lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
5dd37b38741d1f42f9eb947fefa9756f667117aa
| 0 |
gentics/mesh,gentics/mesh,gentics/mesh,gentics/mesh
|
core/src/main/java/io/vertx/core/http/impl/HttpChunkContentCompressor.java
|
/*
* Copied from https://github.com/eclipse/vert.x/blob/1ea29f558425713cfd9ee77a3c000be86c91efde/src/main/java/io/vertx/core/http/impl/HttpChunkContentCompressor.java
* to patch https://github.com/eclipse/vert.x/issues/2184 which caused http requests to fail
* when "Connection: close" header is set and compression is enabled.
*
* TODO: remove this patch when upgrading to Vert.x 3.5.1
*
* Copyright (c) 2011-2013 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.vertx.core.http.impl;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.handler.codec.http.DefaultHttpContent;
import io.netty.handler.codec.http.HttpContentCompressor;
/**
* @author <a href="mailto:[email protected]">Norman Maurer</a>
*/
final class HttpChunkContentCompressor extends HttpContentCompressor {
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
if (msg instanceof ByteBuf) {
// convert ByteBuf to HttpContent to make it work with compression. This is needed as we use the
// ChunkedWriteHandler to send files when compression is enabled.
ByteBuf buff = (ByteBuf) msg;
if (buff.isReadable()) {
// We only encode non empty buffers, as empty buffers can be used for determining when
// the content has been flushed and it confuses the HttpContentCompressor
// if we let it go
msg = new DefaultHttpContent(buff);
}
}
super.write(ctx, msg, promise);
}
HttpChunkContentCompressor(int compressionLevel) {
super(compressionLevel);
}
}
|
Remove patch
|
core/src/main/java/io/vertx/core/http/impl/HttpChunkContentCompressor.java
|
Remove patch
|
<ide><path>ore/src/main/java/io/vertx/core/http/impl/HttpChunkContentCompressor.java
<del>/*
<del> * Copied from https://github.com/eclipse/vert.x/blob/1ea29f558425713cfd9ee77a3c000be86c91efde/src/main/java/io/vertx/core/http/impl/HttpChunkContentCompressor.java
<del> * to patch https://github.com/eclipse/vert.x/issues/2184 which caused http requests to fail
<del> * when "Connection: close" header is set and compression is enabled.
<del> *
<del> * TODO: remove this patch when upgrading to Vert.x 3.5.1
<del> *
<del> * Copyright (c) 2011-2013 The original author or authors
<del> * ------------------------------------------------------
<del> * All rights reserved. This program and the accompanying materials
<del> * are made available under the terms of the Eclipse Public License v1.0
<del> * and Apache License v2.0 which accompanies this distribution.
<del> *
<del> * The Eclipse Public License is available at
<del> * http://www.eclipse.org/legal/epl-v10.html
<del> *
<del> * The Apache License v2.0 is available at
<del> * http://www.opensource.org/licenses/apache2.0.php
<del> *
<del> * You may elect to redistribute this code under either of these licenses.
<del> */
<del>package io.vertx.core.http.impl;
<del>
<del>import io.netty.buffer.ByteBuf;
<del>import io.netty.channel.ChannelHandlerContext;
<del>import io.netty.channel.ChannelPromise;
<del>import io.netty.handler.codec.http.DefaultHttpContent;
<del>import io.netty.handler.codec.http.HttpContentCompressor;
<del>
<del>/**
<del> * @author <a href="mailto:[email protected]">Norman Maurer</a>
<del> */
<del>final class HttpChunkContentCompressor extends HttpContentCompressor {
<del>
<del> @Override
<del> public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
<del> if (msg instanceof ByteBuf) {
<del> // convert ByteBuf to HttpContent to make it work with compression. This is needed as we use the
<del> // ChunkedWriteHandler to send files when compression is enabled.
<del> ByteBuf buff = (ByteBuf) msg;
<del> if (buff.isReadable()) {
<del> // We only encode non empty buffers, as empty buffers can be used for determining when
<del> // the content has been flushed and it confuses the HttpContentCompressor
<del> // if we let it go
<del> msg = new DefaultHttpContent(buff);
<del> }
<del> }
<del> super.write(ctx, msg, promise);
<del> }
<del>
<del> HttpChunkContentCompressor(int compressionLevel) {
<del> super(compressionLevel);
<del> }
<del>
<del>}
|
||
Java
|
lgpl-2.1
|
16499fc06983a4347f3b734be2e4be93bd908227
| 0 |
olvidalo/exist,windauer/exist,RemiKoutcherawy/exist,RemiKoutcherawy/exist,wshager/exist,ljo/exist,opax/exist,wshager/exist,hungerburg/exist,adamretter/exist,wolfgangmm/exist,shabanovd/exist,hungerburg/exist,jensopetersen/exist,olvidalo/exist,adamretter/exist,shabanovd/exist,kohsah/exist,ambs/exist,RemiKoutcherawy/exist,RemiKoutcherawy/exist,patczar/exist,lcahlander/exist,wolfgangmm/exist,patczar/exist,ambs/exist,eXist-db/exist,dizzzz/exist,olvidalo/exist,jensopetersen/exist,dizzzz/exist,kohsah/exist,wshager/exist,joewiz/exist,hungerburg/exist,eXist-db/exist,kohsah/exist,eXist-db/exist,ljo/exist,windauer/exist,opax/exist,lcahlander/exist,RemiKoutcherawy/exist,MjAbuz/exist,patczar/exist,adamretter/exist,ambs/exist,patczar/exist,RemiKoutcherawy/exist,jessealama/exist,wolfgangmm/exist,wolfgangmm/exist,lcahlander/exist,zwobit/exist,joewiz/exist,shabanovd/exist,opax/exist,wshager/exist,shabanovd/exist,ambs/exist,ljo/exist,zwobit/exist,zwobit/exist,patczar/exist,MjAbuz/exist,jessealama/exist,hungerburg/exist,wshager/exist,adamretter/exist,zwobit/exist,eXist-db/exist,opax/exist,wolfgangmm/exist,windauer/exist,ljo/exist,MjAbuz/exist,joewiz/exist,kohsah/exist,shabanovd/exist,ambs/exist,jensopetersen/exist,lcahlander/exist,joewiz/exist,hungerburg/exist,opax/exist,eXist-db/exist,kohsah/exist,joewiz/exist,windauer/exist,windauer/exist,zwobit/exist,adamretter/exist,shabanovd/exist,kohsah/exist,adamretter/exist,MjAbuz/exist,jessealama/exist,MjAbuz/exist,jensopetersen/exist,MjAbuz/exist,ljo/exist,lcahlander/exist,jessealama/exist,lcahlander/exist,dizzzz/exist,jensopetersen/exist,jessealama/exist,ljo/exist,jessealama/exist,wolfgangmm/exist,windauer/exist,dizzzz/exist,eXist-db/exist,zwobit/exist,jensopetersen/exist,ambs/exist,wshager/exist,patczar/exist,dizzzz/exist,olvidalo/exist,dizzzz/exist,joewiz/exist,olvidalo/exist
|
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-06 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* $Id$
*/
package org.exist.collections;
import org.apache.log4j.Logger;
import org.exist.EXistException;
import org.exist.Indexer;
import org.exist.collections.triggers.DocumentTrigger;
import org.exist.collections.triggers.Trigger;
import org.exist.collections.triggers.TriggerException;
import org.exist.dom.BinaryDocument;
import org.exist.dom.DefaultDocumentSet;
import org.exist.dom.DocumentImpl;
import org.exist.dom.DocumentMetadata;
import org.exist.dom.DocumentSet;
import org.exist.dom.MutableDocumentSet;
import org.exist.dom.QName;
import org.exist.security.Group;
import org.exist.security.Permission;
import org.exist.security.PermissionDeniedException;
import org.exist.security.PermissionFactory;
import org.exist.security.SecurityManager;
import org.exist.security.User;
import org.exist.security.XMLSecurityManager;
import org.exist.storage.DBBroker;
import org.exist.storage.FulltextIndexSpec;
import org.exist.storage.GeneralRangeIndexSpec;
import org.exist.storage.IndexSpec;
import org.exist.storage.NodePath;
import org.exist.storage.QNameRangeIndexSpec;
import org.exist.storage.UpdateListener;
import org.exist.storage.ProcessMonitor;
import org.exist.storage.cache.Cacheable;
import org.exist.storage.index.BFile;
import org.exist.storage.io.VariableByteInput;
import org.exist.storage.io.VariableByteOutputStream;
import org.exist.storage.lock.Lock;
import org.exist.storage.lock.LockedDocumentMap;
import org.exist.storage.lock.ReentrantReadWriteLock;
import org.exist.storage.sync.Sync;
import org.exist.storage.txn.Txn;
import org.exist.util.Configuration;
import org.exist.util.LockException;
import org.exist.util.MimeType;
import org.exist.util.SyntaxException;
import org.exist.util.XMLReaderObjectFactory;
import org.exist.util.hashtable.ObjectHashSet;
import org.exist.util.serializer.DOMStreamer;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.Constants;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.TreeMap;
/**
* This class represents a collection in the database. A collection maintains a list of
* sub-collections and documents, and provides the methods to store/remove resources.
*
* Collections are shared between {@link org.exist.storage.DBBroker} instances. The caller
* is responsible to lock/unlock the collection. Call {@link DBBroker#openCollection(XmldbURI, int)}
* to get a collection with a read or write lock and {@link #release(int)} to release the lock.
*
* @author wolf
*/
public class Collection extends Observable implements Comparable, Cacheable
{
public static int LENGTH_COLLECTION_ID = 4; //sizeof int
public static final int POOL_PARSER_THRESHOLD = 500;
private final static int SHALLOW_SIZE = 550;
private final static int DOCUMENT_SIZE = 450;
private final static Logger LOG = Logger.getLogger(Collection.class);
//private final static int VALIDATION_ENABLED = 0;
//private final static int VALIDATION_AUTO = 1;
//private final static int VALIDATION_DISABLED = 2;
public final static int UNKNOWN_COLLECTION_ID = -1;
// Internal id
private int collectionId = UNKNOWN_COLLECTION_ID;
// the documents contained in this collection
private Map documents = new TreeMap();
// the path of this collection
private XmldbURI path;
// the permissions assigned to this collection
private Permission permissions = PermissionFactory.getPermission(0775);
// stores child-collections with their storage address
private ObjectHashSet subcollections = new ObjectHashSet(19);
// Storage address of the collection in the BFile
private long address = BFile.UNKNOWN_ADDRESS;
// creation time
private long created = 0;
private Observer[] observers = null;
private boolean collectionConfEnabled = true;
private boolean triggersEnabled = true;
// fields required by the collections cache
private int refCount = 0;
private int timestamp = 0;
private Lock lock = null;
/** user-defined Reader */
private XMLReader userReader = null;
/** is this a temporary collection? */
private boolean isTempCollection = false;
public Collection(){
}
public Collection(XmldbURI path) {
setPath(path);
lock = new ReentrantReadWriteLock(path);
}
public void setPath(XmldbURI path) {
path = path.toCollectionPathURI();
//TODO : see if the URI resolves against DBBroker.TEMP_COLLECTION
isTempCollection = path.getRawCollectionPath().equals(DBBroker.TEMP_COLLECTION);
this.path=path;
}
public Lock getLock() {
return lock;
}
/**
* Add a new sub-collection to the collection.
*
*/
public void addCollection(DBBroker broker, Collection child, boolean isNew) {
XmldbURI childName = child.getURI().lastSegment();
if (!subcollections.contains(childName))
subcollections.add(childName);
if (isNew) {
User user = broker.getUser();
child.setCreationTime(System.currentTimeMillis());
child.permissions.setOwner(user);
CollectionConfiguration config = getConfiguration(broker);
String group = user.getPrimaryGroup();
if (config != null){
group = config.getDefCollGroup(user);
child.permissions.setPermissions(config.getDefCollPermissions());
}
child.permissions.setGroup(group);
}
}
public boolean hasChildCollection(XmldbURI path) {
return subcollections.contains(path);
}
/**
* Returns true if this is a temporary collection. By default,
* the temporary collection is in /db/system/temp.
*
* @return A boolean where true means the collection is temporary.
*/
public boolean isTempCollection() {
return isTempCollection;
}
/**
* Closes the collection, i.e. releases the lock held by
* the current thread. This is a shortcut for getLock().release().
*/
public void release(int mode) {
getLock().release(mode);
}
/**
* Update the specified child-collection.
*
* @param child
*/
public void update(Collection child) {
final XmldbURI childName = child.getURI().lastSegment();
subcollections.remove(childName);
subcollections.add(childName);
}
/**
* Add a document to the collection.
*
*@param doc
*/
public void addDocument(Txn transaction, DBBroker broker, DocumentImpl doc) {
if (doc.getDocId() == DocumentImpl.UNKNOWN_DOCUMENT_ID)
doc.setDocId(broker.getNextResourceId(transaction, this));
documents.put(doc.getFileURI().getRawCollectionPath(), doc);
}
/**
* Removes the document from the internal list of resources, but
* doesn't delete the document object itself.
*
* @param doc
*/
public void unlinkDocument(DocumentImpl doc) {
documents.remove(doc.getFileURI().getRawCollectionPath());
}
/**
* Return an iterator over all subcollections.
*
* The list of subcollections is copied first, so modifications
* via the iterator have no affect.
*
*@return Description of the Return Value
*/
public Iterator collectionIterator() {
try {
getLock().acquire(Lock.READ_LOCK);
return subcollections.stableIterator();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return null;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Load all collections below this collections
* and return them in a List.
*
* @return List
*/
public List getDescendants(DBBroker broker, User user) {
final ArrayList cl = new ArrayList(subcollections.size());
try {
getLock().acquire(Lock.READ_LOCK);
Collection child;
XmldbURI childName;
for (Iterator i = subcollections.iterator(); i.hasNext(); ) {
childName = (XmldbURI) i.next();
//TODO : resolve URI !
child = broker.getCollection(path.append(childName));
if (permissions.validate(user, Permission.READ)) {
cl.add(child);
if (child.getChildCollectionCount() > 0)
cl.addAll(child.getDescendants(broker, user));
}
}
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
} finally {
getLock().release(Lock.READ_LOCK);
}
return cl;
}
public MutableDocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive,
boolean checkPermissions) {
return allDocs(broker, docs, recursive, checkPermissions, null);
}
/**
* Retrieve all documents contained in this collections.
*
* If recursive is true, documents from sub-collections are
* included.
*
* @param broker
* @param docs
* @param recursive
* @param checkPermissions
* @return The set of documents.
*/
public MutableDocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive,
boolean checkPermissions, LockedDocumentMap protectedDocs) {
if (permissions.validate(broker.getUser(), Permission.READ)) {
List subColls = null;
try {
// acquire a lock on the collection
getLock().acquire(Lock.READ_LOCK);
// add all docs in this collection to the returned set
getDocuments(broker, docs, checkPermissions);
// get a list of subcollection URIs. We will process them after unlocking this collection.
// otherwise we may deadlock ourselves
subColls = subcollections.keys();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
} finally {
getLock().release(Lock.READ_LOCK);
}
if (recursive && subColls != null) {
// process the child collections
for (int i = 0; i < subColls.size(); i++) {
XmldbURI childName = (XmldbURI) subColls.get(i);
//TODO : resolve URI !
Collection child = broker.openCollection(path.appendInternal(childName), Lock.NO_LOCK);
// a collection may have been removed in the meantime, so check first
if (child != null)
child.allDocs(broker, docs, recursive, checkPermissions, protectedDocs);
}
}
}
return docs;
}
public DocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive, LockedDocumentMap lockMap, int lockType) throws LockException {
if (permissions.validate(broker.getUser(), Permission.READ)) {
List subColls = null;
XmldbURI uris[] = null;
try {
// acquire a lock on the collection
getLock().acquire(Lock.READ_LOCK);
// add all docs in this collection to the returned set
getDocuments(broker, docs, lockMap, lockType);
// get a list of subcollection URIs. We will process them after unlocking this collection.
// otherwise we may deadlock ourselves
subColls = subcollections.keys();
if (subColls != null) {
uris = new XmldbURI[subColls.size()];
for (int i = 0; i < subColls.size(); i++) {
XmldbURI childName = (XmldbURI) subColls.get(i);
uris[i] = path.appendInternal(childName);
}
}
} catch (LockException e) {
LOG.warn(e.getMessage());
throw e;
} finally {
getLock().release(Lock.READ_LOCK);
}
if (recursive && uris != null) {
// process the child collections
for (int i = 0; i < uris.length; i++) {
//TODO : resolve URI !
Collection child = broker.openCollection(uris[i], Lock.NO_LOCK);
// a collection may have been removed in the meantime, so check first
if (child != null)
child.allDocs(broker, docs, recursive, lockMap, lockType);
}
}
}
return docs;
}
/**
* Add all documents to the specified document set.
*
* @param docs
*/
public DocumentSet getDocuments(DBBroker broker, MutableDocumentSet docs, boolean checkPermissions) {
try {
getLock().acquire(Lock.READ_LOCK);
docs.addCollection(this);
docs.addAll(broker, this, getDocumentPaths(), checkPermissions);
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
} finally {
getLock().release(Lock.READ_LOCK);
}
return docs;
}
public DocumentSet getDocuments(DBBroker broker, MutableDocumentSet docs, LockedDocumentMap lockMap, int lockType) throws LockException {
try {
getLock().acquire(Lock.READ_LOCK);
docs.addCollection(this);
docs.addAll(broker, this, getDocumentPaths(), lockMap, lockType);
} catch (LockException e) {
throw e;
} finally {
getLock().release(Lock.READ_LOCK);
}
return docs;
}
private String[] getDocumentPaths() {
String paths[] = new String[documents.size()];
int i = 0;
for (Iterator iter = documents.keySet().iterator(); iter.hasNext(); i++) {
paths[i] = (String) iter.next();
}
return paths;
}
/**
* Check if this collection may be safely removed from the
* cache. Returns false if there are ongoing write operations,
* i.e. one or more of the documents is locked for
* write.
*
* @return A boolean value where true indicates it may be unloaded.
*/
public boolean allowUnload() {
for (Iterator i = documents.values().iterator(); i.hasNext(); ) {
DocumentImpl doc = (DocumentImpl) i.next();
if (doc.isLockedForWrite())
return false;
}
return true;
// try {
// lock.acquire(Lock.WRITE_LOCK);
// for (Iterator i = documents.values().iterator(); i.hasNext(); ) {
// DocumentImpl doc = (DocumentImpl) i.next();
// if (doc.isLockedForWrite())
// return false;
// }
// return true;
// } catch (LockException e) {
// LOG.warn("Failed to acquire lock on collection: " + getName(), e);
// } finally {
// lock.release();
// }
// return false;
}
public int compareTo(Object obj) {
Collection other = (Collection) obj;
if (collectionId == other.collectionId)
return Constants.EQUAL;
else if (collectionId < other.collectionId)
return Constants.INFERIOR;
else
return Constants.SUPERIOR;
}
public boolean equals(Object obj) {
if (!(obj instanceof Collection))
return false;
return ((Collection) obj).collectionId == collectionId;
}
/**
* Returns the estimated amount of memory used by this collection
* and its documents. This information is required by the
* {@link org.exist.storage.CollectionCacheManager} to be able
* to resize the caches.
*
* @return estimated amount of memory in bytes
*/
public int getMemorySize() {
return SHALLOW_SIZE + documents.size() * DOCUMENT_SIZE;
}
/**
* Return the number of child-collections managed by this
* collection.
*
*@return The childCollectionCount value
*/
public int getChildCollectionCount() {
try {
getLock().acquire(Lock.READ_LOCK);
return subcollections.size();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return 0;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Get a child resource as identified by path. This method doesn't put
* a lock on the document nor does it recognize locks held by other threads.
* There's no guarantee that the document still exists when accessing it.
*
*@param broker
*@param path The name of the document (without collection path)
*@return the document
*/
public DocumentImpl getDocument(DBBroker broker, XmldbURI path) {
try {
getLock().acquire(Lock.READ_LOCK);
DocumentImpl doc = (DocumentImpl) documents.get(path.getRawCollectionPath());
if(doc == null)
LOG.debug("Document " + path + " not found!");
return doc;
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return null;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Retrieve a child resource after putting a read lock on it. With this method,
* access to the received document object is safe.
*
* @deprecated Use other method
* @param broker
* @param name
* @return The document that was locked.
* @throws LockException
*/
public DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI name) throws LockException {
return getDocumentWithLock(broker,name,Lock.READ_LOCK);
}
/**
* Retrieve a child resource after putting a read lock on it. With this method,
* access to the received document object is safe.
*
* @param broker
* @param uri
* @param lockMode
* @return The document that was locked.
* @throws LockException
*/
public DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI uri, int lockMode)
throws LockException {
try {
getLock().acquire(Lock.READ_LOCK);
DocumentImpl doc = (DocumentImpl) documents.get(uri.getRawCollectionPath());
if(doc != null)
doc.getUpdateLock().acquire(lockMode);
return doc;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
public DocumentImpl getDocumentNoLock(String rawPath) {
return (DocumentImpl) documents.get(rawPath);
}
/**
* Release any locks held on the document.
* @deprecated Use other method
* @param doc
*/
public void releaseDocument(DocumentImpl doc) {
if(doc != null) {
doc.getUpdateLock().release(Lock.READ_LOCK);
}
}
/**
* Release any locks held on the document.
*
* @param doc
*/
public void releaseDocument(DocumentImpl doc, int mode) {
if(doc != null) {
doc.getUpdateLock().release(mode);
}
}
/**
* Returns the number of documents in this collection.
*
*@return The documentCount value
*/
public int getDocumentCount() {
try {
getLock().acquire(Lock.READ_LOCK);
return documents.size();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return 0;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Get the internal id.
*
*@return The id value
*/
public int getId() {
return collectionId;
}
/**
* Get the name of this collection.
*
*@return The name value
*/
public XmldbURI getURI() {
return path;
}
/**
* Returns the parent-collection.
*
*@return The parent-collection or null if this
*is the root collection.
*/
public XmldbURI getParentURI() {
if (path.equals(XmldbURI.ROOT_COLLECTION_URI))
return null;
//TODO : resolve URI against ".." !
return path.removeLastSegment();
}
/**
* Gets the permissions attribute of the Collection object
*
*@return The permissions value
*/
public Permission getPermissions() {
try {
getLock().acquire(Lock.READ_LOCK);
return permissions;
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return permissions;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
public Permission getPermissionsNoLock() {
return permissions;
}
/**
* Check if the collection has a child document.
*
*@param uri the name (without path) of the document
*@return A value of true when the collection has the document identified.
*/
public boolean hasDocument(XmldbURI uri) {
return documents.containsKey(uri.getRawCollectionPath());
}
/**
* Check if the collection has a sub-collection.
*
*@param name the name of the subcollection (without path).
*@return A value of true when the subcollection exists.
*/
public boolean hasSubcollection(XmldbURI name) {
try {
getLock().acquire(Lock.READ_LOCK);
return subcollections.contains(name);
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
//TODO : ouch ! -pb
return subcollections.contains(name);
} finally {
getLock().release(Lock.READ_LOCK);
}
}
public boolean hasSubcollectionNoLock(XmldbURI name) {
return subcollections.contains(name);
}
/**
* Returns an iterator on the child-documents in this collection.
*
*@return A iterator of all the documents in the collection.
*/
public Iterator iterator(DBBroker broker) {
return getDocuments(broker, new DefaultDocumentSet(), false).getDocumentIterator();
}
/**
* Read collection contents from the stream.
*
* @param istream
* @throws IOException
*/
public void read(DBBroker broker, VariableByteInput istream) throws IOException {
collectionId = istream.readInt();
final int collLen = istream.readInt();
subcollections = new ObjectHashSet(collLen == 0 ? 19 : collLen);
for (int i = 0; i < collLen; i++)
subcollections.add(XmldbURI.create(istream.readUTF()));
final int uid = istream.readInt();
final int gid = istream.readInt();
final int perm = istream.readInt();
created = istream.readLong();
final SecurityManager secman = broker.getBrokerPool().getSecurityManager();
if (secman == null) {
//TODO : load default permissions ? -pb
permissions.setOwner(SecurityManager.DBA_USER);
permissions.setGroup(SecurityManager.DBA_GROUP);
} else {
permissions.setOwner(secman.getUser(uid));
Group group = secman.getGroup(gid);
if (group != null)
permissions.setGroup(group.getName());
}
///TODO : why this mask ? -pb
permissions.setPermissions(perm & 0777);
broker.getCollectionResources(this);
}
/**
* Remove the specified sub-collection.
*
*@param name Description of the Parameter
*/
public void removeCollection(XmldbURI name) throws LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
subcollections.remove(name);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/**
* Remove the specified document from the collection.
*
*@param transaction
*@param broker
*@param docUri
*/
public void removeXMLResource(Txn transaction, DBBroker broker, XmldbURI docUri)
throws PermissionDeniedException, TriggerException, LockException {
DocumentImpl doc = null;
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_XML, docUri);
//Doh ! READ lock ?
getLock().acquire(Lock.READ_LOCK);
doc = (DocumentImpl) documents.get(docUri.getRawCollectionPath());
if (doc == null)
return; //TODO should throw an exception!!! Otherwise we dont know if the document was removed
doc.getUpdateLock().acquire(Lock.WRITE_LOCK);
if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"Write access to collection denied; user=" + broker.getUser().getName());
if (!doc.getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException("Permission to remove document denied");
DocumentTrigger trigger = null;
if (!CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI.equals(docUri)) {
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null)
try {
trigger = (DocumentTrigger) config.newTrigger(Trigger.REMOVE_DOCUMENT_EVENT, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
}
} else {
// we remove a collection.xconf configuration file: tell the configuration manager to
// reload the configuration.
CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager();
confMgr.invalidateAll(getURI());
}
if (trigger != null) {
trigger.prepare(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction,
getURI().append(docUri), doc);
}
broker.removeXMLResource(transaction, doc);
documents.remove(docUri.getRawCollectionPath());
if (trigger != null) {
trigger.finish(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction, getURI().append(docUri), null);
}
broker.getBrokerPool().getNotificationService().notifyUpdate(doc, UpdateListener.REMOVE);
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
if (doc != null)
doc.getUpdateLock().release(Lock.WRITE_LOCK);
//Doh ! A READ lock ?
getLock().release(Lock.READ_LOCK);
}
}
public void removeBinaryResource(Txn transaction, DBBroker broker, XmldbURI uri)
throws PermissionDeniedException, LockException, TriggerException {
try {
getLock().acquire(Lock.WRITE_LOCK);
DocumentImpl doc = getDocument(broker, uri);
if(doc.isLockedForWrite())
throw new PermissionDeniedException("Document " + doc.getFileURI() +
" is locked for write");
if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"write access to collection denied; user=" + broker.getUser().getName());
if (!doc.getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException("permission to remove document denied");
removeBinaryResource(transaction, broker, doc);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
public void removeBinaryResource(Txn transaction, DBBroker broker, DocumentImpl doc)
throws PermissionDeniedException, LockException, TriggerException {
if (doc == null)
return; //TODO should throw an exception!!! Otherwise we dont know if the document was removed
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_BINARY, doc.getFileURI());
getLock().acquire(Lock.WRITE_LOCK);
if (doc.getResourceType() != DocumentImpl.BINARY_FILE)
throw new PermissionDeniedException("document " + doc.getFileURI()
+ " is not a binary object");
if(doc.isLockedForWrite())
throw new PermissionDeniedException("Document " + doc.getFileURI() +
" is locked for write");
if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"write access to collection denied; user=" + broker.getUser().getName());
if (!doc.getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException("permission to remove document denied");
DocumentTrigger trigger = null;
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
try {
trigger = (DocumentTrigger) config.newTrigger(Trigger.REMOVE_DOCUMENT_EVENT, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
}
}
if (trigger != null)
trigger.prepare(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction, doc.getURI(), doc);
try {
broker.removeBinaryResource(transaction, (BinaryDocument) doc);
} catch (IOException ex) {
throw new PermissionDeniedException("Cannot delete file.");
}
documents.remove(doc.getFileURI().getRawCollectionPath());
if (trigger != null) {
trigger.finish(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction, doc.getURI(), null);
}
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
getLock().release(Lock.WRITE_LOCK);
}
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param source
* @param privileged
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public void store(Txn transaction, final DBBroker broker, final IndexInfo info, final InputSource source, boolean privileged)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException {
storeXMLInternal(transaction, broker, info, privileged, new StoreBlock() {
public void run() throws EXistException, SAXException {
try {
final InputStream is = source.getByteStream();
if (is != null && is.markSupported())
is.reset();
else {
final Reader cs = source.getCharacterStream();
if (cs != null && cs.markSupported())
cs.reset();
}
} catch (IOException e) {
// mark is not supported: exception is expected, do nothing
}
XMLReader reader = getReader(broker, false, info.getCollectionConfig());
info.setReader(reader, null);
try {
reader.parse(source);
} catch (IOException e) {
throw new EXistException(e);
} finally {
releaseReader(broker, info, reader);
}
}
});
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param data
* @param privileged
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public void store(Txn transaction, final DBBroker broker, final IndexInfo info, final String data, boolean privileged)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException {
storeXMLInternal(transaction, broker, info, privileged, new StoreBlock() {
public void run() throws SAXException, EXistException {
CollectionConfiguration colconf
= info.getDocument().getCollection().getConfiguration(broker);
XMLReader reader = getReader(broker, false, colconf);
info.setReader(reader, null);
try {
reader.parse(new InputSource(new StringReader(data)));
} catch (IOException e) {
throw new EXistException(e);
} finally {
releaseReader(broker, info, reader);
}
}
});
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param node
* @param privileged
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public void store(Txn transaction, DBBroker broker, final IndexInfo info, final Node node, boolean privileged)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException {
storeXMLInternal(transaction, broker, info, privileged, new StoreBlock() {
public void run() throws EXistException, SAXException {
info.getDOMStreamer().serialize(node, true);
}
});
}
private interface StoreBlock {
public void run() throws EXistException, SAXException;
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param privileged
* @param doParse
* @throws EXistException
* @throws SAXException
*/
private void storeXMLInternal(Txn transaction, DBBroker broker, IndexInfo info, boolean privileged, StoreBlock doParse) throws EXistException, SAXException {
DocumentImpl document = info.getIndexer().getDocument();
LOG.debug("storing document " + document.getDocId() + " ...");
//Sanity check
if (!document.getUpdateLock().isLockedForWrite()) {
LOG.warn("document is not locked for write !");
}
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_DOC, document.getFileURI());
doParse.run();
broker.storeXMLResource(transaction, document);
broker.flush();
broker.closeDocument();
// broker.checkTree(document);
LOG.debug("document stored.");
// if we are running in privileged mode (e.g. backup/restore), notify the SecurityManager about changes
if (getURI().equals(XmldbURI.SYSTEM_COLLECTION_URI)
&& document.getFileURI().equals(XMLSecurityManager.ACL_FILE_URI)
&& privileged == false) {
// inform the security manager that system data has changed
LOG.debug("users.xml changed");
broker.getBrokerPool().reloadSecurityManager(broker);
}
} finally {
//This lock has been acquired in validateXMLResourceInternal()
document.getUpdateLock().release(Lock.WRITE_LOCK);
broker.getBrokerPool().getProcessMonitor().endJob();
}
collectionConfEnabled = true;
broker.deleteObservers();
info.finishTrigger(broker, transaction, document.getURI(), document);
broker.getBrokerPool().getNotificationService().notifyUpdate(document,
(info.getEvent() == Trigger.UPDATE_DOCUMENT_EVENT ? UpdateListener.UPDATE : UpdateListener.ADD));
//Is it a collection configuration file ?
XmldbURI docName = document.getFileURI();
//WARNING : there is no reason to lock the collection since setPath() is normally called in a safe way
//TODO: *resolve* URI against CollectionConfigurationManager.CONFIG_COLLECTION_URI
if (getURI().startsWith(XmldbURI.CONFIG_COLLECTION_URI)
&& docName.endsWith(CollectionConfiguration.COLLECTION_CONFIG_SUFFIX_URI)) {
broker.sync(Sync.MAJOR_SYNC);
CollectionConfigurationManager manager = broker.getBrokerPool().getConfigurationManager();
if (manager != null) {
try {
manager.invalidateAll(getURI());
manager.loadConfiguration(broker, this);
} catch (CollectionConfigurationException e) {
// DIZ: should this exception really been thrown? bugid=1807744
throw new EXistException("Error while reading new collection configuration: " + e.getMessage(), e);
}
}
}
}
private interface ValidateBlock {
public void run(IndexInfo info) throws SAXException, EXistException;
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param data
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public IndexInfo validateXMLResource(Txn transaction, DBBroker broker, XmldbURI docUri, String data)
throws EXistException, PermissionDeniedException, TriggerException,
SAXException, LockException, IOException {
return validateXMLResource(transaction, broker, docUri, new InputSource(new StringReader(data)));
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param source
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public IndexInfo validateXMLResource(Txn transaction, final DBBroker broker, XmldbURI docUri, final InputSource source)
throws EXistException, PermissionDeniedException, TriggerException,
SAXException, LockException, IOException {
final CollectionConfiguration colconf = getConfiguration(broker);
return validateXMLResourceInternal(transaction, broker, docUri, colconf, new ValidateBlock() {
public void run(IndexInfo info) throws SAXException, EXistException {
XMLReader reader = getReader(broker, true, colconf);
info.setReader(reader, null);
try {
reader.parse(source);
} catch (SAXException e) {
throw new SAXException("The XML parser reported a problem: " + e.getMessage(), e);
} catch (IOException e) {
throw new EXistException(e);
} finally {
releaseReader(broker, info, reader);
}
}
});
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param node
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public IndexInfo validateXMLResource(Txn transaction, final DBBroker broker, XmldbURI docUri, final Node node)
throws EXistException, PermissionDeniedException, TriggerException,
SAXException, LockException, IOException {
return validateXMLResourceInternal(transaction, broker, docUri, getConfiguration(broker), new ValidateBlock() {
public void run(IndexInfo info) throws SAXException {
info.setDOMStreamer(new DOMStreamer());
info.getDOMStreamer().serialize(node, true);
}
});
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param doValidate
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
private IndexInfo validateXMLResourceInternal(Txn transaction, DBBroker broker, XmldbURI docUri, CollectionConfiguration config, ValidateBlock doValidate)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException {
//Make the necessary operations if we process a collection configuration document
checkConfigurationDocument(transaction, broker, docUri);
if (broker.isReadOnly()) throw new PermissionDeniedException("Database is read-only");
DocumentImpl oldDoc = null;
boolean oldDocLocked = false;
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_VALIDATE_DOC, docUri);
getLock().acquire(Lock.WRITE_LOCK);
DocumentImpl document = new DocumentImpl(broker.getBrokerPool(), this, docUri);
oldDoc = (DocumentImpl) documents.get(docUri.getRawCollectionPath());
checkPermissions(transaction, broker, oldDoc);
manageDocumentInformation(broker, oldDoc, document );
Indexer indexer = new Indexer(broker, transaction);
IndexInfo info = new IndexInfo(indexer, config);
indexer.setDocument(document, config);
addObserversToIndexer(broker, indexer);
indexer.setValidating(true);
// if !triggersEnabled, setupTriggers will return null anyway, so no need to check
info.setTrigger(
setupTriggers(broker, docUri, oldDoc != null, config),
oldDoc == null ? Trigger.STORE_DOCUMENT_EVENT : Trigger.UPDATE_DOCUMENT_EVENT);
info.prepareTrigger(broker, transaction, getURI().append(docUri), oldDoc);
LOG.debug("Scanning document " + getURI().append(docUri));
doValidate.run(info);
// new document is valid: remove old document
if (oldDoc != null) {
LOG.debug("removing old document " + oldDoc.getFileURI());
oldDoc.getUpdateLock().acquire(Lock.WRITE_LOCK);
oldDocLocked = true;
if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) {
//TODO : use a more elaborated method ? No triggers...
broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
documents.remove(oldDoc.getFileURI().getRawCollectionPath());
//This lock is released in storeXMLInternal()
//TODO : check that we go until there to ensure the lock is released
document.getUpdateLock().acquire(Lock.WRITE_LOCK);
document.setDocId(broker.getNextResourceId(transaction, this));
addDocument(transaction, broker, document);
} else {
//TODO : use a more elaborated method ? No triggers...
broker.removeXMLResource(transaction, oldDoc, false);
oldDoc.copyOf(document);
indexer.setDocumentObject(oldDoc);
//old has become new at this point
document = oldDoc;
oldDocLocked = false;
}
LOG.debug("removed old document " + oldDoc.getFileURI());
} else {
//This lock is released in storeXMLInternal()
//TODO : check that we go until there to ensure the lock is released
document.getUpdateLock().acquire(Lock.WRITE_LOCK);
document.setDocId(broker.getNextResourceId(transaction, this));
addDocument(transaction, broker, document);
}
indexer.setValidating(false);
info.postValidateTrigger();
return info;
} finally {
if (oldDocLocked)
oldDoc.getUpdateLock().release(Lock.WRITE_LOCK);
getLock().release(Lock.WRITE_LOCK);
broker.getBrokerPool().getProcessMonitor().endJob();
}
}
private void checkConfigurationDocument(Txn transaction, DBBroker broker, XmldbURI docUri) throws EXistException, PermissionDeniedException,
IOException {
//Is it a collection configuration file ?
//TODO : use XmldbURI.resolve() !
if (!getURI().startsWith(XmldbURI.CONFIG_COLLECTION_URI))
return;
if (!docUri.endsWith(CollectionConfiguration.COLLECTION_CONFIG_SUFFIX_URI))
return;
//Allow just one configuration document per collection
//TODO : do not throw the exception if a system property allows several ones -pb
for(Iterator i = iterator(broker); i.hasNext(); ) {
DocumentImpl confDoc = (DocumentImpl) i.next();
XmldbURI currentConfDocName = confDoc.getFileURI();
if(currentConfDocName != null && !currentConfDocName.equals(docUri)) {
throw new EXistException("Could not store configuration '" + docUri + "': A configuration document with a different name ("
+ currentConfDocName + ") already exists in this collection (" + getURI() + ")");
}
}
// broker.saveCollection(transaction, this);
// CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager();
// if(confMgr != null)
// try {
// confMgr.reload(broker, this);
// } catch (CollectionConfigurationException e) {
// throw new EXistException("An error occurred while reloading the updated collection configuration: " + e.getMessage(), e);
// }
}
/** add observers to the indexer
* @param broker
* @param indexer
*/
private void addObserversToIndexer(DBBroker broker, Indexer indexer) {
broker.deleteObservers();
if (observers != null) {
for (int i = 0; i < observers.length; i++) {
indexer.addObserver(observers[i]);
broker.addObserver(observers[i]);
}
}
}
/** If an old document exists, keep information about the document.
* @param broker
* @param document
*/
private void manageDocumentInformation(DBBroker broker, DocumentImpl oldDoc,
DocumentImpl document) {
DocumentMetadata metadata = new DocumentMetadata();
if (oldDoc != null) {
metadata = oldDoc.getMetadata();
metadata.setCreated(oldDoc.getMetadata().getCreated());
metadata.setLastModified(System.currentTimeMillis());
document.setPermissions(oldDoc.getPermissions());
} else {
User user = broker.getUser();
metadata.setCreated(System.currentTimeMillis());
document.getPermissions().setOwner(user);
String group;
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
document.setPermissions(config.getDefResPermissions());
group = config.getDefResGroup(user);
} else {
group = user.getPrimaryGroup();
}
document.getPermissions().setGroup(group);
}
document.setMetadata(metadata);
}
/**
* Check Permissions about user and document, and throw exceptions if necessary.
*
* @param broker
* @param oldDoc old Document existing in database prior to adding a new one with same name.
* @throws LockException
* @throws PermissionDeniedException
*/
private void checkPermissions(Txn transaction, DBBroker broker, DocumentImpl oldDoc) throws LockException, PermissionDeniedException {
if (oldDoc != null) {
LOG.debug("Found old doc " + oldDoc.getDocId());
// check if the document is locked by another user
User lockUser = oldDoc.getUserLock();
if(lockUser != null && !lockUser.equals(broker.getUser()))
throw new PermissionDeniedException("The document is locked by user " +
lockUser.getName());
// do we have permissions for update?
if (!oldDoc.getPermissions().validate(broker.getUser(),
Permission.UPDATE))
throw new PermissionDeniedException(
"Document exists and update is not allowed");
if (!(getPermissions().validate(broker.getUser(), Permission.UPDATE) ||
getPermissions().validate(broker.getUser(), Permission.WRITE)))
throw new PermissionDeniedException(
"Document exists and update is not allowed for the collection");
// do we have write permissions?
} else if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"User '" + broker.getUser().getName() + "' not allowed to write to collection '" + getURI() + "'");
}
private DocumentTrigger setupTriggers(DBBroker broker, XmldbURI docUri, boolean update, CollectionConfiguration config) {
//TODO : is this the right place for such a task ? -pb
if (CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI.equals(docUri)) {
// we are updating collection.xconf. Notify configuration manager
// CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager();
// confMgr.invalidateAll(getURI());
collectionConfEnabled = false;
return null;
}
if (!triggersEnabled)
return null;
if (config == null)
return null;
DocumentTrigger trigger = null;
try {
if (update)
trigger = (DocumentTrigger) config.newTrigger(Trigger.UPDATE_DOCUMENT_EVENT, broker, this);
else
trigger = (DocumentTrigger) config.newTrigger(Trigger.STORE_DOCUMENT_EVENT, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
if(trigger == null)
return null;
if (update)
LOG.debug("Using update trigger '" + trigger.getClass().getName() + "'");
else
LOG.debug("Using store trigger '" + trigger.getClass().getName() + "'");
return trigger;
}
// Blob
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, byte[] data, String mimeType)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
return addBinaryResource(transaction, broker, docUri, data, mimeType, null, null);
}
// Blob
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, byte[] data, String mimeType, Date created, Date modified)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
return addBinaryResource(transaction, broker, docUri,
new ByteArrayInputStream(data), mimeType, data.length, created, modified);
}
// Streaming
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, InputStream is, String mimeType, int size)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
return addBinaryResource(transaction, broker, docUri, is, mimeType, size, null, null);
}
// Streaming
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, InputStream is, String mimeType, int size, Date created, Date modified)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
if (broker.isReadOnly())
throw new PermissionDeniedException("Database is read-only");
BinaryDocument blob = new BinaryDocument(broker.getBrokerPool(), this, docUri);
//TODO : move later, i.e. after the collection lock is acquired ?
DocumentImpl oldDoc = getDocument(broker, docUri);
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_BINARY, docUri);
getLock().acquire(Lock.WRITE_LOCK);
checkPermissions(transaction, broker, oldDoc);
DocumentTrigger trigger = null;
int event = 0;
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
event = oldDoc != null ? Trigger.UPDATE_DOCUMENT_EVENT : Trigger.STORE_DOCUMENT_EVENT;
try {
trigger = (DocumentTrigger) config.newTrigger(event, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
if (trigger != null) {
trigger.prepare(event, broker, transaction, getURI().append(docUri), oldDoc);
}
}
}
manageDocumentInformation(broker, oldDoc, blob );
DocumentMetadata metadata = blob.getMetadata();
metadata.setMimeType(mimeType == null ? MimeType.BINARY_TYPE.getName() : mimeType);
if (oldDoc != null) {
LOG.debug("removing old document " + oldDoc.getFileURI());
if (oldDoc instanceof BinaryDocument)
broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
else
broker.removeXMLResource(transaction, oldDoc);
}
if(created != null)
metadata.setCreated(created.getTime());
if(modified != null)
metadata.setLastModified(modified.getTime());
blob.setContentLength(size);
broker.storeBinaryResource(transaction, blob, is);
addDocument(transaction, broker, blob);
broker.storeXMLResource(transaction, blob);
/*
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
event = oldDoc != null ? Trigger.UPDATE_DOCUMENT_EVENT : Trigger.STORE_DOCUMENT_EVENT;
try {
trigger = (DocumentTrigger) config.newTrigger(event, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
if (trigger != null) {
trigger.prepare(event, broker, transaction, blob.getURI(), blob);
}
}
}*/
// This is no longer needed as the dom.dbx isn't used
//broker.closeDocument();
if (trigger != null) {
trigger.finish(event, broker, transaction, blob.getURI(), blob);
}
return blob;
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
getLock().release(Lock.WRITE_LOCK);
}
}
public void setId(int id) {
this.collectionId = id;
}
public void setPermissions(int mode) throws LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
permissions.setPermissions(mode);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
public void setPermissions(String mode) throws SyntaxException, LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
permissions.setPermissions(mode);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/**
* Set permissions for the collection.
*
* @param permissions
*/
public void setPermissions(Permission permissions) throws LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
this.permissions = permissions;
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/**
* Write collection contents to stream.
*
* @param ostream
* @throws IOException
*/
public void write(DBBroker broker, VariableByteOutputStream ostream) throws IOException {
ostream.writeInt(collectionId);
ostream.writeInt(subcollections.size());
XmldbURI childColl;
for (Iterator i = subcollections.iterator(); i.hasNext(); ) {
childColl = (XmldbURI)i.next();
ostream.writeUTF(childColl.toString());
}
SecurityManager secman = broker.getBrokerPool().getSecurityManager();
if (secman == null) {
ostream.writeInt(1);
ostream.writeInt(1);
} else {
User user = secman.getUser(permissions.getOwner());
Group group = secman.getGroup(permissions.getOwnerGroup());
if (user==null) {
throw new IllegalStateException("The user "+permissions.getOwner()+" for the collection cannot be found.");
}
if (group == null)
group = secman.getGroup(SecurityManager.GUEST_GROUP);
ostream.writeInt(user.getUID());
ostream.writeInt(group.getId());
}
ostream.writeInt(permissions.getPermissions());
ostream.writeLong(created);
}
public CollectionConfiguration getConfiguration(DBBroker broker) {
if (!collectionConfEnabled)
return null;
//System collection has no configuration
if (DBBroker.SYSTEM_COLLECTION.equals(getURI().getRawCollectionPath()))
return null;
CollectionConfigurationManager manager = broker.getBrokerPool().getConfigurationManager();
if (manager == null)
return null;
//Attempt to get configuration
CollectionConfiguration configuration = null;
collectionConfEnabled = false;
try {
//TODO: AR: if a Trigger throws CollectionConfigurationException from its configure() method, is the rest of the collection configurartion (indexes etc.) ignored even though they might be fine?
configuration = manager.getConfiguration(broker, this);
collectionConfEnabled = true;
} catch (CollectionConfigurationException e) {
LOG.warn("Failed to load collection configuration for '" + getURI() + "'", e);
}
// LOG.debug("Loaded configuration for collection: " + getURI());
return configuration;
}
/**
* Should the collection configuration document be enabled
* for this collection? Called by {@link org.exist.storage.NativeBroker}
* before doing a reindex.
*
* @param enabled
*/
public void setConfigEnabled(boolean enabled) {
collectionConfEnabled = enabled;
}
/**
* Set the internal storage address of the collection data.
*
* @param addr
*/
public void setAddress(long addr) {
this.address = addr;
}
public long getAddress() {
return this.address;
}
public void setCreationTime(long ms) {
created = ms;
}
public long getCreationTime() {
return created;
}
public void setTriggersEnabled(boolean enabled) {
try {
getLock().acquire(Lock.WRITE_LOCK);
this.triggersEnabled = enabled;
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
//Ouch ! -pb
this.triggersEnabled = enabled;
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/** set user-defined Reader */
public void setReader(XMLReader reader){
userReader = reader;
}
// /**
// * If user-defined Reader is set, return it; otherwise return JAXP
// * default XMLReader configured by eXist.
// */
// private XMLReader getReader(DBBroker broker) throws EXistException,
// SAXException {
//
// if(userReader != null){
// return userReader;
// }
//
// return broker.getBrokerPool().getParserPool().borrowXMLReader();
// }
/**
* Get xml reader from readerpool and setup validation when needed.
*/
private XMLReader getReader(DBBroker broker, boolean validation, CollectionConfiguration colconfig) throws EXistException,
SAXException {
// If user-defined Reader is set, return it;
if(userReader != null){
return userReader;
}
// Get reader from readerpool.
XMLReader reader= broker.getBrokerPool().getParserPool().borrowXMLReader();
// If Collection configuration exists (try to) get validation mode
// and setup reader with this information.
if (!validation)
XMLReaderObjectFactory.setReaderValidationMode(XMLReaderObjectFactory.VALIDATION_DISABLED, reader);
else if( colconfig!=null ) {
int mode=colconfig.getValidationMode();
XMLReaderObjectFactory.setReaderValidationMode(mode, reader);
}
// Return configured reader.
return reader;
}
/**
* Reset validation mode of reader and return reader to reader pool.
*/
private void releaseReader(DBBroker broker, IndexInfo info, XMLReader reader) {
if(userReader != null){
return;
}
if (info.getIndexer().getDocSize() > POOL_PARSER_THRESHOLD)
return;
// Get validation mode from static configuration
Configuration config = broker.getConfiguration();
String optionValue = (String) config.getProperty(XMLReaderObjectFactory.PROPERTY_VALIDATION_MODE);
int validationMode = XMLReaderObjectFactory.convertValidationMode(optionValue);
// Restore default validation mode
XMLReaderObjectFactory.setReaderValidationMode(validationMode, reader);
// Return reader
broker.getBrokerPool().getParserPool().returnXMLReader(reader);
}
/* (non-Javadoc)
* @see java.util.Observable#addObserver(java.util.Observer)
*/
public void addObserver(Observer o) {
if (hasObserver(o)) return;
if (observers == null) {
observers = new Observer[1];
observers[0] = o;
} else {
Observer n[] = new Observer[observers.length + 1];
System.arraycopy(observers, 0, n, 0, observers.length);
n[observers.length] = o;
observers = n;
}
}
private boolean hasObserver(Observer o) {
if (observers == null)
return false;
for (int i = 0; i < observers.length; i++) {
if (observers[i] == o)
return true;
}
return false;
}
/* (non-Javadoc)
* @see java.util.Observable#deleteObservers()
*/
public void deleteObservers() {
if (observers != null)
observers = null;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#getKey()
*/
public long getKey() {
return collectionId;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#getReferenceCount()
*/
public int getReferenceCount() {
return refCount;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#incReferenceCount()
*/
public int incReferenceCount() {
return ++refCount;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#decReferenceCount()
*/
public int decReferenceCount() {
return refCount > 0 ? --refCount : 0;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#setReferenceCount(int)
*/
public void setReferenceCount(int count) {
refCount = count;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#setTimestamp(int)
*/
public void setTimestamp(int timestamp) {
this.timestamp = timestamp;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#getTimestamp()
*/
public int getTimestamp() {
return timestamp;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#release()
*/
public boolean sync(boolean syncJournal) {
return false;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#isDirty()
*/
public boolean isDirty() {
return false;
}
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append( getURI() );
buf.append("[");
for(Iterator i = documents.keySet().iterator(); i.hasNext(); ) {
buf.append(i.next());
if(i.hasNext())
buf.append(", ");
}
buf.append("]");
return buf.toString();
}
/**
* (Make private?)
* @param broker
*/
public IndexSpec getIndexConfiguration(DBBroker broker) {
CollectionConfiguration conf = getConfiguration(broker);
//If the collection has its own config...
if (conf == null) {
return broker.getIndexConfiguration();
//... otherwise return the general config (the broker's one)
} else {
return conf.getIndexConfiguration();
}
}
public GeneralRangeIndexSpec getIndexByPathConfiguration(DBBroker broker, NodePath path) {
IndexSpec idxSpec = getIndexConfiguration(broker);
return (idxSpec == null) ? null : idxSpec.getIndexByPath(path);
}
public QNameRangeIndexSpec getIndexByQNameConfiguration(DBBroker broker, QName qname) {
IndexSpec idxSpec = getIndexConfiguration(broker);
return (idxSpec == null) ? null : idxSpec.getIndexByQName(qname);
}
public FulltextIndexSpec getFulltextIndexConfiguration(DBBroker broker) {
IndexSpec idxSpec = getIndexConfiguration(broker);
return (idxSpec == null) ? null : idxSpec.getFulltextIndexSpec();
}
}
|
src/org/exist/collections/Collection.java
|
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-06 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* $Id$
*/
package org.exist.collections;
import org.apache.log4j.Logger;
import org.exist.EXistException;
import org.exist.Indexer;
import org.exist.collections.triggers.DocumentTrigger;
import org.exist.collections.triggers.Trigger;
import org.exist.collections.triggers.TriggerException;
import org.exist.dom.BinaryDocument;
import org.exist.dom.DefaultDocumentSet;
import org.exist.dom.DocumentImpl;
import org.exist.dom.DocumentMetadata;
import org.exist.dom.DocumentSet;
import org.exist.dom.MutableDocumentSet;
import org.exist.dom.QName;
import org.exist.security.Group;
import org.exist.security.Permission;
import org.exist.security.PermissionDeniedException;
import org.exist.security.PermissionFactory;
import org.exist.security.SecurityManager;
import org.exist.security.User;
import org.exist.security.XMLSecurityManager;
import org.exist.storage.DBBroker;
import org.exist.storage.FulltextIndexSpec;
import org.exist.storage.GeneralRangeIndexSpec;
import org.exist.storage.IndexSpec;
import org.exist.storage.NodePath;
import org.exist.storage.QNameRangeIndexSpec;
import org.exist.storage.UpdateListener;
import org.exist.storage.ProcessMonitor;
import org.exist.storage.cache.Cacheable;
import org.exist.storage.index.BFile;
import org.exist.storage.io.VariableByteInput;
import org.exist.storage.io.VariableByteOutputStream;
import org.exist.storage.lock.Lock;
import org.exist.storage.lock.LockedDocumentMap;
import org.exist.storage.lock.ReentrantReadWriteLock;
import org.exist.storage.sync.Sync;
import org.exist.storage.txn.Txn;
import org.exist.util.Configuration;
import org.exist.util.LockException;
import org.exist.util.MimeType;
import org.exist.util.SyntaxException;
import org.exist.util.XMLReaderObjectFactory;
import org.exist.util.hashtable.ObjectHashSet;
import org.exist.util.serializer.DOMStreamer;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.Constants;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.TreeMap;
/**
* This class represents a collection in the database. A collection maintains a list of
* sub-collections and documents, and provides the methods to store/remove resources.
*
* Collections are shared between {@link org.exist.storage.DBBroker} instances. The caller
* is responsible to lock/unlock the collection. Call {@link DBBroker#openCollection(XmldbURI, int)}
* to get a collection with a read or write lock and {@link #release(int)} to release the lock.
*
* @author wolf
*/
public class Collection extends Observable implements Comparable, Cacheable
{
public static int LENGTH_COLLECTION_ID = 4; //sizeof int
public static final int POOL_PARSER_THRESHOLD = 500;
private final static int SHALLOW_SIZE = 550;
private final static int DOCUMENT_SIZE = 450;
private final static Logger LOG = Logger.getLogger(Collection.class);
//private final static int VALIDATION_ENABLED = 0;
//private final static int VALIDATION_AUTO = 1;
//private final static int VALIDATION_DISABLED = 2;
public final static int UNKNOWN_COLLECTION_ID = -1;
// Internal id
private int collectionId = UNKNOWN_COLLECTION_ID;
// the documents contained in this collection
private Map documents = new TreeMap();
// the path of this collection
private XmldbURI path;
// the permissions assigned to this collection
private Permission permissions = PermissionFactory.getPermission(0775);
// stores child-collections with their storage address
private ObjectHashSet subcollections = new ObjectHashSet(19);
// Storage address of the collection in the BFile
private long address = BFile.UNKNOWN_ADDRESS;
// creation time
private long created = 0;
private Observer[] observers = null;
private boolean collectionConfEnabled = true;
private boolean triggersEnabled = true;
// fields required by the collections cache
private int refCount = 0;
private int timestamp = 0;
private Lock lock = null;
/** user-defined Reader */
private XMLReader userReader = null;
/** is this a temporary collection? */
private boolean isTempCollection = false;
public Collection(){
}
public Collection(XmldbURI path) {
setPath(path);
lock = new ReentrantReadWriteLock(path);
}
public void setPath(XmldbURI path) {
path = path.toCollectionPathURI();
//TODO : see if the URI resolves against DBBroker.TEMP_COLLECTION
isTempCollection = path.getRawCollectionPath().equals(DBBroker.TEMP_COLLECTION);
this.path=path;
}
public Lock getLock() {
return lock;
}
/**
* Add a new sub-collection to the collection.
*
*/
public void addCollection(DBBroker broker, Collection child, boolean isNew) {
XmldbURI childName = child.getURI().lastSegment();
if (!subcollections.contains(childName))
subcollections.add(childName);
if (isNew) {
User user = broker.getUser();
child.setCreationTime(System.currentTimeMillis());
child.permissions.setOwner(user);
CollectionConfiguration config = getConfiguration(broker);
String group = user.getPrimaryGroup();
if (config != null){
group = config.getDefCollGroup(user);
child.permissions.setPermissions(config.getDefCollPermissions());
}
child.permissions.setGroup(group);
}
}
public boolean hasChildCollection(XmldbURI path) {
return subcollections.contains(path);
}
/**
* Returns true if this is a temporary collection. By default,
* the temporary collection is in /db/system/temp.
*
* @return A boolean where true means the collection is temporary.
*/
public boolean isTempCollection() {
return isTempCollection;
}
/**
* Closes the collection, i.e. releases the lock held by
* the current thread. This is a shortcut for getLock().release().
*/
public void release(int mode) {
getLock().release(mode);
}
/**
* Update the specified child-collection.
*
* @param child
*/
public void update(Collection child) {
final XmldbURI childName = child.getURI().lastSegment();
subcollections.remove(childName);
subcollections.add(childName);
}
/**
* Add a document to the collection.
*
*@param doc
*/
public void addDocument(Txn transaction, DBBroker broker, DocumentImpl doc) {
if (doc.getDocId() == DocumentImpl.UNKNOWN_DOCUMENT_ID)
doc.setDocId(broker.getNextResourceId(transaction, this));
documents.put(doc.getFileURI().getRawCollectionPath(), doc);
}
/**
* Removes the document from the internal list of resources, but
* doesn't delete the document object itself.
*
* @param doc
*/
public void unlinkDocument(DocumentImpl doc) {
documents.remove(doc.getFileURI().getRawCollectionPath());
}
/**
* Return an iterator over all subcollections.
*
* The list of subcollections is copied first, so modifications
* via the iterator have no affect.
*
*@return Description of the Return Value
*/
public Iterator collectionIterator() {
try {
getLock().acquire(Lock.READ_LOCK);
return subcollections.stableIterator();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return null;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Load all collections below this collections
* and return them in a List.
*
* @return List
*/
public List getDescendants(DBBroker broker, User user) {
final ArrayList cl = new ArrayList(subcollections.size());
try {
getLock().acquire(Lock.READ_LOCK);
Collection child;
XmldbURI childName;
for (Iterator i = subcollections.iterator(); i.hasNext(); ) {
childName = (XmldbURI) i.next();
//TODO : resolve URI !
child = broker.getCollection(path.append(childName));
if (permissions.validate(user, Permission.READ)) {
cl.add(child);
if (child.getChildCollectionCount() > 0)
cl.addAll(child.getDescendants(broker, user));
}
}
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
} finally {
getLock().release(Lock.READ_LOCK);
}
return cl;
}
public MutableDocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive,
boolean checkPermissions) {
return allDocs(broker, docs, recursive, checkPermissions, null);
}
/**
* Retrieve all documents contained in this collections.
*
* If recursive is true, documents from sub-collections are
* included.
*
* @param broker
* @param docs
* @param recursive
* @param checkPermissions
* @return The set of documents.
*/
public MutableDocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive,
boolean checkPermissions, LockedDocumentMap protectedDocs) {
if (permissions.validate(broker.getUser(), Permission.READ)) {
List subColls = null;
try {
// acquire a lock on the collection
getLock().acquire(Lock.READ_LOCK);
// add all docs in this collection to the returned set
getDocuments(broker, docs, checkPermissions);
// get a list of subcollection URIs. We will process them after unlocking this collection.
// otherwise we may deadlock ourselves
subColls = subcollections.keys();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
} finally {
getLock().release(Lock.READ_LOCK);
}
if (recursive && subColls != null) {
// process the child collections
for (int i = 0; i < subColls.size(); i++) {
XmldbURI childName = (XmldbURI) subColls.get(i);
//TODO : resolve URI !
Collection child = broker.openCollection(path.appendInternal(childName), Lock.NO_LOCK);
// a collection may have been removed in the meantime, so check first
if (child != null)
child.allDocs(broker, docs, recursive, checkPermissions, protectedDocs);
}
}
}
return docs;
}
public DocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive, LockedDocumentMap lockMap, int lockType) throws LockException {
if (permissions.validate(broker.getUser(), Permission.READ)) {
List subColls = null;
XmldbURI uris[] = null;
try {
// acquire a lock on the collection
getLock().acquire(Lock.READ_LOCK);
// add all docs in this collection to the returned set
getDocuments(broker, docs, lockMap, lockType);
// get a list of subcollection URIs. We will process them after unlocking this collection.
// otherwise we may deadlock ourselves
subColls = subcollections.keys();
if (subColls != null) {
uris = new XmldbURI[subColls.size()];
for (int i = 0; i < subColls.size(); i++) {
XmldbURI childName = (XmldbURI) subColls.get(i);
uris[i] = path.appendInternal(childName);
}
}
} catch (LockException e) {
LOG.warn(e.getMessage());
throw e;
} finally {
getLock().release(Lock.READ_LOCK);
}
if (recursive && uris != null) {
// process the child collections
for (int i = 0; i < uris.length; i++) {
//TODO : resolve URI !
Collection child = broker.openCollection(uris[i], Lock.NO_LOCK);
// a collection may have been removed in the meantime, so check first
if (child != null)
child.allDocs(broker, docs, recursive, lockMap, lockType);
}
}
}
return docs;
}
/**
* Add all documents to the specified document set.
*
* @param docs
*/
public DocumentSet getDocuments(DBBroker broker, MutableDocumentSet docs, boolean checkPermissions) {
try {
getLock().acquire(Lock.READ_LOCK);
docs.addCollection(this);
docs.addAll(broker, this, getDocumentPaths(), checkPermissions);
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
} finally {
getLock().release(Lock.READ_LOCK);
}
return docs;
}
public DocumentSet getDocuments(DBBroker broker, MutableDocumentSet docs, LockedDocumentMap lockMap, int lockType) throws LockException {
try {
getLock().acquire(Lock.READ_LOCK);
docs.addCollection(this);
docs.addAll(broker, this, getDocumentPaths(), lockMap, lockType);
} catch (LockException e) {
throw e;
} finally {
getLock().release(Lock.READ_LOCK);
}
return docs;
}
private String[] getDocumentPaths() {
String paths[] = new String[documents.size()];
int i = 0;
for (Iterator iter = documents.keySet().iterator(); iter.hasNext(); i++) {
paths[i] = (String) iter.next();
}
return paths;
}
/**
* Check if this collection may be safely removed from the
* cache. Returns false if there are ongoing write operations,
* i.e. one or more of the documents is locked for
* write.
*
* @return A boolean value where true indicates it may be unloaded.
*/
public boolean allowUnload() {
for (Iterator i = documents.values().iterator(); i.hasNext(); ) {
DocumentImpl doc = (DocumentImpl) i.next();
if (doc.isLockedForWrite())
return false;
}
return true;
// try {
// lock.acquire(Lock.WRITE_LOCK);
// for (Iterator i = documents.values().iterator(); i.hasNext(); ) {
// DocumentImpl doc = (DocumentImpl) i.next();
// if (doc.isLockedForWrite())
// return false;
// }
// return true;
// } catch (LockException e) {
// LOG.warn("Failed to acquire lock on collection: " + getName(), e);
// } finally {
// lock.release();
// }
// return false;
}
public int compareTo(Object obj) {
Collection other = (Collection) obj;
if (collectionId == other.collectionId)
return Constants.EQUAL;
else if (collectionId < other.collectionId)
return Constants.INFERIOR;
else
return Constants.SUPERIOR;
}
public boolean equals(Object obj) {
if (!(obj instanceof Collection))
return false;
return ((Collection) obj).collectionId == collectionId;
}
/**
* Returns the estimated amount of memory used by this collection
* and its documents. This information is required by the
* {@link org.exist.storage.CollectionCacheManager} to be able
* to resize the caches.
*
* @return estimated amount of memory in bytes
*/
public int getMemorySize() {
return SHALLOW_SIZE + documents.size() * DOCUMENT_SIZE;
}
/**
* Return the number of child-collections managed by this
* collection.
*
*@return The childCollectionCount value
*/
public int getChildCollectionCount() {
try {
getLock().acquire(Lock.READ_LOCK);
return subcollections.size();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return 0;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Get a child resource as identified by path. This method doesn't put
* a lock on the document nor does it recognize locks held by other threads.
* There's no guarantee that the document still exists when accessing it.
*
*@param broker
*@param path The name of the document (without collection path)
*@return the document
*/
public DocumentImpl getDocument(DBBroker broker, XmldbURI path) {
try {
getLock().acquire(Lock.READ_LOCK);
DocumentImpl doc = (DocumentImpl) documents.get(path.getRawCollectionPath());
if(doc == null)
LOG.debug("Document " + path + " not found!");
return doc;
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return null;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Retrieve a child resource after putting a read lock on it. With this method,
* access to the received document object is safe.
*
* @deprecated Use other method
* @param broker
* @param name
* @return The document that was locked.
* @throws LockException
*/
public DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI name) throws LockException {
return getDocumentWithLock(broker,name,Lock.READ_LOCK);
}
/**
* Retrieve a child resource after putting a read lock on it. With this method,
* access to the received document object is safe.
*
* @param broker
* @param uri
* @param lockMode
* @return The document that was locked.
* @throws LockException
*/
public DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI uri, int lockMode)
throws LockException {
try {
getLock().acquire(Lock.READ_LOCK);
DocumentImpl doc = (DocumentImpl) documents.get(uri.getRawCollectionPath());
if(doc != null)
doc.getUpdateLock().acquire(lockMode);
return doc;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
public DocumentImpl getDocumentNoLock(String rawPath) {
return (DocumentImpl) documents.get(rawPath);
}
/**
* Release any locks held on the document.
* @deprecated Use other method
* @param doc
*/
public void releaseDocument(DocumentImpl doc) {
if(doc != null) {
doc.getUpdateLock().release(Lock.READ_LOCK);
}
}
/**
* Release any locks held on the document.
*
* @param doc
*/
public void releaseDocument(DocumentImpl doc, int mode) {
if(doc != null) {
doc.getUpdateLock().release(mode);
}
}
/**
* Returns the number of documents in this collection.
*
*@return The documentCount value
*/
public int getDocumentCount() {
try {
getLock().acquire(Lock.READ_LOCK);
return documents.size();
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return 0;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
/**
* Get the internal id.
*
*@return The id value
*/
public int getId() {
return collectionId;
}
/**
* Get the name of this collection.
*
*@return The name value
*/
public XmldbURI getURI() {
return path;
}
/**
* Returns the parent-collection.
*
*@return The parent-collection or null if this
*is the root collection.
*/
public XmldbURI getParentURI() {
if (path.equals(XmldbURI.ROOT_COLLECTION_URI))
return null;
//TODO : resolve URI against ".." !
return path.removeLastSegment();
}
/**
* Gets the permissions attribute of the Collection object
*
*@return The permissions value
*/
public Permission getPermissions() {
try {
getLock().acquire(Lock.READ_LOCK);
return permissions;
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
return permissions;
} finally {
getLock().release(Lock.READ_LOCK);
}
}
public Permission getPermissionsNoLock() {
return permissions;
}
/**
* Check if the collection has a child document.
*
*@param uri the name (without path) of the document
*@return A value of true when the collection has the document identified.
*/
public boolean hasDocument(XmldbURI uri) {
return documents.containsKey(uri.getRawCollectionPath());
}
/**
* Check if the collection has a sub-collection.
*
*@param name the name of the subcollection (without path).
*@return A value of true when the subcollection exists.
*/
public boolean hasSubcollection(XmldbURI name) {
try {
getLock().acquire(Lock.READ_LOCK);
return subcollections.contains(name);
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
//TODO : ouch ! -pb
return subcollections.contains(name);
} finally {
getLock().release(Lock.READ_LOCK);
}
}
public boolean hasSubcollectionNoLock(XmldbURI name) {
return subcollections.contains(name);
}
/**
* Returns an iterator on the child-documents in this collection.
*
*@return A iterator of all the documents in the collection.
*/
public Iterator iterator(DBBroker broker) {
return getDocuments(broker, new DefaultDocumentSet(), false).getDocumentIterator();
}
/**
* Read collection contents from the stream.
*
* @param istream
* @throws IOException
*/
public void read(DBBroker broker, VariableByteInput istream) throws IOException {
collectionId = istream.readInt();
final int collLen = istream.readInt();
subcollections = new ObjectHashSet(collLen == 0 ? 19 : collLen);
for (int i = 0; i < collLen; i++)
subcollections.add(XmldbURI.create(istream.readUTF()));
final int uid = istream.readInt();
final int gid = istream.readInt();
final int perm = istream.readInt();
created = istream.readLong();
final SecurityManager secman = broker.getBrokerPool().getSecurityManager();
if (secman == null) {
//TODO : load default permissions ? -pb
permissions.setOwner(SecurityManager.DBA_USER);
permissions.setGroup(SecurityManager.DBA_GROUP);
} else {
permissions.setOwner(secman.getUser(uid));
Group group = secman.getGroup(gid);
if (group != null)
permissions.setGroup(group.getName());
}
///TODO : why this mask ? -pb
permissions.setPermissions(perm & 0777);
broker.getCollectionResources(this);
}
/**
* Remove the specified sub-collection.
*
*@param name Description of the Parameter
*/
public void removeCollection(XmldbURI name) throws LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
subcollections.remove(name);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/**
* Remove the specified document from the collection.
*
*@param transaction
*@param broker
*@param docUri
*/
public void removeXMLResource(Txn transaction, DBBroker broker, XmldbURI docUri)
throws PermissionDeniedException, TriggerException, LockException {
DocumentImpl doc = null;
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_XML, docUri);
//Doh ! READ lock ?
getLock().acquire(Lock.READ_LOCK);
doc = (DocumentImpl) documents.get(docUri.getRawCollectionPath());
if (doc == null)
return; //TODO should throw an exception!!! Otherwise we dont know if the document was removed
doc.getUpdateLock().acquire(Lock.WRITE_LOCK);
if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"Write access to collection denied; user=" + broker.getUser().getName());
if (!doc.getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException("Permission to remove document denied");
DocumentTrigger trigger = null;
if (!CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI.equals(docUri)) {
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null)
try {
trigger = (DocumentTrigger) config.newTrigger(Trigger.REMOVE_DOCUMENT_EVENT, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
}
} else {
// we remove a collection.xconf configuration file: tell the configuration manager to
// reload the configuration.
CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager();
confMgr.invalidateAll(getURI());
}
if (trigger != null) {
trigger.prepare(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction,
getURI().append(docUri), doc);
}
broker.removeXMLResource(transaction, doc);
documents.remove(docUri.getRawCollectionPath());
if (trigger != null) {
trigger.finish(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction, getURI().append(docUri), null);
}
broker.getBrokerPool().getNotificationService().notifyUpdate(doc, UpdateListener.REMOVE);
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
if (doc != null)
doc.getUpdateLock().release(Lock.WRITE_LOCK);
//Doh ! A READ lock ?
getLock().release(Lock.READ_LOCK);
}
}
public void removeBinaryResource(Txn transaction, DBBroker broker, XmldbURI uri)
throws PermissionDeniedException, LockException, TriggerException {
try {
getLock().acquire(Lock.WRITE_LOCK);
DocumentImpl doc = getDocument(broker, uri);
if(doc.isLockedForWrite())
throw new PermissionDeniedException("Document " + doc.getFileURI() +
" is locked for write");
if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"write access to collection denied; user=" + broker.getUser().getName());
if (!doc.getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException("permission to remove document denied");
removeBinaryResource(transaction, broker, doc);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
public void removeBinaryResource(Txn transaction, DBBroker broker, DocumentImpl doc)
throws PermissionDeniedException, LockException, TriggerException {
if (doc == null)
return; //TODO should throw an exception!!! Otherwise we dont know if the document was removed
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_BINARY, doc.getFileURI());
getLock().acquire(Lock.WRITE_LOCK);
if (doc.getResourceType() != DocumentImpl.BINARY_FILE)
throw new PermissionDeniedException("document " + doc.getFileURI()
+ " is not a binary object");
if(doc.isLockedForWrite())
throw new PermissionDeniedException("Document " + doc.getFileURI() +
" is locked for write");
if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"write access to collection denied; user=" + broker.getUser().getName());
if (!doc.getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException("permission to remove document denied");
DocumentTrigger trigger = null;
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
try {
trigger = (DocumentTrigger) config.newTrigger(Trigger.REMOVE_DOCUMENT_EVENT, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
}
}
if (trigger != null)
trigger.prepare(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction, doc.getURI(), doc);
try {
broker.removeBinaryResource(transaction, (BinaryDocument) doc);
} catch (IOException ex) {
throw new PermissionDeniedException("Cannot delete file.");
}
documents.remove(doc.getFileURI().getRawCollectionPath());
if (trigger != null) {
trigger.finish(Trigger.REMOVE_DOCUMENT_EVENT, broker, transaction, doc.getURI(), null);
}
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
getLock().release(Lock.WRITE_LOCK);
}
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param source
* @param privileged
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public void store(Txn transaction, final DBBroker broker, final IndexInfo info, final InputSource source, boolean privileged)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException {
storeXMLInternal(transaction, broker, info, privileged, new StoreBlock() {
public void run() throws EXistException, SAXException {
try {
final InputStream is = source.getByteStream();
if (is != null && is.markSupported())
is.reset();
else {
final Reader cs = source.getCharacterStream();
if (cs != null && cs.markSupported())
cs.reset();
}
} catch (IOException e) {
// mark is not supported: exception is expected, do nothing
}
XMLReader reader = getReader(broker, false, info.getCollectionConfig());
info.setReader(reader, null);
try {
reader.parse(source);
} catch (IOException e) {
throw new EXistException(e);
} finally {
releaseReader(broker, info, reader);
}
}
});
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param data
* @param privileged
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public void store(Txn transaction, final DBBroker broker, final IndexInfo info, final String data, boolean privileged)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException {
storeXMLInternal(transaction, broker, info, privileged, new StoreBlock() {
public void run() throws SAXException, EXistException {
CollectionConfiguration colconf
= info.getDocument().getCollection().getConfiguration(broker);
XMLReader reader = getReader(broker, false, colconf);
info.setReader(reader, null);
try {
reader.parse(new InputSource(new StringReader(data)));
} catch (IOException e) {
throw new EXistException(e);
} finally {
releaseReader(broker, info, reader);
}
}
});
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param node
* @param privileged
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public void store(Txn transaction, DBBroker broker, final IndexInfo info, final Node node, boolean privileged)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException {
storeXMLInternal(transaction, broker, info, privileged, new StoreBlock() {
public void run() throws EXistException, SAXException {
info.getDOMStreamer().serialize(node, true);
}
});
}
private interface StoreBlock {
public void run() throws EXistException, SAXException;
}
/** Stores an XML document in the database. {@link #validateXMLResourceInternal(org.exist.storage.txn.Txn,
* org.exist.storage.DBBroker, org.exist.xmldb.XmldbURI, CollectionConfiguration, org.exist.collections.Collection.ValidateBlock)}
* should have been called previously in order to acquire a write lock for the document. Launches the finish trigger.
* @param transaction
* @param broker
* @param info
* @param privileged
* @param doParse
* @throws EXistException
* @throws SAXException
*/
private void storeXMLInternal(Txn transaction, DBBroker broker, IndexInfo info, boolean privileged, StoreBlock doParse) throws EXistException, SAXException {
DocumentImpl document = info.getIndexer().getDocument();
LOG.debug("storing document " + document.getDocId() + " ...");
//Sanity check
if (!document.getUpdateLock().isLockedForWrite()) {
LOG.warn("document is not locked for write !");
}
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_DOC, document.getFileURI());
doParse.run();
broker.storeXMLResource(transaction, document);
broker.flush();
broker.closeDocument();
// broker.checkTree(document);
LOG.debug("document stored.");
// if we are running in privileged mode (e.g. backup/restore), notify the SecurityManager about changes
if (getURI().equals(XmldbURI.SYSTEM_COLLECTION_URI)
&& document.getFileURI().equals(XMLSecurityManager.ACL_FILE_URI)
&& privileged == false) {
// inform the security manager that system data has changed
LOG.debug("users.xml changed");
broker.getBrokerPool().reloadSecurityManager(broker);
}
} finally {
//This lock has been acquired in validateXMLResourceInternal()
document.getUpdateLock().release(Lock.WRITE_LOCK);
broker.getBrokerPool().getProcessMonitor().endJob();
}
collectionConfEnabled = true;
broker.deleteObservers();
info.finishTrigger(broker, transaction, document.getURI(), document);
broker.getBrokerPool().getNotificationService().notifyUpdate(document,
(info.getEvent() == Trigger.UPDATE_DOCUMENT_EVENT ? UpdateListener.UPDATE : UpdateListener.ADD));
//Is it a collection configuration file ?
XmldbURI docName = document.getFileURI();
//WARNING : there is no reason to lock the collection since setPath() is normally called in a safe way
//TODO: *resolve* URI against CollectionConfigurationManager.CONFIG_COLLECTION_URI
if (getURI().startsWith(XmldbURI.CONFIG_COLLECTION_URI)
&& docName.endsWith(CollectionConfiguration.COLLECTION_CONFIG_SUFFIX_URI)) {
broker.sync(Sync.MAJOR_SYNC);
CollectionConfigurationManager manager = broker.getBrokerPool().getConfigurationManager();
if (manager != null) {
try {
manager.invalidateAll(getURI());
manager.loadConfiguration(broker, this);
} catch (CollectionConfigurationException e) {
// DIZ: should this exception really been thrown? bugid=1807744
throw new EXistException("Error while reading new collection configuration: " + e.getMessage(), e);
}
}
}
}
private interface ValidateBlock {
public void run(IndexInfo info) throws SAXException, EXistException;
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param data
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public IndexInfo validateXMLResource(Txn transaction, DBBroker broker, XmldbURI docUri, String data)
throws EXistException, PermissionDeniedException, TriggerException,
SAXException, LockException, IOException {
return validateXMLResource(transaction, broker, docUri, new InputSource(new StringReader(data)));
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param source
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public IndexInfo validateXMLResource(Txn transaction, final DBBroker broker, XmldbURI docUri, final InputSource source)
throws EXistException, PermissionDeniedException, TriggerException,
SAXException, LockException, IOException {
final CollectionConfiguration colconf = getConfiguration(broker);
return validateXMLResourceInternal(transaction, broker, docUri, colconf, new ValidateBlock() {
public void run(IndexInfo info) throws SAXException, EXistException {
XMLReader reader = getReader(broker, true, colconf);
info.setReader(reader, null);
try {
reader.parse(source);
} catch (SAXException e) {
throw new SAXException("The XML parser reported a problem: " + e.getMessage(), e);
} catch (IOException e) {
throw new EXistException(e);
} finally {
releaseReader(broker, info, reader);
}
}
});
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param node
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
public IndexInfo validateXMLResource(Txn transaction, final DBBroker broker, XmldbURI docUri, final Node node)
throws EXistException, PermissionDeniedException, TriggerException,
SAXException, LockException, IOException {
return validateXMLResourceInternal(transaction, broker, docUri, getConfiguration(broker), new ValidateBlock() {
public void run(IndexInfo info) throws SAXException {
info.setDOMStreamer(new DOMStreamer());
info.getDOMStreamer().serialize(node, true);
}
});
}
/** Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during the process.
* @param transaction
* @param broker
* @param docUri
* @param doValidate
* @return An {@link IndexInfo} with a write lock on the document.
* @throws EXistException
* @throws PermissionDeniedException
* @throws TriggerException
* @throws SAXException
* @throws LockException
*/
private IndexInfo validateXMLResourceInternal(Txn transaction, DBBroker broker, XmldbURI docUri, CollectionConfiguration config, ValidateBlock doValidate)
throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException {
//Make the necessary operations if we process a collection configuration document
checkConfigurationDocument(transaction, broker, docUri);
if (broker.isReadOnly()) throw new PermissionDeniedException("Database is read-only");
DocumentImpl oldDoc = null;
boolean oldDocLocked = false;
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_VALIDATE_DOC, docUri);
getLock().acquire(Lock.WRITE_LOCK);
DocumentImpl document = new DocumentImpl(broker.getBrokerPool(), this, docUri);
oldDoc = (DocumentImpl) documents.get(docUri.getRawCollectionPath());
checkPermissions(transaction, broker, oldDoc);
manageDocumentInformation(broker, oldDoc, document );
Indexer indexer = new Indexer(broker, transaction);
IndexInfo info = new IndexInfo(indexer, config);
indexer.setDocument(document, config);
addObserversToIndexer(broker, indexer);
indexer.setValidating(true);
// if !triggersEnabled, setupTriggers will return null anyway, so no need to check
info.setTrigger(
setupTriggers(broker, docUri, oldDoc != null, config),
oldDoc == null ? Trigger.STORE_DOCUMENT_EVENT : Trigger.UPDATE_DOCUMENT_EVENT);
info.prepareTrigger(broker, transaction, getURI().append(docUri), oldDoc);
LOG.debug("Scanning document " + getURI().append(docUri));
doValidate.run(info);
// new document is valid: remove old document
if (oldDoc != null) {
LOG.debug("removing old document " + oldDoc.getFileURI());
oldDoc.getUpdateLock().acquire(Lock.WRITE_LOCK);
oldDocLocked = true;
if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) {
//TODO : use a more elaborated method ? No triggers...
broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
documents.remove(oldDoc.getFileURI().getRawCollectionPath());
//This lock is released in storeXMLInternal()
//TODO : check that we go until there to ensure the lock is released
document.getUpdateLock().acquire(Lock.WRITE_LOCK);
document.setDocId(broker.getNextResourceId(transaction, this));
addDocument(transaction, broker, document);
} else {
//TODO : use a more elaborated method ? No triggers...
broker.removeXMLResource(transaction, oldDoc, false);
oldDoc.copyOf(document);
indexer.setDocumentObject(oldDoc);
//old has become new at this point
document = oldDoc;
oldDocLocked = false;
}
LOG.debug("removed old document " + oldDoc.getFileURI());
} else {
//This lock is released in storeXMLInternal()
//TODO : check that we go until there to ensure the lock is released
document.getUpdateLock().acquire(Lock.WRITE_LOCK);
document.setDocId(broker.getNextResourceId(transaction, this));
addDocument(transaction, broker, document);
}
indexer.setValidating(false);
info.postValidateTrigger();
return info;
} finally {
if (oldDocLocked)
oldDoc.getUpdateLock().release(Lock.WRITE_LOCK);
getLock().release(Lock.WRITE_LOCK);
broker.getBrokerPool().getProcessMonitor().endJob();
}
}
private void checkConfigurationDocument(Txn transaction, DBBroker broker, XmldbURI docUri) throws EXistException, PermissionDeniedException,
IOException {
//Is it a collection configuration file ?
//TODO : use XmldbURI.resolve() !
if (!getURI().startsWith(XmldbURI.CONFIG_COLLECTION_URI))
return;
if (!docUri.endsWith(CollectionConfiguration.COLLECTION_CONFIG_SUFFIX_URI))
return;
//Allow just one configuration document per collection
//TODO : do not throw the exception if a system property allows several ones -pb
for(Iterator i = iterator(broker); i.hasNext(); ) {
DocumentImpl confDoc = (DocumentImpl) i.next();
XmldbURI currentConfDocName = confDoc.getFileURI();
if(currentConfDocName != null && !currentConfDocName.equals(docUri)) {
throw new EXistException("Could not store configuration '" + docUri + "': A configuration document with a different name ("
+ currentConfDocName + ") already exists in this collection (" + getURI() + ")");
}
}
// broker.saveCollection(transaction, this);
// CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager();
// if(confMgr != null)
// try {
// confMgr.reload(broker, this);
// } catch (CollectionConfigurationException e) {
// throw new EXistException("An error occurred while reloading the updated collection configuration: " + e.getMessage(), e);
// }
}
/** add observers to the indexer
* @param broker
* @param indexer
*/
private void addObserversToIndexer(DBBroker broker, Indexer indexer) {
broker.deleteObservers();
if (observers != null) {
for (int i = 0; i < observers.length; i++) {
indexer.addObserver(observers[i]);
broker.addObserver(observers[i]);
}
}
}
/** If an old document exists, keep information about the document.
* @param broker
* @param document
*/
private void manageDocumentInformation(DBBroker broker, DocumentImpl oldDoc,
DocumentImpl document) {
DocumentMetadata metadata = new DocumentMetadata();
if (oldDoc != null) {
metadata = oldDoc.getMetadata();
metadata.setCreated(oldDoc.getMetadata().getCreated());
metadata.setLastModified(System.currentTimeMillis());
document.setPermissions(oldDoc.getPermissions());
} else {
User user = broker.getUser();
metadata.setCreated(System.currentTimeMillis());
document.getPermissions().setOwner(user);
String group;
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
document.setPermissions(config.getDefResPermissions());
group = config.getDefResGroup(user);
} else {
group = user.getPrimaryGroup();
}
document.getPermissions().setGroup(group);
}
document.setMetadata(metadata);
}
/**
* Check Permissions about user and document, and throw exceptions if necessary.
*
* @param broker
* @param oldDoc old Document existing in database prior to adding a new one with same name.
* @throws LockException
* @throws PermissionDeniedException
*/
private void checkPermissions(Txn transaction, DBBroker broker, DocumentImpl oldDoc) throws LockException, PermissionDeniedException {
if (oldDoc != null) {
LOG.debug("Found old doc " + oldDoc.getDocId());
// check if the document is locked by another user
User lockUser = oldDoc.getUserLock();
if(lockUser != null && !lockUser.equals(broker.getUser()))
throw new PermissionDeniedException("The document is locked by user " +
lockUser.getName());
// do we have permissions for update?
if (!oldDoc.getPermissions().validate(broker.getUser(),
Permission.UPDATE))
throw new PermissionDeniedException(
"Document exists and update is not allowed");
if (!(getPermissions().validate(broker.getUser(), Permission.UPDATE) ||
getPermissions().validate(broker.getUser(), Permission.WRITE)))
throw new PermissionDeniedException(
"Document exists and update is not allowed for the collection");
// do we have write permissions?
} else if (!getPermissions().validate(broker.getUser(), Permission.WRITE))
throw new PermissionDeniedException(
"User '" + broker.getUser().getName() + "' not allowed to write to collection '" + getURI() + "'");
}
private DocumentTrigger setupTriggers(DBBroker broker, XmldbURI docUri, boolean update, CollectionConfiguration config) {
//TODO : is this the right place for such a task ? -pb
if (CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI.equals(docUri)) {
// we are updating collection.xconf. Notify configuration manager
// CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager();
// confMgr.invalidateAll(getURI());
collectionConfEnabled = false;
return null;
}
if (!triggersEnabled)
return null;
if (config == null)
return null;
DocumentTrigger trigger = null;
try {
if (update)
trigger = (DocumentTrigger) config.newTrigger(Trigger.UPDATE_DOCUMENT_EVENT, broker, this);
else
trigger = (DocumentTrigger) config.newTrigger(Trigger.STORE_DOCUMENT_EVENT, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
if(trigger == null)
return null;
if (update)
LOG.debug("Using update trigger '" + trigger.getClass().getName() + "'");
else
LOG.debug("Using store trigger '" + trigger.getClass().getName() + "'");
return trigger;
}
// Blob
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, byte[] data, String mimeType)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
return addBinaryResource(transaction, broker, docUri, data, mimeType, null, null);
}
// Blob
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, byte[] data, String mimeType, Date created, Date modified)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
return addBinaryResource(transaction, broker, docUri,
new ByteArrayInputStream(data), mimeType, data.length, created, modified);
}
// Streaming
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, InputStream is, String mimeType, int size)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
return addBinaryResource(transaction, broker, docUri, is, mimeType, size, null, null);
}
// Streaming
public BinaryDocument addBinaryResource(Txn transaction, DBBroker broker,
XmldbURI docUri, InputStream is, String mimeType, int size, Date created, Date modified)
throws EXistException, PermissionDeniedException, LockException, TriggerException,IOException {
if (broker.isReadOnly())
throw new PermissionDeniedException("Database is read-only");
BinaryDocument blob = new BinaryDocument(broker.getBrokerPool(), this, docUri);
//TODO : move later, i.e. after the collection lock is acquired ?
DocumentImpl oldDoc = getDocument(broker, docUri);
try {
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_BINARY, docUri);
getLock().acquire(Lock.WRITE_LOCK);
checkPermissions(transaction, broker, oldDoc);
DocumentTrigger trigger = null;
int event = 0;
/*
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
event = oldDoc != null ? Trigger.UPDATE_DOCUMENT_EVENT : Trigger.STORE_DOCUMENT_EVENT;
try {
trigger = (DocumentTrigger) config.newTrigger(event, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
if (trigger != null) {
trigger.prepare(event, broker, transaction, blob.getURI(), blob);
}
}
}
*/
manageDocumentInformation(broker, oldDoc, blob );
DocumentMetadata metadata = blob.getMetadata();
metadata.setMimeType(mimeType == null ? MimeType.BINARY_TYPE.getName() : mimeType);
if (oldDoc != null) {
LOG.debug("removing old document " + oldDoc.getFileURI());
if (oldDoc instanceof BinaryDocument)
broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
else
broker.removeXMLResource(transaction, oldDoc);
}
if(created != null)
metadata.setCreated(created.getTime());
if(modified != null)
metadata.setLastModified(modified.getTime());
blob.setContentLength(size);
broker.storeBinaryResource(transaction, blob, is);
addDocument(transaction, broker, blob);
broker.storeXMLResource(transaction, blob);
if (triggersEnabled) {
CollectionConfiguration config = getConfiguration(broker);
if (config != null) {
event = oldDoc != null ? Trigger.UPDATE_DOCUMENT_EVENT : Trigger.STORE_DOCUMENT_EVENT;
try {
trigger = (DocumentTrigger) config.newTrigger(event, broker, this);
} catch (CollectionConfigurationException e) {
LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
}
if (trigger != null) {
trigger.prepare(event, broker, transaction, blob.getURI(), blob);
}
}
}
// This is no longer needed as the dom.dbx isn't used
//broker.closeDocument();
if (trigger != null) {
trigger.finish(event, broker, transaction, blob.getURI(), blob);
}
return blob;
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
getLock().release(Lock.WRITE_LOCK);
}
}
public void setId(int id) {
this.collectionId = id;
}
public void setPermissions(int mode) throws LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
permissions.setPermissions(mode);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
public void setPermissions(String mode) throws SyntaxException, LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
permissions.setPermissions(mode);
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/**
* Set permissions for the collection.
*
* @param permissions
*/
public void setPermissions(Permission permissions) throws LockException {
try {
getLock().acquire(Lock.WRITE_LOCK);
this.permissions = permissions;
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/**
* Write collection contents to stream.
*
* @param ostream
* @throws IOException
*/
public void write(DBBroker broker, VariableByteOutputStream ostream) throws IOException {
ostream.writeInt(collectionId);
ostream.writeInt(subcollections.size());
XmldbURI childColl;
for (Iterator i = subcollections.iterator(); i.hasNext(); ) {
childColl = (XmldbURI)i.next();
ostream.writeUTF(childColl.toString());
}
SecurityManager secman = broker.getBrokerPool().getSecurityManager();
if (secman == null) {
ostream.writeInt(1);
ostream.writeInt(1);
} else {
User user = secman.getUser(permissions.getOwner());
Group group = secman.getGroup(permissions.getOwnerGroup());
if (user==null) {
throw new IllegalStateException("The user "+permissions.getOwner()+" for the collection cannot be found.");
}
if (group == null)
group = secman.getGroup(SecurityManager.GUEST_GROUP);
ostream.writeInt(user.getUID());
ostream.writeInt(group.getId());
}
ostream.writeInt(permissions.getPermissions());
ostream.writeLong(created);
}
public CollectionConfiguration getConfiguration(DBBroker broker) {
if (!collectionConfEnabled)
return null;
//System collection has no configuration
if (DBBroker.SYSTEM_COLLECTION.equals(getURI().getRawCollectionPath()))
return null;
CollectionConfigurationManager manager = broker.getBrokerPool().getConfigurationManager();
if (manager == null)
return null;
//Attempt to get configuration
CollectionConfiguration configuration = null;
collectionConfEnabled = false;
try {
//TODO: AR: if a Trigger throws CollectionConfigurationException from its configure() method, is the rest of the collection configurartion (indexes etc.) ignored even though they might be fine?
configuration = manager.getConfiguration(broker, this);
collectionConfEnabled = true;
} catch (CollectionConfigurationException e) {
LOG.warn("Failed to load collection configuration for '" + getURI() + "'", e);
}
// LOG.debug("Loaded configuration for collection: " + getURI());
return configuration;
}
/**
* Should the collection configuration document be enabled
* for this collection? Called by {@link org.exist.storage.NativeBroker}
* before doing a reindex.
*
* @param enabled
*/
public void setConfigEnabled(boolean enabled) {
collectionConfEnabled = enabled;
}
/**
* Set the internal storage address of the collection data.
*
* @param addr
*/
public void setAddress(long addr) {
this.address = addr;
}
public long getAddress() {
return this.address;
}
public void setCreationTime(long ms) {
created = ms;
}
public long getCreationTime() {
return created;
}
public void setTriggersEnabled(boolean enabled) {
try {
getLock().acquire(Lock.WRITE_LOCK);
this.triggersEnabled = enabled;
} catch (LockException e) {
LOG.warn(e.getMessage(), e);
//Ouch ! -pb
this.triggersEnabled = enabled;
} finally {
getLock().release(Lock.WRITE_LOCK);
}
}
/** set user-defined Reader */
public void setReader(XMLReader reader){
userReader = reader;
}
// /**
// * If user-defined Reader is set, return it; otherwise return JAXP
// * default XMLReader configured by eXist.
// */
// private XMLReader getReader(DBBroker broker) throws EXistException,
// SAXException {
//
// if(userReader != null){
// return userReader;
// }
//
// return broker.getBrokerPool().getParserPool().borrowXMLReader();
// }
/**
* Get xml reader from readerpool and setup validation when needed.
*/
private XMLReader getReader(DBBroker broker, boolean validation, CollectionConfiguration colconfig) throws EXistException,
SAXException {
// If user-defined Reader is set, return it;
if(userReader != null){
return userReader;
}
// Get reader from readerpool.
XMLReader reader= broker.getBrokerPool().getParserPool().borrowXMLReader();
// If Collection configuration exists (try to) get validation mode
// and setup reader with this information.
if (!validation)
XMLReaderObjectFactory.setReaderValidationMode(XMLReaderObjectFactory.VALIDATION_DISABLED, reader);
else if( colconfig!=null ) {
int mode=colconfig.getValidationMode();
XMLReaderObjectFactory.setReaderValidationMode(mode, reader);
}
// Return configured reader.
return reader;
}
/**
* Reset validation mode of reader and return reader to reader pool.
*/
private void releaseReader(DBBroker broker, IndexInfo info, XMLReader reader) {
if(userReader != null){
return;
}
if (info.getIndexer().getDocSize() > POOL_PARSER_THRESHOLD)
return;
// Get validation mode from static configuration
Configuration config = broker.getConfiguration();
String optionValue = (String) config.getProperty(XMLReaderObjectFactory.PROPERTY_VALIDATION_MODE);
int validationMode = XMLReaderObjectFactory.convertValidationMode(optionValue);
// Restore default validation mode
XMLReaderObjectFactory.setReaderValidationMode(validationMode, reader);
// Return reader
broker.getBrokerPool().getParserPool().returnXMLReader(reader);
}
/* (non-Javadoc)
* @see java.util.Observable#addObserver(java.util.Observer)
*/
public void addObserver(Observer o) {
if (hasObserver(o)) return;
if (observers == null) {
observers = new Observer[1];
observers[0] = o;
} else {
Observer n[] = new Observer[observers.length + 1];
System.arraycopy(observers, 0, n, 0, observers.length);
n[observers.length] = o;
observers = n;
}
}
private boolean hasObserver(Observer o) {
if (observers == null)
return false;
for (int i = 0; i < observers.length; i++) {
if (observers[i] == o)
return true;
}
return false;
}
/* (non-Javadoc)
* @see java.util.Observable#deleteObservers()
*/
public void deleteObservers() {
if (observers != null)
observers = null;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#getKey()
*/
public long getKey() {
return collectionId;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#getReferenceCount()
*/
public int getReferenceCount() {
return refCount;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#incReferenceCount()
*/
public int incReferenceCount() {
return ++refCount;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#decReferenceCount()
*/
public int decReferenceCount() {
return refCount > 0 ? --refCount : 0;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#setReferenceCount(int)
*/
public void setReferenceCount(int count) {
refCount = count;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#setTimestamp(int)
*/
public void setTimestamp(int timestamp) {
this.timestamp = timestamp;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#getTimestamp()
*/
public int getTimestamp() {
return timestamp;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#release()
*/
public boolean sync(boolean syncJournal) {
return false;
}
/* (non-Javadoc)
* @see org.exist.storage.cache.Cacheable#isDirty()
*/
public boolean isDirty() {
return false;
}
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append( getURI() );
buf.append("[");
for(Iterator i = documents.keySet().iterator(); i.hasNext(); ) {
buf.append(i.next());
if(i.hasNext())
buf.append(", ");
}
buf.append("]");
return buf.toString();
}
/**
* (Make private?)
* @param broker
*/
public IndexSpec getIndexConfiguration(DBBroker broker) {
CollectionConfiguration conf = getConfiguration(broker);
//If the collection has its own config...
if (conf == null) {
return broker.getIndexConfiguration();
//... otherwise return the general config (the broker's one)
} else {
return conf.getIndexConfiguration();
}
}
public GeneralRangeIndexSpec getIndexByPathConfiguration(DBBroker broker, NodePath path) {
IndexSpec idxSpec = getIndexConfiguration(broker);
return (idxSpec == null) ? null : idxSpec.getIndexByPath(path);
}
public QNameRangeIndexSpec getIndexByQNameConfiguration(DBBroker broker, QName qname) {
IndexSpec idxSpec = getIndexConfiguration(broker);
return (idxSpec == null) ? null : idxSpec.getIndexByQName(qname);
}
public FulltextIndexSpec getFulltextIndexConfiguration(DBBroker broker) {
IndexSpec idxSpec = getIndexConfiguration(broker);
return (idxSpec == null) ? null : idxSpec.getFulltextIndexSpec();
}
}
|
[bugfix] When preparing to store a binary document, send the old document to the trigger and not the new one.
svn path=/trunk/eXist/; revision=10217
|
src/org/exist/collections/Collection.java
|
[bugfix] When preparing to store a binary document, send the old document to the trigger and not the new one.
|
<ide><path>rc/org/exist/collections/Collection.java
<ide> checkPermissions(transaction, broker, oldDoc);
<ide> DocumentTrigger trigger = null;
<ide> int event = 0;
<del>/*
<add>
<ide> if (triggersEnabled) {
<add> CollectionConfiguration config = getConfiguration(broker);
<add> if (config != null) {
<add> event = oldDoc != null ? Trigger.UPDATE_DOCUMENT_EVENT : Trigger.STORE_DOCUMENT_EVENT;
<add> try {
<add> trigger = (DocumentTrigger) config.newTrigger(event, broker, this);
<add> } catch (CollectionConfigurationException e) {
<add> LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
<add> }
<add> if (trigger != null) {
<add> trigger.prepare(event, broker, transaction, getURI().append(docUri), oldDoc);
<add> }
<add> }
<add> }
<add>
<add> manageDocumentInformation(broker, oldDoc, blob );
<add> DocumentMetadata metadata = blob.getMetadata();
<add> metadata.setMimeType(mimeType == null ? MimeType.BINARY_TYPE.getName() : mimeType);
<add>
<add> if (oldDoc != null) {
<add> LOG.debug("removing old document " + oldDoc.getFileURI());
<add> if (oldDoc instanceof BinaryDocument)
<add> broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
<add> else
<add> broker.removeXMLResource(transaction, oldDoc);
<add> }
<add>
<add> if(created != null)
<add> metadata.setCreated(created.getTime());
<add>
<add> if(modified != null)
<add> metadata.setLastModified(modified.getTime());
<add>
<add> blob.setContentLength(size);
<add> broker.storeBinaryResource(transaction, blob, is);
<add> addDocument(transaction, broker, blob);
<add>
<add> broker.storeXMLResource(transaction, blob);
<add>
<add> /*
<add> if (triggersEnabled) {
<ide> CollectionConfiguration config = getConfiguration(broker);
<ide> if (config != null) {
<ide> event = oldDoc != null ? Trigger.UPDATE_DOCUMENT_EVENT : Trigger.STORE_DOCUMENT_EVENT;
<ide> trigger.prepare(event, broker, transaction, blob.getURI(), blob);
<ide> }
<ide> }
<del> }
<del>*/
<del>
<del> manageDocumentInformation(broker, oldDoc, blob );
<del> DocumentMetadata metadata = blob.getMetadata();
<del> metadata.setMimeType(mimeType == null ? MimeType.BINARY_TYPE.getName() : mimeType);
<del>
<del> if (oldDoc != null) {
<del> LOG.debug("removing old document " + oldDoc.getFileURI());
<del> if (oldDoc instanceof BinaryDocument)
<del> broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
<del> else
<del> broker.removeXMLResource(transaction, oldDoc);
<del> }
<del>
<del> if(created != null)
<del> metadata.setCreated(created.getTime());
<del>
<del> if(modified != null)
<del> metadata.setLastModified(modified.getTime());
<del>
<del> blob.setContentLength(size);
<del> broker.storeBinaryResource(transaction, blob, is);
<del> addDocument(transaction, broker, blob);
<del>
<del> broker.storeXMLResource(transaction, blob);
<del> if (triggersEnabled) {
<del> CollectionConfiguration config = getConfiguration(broker);
<del> if (config != null) {
<del> event = oldDoc != null ? Trigger.UPDATE_DOCUMENT_EVENT : Trigger.STORE_DOCUMENT_EVENT;
<del> try {
<del> trigger = (DocumentTrigger) config.newTrigger(event, broker, this);
<del> } catch (CollectionConfigurationException e) {
<del> LOG.debug("An error occurred while initializing a trigger for collection " + getURI() + ": " + e.getMessage(), e);
<del> }
<del> if (trigger != null) {
<del> trigger.prepare(event, broker, transaction, blob.getURI(), blob);
<del> }
<del> }
<del> }
<add> }*/
<ide>
<ide>
<ide> // This is no longer needed as the dom.dbx isn't used
|
|
Java
|
apache-2.0
|
76801caffec0b75ae2741a7efce5cd289db38e62
| 0 |
huitseeker/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,kinbod/deeplearning4j,kinbod/deeplearning4j,kinbod/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j
|
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.nn.layers.convolution.subsampling;
import com.google.common.primitives.Ints;
import org.deeplearning4j.berkeley.Pair;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.gradient.DefaultGradient;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.layers.BaseLayer;
import org.deeplearning4j.optimize.api.ConvexOptimizer;
import org.deeplearning4j.util.Dropout;
import org.nd4j.linalg.api.iter.NdIndexIterator;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.convolution.Convolution;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.INDArrayIndex;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.nd4j.linalg.util.ArrayUtil;
import java.util.*;
/**
* Subsampling layer.
*
* Used for downsampling a convolution
*
* @author Adam Gibson
*/
public class SubsamplingLayer extends BaseLayer {
private INDArray maxIndexes;
public SubsamplingLayer(NeuralNetConfiguration conf) {
super(conf);
}
public SubsamplingLayer(NeuralNetConfiguration conf, INDArray input) {
super(conf, input);
}
@Override
public double calcL2() {
return 0;
}
@Override
public double calcL1() {
return 0;
}
@Override
public Type type() {
return Type.CONVOLUTIONAL;
}
@Override
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) {
//subsampling doesn't have weights and thus gradients are not calculated for this layer
//only scale and reshape epsilon
int inputHeight = input().size(-2);
int inputWidth = input().size(-1);
INDArray reshapeEpsilon, retE;
Gradient retGradient = new DefaultGradient();
switch(conf.getPoolingType()) {
case MAX:
int n = epsilon.size(0);
int c = epsilon.size(1);
int outH = epsilon.size(2);
int outW = epsilon.size(3);
//compute backwards kernel based on rearranging the given error
retE = Nd4j.zeros(n, c, conf.getKernelSize()[0], conf.getKernelSize()[1], outH, outW);
reshapeEpsilon = Nd4j.rollAxis(retE.reshape(n,c,-1,outH,outW),2);
Iterator<int[]> iter = new NdIndexIterator(n,c,outH,outW);
while(iter.hasNext()) {
int[] next = iter.next();
double epsGet = epsilon.getDouble(next);
int idx = maxIndexes.getInt(next);
reshapeEpsilon.putScalar(idx,epsGet);
}
reshapeEpsilon = Convolution.col2im(reshapeEpsilon,conf.getStride(),conf.getPadding(),inputHeight, inputWidth);
return new Pair<>(retGradient,reshapeEpsilon);
case AVG:
//compute reverse average error
retE = epsilon.get(
NDArrayIndex.point(0)
, NDArrayIndex.all()
, NDArrayIndex.all()
, NDArrayIndex.newAxis()
, NDArrayIndex.newAxis());
reshapeEpsilon = Nd4j.tile(retE,1,1,conf.getKernelSize()[0],conf.getKernelSize()[1],1,1);
reshapeEpsilon = Convolution.col2im(reshapeEpsilon, conf.getStride(), conf.getPadding(), inputHeight, inputWidth);
reshapeEpsilon.divi(ArrayUtil.prod(conf.getKernelSize()));
return new Pair<>(retGradient, reshapeEpsilon);
case NONE:
return new Pair<>(retGradient, epsilon);
default: throw new IllegalStateException("Un supported pooling type");
}
}
@Override
public INDArray activate(boolean training) {
INDArray pooled, ret;
// n = num examples, c = num channels or depth
int n, c, kh, kw, outWidth, outHeight;
if(training && conf.getDropOut() > 0) {
this.dropoutMask = Dropout.applyDropout(input,conf.getDropOut(),dropoutMask);
}
pooled = Convolution.im2col(input,conf.getKernelSize(),conf.getStride(),conf.getPadding());
switch(conf.getPoolingType()) {
case AVG:
return pooled.mean(2,3);
case MAX:
n = pooled.size(0);
c = pooled.size(1);
kh = pooled.size(2);
kw = pooled.size(3);
outWidth = pooled.size(4);
outHeight = pooled.size(5);
ret = pooled.reshape(n, c, kh * kw, outHeight, outWidth);
maxIndexes = Nd4j.argMax(ret, 2);
return ret.max(2);
case NONE:
return input;
default: throw new IllegalStateException("Pooling type not supported!");
}
}
@Override
public Gradient error(INDArray input) {
throw new UnsupportedOperationException();
}
@Override
public Gradient calcGradient(Gradient layerError, INDArray indArray) {
throw new UnsupportedOperationException();
}
@Override
public void merge(Layer layer, int batchSize) {
throw new UnsupportedOperationException();
}
@Override
public INDArray activationMean() {
throw new UnsupportedOperationException();
}
@Override
public Layer transpose() {
throw new UnsupportedOperationException();
}
@Override
public void iterate(INDArray input) {
throw new UnsupportedOperationException();
}
@Override
public ConvexOptimizer getOptimizer() {
throw new UnsupportedOperationException();
}
@Override
public void fit() {}
@Override
public void fit(INDArray input) {}
@Override
public void computeGradientAndScore() {}
@Override
public double score() {
return 0;
}
@Override
public void accumulateScore(double accum) { throw new UnsupportedOperationException(); }
@Override
public void update(INDArray gradient, String paramType) {
throw new UnsupportedOperationException();
}
@Override
public INDArray params() {
throw new UnsupportedOperationException();
}
@Override
public INDArray getParam(String param) {
throw new UnsupportedOperationException();
}
@Override
public void setParams(INDArray params) {
throw new UnsupportedOperationException();
}
}
|
deeplearning4j-core/src/main/java/org/deeplearning4j/nn/layers/convolution/subsampling/SubsamplingLayer.java
|
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.nn.layers.convolution.subsampling;
import com.google.common.primitives.Ints;
import org.deeplearning4j.berkeley.Pair;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.gradient.DefaultGradient;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.layers.BaseLayer;
import org.deeplearning4j.optimize.api.ConvexOptimizer;
import org.deeplearning4j.util.Dropout;
import org.nd4j.linalg.api.iter.NdIndexIterator;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.convolution.Convolution;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.INDArrayIndex;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.nd4j.linalg.util.ArrayUtil;
import java.util.*;
/**
* Subsampling layer.
*
* Used for downsampling a convolution
*
* @author Adam Gibson
*/
public class SubsamplingLayer extends BaseLayer {
private INDArray maxIndexes;
public SubsamplingLayer(NeuralNetConfiguration conf) {
super(conf);
}
public SubsamplingLayer(NeuralNetConfiguration conf, INDArray input) {
super(conf, input);
}
@Override
public double calcL2() {
return 0;
}
@Override
public double calcL1() {
return 0;
}
@Override
public Type type() {
return Type.CONVOLUTIONAL;
}
@Override
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) {
//subsampling doesn't have weights and thus gradients are not calculated for this layer
//only scale and reshape epsilon
int inputHeight = input().size(-2);
int inputWidth = input().size(-1);
INDArray reshapeEpsilon, retE;
Gradient retGradient = new DefaultGradient();
switch(conf.getPoolingType()) {
case MAX:
int n = epsilon.size(0);
int c = epsilon.size(1);
int outH = epsilon.size(2);
int outW = epsilon.size(3);
//compute backwards kernel based on rearranging the given error
retE = Nd4j.zeros(n, c, conf.getKernelSize()[0], conf.getKernelSize()[1], outH, outW);
reshapeEpsilon = Nd4j.rollAxis(retE.reshape(n,c,-1,outH,outW),2);
Iterator<int[]> iter = new NdIndexIterator(n,c,outH,outW);
while(iter.hasNext()) {
int[] next = iter.next();
double epsGet = epsilon.getDouble(next);
double get = maxIndexes.getDouble(next);
reshapeEpsilon.putScalar(next,epsilon.getDouble(next));
}
reshapeEpsilon = Convolution.col2im(reshapeEpsilon,conf.getStride(),conf.getPadding(),inputHeight, inputWidth);
return new Pair<>(retGradient,reshapeEpsilon);
case AVG:
//compute reverse average error
retE = epsilon.get(
NDArrayIndex.point(0)
, NDArrayIndex.all()
, NDArrayIndex.all()
, NDArrayIndex.newAxis()
, NDArrayIndex.newAxis());
reshapeEpsilon = Nd4j.tile(retE,1,1,conf.getKernelSize()[0],conf.getKernelSize()[1],1,1);
reshapeEpsilon = Convolution.col2im(reshapeEpsilon, conf.getStride(), conf.getPadding(), inputHeight, inputWidth);
reshapeEpsilon.divi(ArrayUtil.prod(conf.getKernelSize()));
return new Pair<>(retGradient, reshapeEpsilon);
case NONE:
return new Pair<>(retGradient, epsilon);
default: throw new IllegalStateException("Un supported pooling type");
}
}
@Override
public INDArray activate(boolean training) {
INDArray pooled, ret;
// n = num examples, c = num channels or depth
int n, c, kh, kw, outWidth, outHeight;
if(training && conf.getDropOut() > 0) {
this.dropoutMask = Dropout.applyDropout(input,conf.getDropOut(),dropoutMask);
}
pooled = Convolution.im2col(input,conf.getKernelSize(),conf.getStride(),conf.getPadding());
switch(conf.getPoolingType()) {
case AVG:
return pooled.mean(2,3);
case MAX:
n = pooled.size(0);
c = pooled.size(1);
kh = pooled.size(2);
kw = pooled.size(3);
outWidth = pooled.size(4);
outHeight = pooled.size(5);
ret = pooled.reshape(n, c, kh * kw, outHeight, outWidth);
maxIndexes = Nd4j.argMax(ret, 2);
return ret.max(2);
case NONE:
return input;
default: throw new IllegalStateException("Pooling type not supported!");
}
}
@Override
public Gradient error(INDArray input) {
throw new UnsupportedOperationException();
}
@Override
public Gradient calcGradient(Gradient layerError, INDArray indArray) {
throw new UnsupportedOperationException();
}
@Override
public void merge(Layer layer, int batchSize) {
throw new UnsupportedOperationException();
}
@Override
public INDArray activationMean() {
throw new UnsupportedOperationException();
}
@Override
public Layer transpose() {
throw new UnsupportedOperationException();
}
@Override
public void iterate(INDArray input) {
throw new UnsupportedOperationException();
}
@Override
public ConvexOptimizer getOptimizer() {
throw new UnsupportedOperationException();
}
@Override
public void fit() {}
@Override
public void fit(INDArray input) {}
@Override
public void computeGradientAndScore() {}
@Override
public double score() {
return 0;
}
@Override
public void accumulateScore(double accum) { throw new UnsupportedOperationException(); }
@Override
public void update(INDArray gradient, String paramType) {
throw new UnsupportedOperationException();
}
@Override
public INDArray params() {
throw new UnsupportedOperationException();
}
@Override
public INDArray getParam(String param) {
throw new UnsupportedOperationException();
}
@Override
public void setParams(INDArray params) {
throw new UnsupportedOperationException();
}
}
|
max index changes on pooling
Former-commit-id: aeef51a1d556742954be96dc91ed6385704b9cab
|
deeplearning4j-core/src/main/java/org/deeplearning4j/nn/layers/convolution/subsampling/SubsamplingLayer.java
|
max index changes on pooling
|
<ide><path>eeplearning4j-core/src/main/java/org/deeplearning4j/nn/layers/convolution/subsampling/SubsamplingLayer.java
<ide> while(iter.hasNext()) {
<ide> int[] next = iter.next();
<ide> double epsGet = epsilon.getDouble(next);
<del>
<del> double get = maxIndexes.getDouble(next);
<del> reshapeEpsilon.putScalar(next,epsilon.getDouble(next));
<add> int idx = maxIndexes.getInt(next);
<add> reshapeEpsilon.putScalar(idx,epsGet);
<ide> }
<ide> reshapeEpsilon = Convolution.col2im(reshapeEpsilon,conf.getStride(),conf.getPadding(),inputHeight, inputWidth);
<ide> return new Pair<>(retGradient,reshapeEpsilon);
|
|
Java
|
apache-2.0
|
ebe5ff5ed1a361501f5c4e653ad8aa5eb85d7cad
| 0 |
CMPUT301W14T07/Team7Project,CMPUT301W14T07/Team7Project
|
package ca.ualberta.team7project.alertviews;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.text.InputType;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import ca.ualberta.team7project.MainActivity;
import ca.ualberta.team7project.controllers.ThreadListController;
import ca.ualberta.team7project.views.ThreadListView;
/**
* ThreadAlertView prompts the user to reply to a comment or create a new topic.
* <p>
* There exists two conditions in which this dialog is called.
* <ul>
* <li> The user is replying to a topic. Click event is called through a ThreadListView item.
* <li> The user is creating a new topic. Click event is called through an ActionBarIcon.
* </ul>
* <p>
* Some of the layout is defined in the builder, while the remainder is in create_thread.xml
* All button clicks are handled with the ThreadListener in ThreadListView
*
* @see ThreadListView.java
* @author raypold
*
*/
public class ThreadAlertView extends DialogFragment
{
private Boolean replying;
private Boolean editing;
private ThreadListController controller;
public interface ThreadAlertListener
{
public void createThread(String title, String comment);
public void insertImage();
}
ThreadAlertListener listener;
public ThreadAlertView()
{
super();
Context mainContext = MainActivity.getMainContext();
this.listener = ((ca.ualberta.team7project.MainActivity)mainContext).getListController().getListView();
controller = ((ca.ualberta.team7project.MainActivity)mainContext).getListController();
replying = ((ca.ualberta.team7project.MainActivity)mainContext).getListController().getInTopic();
editing = ((ca.ualberta.team7project.MainActivity)mainContext).getListController().getEditingTopic();
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState)
{
/* Create the builder, inflate the layout and set the view to the appropriate xml file */
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflator = getActivity().getLayoutInflater();
View v = inflator.inflate(ca.ualberta.team7project.R.layout.create_thread, null);
builder.setView(v);
final EditText titleInput = (EditText) v.findViewById(ca.ualberta.team7project.R.id.thread_title);
final EditText bodyInput = (EditText) v.findViewById(ca.ualberta.team7project.R.id.thread_body);
final Button insertImage = (Button) v.findViewById(ca.ualberta.team7project.R.id.thread_image);
/* User is replying to a topic */
if(replying == true && editing == false)
{
builder.setMessage(ca.ualberta.team7project.R.string.reply_thread);
/* Title is optional */
titleInput.setHint(ca.ualberta.team7project.R.string.enter_title_optional);
titleInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
//controller.setInTopic(false);
}
/* User is creating a new topic */
else if(replying == false && editing == false)
{
builder.setMessage(ca.ualberta.team7project.R.string.create_thread);
titleInput.setHint(ca.ualberta.team7project.R.string.enter_title);
titleInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
}
/* User is editing an existing thread or topic */
else if(editing == true)
{
builder.setMessage(ca.ualberta.team7project.R.string.edit_thread);
/* Show existing title */
titleInput.setText(controller.getOpenThread().getTitle());
titleInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
/* Show existing comment body */
bodyInput.setText(controller.getOpenThread().getComment());
bodyInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
//controller.setEditingTopic(false);
}
/* User wishes to insert an image. Show a new prompt with image selection options */
insertImage.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View v)
{
/*
* This is going to be a tricky bit of code.
*
* We will have to hide this dialog box, open up a new one to select the image and the reopen this current
* dialog box after the user has chosen the image.
*/
listener.insertImage();
}
});
/* Exit out of prompt through cancel or confirm buttons */
builder.setCancelable(true);
builder.setPositiveButton(
ca.ualberta.team7project.R.string.confirm,
new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface dialog, int id)
{
String title = titleInput.getText().toString();
String body = bodyInput.getText().toString();
listener.createThread(title, body);
}
});
builder.setNegativeButton(
ca.ualberta.team7project.R.string.cancel,
new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface dialog, int id)
{
// Nothing needs to happen if user selects cancel.
}
});
return builder.create();
}
}
|
Team7Project/src/ca/ualberta/team7project/alertviews/ThreadAlertView.java
|
package ca.ualberta.team7project.alertviews;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.text.InputType;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import ca.ualberta.team7project.MainActivity;
import ca.ualberta.team7project.controllers.ThreadListController;
import ca.ualberta.team7project.views.ThreadListView;
/**
* ThreadAlertView prompts the user to reply to a comment or create a new topic.
* <p>
* There exists two conditions in which this dialog is called.
* <ul>
* <li> The user is replying to a topic. Click event is called through a ThreadListView item.
* <li> The user is creating a new topic. Click event is called through an ActionBarIcon.
* </ul>
* <p>
* Some of the layout is defined in the builder, while the remainder is in create_thread.xml
* All button clicks are handled with the ThreadListener in ThreadListView
*
* @see ThreadListView.java
* @author raypold
*
*/
public class ThreadAlertView extends DialogFragment
{
private Boolean replying;
private Boolean editing;
private ThreadListController controller;
public interface ThreadAlertListener
{
public void createThread(String title, String comment);
public void insertImage();
}
ThreadAlertListener listener;
public ThreadAlertView()
{
super();
Context mainContext = MainActivity.getMainContext();
this.listener = ((ca.ualberta.team7project.MainActivity)mainContext).getListController().getListView();
controller = ((ca.ualberta.team7project.MainActivity)mainContext).getListController();
replying = ((ca.ualberta.team7project.MainActivity)mainContext).getListController().getInTopic();
editing = ((ca.ualberta.team7project.MainActivity)mainContext).getListController().getEditingTopic();
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState)
{
/* Create the builder, inflate the layout and set the view to the appropriate xml file */
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflator = getActivity().getLayoutInflater();
View v = inflator.inflate(ca.ualberta.team7project.R.layout.create_thread, null);
builder.setView(v);
final EditText titleInput = (EditText) v.findViewById(ca.ualberta.team7project.R.id.thread_title);
final EditText bodyInput = (EditText) v.findViewById(ca.ualberta.team7project.R.id.thread_body);
final Button insertImage = (Button) v.findViewById(ca.ualberta.team7project.R.id.thread_image);
/* User is replying to a topic */
if(replying == true & editing == false)
{
builder.setMessage(ca.ualberta.team7project.R.string.reply_thread);
/* Title is optional */
titleInput.setHint(ca.ualberta.team7project.R.string.enter_title_optional);
titleInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
//controller.setInTopic(false);
}
/* User is creating a new topic */
else if(replying == false & editing == false)
{
builder.setMessage(ca.ualberta.team7project.R.string.create_thread);
titleInput.setHint(ca.ualberta.team7project.R.string.enter_title);
titleInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
}
/* User is editing an existing thread or topic */
else if(replying == false & editing == true)
{
builder.setMessage(ca.ualberta.team7project.R.string.edit_thread);
/* Show existing title */
titleInput.setText(controller.getOpenThread().getTitle());
titleInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
/* Show existing comment body */
bodyInput.setText(controller.getOpenThread().getComment());
bodyInput.setInputType(InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_NORMAL);
//controller.setEditingTopic(false);
}
/* User wishes to insert an image. Show a new prompt with image selection options */
insertImage.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View v)
{
/*
* This is going to be a tricky bit of code.
*
* We will have to hide this dialog box, open up a new one to select the image and the reopen this current
* dialog box after the user has chosen the image.
*/
listener.insertImage();
}
});
/* Exit out of prompt through cancel or confirm buttons */
builder.setCancelable(true);
builder.setPositiveButton(
ca.ualberta.team7project.R.string.confirm,
new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface dialog, int id)
{
String title = titleInput.getText().toString();
String body = bodyInput.getText().toString();
listener.createThread(title, body);
}
});
builder.setNegativeButton(
ca.ualberta.team7project.R.string.cancel,
new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface dialog, int id)
{
// Nothing needs to happen if user selects cancel.
}
});
return builder.create();
}
}
|
fixed up logic for edit thread alert view
|
Team7Project/src/ca/ualberta/team7project/alertviews/ThreadAlertView.java
|
fixed up logic for edit thread alert view
|
<ide><path>eam7Project/src/ca/ualberta/team7project/alertviews/ThreadAlertView.java
<ide> final Button insertImage = (Button) v.findViewById(ca.ualberta.team7project.R.id.thread_image);
<ide>
<ide> /* User is replying to a topic */
<del> if(replying == true & editing == false)
<add> if(replying == true && editing == false)
<ide> {
<ide> builder.setMessage(ca.ualberta.team7project.R.string.reply_thread);
<ide>
<ide>
<ide> }
<ide> /* User is creating a new topic */
<del> else if(replying == false & editing == false)
<add> else if(replying == false && editing == false)
<ide> {
<ide> builder.setMessage(ca.ualberta.team7project.R.string.create_thread);
<ide>
<ide>
<ide> }
<ide> /* User is editing an existing thread or topic */
<del> else if(replying == false & editing == true)
<add> else if(editing == true)
<ide> {
<ide> builder.setMessage(ca.ualberta.team7project.R.string.edit_thread);
<ide>
|
|
JavaScript
|
mit
|
104124b51802ab2575046dcab07c138518ac5cab
| 0 |
NCSU-Libraries/custom-modules,NCSU-Libraries/custom-modules
|
var d = {
init : function(){
d.catkey_raw = Drupal.settings.sirsi_parser.catkey;
$.ajax({
url: Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + d.catkey_raw +'.json',
error: function (e, t, n) {
console.log(t + ": " + n);
console.log(e);
},
success: function (e, t, n) {
console.log(Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + d.catkey_raw +'.json')
d.data = e;
// make sure there is actual data for device
if(e.buildings){
d.compileHTML();
}
}
});
},
compileHTML : function(){
var p = d.data.buildings;
// build html string
d.str = '<div class="available">';
d.str += '<table>';
d.str += '<thead>';
d.str += '<tr>';
d.str += '<th>Library</th>';
d.str += '<th>Lending Period</th>';
d.str += '<th>Available for Checkout</th>';
d.str += '</tr>';
d.str += '</thead>';
d.str += '<tbody>';
// loop through to get buildings and lending periods
for (var key in p) {
if (p.hasOwnProperty(key)) {
var building = d.getBuilding(key);
var lendPeriod = p[key]['lending-periods'];
for(var lend in lendPeriod){
var total = lendPeriod[lend]['total'];
var available = lendPeriod[lend]['techlend'];
if(!available){
var checkedout = lendPeriod[lend]['checkedout'];
available = checkedout - total;
}
d.str += '<tr class="building">';
d.str += '<td>'+building+'</td>';
d.str += '<td>'+d.getLendPeriod(lend)+'</td>';
d.str += '<td>'+available+' of '+total+'</td>';
d.str += '</tr>';
}
}
}
d.str += '</tbody>';
d.str += '</table>';
d.str += '</div>';
$(".hb").html(d.str);
},
getBuilding : function(val){
switch (val) {
case "hunt":
return "James B. Hunt Jr. Library";
case "dhhill":
return "D. H. Hill Library";
case "design":
return "Design Library";
case "vetmed":
return "Veternary Medicine Library";
case "nrl":
return "Natural Resources Library";
case "textiles":
return "Textiles Library";
default:
break;
}
},
getLendPeriod : function(val){
switch (val) {
case "equip-4hr":
return "4 hour";
case "equ-4h-low":
return "4 hour";
case "laptop-pat":
return "4 hour";
case "equip-1day":
return "1 day";
case "equ-1w-low":
return "1 week";
case "equip-1wk":
return "1 week";
case "laptop-any":
return "2 hour";
default:
break;
}
}
}
$(function(){
d.init();
})
|
sirsi_parser/js/device-template.js
|
Drupal.behaviors.sirsi_parser = {
attach : function() {
(function($) {
var catkey_raw = Drupal.settings.sirsi_parser.catkey;
var device_template = Handlebars.compile([
'{{#if buildings}}',
'<div class="available">',
'<table>',
'<thead>',
'<tr>',
'<th>Library</th>',
'<th>Lending Period</th>',
'<th>Available for Checkout</th>',
'</tr>',
'</thead>',
'<tbody>',
'{{#each buildings}}',
'<tr class="building">',
'<td>{{library @key}}</td>',
'{{#each lending-periods}}',
'<td class="lending">{{lend @key}}</td> {{! the lending period array key }}',
'<td class="availability">',
'{{#unless this.techlend}}',
'0 of {{this.total}}</td>',
'{{/unless}}',
'{{#if this.techlend}}',
'{{this.techlend}} of {{this.total}}</td>',
'{{/if}}',
'</tr>',
'{{/each}}',
'{{/each}}',
'</tbody>',
'</table>',
'</div>',
'{{/if}}'
].join(''));
// Do the AJAX load of the device's availability in Sirsi, output according
// to a few Handlebars helpers.
$.ajax({
url: Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + catkey_raw +'.json',
error: function (e, t, n) {
console.log(t + ": " + n);
console.log(e);
},
success: function (e, t, n) {
console.log(Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + catkey_raw +'.json')
jQuery(".hb").html(device_template(e));
}
});
Handlebars.registerHelper("library", function (e) {
switch (e) {
case "hunt":
return "James B. Hunt Jr. Library";
case "dhhill":
return "D. H. Hill Library";
case "design":
return "Design Library";
case "vetmed":
return "Veternary Medicine Library";
case "nrl":
return "Natural Resources Library";
default:
break;
}
});
Handlebars.registerHelper("lend", function (e) {
switch (e) {
case "equip-4hr":
case "equ-4h-low":
case "laptop-pat":
return "4 hour";
case "equip-1day":
return "1 day";
case "equip-1wk":
return "1 week";
case "laptop-any":
return "2 hour";
default:
// ga('send', 'event', 'techlending errors', 'lending period error for '+ document.URL.slice(document.URL.lastIndexOf('/')+1),e, undefined, true);
// _gaq = _gaq || [];
// _gaq.push(
// ['_setAccount', 'UA-17138302-1'],
// ['_trackEvent', 'techlending errors', 'lending period error for '+ document.URL.slice(document.URL.lastIndexOf('/')+1), e, undefined, true]
// );
break;
}
});
})(jQuery);
}
};
|
device availability fix
|
sirsi_parser/js/device-template.js
|
device availability fix
|
<ide><path>irsi_parser/js/device-template.js
<del>Drupal.behaviors.sirsi_parser = {
<del> attach : function() {
<del> (function($) {
<del> var catkey_raw = Drupal.settings.sirsi_parser.catkey;
<del> var device_template = Handlebars.compile([
<del> '{{#if buildings}}',
<del> '<div class="available">',
<del> '<table>',
<del> '<thead>',
<del> '<tr>',
<del> '<th>Library</th>',
<del> '<th>Lending Period</th>',
<del> '<th>Available for Checkout</th>',
<del> '</tr>',
<del> '</thead>',
<del> '<tbody>',
<del> '{{#each buildings}}',
<del> '<tr class="building">',
<del> '<td>{{library @key}}</td>',
<del> '{{#each lending-periods}}',
<del> '<td class="lending">{{lend @key}}</td> {{! the lending period array key }}',
<del> '<td class="availability">',
<del> '{{#unless this.techlend}}',
<del> '0 of {{this.total}}</td>',
<del> '{{/unless}}',
<del> '{{#if this.techlend}}',
<del> '{{this.techlend}} of {{this.total}}</td>',
<del> '{{/if}}',
<del> '</tr>',
<del> '{{/each}}',
<del> '{{/each}}',
<del> '</tbody>',
<del> '</table>',
<del> '</div>',
<del> '{{/if}}'
<del> ].join(''));
<add>var d = {
<add> init : function(){
<add> d.catkey_raw = Drupal.settings.sirsi_parser.catkey;
<ide>
<del> // Do the AJAX load of the device's availability in Sirsi, output according
<del> // to a few Handlebars helpers.
<del> $.ajax({
<del> url: Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + catkey_raw +'.json',
<add> $.ajax({
<add> url: Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + d.catkey_raw +'.json',
<ide> error: function (e, t, n) {
<ide> console.log(t + ": " + n);
<ide> console.log(e);
<ide> },
<ide> success: function (e, t, n) {
<del> console.log(Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + catkey_raw +'.json')
<del> jQuery(".hb").html(device_template(e));
<add> console.log(Drupal.settings.sirsi_parser.base_url + '/sites/default/files/techlending/devices_data/' + d.catkey_raw +'.json')
<add> d.data = e;
<add> // make sure there is actual data for device
<add> if(e.buildings){
<add> d.compileHTML();
<add> }
<ide> }
<del> });
<del> Handlebars.registerHelper("library", function (e) {
<del> switch (e) {
<del> case "hunt":
<del> return "James B. Hunt Jr. Library";
<del> case "dhhill":
<del> return "D. H. Hill Library";
<del> case "design":
<del> return "Design Library";
<del> case "vetmed":
<del> return "Veternary Medicine Library";
<del> case "nrl":
<del> return "Natural Resources Library";
<del> default:
<del> break;
<del> }
<ide> });
<del> Handlebars.registerHelper("lend", function (e) {
<del> switch (e) {
<add> },
<add>
<add> compileHTML : function(){
<add> var p = d.data.buildings;
<add> // build html string
<add> d.str = '<div class="available">';
<add> d.str += '<table>';
<add> d.str += '<thead>';
<add> d.str += '<tr>';
<add> d.str += '<th>Library</th>';
<add> d.str += '<th>Lending Period</th>';
<add> d.str += '<th>Available for Checkout</th>';
<add> d.str += '</tr>';
<add> d.str += '</thead>';
<add> d.str += '<tbody>';
<add>
<add> // loop through to get buildings and lending periods
<add> for (var key in p) {
<add> if (p.hasOwnProperty(key)) {
<add> var building = d.getBuilding(key);
<add> var lendPeriod = p[key]['lending-periods'];
<add> for(var lend in lendPeriod){
<add> var total = lendPeriod[lend]['total'];
<add> var available = lendPeriod[lend]['techlend'];
<add> if(!available){
<add> var checkedout = lendPeriod[lend]['checkedout'];
<add> available = checkedout - total;
<add> }
<add> d.str += '<tr class="building">';
<add> d.str += '<td>'+building+'</td>';
<add> d.str += '<td>'+d.getLendPeriod(lend)+'</td>';
<add> d.str += '<td>'+available+' of '+total+'</td>';
<add> d.str += '</tr>';
<add> }
<add>
<add> }
<add> }
<add>
<add> d.str += '</tbody>';
<add> d.str += '</table>';
<add> d.str += '</div>';
<add>
<add> $(".hb").html(d.str);
<add> },
<add>
<add> getBuilding : function(val){
<add> switch (val) {
<add> case "hunt":
<add> return "James B. Hunt Jr. Library";
<add> case "dhhill":
<add> return "D. H. Hill Library";
<add> case "design":
<add> return "Design Library";
<add> case "vetmed":
<add> return "Veternary Medicine Library";
<add> case "nrl":
<add> return "Natural Resources Library";
<add> case "textiles":
<add> return "Textiles Library";
<add> default:
<add> break;
<add> }
<add> },
<add>
<add> getLendPeriod : function(val){
<add> switch (val) {
<ide> case "equip-4hr":
<add> return "4 hour";
<ide> case "equ-4h-low":
<add> return "4 hour";
<ide> case "laptop-pat":
<ide> return "4 hour";
<ide> case "equip-1day":
<ide> return "1 day";
<add> case "equ-1w-low":
<add> return "1 week";
<ide> case "equip-1wk":
<ide> return "1 week";
<ide> case "laptop-any":
<ide> return "2 hour";
<ide> default:
<del> // ga('send', 'event', 'techlending errors', 'lending period error for '+ document.URL.slice(document.URL.lastIndexOf('/')+1),e, undefined, true);
<del> // _gaq = _gaq || [];
<del> // _gaq.push(
<del> // ['_setAccount', 'UA-17138302-1'],
<del> // ['_trackEvent', 'techlending errors', 'lending period error for '+ document.URL.slice(document.URL.lastIndexOf('/')+1), e, undefined, true]
<del> // );
<ide> break;
<del> }
<del> });
<del> })(jQuery);
<del> }
<del>};
<add> }
<add> }
<add>}
<add>
<add>$(function(){
<add> d.init();
<add>})
<ide>
<ide>
<add>
<add>
|
|
JavaScript
|
mit
|
5a35a79d157eb2a5ddc91e923b94eb67490bc12b
| 0 |
yuanying/WebIRC,yuanying/WebIRC
|
var request
var connections = new Object()
var last_read = new Object()
var current = new Object()
var bookmarks = new Object()
var jar = new CookieJar({expires:10080, path:"/"});
var input_history = new Object()
input_history["data"] = new Array()
input_history["position"] = -1
var tab_completion = new Object()
tab_completion["repeat"] = false
tab_completion["search_term"] = null
tab_completion["user_lookup"] = new Array()
tab_completion["response"] = false
tab_completion["last_match"] = 0
const PRIVMSG = "p"
const ACTION = "a"
const NOTICE = "n"
const JOIN = "j"
const SELF_JOIN = "J"
const PART = "l"
const SELF_PART = "L"
const KICK = "k"
const QUIT = "q"
const TOPIC = "t"
const NICK = ">"
const SERVER = "-"
const SERVER_ERROR = "*"
const CLIENT_ERROR = "!"
const MODE = "m"
const CTCP = "c"
window.onresize = function() {
update_activity_width()
}
window.onfocus = function() {
$("msg").focus()
}
function update_activity_width() {
if (current.target && connections[current.connection_id].targets[current.target].is_channel) {
$("activity").style.width = document.body.offsetWidth - 417 + "px"
} else {
$("activity").style.width = document.body.offsetWidth - 247 + "px"
}
}
function init() {
var stored_bookmarks = jar.get("bookmarks")
if (stored_bookmarks) {
bookmarks = stored_bookmarks
}
hide("users")
request = get("all", first_time_irc_handler)
$("server_name").focus()
setInterval("update_request()", 10000)
update_activity_width()
}
function send_msg(text) {
input_history.data[input_history.data.length] = text
input_history.position = input_history.data.length
if (text.indexOf("/") == 0) {
var command = double_arg(text)
if (command) {
input_command(command.first.toUpperCase(), command.remainder)
} else {
if (current.connection_id) {
switch (text.toUpperCase()) {
case "/PART":
if (current.target) {
part_request(current.connection_id, current.target)
}
break
case "/QUIT":
close_request(current.connection_id, null)
break
case "/CLOSE":
if (current.target && connections[current.connection_id].targets[current.target]) {
if (connections[current.connection_id].targets[current.target].is_channel) {
part_request(current.connection_id, current.target)
} else {
close_request(current.connection_id, current.target)
}
}
break
case "/MOTD":
command_request(current.connection_id, "MOTD", 1)
break
}
}
}
} else {
send_privmsg(text, false)
}
}
function send_privmsg(text, action) {
if (current.target) {
if (text != "") {
privmsg_request(current.connection_id, current.target, text, action)
}
} else {
local_error("Only commands may be entered when viewing the server log")
}
}
function input_command(cmd, param) {
if (current.connection_id) {
switch (cmd) {
case "/ME":
if (param) {
send_privmsg(param, true)
}
break
case "/NICK":
command_request(current.connection_id, "NICK " + param, 1)
break
case "/MSG":
var command = double_arg(param)
if (command) {
if (!command.first.match(/^[&#!+.~]/)) {
privmsg_request(current.connection_id, command.first, command.remainder, false)
} else {
local_error("Outside messages have been prevented by the local client")
}
} else {
local_error("Usage is: /msg ≪user≫ ≪text≫")
}
break
case "/NOTICE":
var command = double_arg(param)
if (command) {
if (!command.first.match(/^[&#!+.~]/)) {
notice_request(current.connection_id, command.first, command.remainder, false)
} else {
local_error("Outside notices have been prevented by the local client")
}
} else {
local_error("Usage is: /notice ≪user≫ ≪text≫")
}
break
case "/JOIN":
join_request(current.connection_id, param)
break
case "/PART":
part_request(current.connection_id, param)
break
case "/WHOIS":
whois_user(current.connection_id, param)
break
case "/TOPIC":
command_channel_check("TOPIC " + current.target + " :" + param, 1)
break
case "/OP":
current_channel_mode_change("+o", param)
break
case "/DEOP":
current_channel_mode_change("-o", param)
break
case "/VOICE":
current_channel_mode_change("+v", param)
break
case "/DEVOICE":
current_channel_mode_change("-v", param)
break
case "/USER":
click_on_user(current.connection_id, param)
break
case "/MODE":
var command = double_arg(param)
if (command) {
current_channel_mode_change(command.first, command.remainder)
} else {
current_channel_mode_change(param, "")
}
break
case "/RAW":
command_request(current.connection_id, param, 1)
break
}
}
}
function current_channel_mode_change(mode, param) {
command_channel_check("MODE " + current.target + " " + mode + " :" + param, 2)
}
function command_channel_check(command, wait) {
if (current.connection_id && current.target && connections[current.connection_id] && connections[current.connection_id].targets[current.target]) {
if (connections[current.connection_id].targets[current.target].is_channel) {
command_request(current.connection_id, command, wait)
}
}
}
function double_arg(text) {
if (text) {
var parsed_string = text.match(/^(\S+)\s?(.*)?$/)
if (parsed_string[1] && parsed_string[2]) {
var out_strings = new Object()
out_strings["first"] = parsed_string[1]
out_strings["remainder"] = parsed_string[2]
return out_strings
} else {
return null
}
} else {
return null
}
}
const LEFT_CURSOR = 37
const UP_CURSOR = 38
const RIGHT_CURSOR = 39
const DOWN_CURSOR = 40
const DELETE = 8
const TAB = 9
function detect_keypress(event) {
if (!event.ctrlKey && event.which == TAB) {
if ((tab_completion.repeat && tab_completion.response) || $("msg").value.match(/^\S+$/)) {
tab_completion.response = true
} else {
tab_completion.response = false
}
if (tab_completion.response) {
$("msg").value = complete_nick()
} else {
$("msg").value = $("msg").value.gsub(/(\S+)$/, function(match) {return complete_nick()})
}
return false
} else {
tab_completion.repeat = false
}
if (event.ctrlKey) {
switch (event.which) {
case LEFT_CURSOR:
goto_prev_server()
return false
break
case UP_CURSOR:
goto_prev_target()
return false
break
case RIGHT_CURSOR:
goto_next_server()
return false
break
case DOWN_CURSOR:
goto_next_target()
return false
break
case DELETE:
change_to(current.connection_id, null)
return false
break
}
} else {
switch (event.which) {
case UP_CURSOR:
prev_history()
return false
break
case DOWN_CURSOR:
next_history()
return false
break
}
}
return true
}
function goto_prev_server() {
var new_connection_id = move_in_element(current.connection_id, connections, -1)
change_to(new_connection_id, null)
}
function goto_prev_target() {
var new_target = move_in_element(current.target, connections[current.connection_id].targets, -1)
change_to(current.connection_id, new_target)
}
function goto_next_server() {
var new_connection_id = move_in_element(current.connection_id, connections, 1)
change_to(new_connection_id, null)
}
function goto_next_target() {
var new_target = move_in_element(current.target, connections[current.connection_id].targets, 1)
change_to(current.connection_id, new_target)
}
function complete_nick() {
if (tab_completion.repeat) {
var match = tab_complete_lookup(tab_completion.last_match + 1)
if (match) {
if (tab_completion.response) {
return match + ": "
} else {
return match
}
} else {
tab_completion.repeat = false
return tab_completion.search_term
}
} else {
var search_term = $("msg").value.match(/(\S+)$/)
if (search_term) {
tab_completion.user_lookup = combined_users(current.connection_id, current.target)
tab_completion.search_term = search_term[1].toLowerCase()
tab_completion.repeat = true
var match = tab_complete_lookup(0)
if (match) {
if (tab_completion.response) {
return match + ": "
} else {
return match
}
} else {
tab_completion.repeat = false
return tab_completion.search_term
}
}
}
}
function tab_complete_lookup(from) {
for (var i = from; i < tab_completion.user_lookup.length; i++) {
if (tab_completion.user_lookup[i].toLowerCase().indexOf(tab_completion.search_term) == 0) {
tab_completion.last_match = i
return tab_completion.user_lookup[i]
}
}
return null
}
function combined_users(connection_id, target) {
if (connections[connection_id].targets[target] && connections[connection_id].targets[target].is_channel) {
return connections[connection_id].targets[target].opers.concat(connections[connection_id].targets[target].voicers, connections[connection_id].targets[target].users)
}
return new Array()
}
function prev_history() {
input_history.position = Math.max(input_history.position - 1, 0)
if (input_history.data[input_history.position]) {
$("msg").value = input_history.data[input_history.position]
}
}
function next_history() {
input_history.position = Math.min(input_history.position + 1, input_history.data.length)
if (input_history.data[input_history.position]) {
$("msg").value = input_history.data[input_history.position]
} else {
$("msg").value = ""
}
}
function build_array(parent) {
var element_array = new Array()
for (element in parent) {
if (parent[element]) {
element_array[element_array.length] = element
}
}
return element_array
}
function move_in_array(reference, array_of_items, position) {
if (reference) {
var current_location = array_of_items.indexOf(reference)
if (current_location != -1) {
var new_position = current_location + position
if (new_position >= array_of_items.length) {
return array_of_items[new_position - array_of_items.length]
} else if (new_position < 0) {
return array_of_items[new_position + array_of_items.length]
} else {
return array_of_items[new_position]
}
} else {
return null
}
} else {
return array_of_items[0]
}
}
function move_in_element(reference, element, position) {
return move_in_array(reference, build_array(element), position)
}
function create_target_element(connection_id, target_name, is_channel) {
connections[connection_id].targets[target_name] = new Object()
connections[connection_id].targets[target_name]["is_channel"] = is_channel
connections[connection_id].targets[target_name]["div_activity"] = create_div()
if (is_channel) {
connections[connection_id].open_channels.push(target_name)
var activity_header = create_activity_div("activity_header", "Channel activity for " + target_name)
connections[connection_id].targets[target_name]["div_users"] = create_div()
} else {
connections[connection_id].open_privmsgs.push(target_name)
var activity_header = create_activity_div("activity_header", "Private messages with " + target_name)
}
div_activity(connection_id, target_name).appendChild(activity_header)
connections[connection_id].targets[target_name]["div_target"] = create_div("connection_item clickable")
connections[connection_id].targets[target_name]["div_target"].setAttribute("onclick", "change_to(\"" + connection_id + "\", \"" + target_name + "\")")
var name = create_div("target")
name.textContent = is_channel ? target_name : "@" + target_name
var close_button = create_div("close_button")
close_button.setAttribute("onclick", "close_window(\"" + connection_id + "\", \"" + target_name + "\")")
connections[connection_id].targets[target_name]["div_unread"] = create_div("small unread_count")
clear_unread(connection_id, target_name, false)
connections[connection_id].targets[target_name].div_target.appendChild(name)
connections[connection_id].targets[target_name].div_target.appendChild(close_button)
connections[connection_id].targets[target_name].div_target.appendChild(connections[connection_id].targets[target_name].div_unread)
connections[connection_id].div_group.appendChild(connections[connection_id].targets[target_name].div_target)
}
function create_connection_element(connection_id, connection_count, connection_name, connection_port, nickname, real_name, last_activity) {
last_read[connection_id] = -1
connections[connection_id] = new Object()
connections[connection_id]["open_channels"] = new Array()
connections[connection_id]["open_privmsgs"] = new Array()
connections[connection_id]["targets"] = new Object()
connections[connection_id]["connection_count"] = connection_count
connections[connection_id]["server_name"] = connection_name
connections[connection_id]["server_port"] = connection_port
connections[connection_id]["nickname"] = nickname
connections[connection_id]["real_name"] = real_name
connections[connection_id]["last_activity"] = last_activity
connections[connection_id]["div_meta_group"] = create_div()
connections[connection_id]["div_group"] = create_div()
connections[connection_id]["join_button"] = create_div("tiny clickable header join_button with_hover")
connections[connection_id]["join_channel"] = create_div()
connections[connection_id].join_channel.style.display = "none"
connections[connection_id]["join_input"] = create_element("input", "regular join_input")
connections[connection_id]["join_input"].setAttribute("onkeypress", "if (event.which == 10 || event.which == 13) {join_input(\"" + connection_id + "\")}")
var close_button = create_div("tiny clickable header join_button with_hover cancel_button")
close_button.textContent = "Cancel"
close_button.setAttribute("onclick", "cancel_join(\"" + connection_id + "\")")
connections[connection_id].join_channel.appendChild(connections[connection_id]["join_input"])
connections[connection_id].join_channel.appendChild(close_button)
connections[connection_id].join_button.textContent = "Join channel..."
connections[connection_id].join_button.setAttribute("onclick", "join(\"" + connection_id + "\")")
connections[connection_id].div_meta_group.appendChild(connections[connection_id].div_group)
connections[connection_id].div_meta_group.appendChild(connections[connection_id].join_button)
connections[connection_id].div_meta_group.appendChild(connections[connection_id].join_channel)
$("connections").appendChild(connections[connection_id].div_meta_group)
add_connection_server(connection_id, connection_name)
}
function add_connection_server(connection_id, target_name) {
connections[connection_id]["div_activity"] = create_div()
connections[connection_id].div_activity.appendChild(create_activity_div("activity_header", "IRC Server connection with " + connections[connection_id].server_name))
connections[connection_id]["div_server"] = create_div("connection_item clickable")
connections[connection_id].div_server.setAttribute("onclick", "change_to(\"" + connection_id + "\")")
var name = create_div("server")
name.textContent = target_name
var close_button = create_div("close_button")
close_button.setAttribute("onclick", "disconnect(\"" + connection_id + "\")")
connections[connection_id]["div_unread"] = create_div("small unread_count")
clear_unread(connection_id, null, false)
connections[connection_id].div_server.appendChild(name)
connections[connection_id].div_server.appendChild(close_button)
connections[connection_id].div_server.appendChild(connections[connection_id].div_unread)
connections[connection_id].div_group.appendChild(connections[connection_id].div_server)
}
function change_to(connection_id, target) {
if (current.connection_id) {
remove_all($("activity"))
div_connection_item(current.connection_id, current.target).className = "connection_item clickable"
} else {
hide("new_connection")
show("activity")
show("msg")
}
if (connection_id) {
$("activity").appendChild(div_activity(connection_id, target))
div_connection_item(connection_id, target).className = "connection_item selected"
show("activity")
scroll_to_bottom()
$("msg").focus()
clear_unread(connection_id, target, true)
} else {
show("new_connection")
hide("activity")
$("server_name").focus()
hide("msg")
}
update_title(connection_id, target)
update_topic(connection_id, target)
show_or_hide_users(connection_id, target)
update_activity(connection_id, target)
current["connection_id"] = connection_id
current["target"] = target
update_activity_width()
jar.put("current", current)
}
function show_or_hide_users(connection_id, target) {
if (connection_id && target && connections[connection_id].targets[target].is_channel) {
remove_all($("users"))
update_user_list(connection_id, target)
$("users").appendChild(connections[connection_id].targets[target].div_users)
show("users")
} else {
hide("users")
}
}
function update_user_list(connection_id, channel) {
if (connections[connection_id].targets[channel].is_channel) {
remove_all(connections[connection_id].targets[channel].div_users)
var opers = connections[connection_id].targets[channel].opers.sort()
var voicers = connections[connection_id].targets[channel].voicers.sort()
var users = connections[connection_id].targets[channel].users.sort()
for (var i = 0; i < opers.length; i++) { add_oper(connection_id, channel, opers[i]) }
for (var i = 0; i < voicers.length; i++) { add_voicer(connection_id, channel, voicers[i]) }
for (var i = 0; i < users.length; i++) { add_user(connection_id, channel, users[i]) }
}
}
function add_user_element(connection_id, channel, name, type) {
var div_op = create_div("user " + type)
var div_span = create_element("span", "clickable with_hover")
div_span.setAttribute("onclick", "click_on_user(\"" + connection_id + "\", \"" + name + "\")")
div_span.textContent = name
var div_whois = create_element("span", "small user_menu clickable")
div_whois.innerHTML = "?"
div_whois.setAttribute("onclick", "whois_user(\"" + connection_id + "\", \"" + name + "\")")
div_op.appendChild(div_span)
div_op.appendChild(div_whois)
connections[connection_id].targets[channel].div_users.appendChild(div_op)
}
function click_on_user(connection_id, user) {
if (!connections[connection_id].targets[user.toLowerCase()]) {
new_window_request(connection_id, user)
} else {
change_to(connection_id, user.toLowerCase())
}
}
function whois_user(connection_id, user) {
command_request(connection_id, "WHOIS " + user, 1)
change_to(connection_id)
}
function add_oper(connection_id, channel, name) {
add_user_element(connection_id, channel, name, "op")
}
function add_voicer(connection_id, channel, name) {
add_user_element(connection_id, channel, name, "voice")
}
function add_user(connection_id, channel, name) {
add_user_element(connection_id, channel, name, "")
}
function update_topic(connection_id, target) {
if (target) {
if (!connections[connection_id].targets[target].is_channel) {
$("topic").textContent = "Private messages with " + target
} else {
if (connections[connection_id].targets[target].topic) {
$("topic").textContent = target + " - " + connections[connection_id].targets[target].topic + topic_appendix(connections[connection_id].targets[target].topic_creator, connections[connection_id].targets[target].topic_creation_time)
linkify($("topic"))
} else {
$("topic").textContent = target
}
}
} else {
if (connection_id) {
$("topic").textContent = "Server log for " + connections[connection_id].server_name
} else {
$("topic").textContent = "Create a new connection"
}
}
}
function topic_appendix(creator, creation_time) {
return ((creator && creation_time) ? " (" + creator + ", " + get_brief_date(creation_time) + ")" : "")
}
function update_title(connection_id, target) {
if (connection_id) {
if (target) {
set_title(target + ", " + connections[connection_id].server_name + ":" + connections[connection_id].server_port + ", " + connections[connection_id].nickname + " (" + connections[connection_id].real_name + ")")
} else {
set_title(connections[connection_id].server_name + ":" + connections[connection_id].server_port + ", " + connections[connection_id].nickname + " (" + connections[connection_id].real_name + ")")
}
} else {
set_title("Web IRC")
}
}
function update_activity(connection_id, target) {
if (connection_id) {
if (target) {
set_activity(connections[connection_id].targets[target].last_activity)
} else {
set_activity(connections[connection_id].last_activity)
}
} else {
set_activity()
}
}
function set_activity(time) {
if (time) {
$("activity_info").textContent = "Last activity: " + timestamp_long(time)
} else {
$("activity_info").textContent = ""
}
}
function set_title(text) {
if (text) {
$("title_text").textContent = text
} else {
$("title_text").textContent = ""
}
}
function first_time_irc_handler(event) {
if (request_done()) {
irc_handler(event, true)
var stored_current = jar.get("current")
if (stored_current && connections[stored_current.connection_id]) {
if (!stored_current.target || connections[stored_current.connection_id].targets[stored_current.target]) {
change_to(stored_current.connection_id, stored_current.target)
}
}
}
}
function irc_handler(event, first_time) {
if (request_done()) {
var response = request_to_json()
process_history(response.history, !first_time)
if (response.sync) {process_sync(response.sync)}
}
}
function process_sync(close) {
for (var i = 0; i < close.connections.length; i++) { close_connection(close.connections[i]) }
for (var connection_id in close.targets) {
for (var i = 0; i < close.targets[connection_id].channels.length; i++) {
if (connections[connection_id]) {
close_channel(connection_id, close.targets[connection_id].channels[i])
}
}
for (var i = 0; i < close.targets[connection_id].privmsgs.length; i++) {
if (connections[connection_id]) {
close_privmsg(connection_id, close.targets[connection_id].privmsgs[i])
}
}
}
}
function remove_target(connection_id, target) {
if (is_current(connection_id, target)) {
change_to(connection_id)
}
if (connections[connection_id].targets[target]) {
connections[connection_id].div_group.removeChild(connections[connection_id].targets[target]["div_target"])
connections[connection_id].targets[target] = undefined
destroy_bookmarks(connection_id, target)
}
}
function close_channel(connection_id, channel) {
connections[connection_id].open_channels.splice(connections[connection_id].open_channels.indexOf(channel), 1)
remove_target(connection_id, channel)
}
function close_privmsg(connection_id, privmsg) {
connections[connection_id].open_privmsgs.splice(connections[connection_id].open_privmsgs.indexOf(privmsg), 1)
remove_target(connection_id, privmsg)
}
function close_connection(connection_id) {
change_to()
$("connections").removeChild(connections[connection_id].div_meta_group)
connections[connection_id] = undefined
destroy_bookmarks(connection_id, null)
}
function process_history(history, auto_open) {
for (var connection_id in history) {
var new_target = create_connection_if_necessary(connection_id, history[connection_id])
if (history[connection_id].connection_count != connections[connection_id].connection_count) {
connections[connection_id].connection_count = history[connection_id].connection_count
close_all_targets(connection_id)
}
if (connections[connection_id].nickname != history[connection_id].nickname) {
connections[connection_id].nickname = history[connection_id].nickname
if (current.connection_id = connection_id) {
update_title(connection_id, current.target)
}
}
connections[connection_id].last_activity = history[connection_id].history.root_log.last_activity
for (var n = 0; n < history[connection_id].history.root_log.data.length; n++) {
var line = history[connection_id].history.root_log.data[n]
if (check_if_new(connection_id, null, line.msg_id)) {
update_read_count(connection_id, line.msg_id)
root_log(connection_id, line)
scroll_if_necessary(connection_id)
}
}
if (auto_open && new_target) {change_to(connection_id)}
for (var channel in history[connection_id].history.channels) {
var new_target = create_target_if_necessary(connection_id, channel, true)
update_users(connection_id, channel, history[connection_id].history.users[channel])
update_target_attributes(connection_id, channel, history[connection_id].history.channels[channel])
for (var n = 0; n < history[connection_id].history.channels[channel].data.length; n++) {
var line = history[connection_id].history.channels[channel].data[n]
if (check_if_new(connection_id, channel, line.msg_id)) {
update_read_count(connection_id, line.msg_id)
channel_log(connection_id, channel, line)
scroll_if_necessary(connection_id, channel)
}
}
if (auto_open && new_target) {change_to(connection_id, channel)}
}
for (var privmsg in history[connection_id].history.privmsgs) {
var new_target = create_target_if_necessary(connection_id, privmsg, false)
update_target_attributes(connection_id, privmsg, history[connection_id].history.privmsgs[privmsg])
for (var n = 0; n < history[connection_id].history.privmsgs[privmsg].data.length; n++) {
var line = history[connection_id].history.privmsgs[privmsg].data[n]
if (check_if_new(connection_id, privmsg, line.msg_id)) {
update_read_count(connection_id, line.msg_id)
channel_log(connection_id, privmsg, line)
scroll_if_necessary(connection_id, privmsg)
}
}
if (auto_open && new_target) {change_to(connection_id, privmsg)}
}
}
}
function check_if_new(connection_id, target, check_id) {
if (check_id > get_last_id(connection_id, target)) {
if (target) {
connections[connection_id].targets[target].msg_id = check_id
} else {
connections[connection_id].msg_id = check_id
}
return true
} else {
return false
}
}
function get_last_id(connection_id, target) {
if (target) {
var last_id = connections[connection_id].targets[target].msg_id
if (last_id) {
return last_id
} else {
connections[connection_id].targets[target].msg_id = 0
return 0
}
} else {
var last_id = connections[connection_id].msg_id
if (last_id) {
return last_id
} else {
connections[connection_id].msg_id = 0
return 0
}
}
}
function close_all_targets(connection_id) {
for (target in connections[connection_id].targets) {
if (connections[connection_id].targets[target] && connections[connection_id].targets[target].is_channel) {
close_channel(connection_id, target)
} else {
close_privmsg(connection_id, target)
}
}
}
function create_connection_if_necessary(connection_id, element) {
if (!connections[connection_id]) {
create_connection_element(connection_id, element.connection_count, element.server_name, element.server_port, element.nickname, element.real_name, element.last_activity)
return true
} else {
return false
}
}
function update_users(connection_id, channel, userlist) {
connections[connection_id].targets[channel]["opers"] = userlist.opers
connections[connection_id].targets[channel]["voicers"] = userlist.voicers
connections[connection_id].targets[channel]["users"] = userlist.users
}
function update_target_attributes(connection_id, target, history_element) {
connections[connection_id].targets[target]["topic"] = history_element.topic
connections[connection_id].targets[target]["topic_creator"] = history_element.topic_creator
connections[connection_id].targets[target]["topic_creation_time"] = history_element.topic_creation_time
connections[connection_id].targets[target]["last_activity"] = history_element.last_activity
}
function update_read_count(connection_id, msg_id) {
if (last_read[connection_id] && msg_id > last_read[connection_id]) {
last_read[connection_id] = msg_id
}
}
function root_log(connection_id, line) {
check_for_timestamp(connection_id, null, line.timestamp)
switch (line.type) {
case NOTICE:
root_notice(connection_id, line.source, line.msg)
add_unread(connection_id, null, line.msg_id, false)
break
case SERVER:
irc_server(connection_id, line.source, line.tag, line.params)
add_unread(connection_id, null, line.msg_id, false)
break
case SERVER_ERROR:
irc_server_error(connection_id, line.source, line.tag, line.params)
add_unread(connection_id, null, line.msg_id, true)
break
case MODE:
irc_user_mode(connection_id, line.source, line.target, line.add_mode, line.mode_char, line.param)
break
case CTCP:
irc_ctcp(connection_id, line.source, line.cmd, line.param, line.response)
add_unread(connection_id, null, line.msg_id)
break
case JOIN:
irc_self_join(connection_id, line.channel)
break
case PART:
irc_self_part(connection_id, line.channel, line.msg)
break
case KICK:
irc_self_kick(connection_id, line.source, line.channel, line.reason)
break
case NICK:
irc_self_nick(connection_id, line.new_nickname)
break
case CLIENT_ERROR:
irc_client_error(connection_id, line.tag, line.params)
add_unread(connection_id, null, line.msg_id, true)
break
}
}
function get_unread_div(connection_id, target) {
if (target) {
return connections[connection_id].targets[target].div_unread
} else {
return connections[connection_id].div_unread
}
}
function is_current(connection_id, target) {
if (connection_id == current.connection_id && target == current.target) {
return true
} else {
return false
}
}
function create_bookmark(connection_id, target, value) {
if (!bookmarks[connection_id]) {
bookmarks[connection_id] = new Object()
bookmarks[connection_id].targets = new Object()
}
if (target) {
bookmarks[connection_id].targets[target] = value
} else {
bookmarks[connection_id]["msg_id"] = value
}
}
function get_bookmark(connection_id, target) {
if (!bookmarks[connection_id]) {
return 0
}
if (target) {
if (!bookmarks[connection_id].targets[target]) {
return 0
} else {
return bookmarks[connection_id].targets[target]
}
} else {
if (!bookmarks[connection_id].msg_id) {
return 0
} else {
return bookmarks[connection_id].msg_id
}
}
}
function add_unread(connection_id, target, msg_id, highlighted) {
var div_unread = get_unread_div(connection_id, target)
if (is_current(connection_id, target)) {
create_bookmark(connection_id, target, msg_id)
} else {
if (get_bookmark(connection_id, target) < msg_id) {
var div_unread = get_unread_div(connection_id, target)
set_display(div_unread, "block")
div_unread.textContent = parseInt(div_unread.textContent) + 1
if (highlighted) {
div_unread.className = "small unread_count highlighted"
}
}
}
}
function clear_unread(connection_id, target, update_bookmark) {
var div_unread = get_unread_div(connection_id, target)
div_unread.className = "small unread_count"
set_display(div_unread, "none")
div_unread.textContent = 0
if (update_bookmark) {
create_bookmark(connection_id, target, last_read[connection_id])
jar.put("bookmarks", bookmarks)
}
}
function destroy_bookmarks(connection_id, target) {
if (target) {
if (bookmarks[connection_id] && bookmarks[connection_id].targets[target]) {
bookmarks[connection_id].targets[target] = undefined
jar.put("bookmarks", bookmarks)
}
} else {
if (bookmarks[connection_id]) {
bookmarks[connection_id] = undefined
jar.put("bookmarks", bookmarks)
}
}
}
function add_timestamp(connection_id, target, time, is_long) {
var div_timstamp = create_div("tiny timestamp")
if (is_long) {
div_timstamp.textContent = timestamp_long(time)
} else {
div_timstamp.textContent = timestamp_short(time)
}
add_activity(connection_id, target, div_timstamp)
return true
}
function check_for_timestamp(connection_id, target, time) {
if (target) {
var last_time = connections[connection_id].targets[target].last_timestamp
connections[connection_id].targets[target].last_timestamp = time
} else {
var last_time = connections[connection_id].last_timestamp
connections[connection_id]["last_timestamp"] = time
}
if (last_time) {
var diff_time = time - last_time
if (diff_time > 900) { // 15 minutes
if (diff_time > 86400) { // 24 hours
return add_timestamp(connection_id, target, time, true)
} else {
return add_timestamp(connection_id, target, time, false)
}
}
} else {
return add_timestamp(connection_id, target, time, true)
}
return false
}
function local_error(text) {
div_activity(current.connection_id, current.target).appendChild(create_activity_span("small server error", "INPUTERROR", "small server_info", text))
scroll_if_necessary(current.connection_id, current.target)
}
function irc_client_error(connection_id, tag, params) {
div_activity(connection_id).appendChild(create_activity_span("small server error", tag, "small server_info", params))
}
function irc_ctcp(connection_id, source, ctcp_cmd, ctcp_param, response) {
add_activity(connection_id, undefined, create_activity_span("small server source", source, "small server ctcp", "CTCP " + ctcp_cmd, "small server_info", "Response: " + response))
}
function mention_me(connection_id, text) {
if (text.toLowerCase().indexOf(connections[connection_id].nickname.toLowerCase()) == 0) {
return true
} else {
return false
}
}
function channel_log(connection_id, channel, line) {
var timestamp = check_for_timestamp(connection_id, channel, line.timestamp)
switch (line.type) {
case JOIN:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_join(connection_id, channel, line.user, line)
update_user_list(connection_id, channel)
break
case PRIVMSG:
create_divider_if_necessary(connection_id, channel, true, timestamp)
irc_privmsg(connection_id, channel, line.source, line.msg)
add_unread(connection_id, channel, line.msg_id, mention_me(connection_id, line.msg))
break
case ACTION:
create_divider_if_necessary(connection_id, channel, true, timestamp)
irc_action(connection_id, channel, line.source, line.msg)
add_unread(connection_id, channel, line.msg_id, false)
break
case NOTICE:
create_divider_if_necessary(connection_id, channel, true, timestamp)
irc_notice(connection_id, channel, line.source, line.msg)
add_unread(connection_id, channel, line.msg_id, false)
break
case PART:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_part(connection_id, channel, line.source, line.msg)
update_user_list(connection_id, channel)
break
case MODE:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_channel_mode(connection_id, channel, line.source, line.target, line.add_mode, line.mode_char, line.param)
break
case TOPIC:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_topic(connection_id, channel, line.source, line.text)
break
case KICK:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_kick(connection_id, channel, line.source, line.user, line.reason)
update_user_list(connection_id, channel)
break
case NICK:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_nick(connection_id, channel, line.user, line.new_nickname)
update_user_list(connection_id, channel)
break
case QUIT:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_quit(connection_id, channel, line.user, line.msg)
update_user_list(connection_id, channel)
break
}
}
function irc_self_nick(connection_id, new_nickname) {
irc_server_narrative(connection_id, "You have changed your nickname to " + new_nickname)
}
function irc_nick(connection_id, channel, user, new_nickname) {
add_channel_narrative(connection_id, channel, user + " has changed their nickname to " + new_nickname)
}
function irc_quit(connection_id, channel, user, msg) {
add_channel_narrative(connection_id, channel, user + " has quit IRC" + extra_msg(msg))
}
function irc_kick(connection_id, channel, source, user, reason) {
add_channel_narrative(connection_id, channel, source + " has kicked " + user + " from " + channel + extra_msg(reason))
}
function add_channel_narrative(connection_id, channel, text) {
var div_narrative = create_div("activity_element small narrative")
div_narrative.textContent = text
linkify(div_narrative)
add_activity(connection_id, channel, div_narrative)
}
function irc_topic(connection_id, channel, source, text) {
if (text == "") {
add_channel_narrative(connection_id, channel, source + " has cleared the current topic")
} else {
add_channel_narrative(connection_id, channel, source + " has set the topic to “" + text + "”")
}
}
function mode_operator(is_plus) {
return (is_plus ? "+" : "-")
}
function mode_param(param) {
return (param ? " with the value of " + param : "")
}
function irc_channel_mode(connection_id, channel, source, target, add_mode, mode_char, param) {
switch(mode_char) {
case "o":
if (add_mode) {
op_user(connection_id, channel, source, param)
} else {
deop_user(connection_id, channel, source, param)
}
update_user_list(connection_id, channel)
break
case "v":
if (add_mode) {
voice_user(connection_id, channel, source, param)
} else {
devoice_user(connection_id, channel, source, param)
}
update_user_list(connection_id, channel)
break
default:
add_channel_narrative(connection_id, channel, source + " has set the mode of " + target + " to " + mode_operator(add_mode) + mode_char + mode_param(param))
}
}
function create_divider_if_necessary(connection_id, target, user_text, timestamp) {
if (!timestamp && (user_text || connections[connection_id].targets[target].divider_required)) {
div_activity(connection_id, target).appendChild(create_div("divider"))
}
connections[connection_id].targets[target].divider_required = user_text
}
function op_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has given operator status to " + target)
}
function deop_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has removed operator status from " + target)
}
function voice_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has given voice status to " + target)
}
function devoice_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has removed voice status from " + target)
}
function irc_privmsg(connection_id, channel, user, msg) {
add_activity(connection_id, channel, linkify(create_activity_span(nick_color(connection_id, user), user + ":", "conversation", msg)))
}
function add_link(text) {
if (text.match(/^www\./i)) {
return "<a href=\"http://" + text + "\" target=\"_blank\">" + text + "</a>"
} else {
return "<a href=\"" + text + "\" target=\"_blank\">" + text + "</a>"
}
}
function linkify(element) {
element.innerHTML = element.innerHTML.gsub(/((http:\/\/|www\.)([A-z0-9.\/?=+-:%]|&)+)/i, function(match){return add_link(match[1])})
return element
}
function irc_action(connection_id, channel, user, msg) {
var span_action = create_activity_span(nick_color(connection_id, user), " * " + user + " " + msg)
linkify(span_action)
add_activity(connection_id, channel, span_action)
}
function irc_notice(connection_id, channel, user, msg) {
add_activity(connection_id, channel, create_activity_span(nick_color(connection_id, user), "[" + user + "]:", "conversation", msg))
}
function nick_color(connection_id, user) {
return ((user == connections[connection_id].nickname) ? "nick self" : "nick")
}
function irc_join(connection_id, channel, user) {
add_channel_narrative(connection_id, channel, user + " has joined " + channel)
}
function extra_msg(msg) {
return (msg ? " - “" + msg + "”" : "")
}
function irc_part(connection_id, channel, user, msg) {
add_channel_narrative(connection_id, channel, user + " has left " + channel + extra_msg(msg))
}
function irc_user_mode(connection_id, source, target, add_mode, mode_char, param) {
irc_server_narrative(connection_id, source + " has set the mode of " + target + " to " + mode_operator(add_mode) + mode_char)
}
function irc_self_join(connection_id, channel) {
irc_server_narrative(connection_id, "You have joined " + channel)
}
function irc_self_part(connection_id, channel, msg) {
irc_server_narrative(connection_id, "You have left " + channel + extra_msg(msg))
}
function irc_self_kick(connection_id, user, channel, reason) {
irc_server_narrative(connection_id, "You have been kicked from " + channel + " by " + user + extra_msg(reason))
}
function irc_server_narrative(connection_id, text) {
add_activity(connection_id, null, create_activity_div("small server_narrative", text))
}
function root_notice(connection_id, source, msg) {
if (source) {
var span_root_notice = create_activity_span("small server server_source", source, "small server notice", "NOTICE", "small server_info", msg)
} else {
var span_root_notice = create_activity_span("small server notice", "NOTICE", "small server_info", msg)
}
linkify(span_root_notice)
div_activity(connection_id).appendChild(span_root_notice)
}
function create_target_if_necessary(connection_id, target, is_channel) {
if (!connections[connection_id].targets[target]) {
create_target_element(connection_id, target, is_channel)
return true
} else {
return false
}
}
function scroll_if_necessary(connection_id, target) {
if (is_current(connection_id, target)) {
scroll_to_bottom()
update_topic(connection_id, target)
update_activity(connection_id, target)
}
}
function irc_server(connection_id, source, tag, text) {
if (tag == "MOTD") {
var div_motd = create_div("motd small")
div_motd.textContent = text
no_breaking_spaces(div_motd)
linkify(div_motd)
div_activity(connection_id).appendChild(div_motd)
} else {
var span_server = create_activity_span("small server server_source", source, "small server", tag, "small server_info", text)
linkify(span_server)
div_activity(connection_id).appendChild(span_server)
}
}
function no_breaking_spaces(element) {
element.innerHTML = element.innerHTML.gsub(" ", " ")
}
function irc_server_error(connection_id, source, tag, text) {
div_activity(connection_id).appendChild(create_activity_span("small server server_source", source, "small server error", tag, "small server_info", text))
}
function create_element(type, class_name, id) {
var element = document.createElement(type)
if (class_name) {element.setAttribute("class", class_name)}
if (id) {element.setAttribute("id", id)}
return element
}
function create_div(class_name, id) {
return create_element("div", class_name, id)
}
function activity_element() {
return create_div("activity_element small")
}
function create_activity_div(type, text) {
var div_text = create_element("div", type)
div_text.textContent = text
return div_text
}
function create_activity_span(type_1, text_1, type_2, text_2, type_3, text_3) {
var div_activity_element = activity_element()
var span_text_1 = create_element("span", type_1)
span_text_1.textContent = text_1
div_activity_element.appendChild(span_text_1)
if (type_2 && text_2) {
var span_text_2 = create_element("span", type_2)
span_text_2.textContent = text_2
div_activity_element.appendChild(span_text_2)
}
if (type_3 && text_3) {
var span_text_3 = create_element("span", type_3)
span_text_3.textContent = text_3
div_activity_element.appendChild(span_text_3)
}
return div_activity_element
}
function div_activity(connection_id, target) {
if (target) {
return connections[connection_id].targets[target].div_activity
} else {
return connections[connection_id].div_activity
}
}
function add_activity(connection_id, target, child) {
div_activity(connection_id, target).appendChild(child)
}
function join(connection_id) {
connections[connection_id].join_button.style.display = "none"
connections[connection_id].join_channel.style.display = "block"
connections[connection_id].join_input.value = "#"
connections[connection_id].join_input.focus()
}
function cancel_join(connection_id) {
connections[connection_id].join_button.style.display = "block"
connections[connection_id].join_channel.style.display = "none"
}
function join_input(connection_id) {
join_request(connection_id, connections[connection_id].join_input.value)
cancel_join(connection_id)
}
function div_connection_item(connection_id, target) {
if (target) {
return connections[connection_id].targets[target].div_target
} else {
return connections[connection_id].div_server
}
}
function disconnect(connection_id) {
close_request(connection_id)
}
function close_window(connection_id, target) {
if (connections[connection_id].targets[target].is_channel) {
part_request(connection_id, target)
} else {
close_request(connection_id, target)
}
}
function scroll_to_bottom() {
var div_element = $("activity")
div_element.scrollTop = div_element.scrollHeight
}
function show(element) {
set_display($(element), "block")
}
function hide(element) {
set_display($(element), "none")
}
function set_display(element, value) {
element.style.display = value
}
function remove_all(element) {
while(element.firstChild) {
element.removeChild(element.firstChild)
}
}
|
public/javascript/script.js
|
var request
var connections = new Object()
var last_read = new Object()
var current = new Object()
var bookmarks = new Object()
var jar = new CookieJar({expires:10080, path:"/"});
var input_history = new Object()
input_history["data"] = new Array()
input_history["position"] = -1
var tab_completion = new Object()
tab_completion["repeat"] = false
tab_completion["search_term"] = null
tab_completion["user_lookup"] = new Array()
tab_completion["response"] = false
tab_completion["last_match"] = 0
const PRIVMSG = "p"
const ACTION = "a"
const NOTICE = "n"
const JOIN = "j"
const SELF_JOIN = "J"
const PART = "l"
const SELF_PART = "L"
const KICK = "k"
const QUIT = "q"
const TOPIC = "t"
const NICK = ">"
const SERVER = "-"
const SERVER_ERROR = "*"
const CLIENT_ERROR = "!"
const MODE = "m"
const CTCP = "c"
window.onresize = function() {
update_activity_width()
}
function update_activity_width() {
if (current.target && connections[current.connection_id].targets[current.target].is_channel) {
$("activity").style.width = document.body.offsetWidth - 417 + "px"
} else {
$("activity").style.width = document.body.offsetWidth - 247 + "px"
}
}
function init() {
var stored_bookmarks = jar.get("bookmarks")
if (stored_bookmarks) {
bookmarks = stored_bookmarks
}
hide("users")
request = get("all", first_time_irc_handler)
$("server_name").focus()
setInterval("update_request()", 10000)
update_activity_width()
}
function send_msg(text) {
input_history.data[input_history.data.length] = text
input_history.position = input_history.data.length
if (text.indexOf("/") == 0) {
var command = double_arg(text)
if (command) {
input_command(command.first.toUpperCase(), command.remainder)
} else {
if (current.connection_id) {
switch (text.toUpperCase()) {
case "/PART":
if (current.target) {
part_request(current.connection_id, current.target)
}
break
case "/QUIT":
close_request(current.connection_id, null)
break
case "/CLOSE":
if (current.target && connections[current.connection_id].targets[current.target]) {
if (connections[current.connection_id].targets[current.target].is_channel) {
part_request(current.connection_id, current.target)
} else {
close_request(current.connection_id, current.target)
}
}
break
case "/MOTD":
command_request(current.connection_id, "MOTD", 1)
break
}
}
}
} else {
send_privmsg(text, false)
}
}
function send_privmsg(text, action) {
if (current.target) {
if (text != "") {
privmsg_request(current.connection_id, current.target, text, action)
}
} else {
local_error("Only commands may be entered when viewing the server log")
}
}
function input_command(cmd, param) {
if (current.connection_id) {
switch (cmd) {
case "/ME":
if (param) {
send_privmsg(param, true)
}
break
case "/NICK":
command_request(current.connection_id, "NICK " + param, 1)
break
case "/MSG":
var command = double_arg(param)
if (command) {
if (!command.first.match(/^[&#!+.~]/)) {
privmsg_request(current.connection_id, command.first, command.remainder, false)
} else {
local_error("Outside messages have been prevented by the local client")
}
} else {
local_error("Usage is: /msg ≪user≫ ≪text≫")
}
break
case "/NOTICE":
var command = double_arg(param)
if (command) {
if (!command.first.match(/^[&#!+.~]/)) {
notice_request(current.connection_id, command.first, command.remainder, false)
} else {
local_error("Outside notices have been prevented by the local client")
}
} else {
local_error("Usage is: /notice ≪user≫ ≪text≫")
}
break
case "/JOIN":
join_request(current.connection_id, param)
break
case "/PART":
part_request(current.connection_id, param)
break
case "/WHOIS":
whois_user(current.connection_id, param)
break
case "/TOPIC":
command_channel_check("TOPIC " + current.target + " :" + param, 1)
break
case "/OP":
current_channel_mode_change("+o", param)
break
case "/DEOP":
current_channel_mode_change("-o", param)
break
case "/VOICE":
current_channel_mode_change("+v", param)
break
case "/DEVOICE":
current_channel_mode_change("-v", param)
break
case "/USER":
click_on_user(current.connection_id, param)
break
case "/MODE":
var command = double_arg(param)
if (command) {
current_channel_mode_change(command.first, command.remainder)
} else {
current_channel_mode_change(param, "")
}
break
case "/RAW":
command_request(current.connection_id, param, 1)
break
}
}
}
function current_channel_mode_change(mode, param) {
command_channel_check("MODE " + current.target + " " + mode + " :" + param, 2)
}
function command_channel_check(command, wait) {
if (current.connection_id && current.target && connections[current.connection_id] && connections[current.connection_id].targets[current.target]) {
if (connections[current.connection_id].targets[current.target].is_channel) {
command_request(current.connection_id, command, wait)
}
}
}
function double_arg(text) {
if (text) {
var parsed_string = text.match(/^(\S+)\s?(.*)?$/)
if (parsed_string[1] && parsed_string[2]) {
var out_strings = new Object()
out_strings["first"] = parsed_string[1]
out_strings["remainder"] = parsed_string[2]
return out_strings
} else {
return null
}
} else {
return null
}
}
const LEFT_CURSOR = 37
const UP_CURSOR = 38
const RIGHT_CURSOR = 39
const DOWN_CURSOR = 40
const DELETE = 8
const TAB = 9
function detect_keypress(event) {
if (!event.ctrlKey && event.which == TAB) {
if ((tab_completion.repeat && tab_completion.response) || $("msg").value.match(/^\S+$/)) {
tab_completion.response = true
} else {
tab_completion.response = false
}
if (tab_completion.response) {
$("msg").value = complete_nick()
} else {
$("msg").value = $("msg").value.gsub(/(\S+)$/, function(match) {return complete_nick()})
}
return false
} else {
tab_completion.repeat = false
}
if (event.ctrlKey) {
switch (event.which) {
case LEFT_CURSOR:
goto_prev_server()
return false
break
case UP_CURSOR:
goto_prev_target()
return false
break
case RIGHT_CURSOR:
goto_next_server()
return false
break
case DOWN_CURSOR:
goto_next_target()
return false
break
case DELETE:
change_to(current.connection_id, null)
return false
break
}
} else {
switch (event.which) {
case UP_CURSOR:
prev_history()
return false
break
case DOWN_CURSOR:
next_history()
return false
break
}
}
return true
}
function goto_prev_server() {
var new_connection_id = move_in_element(current.connection_id, connections, -1)
change_to(new_connection_id, null)
}
function goto_prev_target() {
var new_target = move_in_element(current.target, connections[current.connection_id].targets, -1)
change_to(current.connection_id, new_target)
}
function goto_next_server() {
var new_connection_id = move_in_element(current.connection_id, connections, 1)
change_to(new_connection_id, null)
}
function goto_next_target() {
var new_target = move_in_element(current.target, connections[current.connection_id].targets, 1)
change_to(current.connection_id, new_target)
}
function complete_nick() {
if (tab_completion.repeat) {
var match = tab_complete_lookup(tab_completion.last_match + 1)
if (match) {
if (tab_completion.response) {
return match + ": "
} else {
return match
}
} else {
tab_completion.repeat = false
return tab_completion.search_term
}
} else {
var search_term = $("msg").value.match(/(\S+)$/)
if (search_term) {
tab_completion.user_lookup = combined_users(current.connection_id, current.target)
tab_completion.search_term = search_term[1].toLowerCase()
tab_completion.repeat = true
var match = tab_complete_lookup(0)
if (match) {
if (tab_completion.response) {
return match + ": "
} else {
return match
}
} else {
tab_completion.repeat = false
return tab_completion.search_term
}
}
}
}
function tab_complete_lookup(from) {
for (var i = from; i < tab_completion.user_lookup.length; i++) {
if (tab_completion.user_lookup[i].toLowerCase().indexOf(tab_completion.search_term) == 0) {
tab_completion.last_match = i
return tab_completion.user_lookup[i]
}
}
return null
}
function combined_users(connection_id, target) {
if (connections[connection_id].targets[target] && connections[connection_id].targets[target].is_channel) {
return connections[connection_id].targets[target].opers.concat(connections[connection_id].targets[target].voicers, connections[connection_id].targets[target].users)
}
return new Array()
}
function prev_history() {
input_history.position = Math.max(input_history.position - 1, 0)
if (input_history.data[input_history.position]) {
$("msg").value = input_history.data[input_history.position]
}
}
function next_history() {
input_history.position = Math.min(input_history.position + 1, input_history.data.length)
if (input_history.data[input_history.position]) {
$("msg").value = input_history.data[input_history.position]
} else {
$("msg").value = ""
}
}
function build_array(parent) {
var element_array = new Array()
for (element in parent) {
if (parent[element]) {
element_array[element_array.length] = element
}
}
return element_array
}
function move_in_array(reference, array_of_items, position) {
if (reference) {
var current_location = array_of_items.indexOf(reference)
if (current_location != -1) {
var new_position = current_location + position
if (new_position >= array_of_items.length) {
return array_of_items[new_position - array_of_items.length]
} else if (new_position < 0) {
return array_of_items[new_position + array_of_items.length]
} else {
return array_of_items[new_position]
}
} else {
return null
}
} else {
return array_of_items[0]
}
}
function move_in_element(reference, element, position) {
return move_in_array(reference, build_array(element), position)
}
function create_target_element(connection_id, target_name, is_channel) {
connections[connection_id].targets[target_name] = new Object()
connections[connection_id].targets[target_name]["is_channel"] = is_channel
connections[connection_id].targets[target_name]["div_activity"] = create_div()
if (is_channel) {
connections[connection_id].open_channels.push(target_name)
var activity_header = create_activity_div("activity_header", "Channel activity for " + target_name)
connections[connection_id].targets[target_name]["div_users"] = create_div()
} else {
connections[connection_id].open_privmsgs.push(target_name)
var activity_header = create_activity_div("activity_header", "Private messages with " + target_name)
}
div_activity(connection_id, target_name).appendChild(activity_header)
connections[connection_id].targets[target_name]["div_target"] = create_div("connection_item clickable")
connections[connection_id].targets[target_name]["div_target"].setAttribute("onclick", "change_to(\"" + connection_id + "\", \"" + target_name + "\")")
var name = create_div("target")
name.textContent = is_channel ? target_name : "@" + target_name
var close_button = create_div("close_button")
close_button.setAttribute("onclick", "close_window(\"" + connection_id + "\", \"" + target_name + "\")")
connections[connection_id].targets[target_name]["div_unread"] = create_div("small unread_count")
clear_unread(connection_id, target_name, false)
connections[connection_id].targets[target_name].div_target.appendChild(name)
connections[connection_id].targets[target_name].div_target.appendChild(close_button)
connections[connection_id].targets[target_name].div_target.appendChild(connections[connection_id].targets[target_name].div_unread)
connections[connection_id].div_group.appendChild(connections[connection_id].targets[target_name].div_target)
}
function create_connection_element(connection_id, connection_count, connection_name, connection_port, nickname, real_name, last_activity) {
last_read[connection_id] = -1
connections[connection_id] = new Object()
connections[connection_id]["open_channels"] = new Array()
connections[connection_id]["open_privmsgs"] = new Array()
connections[connection_id]["targets"] = new Object()
connections[connection_id]["connection_count"] = connection_count
connections[connection_id]["server_name"] = connection_name
connections[connection_id]["server_port"] = connection_port
connections[connection_id]["nickname"] = nickname
connections[connection_id]["real_name"] = real_name
connections[connection_id]["last_activity"] = last_activity
connections[connection_id]["div_meta_group"] = create_div()
connections[connection_id]["div_group"] = create_div()
connections[connection_id]["join_button"] = create_div("tiny clickable header join_button with_hover")
connections[connection_id]["join_channel"] = create_div()
connections[connection_id].join_channel.style.display = "none"
connections[connection_id]["join_input"] = create_element("input", "regular join_input")
connections[connection_id]["join_input"].setAttribute("onkeypress", "if (event.which == 10 || event.which == 13) {join_input(\"" + connection_id + "\")}")
var close_button = create_div("tiny clickable header join_button with_hover cancel_button")
close_button.textContent = "Cancel"
close_button.setAttribute("onclick", "cancel_join(\"" + connection_id + "\")")
connections[connection_id].join_channel.appendChild(connections[connection_id]["join_input"])
connections[connection_id].join_channel.appendChild(close_button)
connections[connection_id].join_button.textContent = "Join channel..."
connections[connection_id].join_button.setAttribute("onclick", "join(\"" + connection_id + "\")")
connections[connection_id].div_meta_group.appendChild(connections[connection_id].div_group)
connections[connection_id].div_meta_group.appendChild(connections[connection_id].join_button)
connections[connection_id].div_meta_group.appendChild(connections[connection_id].join_channel)
$("connections").appendChild(connections[connection_id].div_meta_group)
add_connection_server(connection_id, connection_name)
}
function add_connection_server(connection_id, target_name) {
connections[connection_id]["div_activity"] = create_div()
connections[connection_id].div_activity.appendChild(create_activity_div("activity_header", "IRC Server connection with " + connections[connection_id].server_name))
connections[connection_id]["div_server"] = create_div("connection_item clickable")
connections[connection_id].div_server.setAttribute("onclick", "change_to(\"" + connection_id + "\")")
var name = create_div("server")
name.textContent = target_name
var close_button = create_div("close_button")
close_button.setAttribute("onclick", "disconnect(\"" + connection_id + "\")")
connections[connection_id]["div_unread"] = create_div("small unread_count")
clear_unread(connection_id, null, false)
connections[connection_id].div_server.appendChild(name)
connections[connection_id].div_server.appendChild(close_button)
connections[connection_id].div_server.appendChild(connections[connection_id].div_unread)
connections[connection_id].div_group.appendChild(connections[connection_id].div_server)
}
function change_to(connection_id, target) {
if (current.connection_id) {
remove_all($("activity"))
div_connection_item(current.connection_id, current.target).className = "connection_item clickable"
} else {
hide("new_connection")
show("activity")
show("msg")
}
if (connection_id) {
$("activity").appendChild(div_activity(connection_id, target))
div_connection_item(connection_id, target).className = "connection_item selected"
show("activity")
scroll_to_bottom()
$("msg").focus()
clear_unread(connection_id, target, true)
} else {
show("new_connection")
hide("activity")
$("server_name").focus()
hide("msg")
}
update_title(connection_id, target)
update_topic(connection_id, target)
show_or_hide_users(connection_id, target)
update_activity(connection_id, target)
current["connection_id"] = connection_id
current["target"] = target
update_activity_width()
jar.put("current", current)
}
function show_or_hide_users(connection_id, target) {
if (connection_id && target && connections[connection_id].targets[target].is_channel) {
remove_all($("users"))
update_user_list(connection_id, target)
$("users").appendChild(connections[connection_id].targets[target].div_users)
show("users")
} else {
hide("users")
}
}
function update_user_list(connection_id, channel) {
if (connections[connection_id].targets[channel].is_channel) {
remove_all(connections[connection_id].targets[channel].div_users)
var opers = connections[connection_id].targets[channel].opers.sort()
var voicers = connections[connection_id].targets[channel].voicers.sort()
var users = connections[connection_id].targets[channel].users.sort()
for (var i = 0; i < opers.length; i++) { add_oper(connection_id, channel, opers[i]) }
for (var i = 0; i < voicers.length; i++) { add_voicer(connection_id, channel, voicers[i]) }
for (var i = 0; i < users.length; i++) { add_user(connection_id, channel, users[i]) }
}
}
function add_user_element(connection_id, channel, name, type) {
var div_op = create_div("user " + type)
var div_span = create_element("span", "clickable with_hover")
div_span.setAttribute("onclick", "click_on_user(\"" + connection_id + "\", \"" + name + "\")")
div_span.textContent = name
var div_whois = create_element("span", "small user_menu clickable")
div_whois.innerHTML = "?"
div_whois.setAttribute("onclick", "whois_user(\"" + connection_id + "\", \"" + name + "\")")
div_op.appendChild(div_span)
div_op.appendChild(div_whois)
connections[connection_id].targets[channel].div_users.appendChild(div_op)
}
function click_on_user(connection_id, user) {
if (!connections[connection_id].targets[user.toLowerCase()]) {
new_window_request(connection_id, user)
} else {
change_to(connection_id, user.toLowerCase())
}
}
function whois_user(connection_id, user) {
command_request(connection_id, "WHOIS " + user, 1)
change_to(connection_id)
}
function add_oper(connection_id, channel, name) {
add_user_element(connection_id, channel, name, "op")
}
function add_voicer(connection_id, channel, name) {
add_user_element(connection_id, channel, name, "voice")
}
function add_user(connection_id, channel, name) {
add_user_element(connection_id, channel, name, "")
}
function update_topic(connection_id, target) {
if (target) {
if (!connections[connection_id].targets[target].is_channel) {
$("topic").textContent = "Private messages with " + target
} else {
if (connections[connection_id].targets[target].topic) {
$("topic").textContent = target + " - " + connections[connection_id].targets[target].topic + topic_appendix(connections[connection_id].targets[target].topic_creator, connections[connection_id].targets[target].topic_creation_time)
linkify($("topic"))
} else {
$("topic").textContent = target
}
}
} else {
if (connection_id) {
$("topic").textContent = "Server log for " + connections[connection_id].server_name
} else {
$("topic").textContent = "Create a new connection"
}
}
}
function topic_appendix(creator, creation_time) {
return ((creator && creation_time) ? " (" + creator + ", " + get_brief_date(creation_time) + ")" : "")
}
function update_title(connection_id, target) {
if (connection_id) {
if (target) {
set_title(target + ", " + connections[connection_id].server_name + ":" + connections[connection_id].server_port + ", " + connections[connection_id].nickname + " (" + connections[connection_id].real_name + ")")
} else {
set_title(connections[connection_id].server_name + ":" + connections[connection_id].server_port + ", " + connections[connection_id].nickname + " (" + connections[connection_id].real_name + ")")
}
} else {
set_title("Web IRC")
}
}
function update_activity(connection_id, target) {
if (connection_id) {
if (target) {
set_activity(connections[connection_id].targets[target].last_activity)
} else {
set_activity(connections[connection_id].last_activity)
}
} else {
set_activity()
}
}
function set_activity(time) {
if (time) {
$("activity_info").textContent = "Last activity: " + timestamp_long(time)
} else {
$("activity_info").textContent = ""
}
}
function set_title(text) {
if (text) {
$("title_text").textContent = text
} else {
$("title_text").textContent = ""
}
}
function first_time_irc_handler(event) {
if (request_done()) {
irc_handler(event, true)
var stored_current = jar.get("current")
if (stored_current && connections[stored_current.connection_id]) {
if (!stored_current.target || connections[stored_current.connection_id].targets[stored_current.target]) {
change_to(stored_current.connection_id, stored_current.target)
}
}
}
}
function irc_handler(event, first_time) {
if (request_done()) {
var response = request_to_json()
process_history(response.history, !first_time)
if (response.sync) {process_sync(response.sync)}
}
}
function process_sync(close) {
for (var i = 0; i < close.connections.length; i++) { close_connection(close.connections[i]) }
for (var connection_id in close.targets) {
for (var i = 0; i < close.targets[connection_id].channels.length; i++) {
if (connections[connection_id]) {
close_channel(connection_id, close.targets[connection_id].channels[i])
}
}
for (var i = 0; i < close.targets[connection_id].privmsgs.length; i++) {
if (connections[connection_id]) {
close_privmsg(connection_id, close.targets[connection_id].privmsgs[i])
}
}
}
}
function remove_target(connection_id, target) {
if (is_current(connection_id, target)) {
change_to(connection_id)
}
if (connections[connection_id].targets[target]) {
connections[connection_id].div_group.removeChild(connections[connection_id].targets[target]["div_target"])
connections[connection_id].targets[target] = undefined
destroy_bookmarks(connection_id, target)
}
}
function close_channel(connection_id, channel) {
connections[connection_id].open_channels.splice(connections[connection_id].open_channels.indexOf(channel), 1)
remove_target(connection_id, channel)
}
function close_privmsg(connection_id, privmsg) {
connections[connection_id].open_privmsgs.splice(connections[connection_id].open_privmsgs.indexOf(privmsg), 1)
remove_target(connection_id, privmsg)
}
function close_connection(connection_id) {
change_to()
$("connections").removeChild(connections[connection_id].div_meta_group)
connections[connection_id] = undefined
destroy_bookmarks(connection_id, null)
}
function process_history(history, auto_open) {
for (var connection_id in history) {
var new_target = create_connection_if_necessary(connection_id, history[connection_id])
if (history[connection_id].connection_count != connections[connection_id].connection_count) {
connections[connection_id].connection_count = history[connection_id].connection_count
close_all_targets(connection_id)
}
if (connections[connection_id].nickname != history[connection_id].nickname) {
connections[connection_id].nickname = history[connection_id].nickname
if (current.connection_id = connection_id) {
update_title(connection_id, current.target)
}
}
connections[connection_id].last_activity = history[connection_id].history.root_log.last_activity
for (var n = 0; n < history[connection_id].history.root_log.data.length; n++) {
var line = history[connection_id].history.root_log.data[n]
if (check_if_new(connection_id, null, line.msg_id)) {
update_read_count(connection_id, line.msg_id)
root_log(connection_id, line)
scroll_if_necessary(connection_id)
}
}
if (auto_open && new_target) {change_to(connection_id)}
for (var channel in history[connection_id].history.channels) {
var new_target = create_target_if_necessary(connection_id, channel, true)
update_users(connection_id, channel, history[connection_id].history.users[channel])
update_target_attributes(connection_id, channel, history[connection_id].history.channels[channel])
for (var n = 0; n < history[connection_id].history.channels[channel].data.length; n++) {
var line = history[connection_id].history.channels[channel].data[n]
if (check_if_new(connection_id, channel, line.msg_id)) {
update_read_count(connection_id, line.msg_id)
channel_log(connection_id, channel, line)
scroll_if_necessary(connection_id, channel)
}
}
if (auto_open && new_target) {change_to(connection_id, channel)}
}
for (var privmsg in history[connection_id].history.privmsgs) {
var new_target = create_target_if_necessary(connection_id, privmsg, false)
update_target_attributes(connection_id, privmsg, history[connection_id].history.privmsgs[privmsg])
for (var n = 0; n < history[connection_id].history.privmsgs[privmsg].data.length; n++) {
var line = history[connection_id].history.privmsgs[privmsg].data[n]
if (check_if_new(connection_id, privmsg, line.msg_id)) {
update_read_count(connection_id, line.msg_id)
channel_log(connection_id, privmsg, line)
scroll_if_necessary(connection_id, privmsg)
}
}
if (auto_open && new_target) {change_to(connection_id, privmsg)}
}
}
}
function check_if_new(connection_id, target, check_id) {
if (check_id > get_last_id(connection_id, target)) {
if (target) {
connections[connection_id].targets[target].msg_id = check_id
} else {
connections[connection_id].msg_id = check_id
}
return true
} else {
return false
}
}
function get_last_id(connection_id, target) {
if (target) {
var last_id = connections[connection_id].targets[target].msg_id
if (last_id) {
return last_id
} else {
connections[connection_id].targets[target].msg_id = 0
return 0
}
} else {
var last_id = connections[connection_id].msg_id
if (last_id) {
return last_id
} else {
connections[connection_id].msg_id = 0
return 0
}
}
}
function close_all_targets(connection_id) {
for (target in connections[connection_id].targets) {
if (connections[connection_id].targets[target] && connections[connection_id].targets[target].is_channel) {
close_channel(connection_id, target)
} else {
close_privmsg(connection_id, target)
}
}
}
function create_connection_if_necessary(connection_id, element) {
if (!connections[connection_id]) {
create_connection_element(connection_id, element.connection_count, element.server_name, element.server_port, element.nickname, element.real_name, element.last_activity)
return true
} else {
return false
}
}
function update_users(connection_id, channel, userlist) {
connections[connection_id].targets[channel]["opers"] = userlist.opers
connections[connection_id].targets[channel]["voicers"] = userlist.voicers
connections[connection_id].targets[channel]["users"] = userlist.users
}
function update_target_attributes(connection_id, target, history_element) {
connections[connection_id].targets[target]["topic"] = history_element.topic
connections[connection_id].targets[target]["topic_creator"] = history_element.topic_creator
connections[connection_id].targets[target]["topic_creation_time"] = history_element.topic_creation_time
connections[connection_id].targets[target]["last_activity"] = history_element.last_activity
}
function update_read_count(connection_id, msg_id) {
if (last_read[connection_id] && msg_id > last_read[connection_id]) {
last_read[connection_id] = msg_id
}
}
function root_log(connection_id, line) {
check_for_timestamp(connection_id, null, line.timestamp)
switch (line.type) {
case NOTICE:
root_notice(connection_id, line.source, line.msg)
add_unread(connection_id, null, line.msg_id, false)
break
case SERVER:
irc_server(connection_id, line.source, line.tag, line.params)
add_unread(connection_id, null, line.msg_id, false)
break
case SERVER_ERROR:
irc_server_error(connection_id, line.source, line.tag, line.params)
add_unread(connection_id, null, line.msg_id, true)
break
case MODE:
irc_user_mode(connection_id, line.source, line.target, line.add_mode, line.mode_char, line.param)
break
case CTCP:
irc_ctcp(connection_id, line.source, line.cmd, line.param, line.response)
add_unread(connection_id, null, line.msg_id)
break
case JOIN:
irc_self_join(connection_id, line.channel)
break
case PART:
irc_self_part(connection_id, line.channel, line.msg)
break
case KICK:
irc_self_kick(connection_id, line.source, line.channel, line.reason)
break
case NICK:
irc_self_nick(connection_id, line.new_nickname)
break
case CLIENT_ERROR:
irc_client_error(connection_id, line.tag, line.params)
add_unread(connection_id, null, line.msg_id, true)
break
}
}
function get_unread_div(connection_id, target) {
if (target) {
return connections[connection_id].targets[target].div_unread
} else {
return connections[connection_id].div_unread
}
}
function is_current(connection_id, target) {
if (connection_id == current.connection_id && target == current.target) {
return true
} else {
return false
}
}
function create_bookmark(connection_id, target, value) {
if (!bookmarks[connection_id]) {
bookmarks[connection_id] = new Object()
bookmarks[connection_id].targets = new Object()
}
if (target) {
bookmarks[connection_id].targets[target] = value
} else {
bookmarks[connection_id]["msg_id"] = value
}
}
function get_bookmark(connection_id, target) {
if (!bookmarks[connection_id]) {
return 0
}
if (target) {
if (!bookmarks[connection_id].targets[target]) {
return 0
} else {
return bookmarks[connection_id].targets[target]
}
} else {
if (!bookmarks[connection_id].msg_id) {
return 0
} else {
return bookmarks[connection_id].msg_id
}
}
}
function add_unread(connection_id, target, msg_id, highlighted) {
var div_unread = get_unread_div(connection_id, target)
if (is_current(connection_id, target)) {
create_bookmark(connection_id, target, msg_id)
} else {
if (get_bookmark(connection_id, target) < msg_id) {
var div_unread = get_unread_div(connection_id, target)
set_display(div_unread, "block")
div_unread.textContent = parseInt(div_unread.textContent) + 1
if (highlighted) {
div_unread.className = "small unread_count highlighted"
}
}
}
}
function clear_unread(connection_id, target, update_bookmark) {
var div_unread = get_unread_div(connection_id, target)
div_unread.className = "small unread_count"
set_display(div_unread, "none")
div_unread.textContent = 0
if (update_bookmark) {
create_bookmark(connection_id, target, last_read[connection_id])
jar.put("bookmarks", bookmarks)
}
}
function destroy_bookmarks(connection_id, target) {
if (target) {
if (bookmarks[connection_id] && bookmarks[connection_id].targets[target]) {
bookmarks[connection_id].targets[target] = undefined
jar.put("bookmarks", bookmarks)
}
} else {
if (bookmarks[connection_id]) {
bookmarks[connection_id] = undefined
jar.put("bookmarks", bookmarks)
}
}
}
function add_timestamp(connection_id, target, time, is_long) {
var div_timstamp = create_div("tiny timestamp")
if (is_long) {
div_timstamp.textContent = timestamp_long(time)
} else {
div_timstamp.textContent = timestamp_short(time)
}
add_activity(connection_id, target, div_timstamp)
return true
}
function check_for_timestamp(connection_id, target, time) {
if (target) {
var last_time = connections[connection_id].targets[target].last_timestamp
connections[connection_id].targets[target].last_timestamp = time
} else {
var last_time = connections[connection_id].last_timestamp
connections[connection_id]["last_timestamp"] = time
}
if (last_time) {
var diff_time = time - last_time
if (diff_time > 900) { // 15 minutes
if (diff_time > 86400) { // 24 hours
return add_timestamp(connection_id, target, time, true)
} else {
return add_timestamp(connection_id, target, time, false)
}
}
} else {
return add_timestamp(connection_id, target, time, true)
}
return false
}
function local_error(text) {
div_activity(current.connection_id, current.target).appendChild(create_activity_span("small server error", "INPUTERROR", "small server_info", text))
scroll_if_necessary(current.connection_id, current.target)
}
function irc_client_error(connection_id, tag, params) {
div_activity(connection_id).appendChild(create_activity_span("small server error", tag, "small server_info", params))
}
function irc_ctcp(connection_id, source, ctcp_cmd, ctcp_param, response) {
add_activity(connection_id, undefined, create_activity_span("small server source", source, "small server ctcp", "CTCP " + ctcp_cmd, "small server_info", "Response: " + response))
}
function mention_me(connection_id, text) {
if (text.toLowerCase().indexOf(connections[connection_id].nickname.toLowerCase()) == 0) {
return true
} else {
return false
}
}
function channel_log(connection_id, channel, line) {
var timestamp = check_for_timestamp(connection_id, channel, line.timestamp)
switch (line.type) {
case JOIN:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_join(connection_id, channel, line.user, line)
update_user_list(connection_id, channel)
break
case PRIVMSG:
create_divider_if_necessary(connection_id, channel, true, timestamp)
irc_privmsg(connection_id, channel, line.source, line.msg)
add_unread(connection_id, channel, line.msg_id, mention_me(connection_id, line.msg))
break
case ACTION:
create_divider_if_necessary(connection_id, channel, true, timestamp)
irc_action(connection_id, channel, line.source, line.msg)
add_unread(connection_id, channel, line.msg_id, false)
break
case NOTICE:
create_divider_if_necessary(connection_id, channel, true, timestamp)
irc_notice(connection_id, channel, line.source, line.msg)
add_unread(connection_id, channel, line.msg_id, false)
break
case PART:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_part(connection_id, channel, line.source, line.msg)
update_user_list(connection_id, channel)
break
case MODE:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_channel_mode(connection_id, channel, line.source, line.target, line.add_mode, line.mode_char, line.param)
break
case TOPIC:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_topic(connection_id, channel, line.source, line.text)
break
case KICK:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_kick(connection_id, channel, line.source, line.user, line.reason)
update_user_list(connection_id, channel)
break
case NICK:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_nick(connection_id, channel, line.user, line.new_nickname)
update_user_list(connection_id, channel)
break
case QUIT:
create_divider_if_necessary(connection_id, channel, false, timestamp)
irc_quit(connection_id, channel, line.user, line.msg)
update_user_list(connection_id, channel)
break
}
}
function irc_self_nick(connection_id, new_nickname) {
irc_server_narrative(connection_id, "You have changed your nickname to " + new_nickname)
}
function irc_nick(connection_id, channel, user, new_nickname) {
add_channel_narrative(connection_id, channel, user + " has changed their nickname to " + new_nickname)
}
function irc_quit(connection_id, channel, user, msg) {
add_channel_narrative(connection_id, channel, user + " has quit IRC" + extra_msg(msg))
}
function irc_kick(connection_id, channel, source, user, reason) {
add_channel_narrative(connection_id, channel, source + " has kicked " + user + " from " + channel + extra_msg(reason))
}
function add_channel_narrative(connection_id, channel, text) {
var div_narrative = create_div("activity_element small narrative")
div_narrative.textContent = text
linkify(div_narrative)
add_activity(connection_id, channel, div_narrative)
}
function irc_topic(connection_id, channel, source, text) {
if (text == "") {
add_channel_narrative(connection_id, channel, source + " has cleared the current topic")
} else {
add_channel_narrative(connection_id, channel, source + " has set the topic to “" + text + "”")
}
}
function mode_operator(is_plus) {
return (is_plus ? "+" : "-")
}
function mode_param(param) {
return (param ? " with the value of " + param : "")
}
function irc_channel_mode(connection_id, channel, source, target, add_mode, mode_char, param) {
switch(mode_char) {
case "o":
if (add_mode) {
op_user(connection_id, channel, source, param)
} else {
deop_user(connection_id, channel, source, param)
}
update_user_list(connection_id, channel)
break
case "v":
if (add_mode) {
voice_user(connection_id, channel, source, param)
} else {
devoice_user(connection_id, channel, source, param)
}
update_user_list(connection_id, channel)
break
default:
add_channel_narrative(connection_id, channel, source + " has set the mode of " + target + " to " + mode_operator(add_mode) + mode_char + mode_param(param))
}
}
function create_divider_if_necessary(connection_id, target, user_text, timestamp) {
if (!timestamp && (user_text || connections[connection_id].targets[target].divider_required)) {
div_activity(connection_id, target).appendChild(create_div("divider"))
}
connections[connection_id].targets[target].divider_required = user_text
}
function op_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has given operator status to " + target)
}
function deop_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has removed operator status from " + target)
}
function voice_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has given voice status to " + target)
}
function devoice_user(connection_id, channel, source, target) {
add_channel_narrative(connection_id, channel, source + " has removed voice status from " + target)
}
function irc_privmsg(connection_id, channel, user, msg) {
add_activity(connection_id, channel, linkify(create_activity_span(nick_color(connection_id, user), user + ":", "conversation", msg)))
}
function add_link(text) {
if (text.match(/^www\./i)) {
return "<a href=\"http://" + text + "\" target=\"_blank\">" + text + "</a>"
} else {
return "<a href=\"" + text + "\" target=\"_blank\">" + text + "</a>"
}
}
function linkify(element) {
element.innerHTML = element.innerHTML.gsub(/((http:\/\/|www\.)([A-z0-9.\/?=+-:%]|&)+)/i, function(match){return add_link(match[1])})
return element
}
function irc_action(connection_id, channel, user, msg) {
var span_action = create_activity_span(nick_color(connection_id, user), " * " + user + " " + msg)
linkify(span_action)
add_activity(connection_id, channel, span_action)
}
function irc_notice(connection_id, channel, user, msg) {
add_activity(connection_id, channel, create_activity_span(nick_color(connection_id, user), "[" + user + "]:", "conversation", msg))
}
function nick_color(connection_id, user) {
return ((user == connections[connection_id].nickname) ? "nick self" : "nick")
}
function irc_join(connection_id, channel, user) {
add_channel_narrative(connection_id, channel, user + " has joined " + channel)
}
function extra_msg(msg) {
return (msg ? " - “" + msg + "”" : "")
}
function irc_part(connection_id, channel, user, msg) {
add_channel_narrative(connection_id, channel, user + " has left " + channel + extra_msg(msg))
}
function irc_user_mode(connection_id, source, target, add_mode, mode_char, param) {
irc_server_narrative(connection_id, source + " has set the mode of " + target + " to " + mode_operator(add_mode) + mode_char)
}
function irc_self_join(connection_id, channel) {
irc_server_narrative(connection_id, "You have joined " + channel)
}
function irc_self_part(connection_id, channel, msg) {
irc_server_narrative(connection_id, "You have left " + channel + extra_msg(msg))
}
function irc_self_kick(connection_id, user, channel, reason) {
irc_server_narrative(connection_id, "You have been kicked from " + channel + " by " + user + extra_msg(reason))
}
function irc_server_narrative(connection_id, text) {
add_activity(connection_id, null, create_activity_div("small server_narrative", text))
}
function root_notice(connection_id, source, msg) {
if (source) {
var span_root_notice = create_activity_span("small server server_source", source, "small server notice", "NOTICE", "small server_info", msg)
} else {
var span_root_notice = create_activity_span("small server notice", "NOTICE", "small server_info", msg)
}
linkify(span_root_notice)
div_activity(connection_id).appendChild(span_root_notice)
}
function create_target_if_necessary(connection_id, target, is_channel) {
if (!connections[connection_id].targets[target]) {
create_target_element(connection_id, target, is_channel)
return true
} else {
return false
}
}
function scroll_if_necessary(connection_id, target) {
if (is_current(connection_id, target)) {
scroll_to_bottom()
update_topic(connection_id, target)
update_activity(connection_id, target)
}
}
function irc_server(connection_id, source, tag, text) {
if (tag == "MOTD") {
var div_motd = create_div("motd small")
div_motd.textContent = text
no_breaking_spaces(div_motd)
linkify(div_motd)
div_activity(connection_id).appendChild(div_motd)
} else {
var span_server = create_activity_span("small server server_source", source, "small server", tag, "small server_info", text)
linkify(span_server)
div_activity(connection_id).appendChild(span_server)
}
}
function no_breaking_spaces(element) {
element.innerHTML = element.innerHTML.gsub(" ", " ")
}
function irc_server_error(connection_id, source, tag, text) {
div_activity(connection_id).appendChild(create_activity_span("small server server_source", source, "small server error", tag, "small server_info", text))
}
function create_element(type, class_name, id) {
var element = document.createElement(type)
if (class_name) {element.setAttribute("class", class_name)}
if (id) {element.setAttribute("id", id)}
return element
}
function create_div(class_name, id) {
return create_element("div", class_name, id)
}
function activity_element() {
return create_div("activity_element small")
}
function create_activity_div(type, text) {
var div_text = create_element("div", type)
div_text.textContent = text
return div_text
}
function create_activity_span(type_1, text_1, type_2, text_2, type_3, text_3) {
var div_activity_element = activity_element()
var span_text_1 = create_element("span", type_1)
span_text_1.textContent = text_1
div_activity_element.appendChild(span_text_1)
if (type_2 && text_2) {
var span_text_2 = create_element("span", type_2)
span_text_2.textContent = text_2
div_activity_element.appendChild(span_text_2)
}
if (type_3 && text_3) {
var span_text_3 = create_element("span", type_3)
span_text_3.textContent = text_3
div_activity_element.appendChild(span_text_3)
}
return div_activity_element
}
function div_activity(connection_id, target) {
if (target) {
return connections[connection_id].targets[target].div_activity
} else {
return connections[connection_id].div_activity
}
}
function add_activity(connection_id, target, child) {
div_activity(connection_id, target).appendChild(child)
}
function join(connection_id) {
connections[connection_id].join_button.style.display = "none"
connections[connection_id].join_channel.style.display = "block"
connections[connection_id].join_input.value = "#"
connections[connection_id].join_input.focus()
}
function cancel_join(connection_id) {
connections[connection_id].join_button.style.display = "block"
connections[connection_id].join_channel.style.display = "none"
}
function join_input(connection_id) {
join_request(connection_id, connections[connection_id].join_input.value)
cancel_join(connection_id)
}
function div_connection_item(connection_id, target) {
if (target) {
return connections[connection_id].targets[target].div_target
} else {
return connections[connection_id].div_server
}
}
function disconnect(connection_id) {
close_request(connection_id)
}
function close_window(connection_id, target) {
if (connections[connection_id].targets[target].is_channel) {
part_request(connection_id, target)
} else {
close_request(connection_id, target)
}
}
function scroll_to_bottom() {
var div_element = $("activity")
div_element.scrollTop = div_element.scrollHeight
}
function show(element) {
set_display($(element), "block")
}
function hide(element) {
set_display($(element), "none")
}
function set_display(element, value) {
element.style.display = value
}
function remove_all(element) {
while(element.firstChild) {
element.removeChild(element.firstChild)
}
}
|
give the text input control focus when the browser window gains focus.
|
public/javascript/script.js
|
give the text input control focus when the browser window gains focus.
|
<ide><path>ublic/javascript/script.js
<ide>
<ide> window.onresize = function() {
<ide> update_activity_width()
<add>}
<add>
<add>window.onfocus = function() {
<add> $("msg").focus()
<ide> }
<ide>
<ide> function update_activity_width() {
|
|
Java
|
apache-2.0
|
deec45c6ce378b4f00df1559ee94154282e0cd66
| 0 |
infojulio/androidbible,yukuku/androidbible,Jaden-J/androidbible,yukuku/androidbible,yukuku/androidbible,infojulio/androidbible,yukuku/androidbible,infojulio/androidbible,arnotixe/androidbible,Jaden-J/androidbible,infojulio/androidbible,infojulio/androidbible,arnotixe/androidbible,Jaden-J/androidbible,arnotixe/androidbible,Jaden-J/androidbible,infojulio/androidbible,infojulio/androidbible,yukuku/androidbible,yukuku/androidbible,yukuku/androidbible,Jaden-J/androidbible,yukuku/androidbible,Jaden-J/androidbible,infojulio/androidbible,arnotixe/androidbible,arnotixe/androidbible,Jaden-J/androidbible
|
package yuku.alkitab.base.ac;
import android.app.AlertDialog;
import android.app.DatePickerDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.PopupMenu;
import android.text.SpannableStringBuilder;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.DatePicker;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import yuku.afw.App;
import yuku.afw.V;
import yuku.afw.storage.Preferences;
import yuku.alkitab.base.S;
import yuku.alkitab.base.config.AppConfig;
import yuku.alkitab.base.model.Ari;
import yuku.alkitab.base.model.ReadingPlan;
import yuku.alkitab.base.model.Version;
import yuku.alkitab.base.storage.Prefkey;
import yuku.alkitab.base.util.IntArrayList;
import yuku.alkitab.base.util.ReadingPlanManager;
import yuku.alkitab.debug.R;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
public class ReadingPlanActivity extends ActionBarActivity {
public static final String READING_PLAN_ARI_RANGES = "reading_plan_ari_ranges";
public static final String READING_PLAN_ID = "reading_plan_id";
public static final String READING_PLAN_DAY_NUMBER = "reading_plan_day_number";
private ReadingPlan readingPlan;
private List<ReadingPlan.ReadingPlanInfo> downloadedReadingPlanInfos;
private int todayNumber;
private int dayNumber;
private IntArrayList readingCodes;
private boolean newDropDownItems;
private ImageButton bLeft;
private ImageButton bRight;
private Button bToday;
private ListView lsReadingPlan;
private ReadingPlanAdapter readingPlanAdapter;
private ActionBar actionBar;
private LinearLayout llNavigations;
private FrameLayout flNoData;
private Button bDownload;
private boolean showDetail;
public static Intent createIntent(int dayNumber) {
Intent intent = new Intent(App.context, ReadingPlanActivity.class);
intent.putExtra(READING_PLAN_DAY_NUMBER, dayNumber);
return intent;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_reading_plan);
llNavigations = V.get(this, R.id.llNavigations);
flNoData = V.get(this, R.id.flNoDataContainer);
lsReadingPlan = V.get(this, R.id.lsTodayReadings);
bToday = V.get(this, R.id.bToday);
bLeft = V.get(this, R.id.bLeft);
bRight = V.get(this, R.id.bRight);
bDownload = V.get(this, R.id.bDownload);
actionBar = getSupportActionBar();
long id = Preferences.getLong(Prefkey.active_reading_plan, 0);
loadReadingPlan(id);
loadReadingPlanProgress();
loadDayNumber();
prepareDropDownNavigation();
prepareDisplay();
}
@Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.activity_reading_plan, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
int itemId = item.getItemId();
if (itemId == R.id.menuReset) {
resetReadingPlan();
return true;
} else if (itemId == R.id.menuDownload) {
downloadReadingPlan();
return true;
} else if (itemId == R.id.menuDelete) {
deleteReadingPlan();
return true;
} else if (itemId == R.id.menuAbout) {
showAbout();
}
return super.onOptionsItemSelected(item);
}
private void loadReadingPlan(long id) {
downloadedReadingPlanInfos = S.getDb().listAllReadingPlanInfo();
if (downloadedReadingPlanInfos.size() == 0) {
return;
}
long startDate = 0;
if (id == 0) {
id = downloadedReadingPlanInfos.get(0).id;
startDate = downloadedReadingPlanInfos.get(0).startDate;
} else {
for (ReadingPlan.ReadingPlanInfo info : downloadedReadingPlanInfos) {
if (id == info.id) {
startDate = info.startDate;
}
}
}
byte[] binaryReadingPlan = S.getDb().getBinaryReadingPlanById(id);
InputStream inputStream = new ByteArrayInputStream(binaryReadingPlan);
ReadingPlan res = ReadingPlanManager.readVersion1(inputStream);
res.info.id = id;
res.info.startDate = startDate;
readingPlan = res;
Preferences.setLong(Prefkey.active_reading_plan, id);
}
private void loadReadingPlanProgress() {
if (readingPlan == null) {
return;
}
readingCodes = S.getDb().getAllReadingCodesByReadingPlanId(readingPlan.info.id);
}
public void goToIsiActivity(final int dayNumber, final int sequence) {
final int[] selectedVerses = readingPlan.dailyVerses.get(dayNumber);
int ari = selectedVerses[sequence * 2];
Intent intent = new Intent();
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.putExtra("ari", ari);
intent.putExtra(READING_PLAN_ID, readingPlan.info.id);
intent.putExtra(READING_PLAN_DAY_NUMBER, dayNumber);
intent.putExtra(READING_PLAN_ARI_RANGES, selectedVerses);
setResult(RESULT_OK, intent);
finish();
}
private void loadDayNumber() {
if (readingPlan == null) {
return;
}
Calendar startCalendar = GregorianCalendar.getInstance();
startCalendar.setTime(new Date(readingPlan.info.startDate));
todayNumber = calculateDaysDiff(startCalendar, GregorianCalendar.getInstance());
dayNumber = getIntent().getIntExtra(READING_PLAN_DAY_NUMBER, -1);
if (dayNumber == -1) {
dayNumber = todayNumber;
}
}
private int calculateDaysDiff(Calendar startCalendar, Calendar endCalendar) {
startCalendar.set(Calendar.HOUR_OF_DAY, 0);
startCalendar.set(Calendar.MINUTE, 0);
startCalendar.set(Calendar.SECOND, 0);
startCalendar.set(Calendar.MILLISECOND, 0);
endCalendar.set(Calendar.HOUR_OF_DAY, 0);
endCalendar.set(Calendar.MINUTE, 0);
endCalendar.set(Calendar.SECOND, 0);
endCalendar.set(Calendar.MILLISECOND, 0);
return (int) ((endCalendar.getTime().getTime() - startCalendar.getTime().getTime()) / (1000 * 60 * 60 * 24));
}
public boolean prepareDropDownNavigation() {
if (downloadedReadingPlanInfos.size() == 0) {
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
return true;
}
actionBar.setDisplayShowTitleEnabled(false);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
long id = Preferences.getLong(Prefkey.active_reading_plan, 0);
int itemNumber = 0;
//Drop-down navigation
List<String> titles = new ArrayList<String>();
for (int i = 0; i < downloadedReadingPlanInfos.size(); i++) {
ReadingPlan.ReadingPlanInfo info = downloadedReadingPlanInfos.get(i);
titles.add(info.title);
if (info.id == id) {
itemNumber = i;
}
}
ArrayAdapter<String> navigationAdapter = new ArrayAdapter<String>(this, R.layout.item_dropdown_reading_plan, titles);
newDropDownItems = false;
actionBar.setListNavigationCallbacks(navigationAdapter, new ActionBar.OnNavigationListener() {
@Override
public boolean onNavigationItemSelected(final int i, final long l) {
if (newDropDownItems) {
loadReadingPlan(downloadedReadingPlanInfos.get(i).id);
loadReadingPlanProgress();
prepareDisplay();
}
return true;
}
});
actionBar.setSelectedNavigationItem(itemNumber);
newDropDownItems = true;
return false;
}
public void prepareDisplay() {
if (readingPlan == null) {
llNavigations.setVisibility(View.GONE);
lsReadingPlan.setVisibility(View.GONE);
flNoData.setVisibility(View.VISIBLE);
bDownload.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
downloadReadingPlan();
}
});
return;
}
llNavigations.setVisibility(View.VISIBLE);
lsReadingPlan.setVisibility(View.VISIBLE);
flNoData.setVisibility(View.GONE);
//Listviews
readingPlanAdapter = new ReadingPlanAdapter();
readingPlanAdapter.load();
lsReadingPlan.setAdapter(readingPlanAdapter);
lsReadingPlan.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(final AdapterView<?> parent, final View view, final int position, final long id) {
final int todayReadingsSize = readingPlan.dailyVerses.get(dayNumber).length / 2;
if (position < todayReadingsSize) {
goToIsiActivity(dayNumber, position);
} else if (position > todayReadingsSize) {
goToIsiActivity(position - todayReadingsSize - 1, 0);
}
}
});
//buttons
updateButtonStatus();
bToday.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
final PopupMenu popupMenu = new PopupMenu(ReadingPlanActivity.this, v);
popupMenu.getMenu().add(Menu.NONE, 1, 1, getString(R.string.rp_showCalendar));
popupMenu.getMenu().add(Menu.NONE, 2, 2, getString(R.string.rp_gotoFirstUnread));
popupMenu.getMenu().add(Menu.NONE, 3, 3, getString(R.string.rp_gotoToday));
popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(final MenuItem menuItem) {
popupMenu.dismiss();
int itemId = menuItem.getItemId();
if (itemId == 1) {
showCalendar();
} else if (itemId == 2) {
gotoFirstUnread();
} else if (itemId == 3) {
gotoToday();
}
return true;
}
});
popupMenu.show();
}
private void gotoToday() {
loadDayNumber();
changeDay(0);
}
private void gotoFirstUnread() {
dayNumber = findFirstUnreadDay(readingPlan.info.duration - 1);
changeDay(0);
}
private void showCalendar() {
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date(readingPlan.info.startDate));
calendar.add(Calendar.DATE, dayNumber);
DatePickerDialog.OnDateSetListener dateSetListener = new DatePickerDialog.OnDateSetListener() {
@Override
public void onDateSet(final DatePicker view, final int year, final int monthOfYear, final int dayOfMonth) {
Calendar newCalendar = new GregorianCalendar(year, monthOfYear, dayOfMonth);
Calendar startCalendar = GregorianCalendar.getInstance();
startCalendar.setTime(new Date(readingPlan.info.startDate));
int newDay = calculateDaysDiff(startCalendar, newCalendar);
if (newDay < 0) {
newDay = 0;
} else if (newDay >= readingPlan.info.duration) {
newDay = readingPlan.info.duration - 1;
}
dayNumber = newDay;
changeDay(0);
}
};
DatePickerDialog datePickerDialog = new DatePickerDialog(ReadingPlanActivity.this, dateSetListener, calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH));
datePickerDialog.show();
}
});
bLeft.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
changeDay(-1);
}
});
bRight.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
changeDay(+1);
}
});
}
private void resetReadingPlan() {
new AlertDialog.Builder(this)
.setMessage(getString(R.string.rp_reset))
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
int lastUnreadDay = findFirstUnreadDay(dayNumber);
Calendar calendar = GregorianCalendar.getInstance();
calendar.add(Calendar.DATE, -lastUnreadDay);
S.getDb().updateStartDate(readingPlan.info.id, calendar.getTime().getTime());
loadReadingPlan(readingPlan.info.id);
loadDayNumber();
readingPlanAdapter.load();
readingPlanAdapter.notifyDataSetChanged();
updateButtonStatus();
}
})
.setNegativeButton(R.string.cancel, null)
.show();
}
private int findFirstUnreadDay(final int dayUntil) {
int firstUnreadDay = dayUntil;
loop1:
for (int i = 0; i < dayUntil; i++) {
boolean[] readMarks = new boolean[readingPlan.dailyVerses.get(i).length];
ReadingPlanManager.writeReadMarksByDay(readingCodes, readMarks, i);
for (boolean readMark : readMarks) {
if (!readMark) {
firstUnreadDay = i;
break loop1;
}
}
}
return firstUnreadDay;
}
private void deleteReadingPlan() {
new AlertDialog.Builder(this)
.setMessage(getString(R.string.rp_deletePlan, readingPlan.info.title))
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
S.getDb().deleteReadingPlanById(readingPlan.info.id);
readingPlan = null;
Preferences.remove(Prefkey.active_reading_plan);
loadReadingPlan(0);
loadReadingPlanProgress();
loadDayNumber();
prepareDropDownNavigation();
prepareDisplay();
}
})
.setNegativeButton(R.string.cancel, null)
.show();
}
private void showAbout() {
SpannableStringBuilder sb = new SpannableStringBuilder();
sb.append("Title: " + readingPlan.info.title);
sb.append("\nDescription: " + readingPlan.info.description);
sb.append("\nDuration: " + readingPlan.info.duration);
new AlertDialog.Builder(ReadingPlanActivity.this)
.setMessage(sb)
.setPositiveButton(R.string.ok, null)
.show();
}
private void changeDay(int day) {
dayNumber += day;
readingPlanAdapter.load();
readingPlanAdapter.notifyDataSetChanged();
updateButtonStatus();
}
private void updateButtonStatus() { //TODO look disabled
if (dayNumber == 0) {
bLeft.setEnabled(false);
bRight.setEnabled(true);
} else if (dayNumber == readingPlan.info.duration - 1) {
bLeft.setEnabled(true);
bRight.setEnabled(false);
} else {
bLeft.setEnabled(true);
bRight.setEnabled(true);
}
bToday.setText(getReadingDateHeader(dayNumber));
}
private void downloadReadingPlan() {
AppConfig config = AppConfig.get();
final List<ReadingPlan.ReadingPlanInfo> infos = config.readingPlanInfos;
final List<String> readingPlanTitles = new ArrayList<String>();
final List<Integer> resources = new ArrayList<Integer>();
for (int i = 0; i < infos.size(); i++) {
String title = infos.get(i).title;
boolean downloaded = false;
for (ReadingPlan.ReadingPlanInfo downloadedReadingPlanInfo : downloadedReadingPlanInfos) {
if (title.equals(downloadedReadingPlanInfo.title)) {
downloaded = true;
break;
}
}
if (!downloaded) {
readingPlanTitles.add(title);
String filename = infos.get(i).filename.replace(".rpb", ""); //TODO: proper method. testing only
resources.add(getResources().getIdentifier(filename, "raw", getPackageName())); //TODO: proper method
}
}
if (readingPlanTitles.size() == 0) {
new AlertDialog.Builder(this)
.setMessage(getString(R.string.rp_noReadingPlanAvailable))
.setPositiveButton(R.string.ok, null)
.show();
} else {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, readingPlanTitles), new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
long id = ReadingPlanManager.copyReadingPlanToDb(resources.get(which));
Preferences.setLong(Prefkey.active_reading_plan, id);
loadDayNumber();
loadReadingPlan(id);
loadReadingPlanProgress();
prepareDropDownNavigation();
prepareDisplay();
dialog.dismiss();
}
})
.setNegativeButton("Cancel", null)
.show();
}
}
private float getActualPercentage() {
float res = (float) countRead() / (float) countAllReadings() * 100;
res = (float)Math.round(res * 100) / 100;
return res;
}
private float getTargetPercentage() {
float res = (float) countTarget() / (float) countAllReadings() * 100;
res = (float)Math.round(res * 100) / 100;
return res;
}
private int countRead() {
IntArrayList filteredReadingCodes = ReadingPlanManager.filterReadingCodesByDayStartEnd(readingCodes, 0, todayNumber);
for (int i = 0; i < filteredReadingCodes.size(); i++) {
}
return filteredReadingCodes.size();
}
private int countTarget() {
int res = 0;
for (int i = 0; i <= todayNumber; i++) {
res += readingPlan.dailyVerses.get(i).length / 2;
}
return res;
}
private int countAllReadings() {
int res = 0;
for (int i = 0; i < readingPlan.info.duration; i++) {
res += readingPlan.dailyVerses.get(i).length / 2;
}
return res;
}
public String getReadingDateHeader(final int dayNumber) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date(readingPlan.info.startDate));
calendar.add(Calendar.DATE, dayNumber);
String date = getString(R.string.rp_dayHeader, (dayNumber + 1), new SimpleDateFormat("MMMM dd, yyyy").format(calendar.getTime()));
return date;
}
public static SpannableStringBuilder getReference(Version version, int[] ari) {
SpannableStringBuilder sb = new SpannableStringBuilder();
String book = version.getBook(Ari.toBook(ari[0])).shortName;
sb.append(book);
int startChapter = Ari.toChapter(ari[0]);
int startVerse = Ari.toVerse(ari[0]);
int lastVerse = Ari.toVerse(ari[1]);
int lastChapter = Ari.toChapter(ari[1]);
sb.append(" " + startChapter);
if (startVerse == 0) {
if (lastVerse == 0) {
if (startChapter != lastChapter) {
sb.append("-" + lastChapter);
}
} else {
sb.append("-" + lastChapter + ":" + lastVerse);
}
} else {
if (startChapter == lastChapter) {
sb.append(":" + startVerse + "-" + lastVerse);
} else {
sb.append(":" + startVerse + "-" + lastChapter + ":" + lastVerse);
}
}
return sb;
}
class ReadingPlanAdapter extends BaseAdapter {
private int[] todayReadings;
public void load() {
todayReadings = readingPlan.dailyVerses.get(dayNumber);
}
@Override
public int getCount() {
if (showDetail) {
return (todayReadings.length / 2) + readingPlan.info.duration + 1;
} else {
return (todayReadings.length / 2) + 1;
}
}
@Override
public View getView(final int position, View convertView, final ViewGroup parent) {
final int itemViewType = getItemViewType(position);
if (itemViewType == 0) {
CheckBox checkBox = new CheckBox(ReadingPlanActivity.this);
LinearLayout layout = new LinearLayout(ReadingPlanActivity.this);
layout.addView(checkBox);
convertView = layout;
boolean[] readMarks = new boolean[todayReadings.length];
ReadingPlanManager.writeReadMarksByDay(readingCodes, readMarks, dayNumber);
if (readMarks[position * 2]) {
checkBox.setChecked(true);
} else {
checkBox.setChecked(false);
}
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) {
ReadingPlanManager.updateReadingPlanProgress(readingPlan.info.id, dayNumber, position, isChecked);
loadReadingPlanProgress();
load();
notifyDataSetChanged();
}
});
int start = position * 2;
int[] aris = {todayReadings[start], todayReadings[start + 1]};
checkBox.setText(getReference(S.activeVersion, aris));
checkBox.setFocusable(false);
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
checkBox.setLayoutParams(layoutParams);
} else if (itemViewType == 1) {
if (convertView == null) {
convertView = getLayoutInflater().inflate(R.layout.item_reading_plan_summary, parent, false);
}
final ProgressBar pbReadingProgress = V.get(convertView, R.id.pbReadingProgress);
final TextView tActual = V.get(convertView, R.id.tActual);
final TextView tTarget = V.get(convertView, R.id.tTarget);
final TextView tComment = V.get(convertView, R.id.tComment);
final TextView tDetail = V.get(convertView, R.id.tDetail);
float actualPercentage = getActualPercentage();
float targetPercentage = getTargetPercentage();
pbReadingProgress.setMax(100);
pbReadingProgress.setProgress((int) actualPercentage);
pbReadingProgress.setSecondaryProgress((int) targetPercentage);
tActual.setText(getString(R.string.rp_commentActual, actualPercentage));
tTarget.setText(getString(R.string.rp_commentTarget, targetPercentage));
String comment;
if (actualPercentage == targetPercentage) {
comment = getString(R.string.rp_commentOnSchedule);
} else {
float diff = (float) Math.round((targetPercentage - actualPercentage) * 100) / 100;
comment = getString(R.string.rp_commentBehindSchedule, diff);
}
tComment.setText(comment);
tDetail.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
showDetail = !showDetail;
if (showDetail) {
tDetail.setText(R.string.rp_hideDetail);
} else {
tDetail.setText(R.string.rp_showDetail);
}
notifyDataSetChanged();
}
});
} else if (itemViewType == 2) {
if (convertView == null) {
convertView = getLayoutInflater().inflate(R.layout.item_reading_plan_one_day, parent, false);
}
final LinearLayout layout = V.get(convertView, R.id.llOneDayReadingPlan);
final int currentViewTypePosition = position - todayReadings.length / 2 - 1;
//Text title
TextView tTitle = V.get(convertView, android.R.id.text1);
tTitle.setText(getReadingDateHeader(currentViewTypePosition));
//Text reading
while (true) {
final View reading = layout.findViewWithTag("reading");
if (reading != null) {
layout.removeView(reading);
} else {
break;
}
}
int[] aris = readingPlan.dailyVerses.get(currentViewTypePosition);
for (int i = 0; i < aris.length / 2; i++) {
final int ariPosition = i;
int[] ariStartEnd = {aris[i * 2], aris[i * 2 + 1]};
final SpannableStringBuilder reference = getReference(S.activeVersion, ariStartEnd);
CheckBox checkBox = new CheckBox(ReadingPlanActivity.this);
checkBox.setText(reference);
checkBox.setTag("reading");
boolean[] readMarks = new boolean[aris.length];
ReadingPlanManager.writeReadMarksByDay(readingCodes, readMarks, currentViewTypePosition);
checkBox.setChecked(readMarks[ariPosition * 2]);
checkBox.setFocusable(false);
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT);
checkBox.setLayoutParams(layoutParams);
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) {
ReadingPlanManager.updateReadingPlanProgress(readingPlan.info.id, currentViewTypePosition, ariPosition, isChecked);
loadReadingPlanProgress();
load();
notifyDataSetChanged();
}
});
layout.addView(checkBox);
}
}
return convertView;
}
@Override
public Object getItem(final int position) {
return null;
}
@Override
public long getItemId(final int position) {
return 0;
}
@Override
public int getViewTypeCount() {
return 3;
}
@Override
public int getItemViewType(final int position) {
if (position < todayReadings.length / 2) {
return 0;
} else if (position == todayReadings.length / 2) {
return 1;
} else {
return 2;
}
}
}
}
|
Alkitab/src/yuku/alkitab/base/ac/ReadingPlanActivity.java
|
package yuku.alkitab.base.ac;
import android.app.AlertDialog;
import android.app.DatePickerDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.PopupMenu;
import android.text.SpannableStringBuilder;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.DatePicker;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import yuku.afw.App;
import yuku.afw.V;
import yuku.afw.storage.Preferences;
import yuku.alkitab.base.S;
import yuku.alkitab.base.config.AppConfig;
import yuku.alkitab.base.model.Ari;
import yuku.alkitab.base.model.ReadingPlan;
import yuku.alkitab.base.model.Version;
import yuku.alkitab.base.storage.Prefkey;
import yuku.alkitab.base.util.IntArrayList;
import yuku.alkitab.base.util.ReadingPlanManager;
import yuku.alkitab.debug.R;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
public class ReadingPlanActivity extends ActionBarActivity {
public static final String READING_PLAN_ARI_RANGES = "reading_plan_ari_ranges";
public static final String READING_PLAN_ID = "reading_plan_id";
public static final String READING_PLAN_DAY_NUMBER = "reading_plan_day_number";
private ReadingPlan readingPlan;
private List<ReadingPlan.ReadingPlanInfo> downloadedReadingPlanInfos;
private int todayNumber;
private int dayNumber;
private IntArrayList readingCodes;
private boolean newDropDownItems;
private ImageButton bLeft;
private ImageButton bRight;
private Button bToday;
private ListView lsReadingPlan;
private ReadingPlanAdapter readingPlanAdapter;
private ActionBar actionBar;
private LinearLayout llNavigations;
private FrameLayout flNoData;
private Button bDownload;
private boolean showDetail;
public static Intent createIntent(int dayNumber) {
Intent intent = new Intent(App.context, ReadingPlanActivity.class);
intent.putExtra(READING_PLAN_DAY_NUMBER, dayNumber);
return intent;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_reading_plan);
llNavigations = V.get(this, R.id.llNavigations);
flNoData = V.get(this, R.id.flNoDataContainer);
lsReadingPlan = V.get(this, R.id.lsTodayReadings);
bToday = V.get(this, R.id.bToday);
bLeft = V.get(this, R.id.bLeft);
bRight = V.get(this, R.id.bRight);
bDownload = V.get(this, R.id.bDownload);
actionBar = getSupportActionBar();
long id = Preferences.getLong(Prefkey.active_reading_plan, 0);
loadReadingPlan(id);
loadReadingPlanProgress();
loadDayNumber();
prepareDropDownNavigation();
prepareDisplay();
}
@Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.activity_reading_plan, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
int itemId = item.getItemId();
if (itemId == R.id.menuReset) {
resetReadingPlan();
return true;
} else if (itemId == R.id.menuDownload) {
downloadReadingPlan();
return true;
} else if (itemId == R.id.menuDelete) {
deleteReadingPlan();
return true;
} else if (itemId == R.id.menuAbout) {
showAbout();
}
return super.onOptionsItemSelected(item);
}
private void loadReadingPlan(long id) {
downloadedReadingPlanInfos = S.getDb().listAllReadingPlanInfo();
if (downloadedReadingPlanInfos.size() == 0) {
return;
}
long startDate = 0;
if (id == 0) {
id = downloadedReadingPlanInfos.get(0).id;
startDate = downloadedReadingPlanInfos.get(0).startDate;
} else {
for (ReadingPlan.ReadingPlanInfo info : downloadedReadingPlanInfos) {
if (id == info.id) {
startDate = info.startDate;
}
}
}
byte[] binaryReadingPlan = S.getDb().getBinaryReadingPlanById(id);
InputStream inputStream = new ByteArrayInputStream(binaryReadingPlan);
ReadingPlan res = ReadingPlanManager.readVersion1(inputStream);
res.info.id = id;
res.info.startDate = startDate;
readingPlan = res;
Preferences.setLong(Prefkey.active_reading_plan, id);
}
private void loadReadingPlanProgress() {
if (readingPlan == null) {
return;
}
readingCodes = S.getDb().getAllReadingCodesByReadingPlanId(readingPlan.info.id);
}
public void goToIsiActivity(final int dayNumber, final int sequence) {
final int[] selectedVerses = readingPlan.dailyVerses.get(dayNumber);
int ari = selectedVerses[sequence * 2];
Intent intent = new Intent();
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.putExtra("ari", ari);
intent.putExtra(READING_PLAN_ID, readingPlan.info.id);
intent.putExtra(READING_PLAN_DAY_NUMBER, dayNumber);
intent.putExtra(READING_PLAN_ARI_RANGES, selectedVerses);
setResult(RESULT_OK, intent);
finish();
}
private void loadDayNumber() {
if (readingPlan == null) {
return;
}
Calendar newCalendar = GregorianCalendar.getInstance();
newCalendar.set(Calendar.HOUR_OF_DAY, 0);
newCalendar.set(Calendar.MINUTE, 1); //TODO: find another way to calculate difference
newCalendar.set(Calendar.SECOND, 0);
Calendar startCalendar = GregorianCalendar.getInstance();
startCalendar.setTime(new Date(readingPlan.info.startDate));
startCalendar.set(Calendar.HOUR_OF_DAY, 0);
startCalendar.set(Calendar.MINUTE, 0);
startCalendar.set(Calendar.SECOND, 0);
todayNumber = (int) ((newCalendar.getTime().getTime() - startCalendar.getTime().getTime()) / (1000 * 60 * 60 * 24));
dayNumber = getIntent().getIntExtra(READING_PLAN_DAY_NUMBER, -1);
if (dayNumber == -1) {
dayNumber = todayNumber;
}
}
public boolean prepareDropDownNavigation() {
if (downloadedReadingPlanInfos.size() == 0) {
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
return true;
}
actionBar.setDisplayShowTitleEnabled(false);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
long id = Preferences.getLong(Prefkey.active_reading_plan, 0);
int itemNumber = 0;
//Drop-down navigation
List<String> titles = new ArrayList<String>();
for (int i = 0; i < downloadedReadingPlanInfos.size(); i++) {
ReadingPlan.ReadingPlanInfo info = downloadedReadingPlanInfos.get(i);
titles.add(info.title);
if (info.id == id) {
itemNumber = i;
}
}
ArrayAdapter<String> navigationAdapter = new ArrayAdapter<String>(this, R.layout.item_dropdown_reading_plan, titles);
newDropDownItems = false;
actionBar.setListNavigationCallbacks(navigationAdapter, new ActionBar.OnNavigationListener() {
@Override
public boolean onNavigationItemSelected(final int i, final long l) {
if (newDropDownItems) {
loadReadingPlan(downloadedReadingPlanInfos.get(i).id);
loadReadingPlanProgress();
prepareDisplay();
}
return true;
}
});
actionBar.setSelectedNavigationItem(itemNumber);
newDropDownItems = true;
return false;
}
public void prepareDisplay() {
if (readingPlan == null) {
llNavigations.setVisibility(View.GONE);
lsReadingPlan.setVisibility(View.GONE);
flNoData.setVisibility(View.VISIBLE);
bDownload.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
downloadReadingPlan();
}
});
return;
}
llNavigations.setVisibility(View.VISIBLE);
lsReadingPlan.setVisibility(View.VISIBLE);
flNoData.setVisibility(View.GONE);
//Listviews
readingPlanAdapter = new ReadingPlanAdapter();
readingPlanAdapter.load();
lsReadingPlan.setAdapter(readingPlanAdapter);
lsReadingPlan.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(final AdapterView<?> parent, final View view, final int position, final long id) {
final int todayReadingsSize = readingPlan.dailyVerses.get(dayNumber).length / 2;
if (position < todayReadingsSize) {
goToIsiActivity(dayNumber, position);
} else if (position > todayReadingsSize) {
goToIsiActivity(position - todayReadingsSize - 1, 0);
}
}
});
//buttons
updateButtonStatus();
bToday.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
final PopupMenu popupMenu = new PopupMenu(ReadingPlanActivity.this, v);
popupMenu.getMenu().add(Menu.NONE, 1, 1, getString(R.string.rp_showCalendar));
popupMenu.getMenu().add(Menu.NONE, 2, 2, getString(R.string.rp_gotoFirstUnread));
popupMenu.getMenu().add(Menu.NONE, 3, 3, getString(R.string.rp_gotoToday));
popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(final MenuItem menuItem) {
popupMenu.dismiss();
int itemId = menuItem.getItemId();
if (itemId == 1) {
showCalendar();
} else if (itemId == 2) {
gotoFirstUnread();
} else if (itemId == 3) {
gotoToday();
}
return true;
}
});
popupMenu.show();
}
private void gotoToday() {
loadDayNumber();
changeDay(0);
}
private void gotoFirstUnread() {
dayNumber = findFirstUnreadDay(readingPlan.info.duration - 1);
changeDay(0);
}
private void showCalendar() {
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date(readingPlan.info.startDate));
calendar.add(Calendar.DATE, dayNumber);
DatePickerDialog.OnDateSetListener dateSetListener = new DatePickerDialog.OnDateSetListener() {
@Override
public void onDateSet(final DatePicker view, final int year, final int monthOfYear, final int dayOfMonth) {
Calendar newCalendar = new GregorianCalendar(year, monthOfYear, dayOfMonth);
newCalendar.set(Calendar.HOUR_OF_DAY, 0);
newCalendar.set(Calendar.MINUTE, 1); //TODO: find another way to calculate difference
newCalendar.set(Calendar.SECOND, 0);
Calendar startCalendar = GregorianCalendar.getInstance();
startCalendar.setTime(new Date(readingPlan.info.startDate));
startCalendar.set(Calendar.HOUR_OF_DAY, 0);
startCalendar.set(Calendar.MINUTE, 0);
startCalendar.set(Calendar.SECOND, 0);
int newDay = (int) ((newCalendar.getTime().getTime() - startCalendar.getTime().getTime()) / (1000 * 60 * 60 * 24));
if (newDay < 0) {
newDay = 0;
} else if (newDay >= readingPlan.info.duration) {
newDay = readingPlan.info.duration - 1;
}
dayNumber = newDay;
changeDay(0);
}
};
DatePickerDialog datePickerDialog = new DatePickerDialog(ReadingPlanActivity.this, dateSetListener, calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH));
datePickerDialog.show();
}
});
bLeft.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
changeDay(-1);
}
});
bRight.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
changeDay(+1);
}
});
}
private void resetReadingPlan() {
new AlertDialog.Builder(this)
.setMessage(getString(R.string.rp_reset))
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
int lastUnreadDay = findFirstUnreadDay(dayNumber);
Calendar calendar = GregorianCalendar.getInstance();
calendar.add(Calendar.DATE, -lastUnreadDay);
S.getDb().updateStartDate(readingPlan.info.id, calendar.getTime().getTime());
loadReadingPlan(readingPlan.info.id);
loadDayNumber();
readingPlanAdapter.load();
readingPlanAdapter.notifyDataSetChanged();
updateButtonStatus();
}
})
.setNegativeButton(R.string.cancel, null)
.show();
}
private int findFirstUnreadDay(final int dayUntil) {
int firstUnreadDay = dayUntil;
loop1:
for (int i = 0; i < dayUntil; i++) {
boolean[] readMarks = new boolean[readingPlan.dailyVerses.get(i).length];
ReadingPlanManager.writeReadMarksByDay(readingCodes, readMarks, i);
for (boolean readMark : readMarks) {
if (!readMark) {
firstUnreadDay = i;
break loop1;
}
}
}
return firstUnreadDay;
}
private void deleteReadingPlan() {
new AlertDialog.Builder(this)
.setMessage(getString(R.string.rp_deletePlan, readingPlan.info.title))
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
S.getDb().deleteReadingPlanById(readingPlan.info.id);
readingPlan = null;
Preferences.remove(Prefkey.active_reading_plan);
loadReadingPlan(0);
loadReadingPlanProgress();
loadDayNumber();
prepareDropDownNavigation();
prepareDisplay();
}
})
.setNegativeButton(R.string.cancel, null)
.show();
}
private void showAbout() {
SpannableStringBuilder sb = new SpannableStringBuilder();
sb.append("Title: " + readingPlan.info.title);
sb.append("\nDescription: " + readingPlan.info.description);
sb.append("\nDuration: " + readingPlan.info.duration);
new AlertDialog.Builder(ReadingPlanActivity.this)
.setMessage(sb)
.setPositiveButton(R.string.ok, null)
.show();
}
private void changeDay(int day) {
dayNumber += day;
readingPlanAdapter.load();
readingPlanAdapter.notifyDataSetChanged();
updateButtonStatus();
}
private void updateButtonStatus() { //TODO look disabled
if (dayNumber == 0) {
bLeft.setEnabled(false);
bRight.setEnabled(true);
} else if (dayNumber == readingPlan.info.duration - 1) {
bLeft.setEnabled(true);
bRight.setEnabled(false);
} else {
bLeft.setEnabled(true);
bRight.setEnabled(true);
}
bToday.setText(getReadingDateHeader(dayNumber));
}
private void downloadReadingPlan() {
AppConfig config = AppConfig.get();
final List<ReadingPlan.ReadingPlanInfo> infos = config.readingPlanInfos;
final List<String> readingPlanTitles = new ArrayList<String>();
final List<Integer> resources = new ArrayList<Integer>();
for (int i = 0; i < infos.size(); i++) {
String title = infos.get(i).title;
boolean downloaded = false;
for (ReadingPlan.ReadingPlanInfo downloadedReadingPlanInfo : downloadedReadingPlanInfos) {
if (title.equals(downloadedReadingPlanInfo.title)) {
downloaded = true;
break;
}
}
if (!downloaded) {
readingPlanTitles.add(title);
String filename = infos.get(i).filename.replace(".rpb", ""); //TODO: proper method. testing only
resources.add(getResources().getIdentifier(filename, "raw", getPackageName())); //TODO: proper method
}
}
if (readingPlanTitles.size() == 0) {
new AlertDialog.Builder(this)
.setMessage(getString(R.string.rp_noReadingPlanAvailable))
.setPositiveButton(R.string.ok, null)
.show();
} else {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, readingPlanTitles), new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
long id = ReadingPlanManager.copyReadingPlanToDb(resources.get(which));
Preferences.setLong(Prefkey.active_reading_plan, id);
loadDayNumber();
loadReadingPlan(id);
loadReadingPlanProgress();
prepareDropDownNavigation();
prepareDisplay();
dialog.dismiss();
}
})
.setNegativeButton("Cancel", null)
.show();
}
}
private float getActualPercentage() {
float res = (float) countRead() / (float) countAllReadings() * 100;
res = (float)Math.round(res * 100) / 100;
return res;
}
private float getTargetPercentage() {
float res = (float) countTarget() / (float) countAllReadings() * 100;
res = (float)Math.round(res * 100) / 100;
return res;
}
private int countRead() {
IntArrayList filteredReadingCodes = ReadingPlanManager.filterReadingCodesByDayStartEnd(readingCodes, 0, todayNumber);
for (int i = 0; i < filteredReadingCodes.size(); i++) {
}
return filteredReadingCodes.size();
}
private int countTarget() {
int res = 0;
for (int i = 0; i <= todayNumber; i++) {
res += readingPlan.dailyVerses.get(i).length / 2;
}
return res;
}
private int countAllReadings() {
int res = 0;
for (int i = 0; i < readingPlan.info.duration; i++) {
res += readingPlan.dailyVerses.get(i).length / 2;
}
return res;
}
public String getReadingDateHeader(final int dayNumber) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date(readingPlan.info.startDate));
calendar.add(Calendar.DATE, dayNumber);
String date = getString(R.string.rp_dayHeader, (dayNumber + 1), new SimpleDateFormat("MMMM dd, yyyy").format(calendar.getTime()));
return date;
}
public static SpannableStringBuilder getReference(Version version, int[] ari) {
SpannableStringBuilder sb = new SpannableStringBuilder();
String book = version.getBook(Ari.toBook(ari[0])).shortName;
sb.append(book);
int startChapter = Ari.toChapter(ari[0]);
int startVerse = Ari.toVerse(ari[0]);
int lastVerse = Ari.toVerse(ari[1]);
int lastChapter = Ari.toChapter(ari[1]);
sb.append(" " + startChapter);
if (startVerse == 0) {
if (lastVerse == 0) {
if (startChapter != lastChapter) {
sb.append("-" + lastChapter);
}
} else {
sb.append("-" + lastChapter + ":" + lastVerse);
}
} else {
if (startChapter == lastChapter) {
sb.append(":" + startVerse + "-" + lastVerse);
} else {
sb.append(":" + startVerse + "-" + lastChapter + ":" + lastVerse);
}
}
return sb;
}
class ReadingPlanAdapter extends BaseAdapter {
private int[] todayReadings;
public void load() {
todayReadings = readingPlan.dailyVerses.get(dayNumber);
}
@Override
public int getCount() {
if (showDetail) {
return (todayReadings.length / 2) + readingPlan.info.duration + 1;
} else {
return (todayReadings.length / 2) + 1;
}
}
@Override
public View getView(final int position, View convertView, final ViewGroup parent) {
final int itemViewType = getItemViewType(position);
if (itemViewType == 0) {
CheckBox checkBox = new CheckBox(ReadingPlanActivity.this);
LinearLayout layout = new LinearLayout(ReadingPlanActivity.this);
layout.addView(checkBox);
convertView = layout;
boolean[] readMarks = new boolean[todayReadings.length];
ReadingPlanManager.writeReadMarksByDay(readingCodes, readMarks, dayNumber);
if (readMarks[position * 2]) {
checkBox.setChecked(true);
} else {
checkBox.setChecked(false);
}
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) {
ReadingPlanManager.updateReadingPlanProgress(readingPlan.info.id, dayNumber, position, isChecked);
loadReadingPlanProgress();
load();
notifyDataSetChanged();
}
});
int start = position * 2;
int[] aris = {todayReadings[start], todayReadings[start + 1]};
checkBox.setText(getReference(S.activeVersion, aris));
checkBox.setFocusable(false);
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
checkBox.setLayoutParams(layoutParams);
} else if (itemViewType == 1) {
if (convertView == null) {
convertView = getLayoutInflater().inflate(R.layout.item_reading_plan_summary, parent, false);
}
final ProgressBar pbReadingProgress = V.get(convertView, R.id.pbReadingProgress);
final TextView tActual = V.get(convertView, R.id.tActual);
final TextView tTarget = V.get(convertView, R.id.tTarget);
final TextView tComment = V.get(convertView, R.id.tComment);
final TextView tDetail = V.get(convertView, R.id.tDetail);
float actualPercentage = getActualPercentage();
float targetPercentage = getTargetPercentage();
pbReadingProgress.setMax(100);
pbReadingProgress.setProgress((int) actualPercentage);
pbReadingProgress.setSecondaryProgress((int) targetPercentage);
tActual.setText(getString(R.string.rp_commentActual, actualPercentage));
tTarget.setText(getString(R.string.rp_commentTarget, targetPercentage));
String comment;
if (actualPercentage == targetPercentage) {
comment = getString(R.string.rp_commentOnSchedule);
} else {
float diff = (float) Math.round((targetPercentage - actualPercentage) * 100) / 100;
comment = getString(R.string.rp_commentBehindSchedule, diff);
}
tComment.setText(comment);
tDetail.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
showDetail = !showDetail;
if (showDetail) {
tDetail.setText(R.string.rp_hideDetail);
} else {
tDetail.setText(R.string.rp_showDetail);
}
notifyDataSetChanged();
}
});
} else if (itemViewType == 2) {
if (convertView == null) {
convertView = getLayoutInflater().inflate(R.layout.item_reading_plan_one_day, parent, false);
}
final LinearLayout layout = V.get(convertView, R.id.llOneDayReadingPlan);
final int currentViewTypePosition = position - todayReadings.length / 2 - 1;
//Text title
TextView tTitle = V.get(convertView, android.R.id.text1);
tTitle.setText(getReadingDateHeader(currentViewTypePosition));
//Text reading
while (true) {
final View reading = layout.findViewWithTag("reading");
if (reading != null) {
layout.removeView(reading);
} else {
break;
}
}
int[] aris = readingPlan.dailyVerses.get(currentViewTypePosition);
for (int i = 0; i < aris.length / 2; i++) {
final int ariPosition = i;
int[] ariStartEnd = {aris[i * 2], aris[i * 2 + 1]};
final SpannableStringBuilder reference = getReference(S.activeVersion, ariStartEnd);
CheckBox checkBox = new CheckBox(ReadingPlanActivity.this);
checkBox.setText(reference);
checkBox.setTag("reading");
boolean[] readMarks = new boolean[aris.length];
ReadingPlanManager.writeReadMarksByDay(readingCodes, readMarks, currentViewTypePosition);
checkBox.setChecked(readMarks[ariPosition * 2]);
checkBox.setFocusable(false);
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT);
checkBox.setLayoutParams(layoutParams);
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) {
ReadingPlanManager.updateReadingPlanProgress(readingPlan.info.id, currentViewTypePosition, ariPosition, isChecked);
loadReadingPlanProgress();
load();
notifyDataSetChanged();
}
});
layout.addView(checkBox);
}
}
return convertView;
}
@Override
public Object getItem(final int position) {
return null;
}
@Override
public long getItemId(final int position) {
return 0;
}
@Override
public int getViewTypeCount() {
return 3;
}
@Override
public int getItemViewType(final int position) {
if (position < todayReadings.length / 2) {
return 0;
} else if (position == todayReadings.length / 2) {
return 1;
} else {
return 2;
}
}
}
}
|
Extract to method.
|
Alkitab/src/yuku/alkitab/base/ac/ReadingPlanActivity.java
|
Extract to method.
|
<ide><path>lkitab/src/yuku/alkitab/base/ac/ReadingPlanActivity.java
<ide> return;
<ide> }
<ide>
<del> Calendar newCalendar = GregorianCalendar.getInstance();
<del> newCalendar.set(Calendar.HOUR_OF_DAY, 0);
<del> newCalendar.set(Calendar.MINUTE, 1); //TODO: find another way to calculate difference
<del> newCalendar.set(Calendar.SECOND, 0);
<del>
<ide> Calendar startCalendar = GregorianCalendar.getInstance();
<ide> startCalendar.setTime(new Date(readingPlan.info.startDate));
<add>
<add> todayNumber = calculateDaysDiff(startCalendar, GregorianCalendar.getInstance());
<add> dayNumber = getIntent().getIntExtra(READING_PLAN_DAY_NUMBER, -1);
<add> if (dayNumber == -1) {
<add> dayNumber = todayNumber;
<add> }
<add> }
<add>
<add> private int calculateDaysDiff(Calendar startCalendar, Calendar endCalendar) {
<ide> startCalendar.set(Calendar.HOUR_OF_DAY, 0);
<ide> startCalendar.set(Calendar.MINUTE, 0);
<ide> startCalendar.set(Calendar.SECOND, 0);
<del>
<del> todayNumber = (int) ((newCalendar.getTime().getTime() - startCalendar.getTime().getTime()) / (1000 * 60 * 60 * 24));
<del> dayNumber = getIntent().getIntExtra(READING_PLAN_DAY_NUMBER, -1);
<del> if (dayNumber == -1) {
<del> dayNumber = todayNumber;
<del> }
<del> }
<add> startCalendar.set(Calendar.MILLISECOND, 0);
<add>
<add> endCalendar.set(Calendar.HOUR_OF_DAY, 0);
<add> endCalendar.set(Calendar.MINUTE, 0);
<add> endCalendar.set(Calendar.SECOND, 0);
<add> endCalendar.set(Calendar.MILLISECOND, 0);
<add>
<add> return (int) ((endCalendar.getTime().getTime() - startCalendar.getTime().getTime()) / (1000 * 60 * 60 * 24));
<add> }
<add>
<ide>
<ide> public boolean prepareDropDownNavigation() {
<ide> if (downloadedReadingPlanInfos.size() == 0) {
<ide> @Override
<ide> public void onDateSet(final DatePicker view, final int year, final int monthOfYear, final int dayOfMonth) {
<ide> Calendar newCalendar = new GregorianCalendar(year, monthOfYear, dayOfMonth);
<del> newCalendar.set(Calendar.HOUR_OF_DAY, 0);
<del> newCalendar.set(Calendar.MINUTE, 1); //TODO: find another way to calculate difference
<del> newCalendar.set(Calendar.SECOND, 0);
<del>
<ide> Calendar startCalendar = GregorianCalendar.getInstance();
<ide> startCalendar.setTime(new Date(readingPlan.info.startDate));
<del> startCalendar.set(Calendar.HOUR_OF_DAY, 0);
<del> startCalendar.set(Calendar.MINUTE, 0);
<del> startCalendar.set(Calendar.SECOND, 0);
<del>
<del> int newDay = (int) ((newCalendar.getTime().getTime() - startCalendar.getTime().getTime()) / (1000 * 60 * 60 * 24));
<add>
<add> int newDay = calculateDaysDiff(startCalendar, newCalendar);
<ide> if (newDay < 0) {
<ide> newDay = 0;
<ide> } else if (newDay >= readingPlan.info.duration) {
|
|
JavaScript
|
mit
|
ff16cf6d8c3c7dd49737a0aa89530b0f449bd714
| 0 |
fernandopasik/generator-startmeup,fernandopasik/generator-startmeup,fernandopasik/generator-startmeup
|
'use strict';
/**
* Copy project initial files.
*/
module.exports = function () {
let bower;
this.fs.writeJSON(this.destinationPath('package.json'), this.pkg);
if (this.pkg.devDependencies.bower) {
this.template('_bower.json', 'bower.json');
bower = Object.assign(
this.fs.readJSON(this.templatePath('_bower.json'), {}),
{
name: this.pkg.name,
description: this.pkg.description,
homepage: this.pkg.homepage,
authors: this.pkg.author,
repository: { type: 'git', url: this.pkg.repository.url }
}
);
this.fs.writeJSON(this.destinationPath('bower.json'), bower);
}
this.template('README.md');
this.copy('LICENSE');
};
|
generators/app/writing.js
|
'use strict';
/**
* Copy project initial files.
*/
module.exports = function () {
this.fs.writeJSON(this.destinationPath('package.json'), this.pkg);
if (this.pkg.devDependencies.bower) {
this.template('_bower.json', 'bower.json');
}
this.template('README.md');
this.copy('LICENSE');
};
|
Better initial bower.json
|
generators/app/writing.js
|
Better initial bower.json
|
<ide><path>enerators/app/writing.js
<ide> */
<ide> module.exports = function () {
<ide>
<add> let bower;
<add>
<ide> this.fs.writeJSON(this.destinationPath('package.json'), this.pkg);
<add>
<ide> if (this.pkg.devDependencies.bower) {
<add>
<ide> this.template('_bower.json', 'bower.json');
<add>
<add> bower = Object.assign(
<add> this.fs.readJSON(this.templatePath('_bower.json'), {}),
<add> {
<add> name: this.pkg.name,
<add> description: this.pkg.description,
<add> homepage: this.pkg.homepage,
<add> authors: this.pkg.author,
<add> repository: { type: 'git', url: this.pkg.repository.url }
<add> }
<add> );
<add>
<add> this.fs.writeJSON(this.destinationPath('bower.json'), bower);
<ide> }
<ide> this.template('README.md');
<ide> this.copy('LICENSE');
|
|
Java
|
apache-2.0
|
a7735efd7553c9ce162deb8cef3b90ea7702b063
| 0 |
samthor/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,signed/intellij-community,joewalnes/idea-community,akosyakov/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,slisson/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,ryano144/intellij-community,jagguli/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,ernestp/consulo,salguarnieri/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,caot/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,consulo/consulo,izonder/intellij-community,pwoodworth/intellij-community,consulo/consulo,youdonghai/intellij-community,dslomov/intellij-community,ernestp/consulo,fitermay/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,supersven/intellij-community,caot/intellij-community,amith01994/intellij-community,clumsy/intellij-community,slisson/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,amith01994/intellij-community,signed/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,supersven/intellij-community,izonder/intellij-community,da1z/intellij-community,caot/intellij-community,xfournet/intellij-community,joewalnes/idea-community,semonte/intellij-community,semonte/intellij-community,vladmm/intellij-community,fnouama/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,slisson/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,allotria/intellij-community,amith01994/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,retomerz/intellij-community,diorcety/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,signed/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,asedunov/intellij-community,signed/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,robovm/robovm-studio,asedunov/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,samthor/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,consulo/consulo,adedayo/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,signed/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,ernestp/consulo,pwoodworth/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,jagguli/intellij-community,izonder/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,FHannes/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,supersven/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,supersven/intellij-community,orekyuu/intellij-community,allotria/intellij-community,da1z/intellij-community,robovm/robovm-studio,semonte/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,hurricup/intellij-community,diorcety/intellij-community,adedayo/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,retomerz/intellij-community,hurricup/intellij-community,adedayo/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,Lekanich/intellij-community,da1z/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,supersven/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,robovm/robovm-studio,blademainer/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,signed/intellij-community,kool79/intellij-community,samthor/intellij-community,adedayo/intellij-community,holmes/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,dslomov/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,slisson/intellij-community,asedunov/intellij-community,adedayo/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,izonder/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,adedayo/intellij-community,clumsy/intellij-community,holmes/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,vladmm/intellij-community,retomerz/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,allotria/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,ryano144/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,kdwink/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,blademainer/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,adedayo/intellij-community,caot/intellij-community,ryano144/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,holmes/intellij-community,caot/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,allotria/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,salguarnieri/intellij-community,caot/intellij-community,samthor/intellij-community,blademainer/intellij-community,petteyg/intellij-community,apixandru/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,FHannes/intellij-community,fitermay/intellij-community,apixandru/intellij-community,adedayo/intellij-community,slisson/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,signed/intellij-community,kdwink/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,dslomov/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,joewalnes/idea-community,akosyakov/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,joewalnes/idea-community,FHannes/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,hurricup/intellij-community,signed/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,consulo/consulo,nicolargo/intellij-community,izonder/intellij-community,da1z/intellij-community,kdwink/intellij-community,kool79/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,hurricup/intellij-community,samthor/intellij-community,slisson/intellij-community,suncycheng/intellij-community,holmes/intellij-community,signed/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,slisson/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,holmes/intellij-community,holmes/intellij-community,izonder/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,semonte/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,retomerz/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,izonder/intellij-community,ibinti/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,joewalnes/idea-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,allotria/intellij-community,Lekanich/intellij-community,ernestp/consulo,kool79/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,ernestp/consulo,consulo/consulo,asedunov/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,samthor/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,kdwink/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,apixandru/intellij-community,consulo/consulo,Distrotech/intellij-community,kool79/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,da1z/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,blademainer/intellij-community,fitermay/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,ryano144/intellij-community,slisson/intellij-community,FHannes/intellij-community,izonder/intellij-community,da1z/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,apixandru/intellij-community,fitermay/intellij-community,da1z/intellij-community,tmpgit/intellij-community,allotria/intellij-community,vladmm/intellij-community,retomerz/intellij-community,caot/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,fitermay/intellij-community,blademainer/intellij-community,allotria/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,robovm/robovm-studio,ryano144/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,fnouama/intellij-community,da1z/intellij-community,samthor/intellij-community,allotria/intellij-community,semonte/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,slisson/intellij-community,supersven/intellij-community,jagguli/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,joewalnes/idea-community,Distrotech/intellij-community,hurricup/intellij-community,apixandru/intellij-community,xfournet/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,diorcety/intellij-community,samthor/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,caot/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,semonte/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,semonte/intellij-community,kdwink/intellij-community,supersven/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,caot/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,ibinti/intellij-community,holmes/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,slisson/intellij-community,supersven/intellij-community,jagguli/intellij-community,kool79/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,jagguli/intellij-community,amith01994/intellij-community,da1z/intellij-community,adedayo/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,petteyg/intellij-community,clumsy/intellij-community,fitermay/intellij-community,FHannes/intellij-community,petteyg/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,ernestp/consulo,nicolargo/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,orekyuu/intellij-community,ryano144/intellij-community
|
package org.jetbrains.android;
import com.android.ddmlib.AndroidDebugBridge;
import com.intellij.openapi.components.ApplicationComponent;
import com.intellij.openapi.diagnostic.Logger;
import org.jetbrains.android.ddms.AdbManager;
import org.jetbrains.android.ddms.AdbNotRespondingException;
import org.jetbrains.annotations.NotNull;
/**
* @author coyote
*/
public class AndroidPlugin implements ApplicationComponent {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.android.AndroidPlugin");
@NotNull
public String getComponentName() {
return "AndroidApplicationComponent";
}
public void initComponent() {
}
public void disposeComponent() {
try {
AdbManager.run(new Runnable() {
public void run() {
AndroidDebugBridge.terminate();
}
}, false);
}
catch (AdbNotRespondingException e) {
LOG.info(e);
}
}
}
|
plugins/android/src/org/jetbrains/android/AndroidPlugin.java
|
package org.jetbrains.android;
import com.android.ddmlib.AndroidDebugBridge;
import com.intellij.openapi.components.ApplicationComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.ultimate.PluginVerifier;
import com.intellij.ultimate.UltimateVerifier;
import org.jetbrains.android.ddms.AdbManager;
import org.jetbrains.android.ddms.AdbNotRespondingException;
import org.jetbrains.annotations.NotNull;
/**
* @author coyote
*/
public class AndroidPlugin implements ApplicationComponent {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.android.AndroidPlugin");
@NotNull
public String getComponentName() {
return "AndroidApplicationComponent";
}
public void initComponent() {
}
public void disposeComponent() {
try {
AdbManager.run(new Runnable() {
public void run() {
AndroidDebugBridge.terminate();
}
}, false);
}
catch (AdbNotRespondingException e) {
LOG.info(e);
}
}
}
|
Fix CE compilation
|
plugins/android/src/org/jetbrains/android/AndroidPlugin.java
|
Fix CE compilation
|
<ide><path>lugins/android/src/org/jetbrains/android/AndroidPlugin.java
<ide> import com.android.ddmlib.AndroidDebugBridge;
<ide> import com.intellij.openapi.components.ApplicationComponent;
<ide> import com.intellij.openapi.diagnostic.Logger;
<del>import com.intellij.ultimate.PluginVerifier;
<del>import com.intellij.ultimate.UltimateVerifier;
<ide> import org.jetbrains.android.ddms.AdbManager;
<ide> import org.jetbrains.android.ddms.AdbNotRespondingException;
<ide> import org.jetbrains.annotations.NotNull;
|
|
Java
|
apache-2.0
|
b34858c1f88969a74accb7b45f63fd18da01b432
| 0 |
hasonhai/hadoop,mix/hadoop,hasonhai/hadoop,myeoje/PhillyYarn,huiyi-learning/hadoop,an3m0na/hadoop,MjAbuz/hadoop,satishpatil2k13/ODPI-Hadoop,zjshen/hadoop-in-docker,Microsoft-CISL/hadoop-prototype,jokes000/hadoop,Microsoft-CISL/hadoop-prototype,zjshen/hadoop-in-docker,an3m0na/hadoop,zrccxyb62/hadoop,an3m0na/hadoop,MjAbuz/hadoop,hasonhai/hadoop,satishpatil2k13/ODPI-Hadoop,huiyi-learning/hadoop,jokes000/hadoop,jokes000/hadoop,zrccxyb62/hadoop,satishpatil2k13/ODPI-Hadoop,mix/hadoop,zrccxyb62/hadoop,Microsoft-CISL/hadoop-prototype,jokes000/hadoop,zrccxyb62/hadoop,huiyi-learning/hadoop,myeoje/PhillyYarn,MjAbuz/hadoop,an3m0na/hadoop,intel-hadoop/hadoop,MjAbuz/hadoop,myeoje/PhillyYarn,intel-hadoop/hadoop,mix/hadoop,hasonhai/hadoop,an3m0na/hadoop,huiyi-learning/hadoop,zrccxyb62/hadoop,an3m0na/hadoop,huiyi-learning/hadoop,satishpatil2k13/ODPI-Hadoop,zjshen/hadoop-in-docker,zjshen/hadoop-in-docker,satishpatil2k13/ODPI-Hadoop,hasonhai/hadoop,an3m0na/hadoop,hasonhai/hadoop,zjshen/hadoop-in-docker,jokes000/hadoop,intel-hadoop/hadoop,mix/hadoop,Microsoft-CISL/hadoop-prototype,jokes000/hadoop,intel-hadoop/hadoop,zrccxyb62/hadoop,jokes000/hadoop,huiyi-learning/hadoop,mix/hadoop,zjshen/hadoop-in-docker,satishpatil2k13/ODPI-Hadoop,jokes000/hadoop,zjshen/hadoop-in-docker,Microsoft-CISL/hadoop-prototype,huiyi-learning/hadoop,MjAbuz/hadoop,Microsoft-CISL/hadoop-prototype,mix/hadoop,hasonhai/hadoop,intel-hadoop/hadoop,MjAbuz/hadoop,mix/hadoop,zjshen/hadoop-in-docker,intel-hadoop/hadoop,Microsoft-CISL/hadoop-prototype,intel-hadoop/hadoop,myeoje/PhillyYarn,satishpatil2k13/ODPI-Hadoop,hasonhai/hadoop,myeoje/PhillyYarn,satishpatil2k13/ODPI-Hadoop,mix/hadoop,MjAbuz/hadoop,huiyi-learning/hadoop,an3m0na/hadoop,myeoje/PhillyYarn,zrccxyb62/hadoop,myeoje/PhillyYarn,myeoje/PhillyYarn,Microsoft-CISL/hadoop-prototype
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.token.delegation;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.crypto.SecretKey;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.HadoopKerberosName;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.Time;
import com.google.common.base.Preconditions;
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "Hive"})
@InterfaceStability.Evolving
public abstract
class AbstractDelegationTokenSecretManager<TokenIdent
extends AbstractDelegationTokenIdentifier>
extends SecretManager<TokenIdent> {
private static final Log LOG = LogFactory
.getLog(AbstractDelegationTokenSecretManager.class);
/**
* Cache of currently valid tokens, mapping from DelegationTokenIdentifier
* to DelegationTokenInformation. Protected by this object lock.
*/
protected final Map<TokenIdent, DelegationTokenInformation> currentTokens
= new HashMap<TokenIdent, DelegationTokenInformation>();
/**
* Sequence number to create DelegationTokenIdentifier.
* Protected by this object lock.
*/
protected int delegationTokenSequenceNumber = 0;
/**
* Access to allKeys is protected by this object lock
*/
protected final Map<Integer, DelegationKey> allKeys
= new HashMap<Integer, DelegationKey>();
/**
* Access to currentId is protected by this object lock.
*/
protected int currentId = 0;
/**
* Access to currentKey is protected by this object lock
*/
private DelegationKey currentKey;
private long keyUpdateInterval;
private long tokenMaxLifetime;
private long tokenRemoverScanInterval;
private long tokenRenewInterval;
/**
* Whether to store a token's tracking ID in its TokenInformation.
* Can be overridden by a subclass.
*/
protected boolean storeTokenTrackingId;
private Thread tokenRemoverThread;
protected volatile boolean running;
/**
* If the delegation token update thread holds this lock, it will
* not get interrupted.
*/
protected Object noInterruptsLock = new Object();
public AbstractDelegationTokenSecretManager(long delegationKeyUpdateInterval,
long delegationTokenMaxLifetime, long delegationTokenRenewInterval,
long delegationTokenRemoverScanInterval) {
this.keyUpdateInterval = delegationKeyUpdateInterval;
this.tokenMaxLifetime = delegationTokenMaxLifetime;
this.tokenRenewInterval = delegationTokenRenewInterval;
this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval;
this.storeTokenTrackingId = false;
}
/** should be called before this object is used */
public void startThreads() throws IOException {
Preconditions.checkState(!running);
updateCurrentKey();
synchronized (this) {
running = true;
tokenRemoverThread = new Daemon(new ExpiredTokenRemover());
tokenRemoverThread.start();
}
}
/**
* Reset all data structures and mutable state.
*/
public synchronized void reset() {
currentId = 0;
allKeys.clear();
setDelegationTokenSeqNum(0);
currentTokens.clear();
}
/**
* Add a previously used master key to cache (when NN restarts),
* should be called before activate().
* */
public synchronized void addKey(DelegationKey key) throws IOException {
if (running) // a safety check
throw new IOException("Can't add delegation key to a running SecretManager.");
if (key.getKeyId() > currentId) {
currentId = key.getKeyId();
}
allKeys.put(key.getKeyId(), key);
}
public synchronized DelegationKey[] getAllKeys() {
return allKeys.values().toArray(new DelegationKey[0]);
}
// HDFS
protected void logUpdateMasterKey(DelegationKey key) throws IOException {
return;
}
// HDFS
protected void logExpireToken(TokenIdent ident) throws IOException {
return;
}
// RM
protected void storeNewMasterKey(DelegationKey key) throws IOException {
return;
}
// for ZK based secretManager
protected void updateMasterKey(DelegationKey key) throws IOException{
return;
}
// RM
protected void removeStoredMasterKey(DelegationKey key) {
return;
}
// RM
protected void storeNewToken(TokenIdent ident, long renewDate) throws IOException{
return;
}
// RM
protected void removeStoredToken(TokenIdent ident) throws IOException {
}
// RM
protected void updateStoredToken(TokenIdent ident, long renewDate) throws IOException {
return;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected int getDelegationTokenSeqNum() {
return delegationTokenSequenceNumber;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected int incrementDelegationTokenSeqNum() {
return ++delegationTokenSequenceNumber;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void setDelegationTokenSeqNum(int seqNum) {
delegationTokenSequenceNumber = seqNum;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected DelegationKey getDelegationKey(int keyId) {
return allKeys.get(keyId);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void storeDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
storeNewMasterKey(key);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void updateDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
updateMasterKey(key);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected DelegationTokenInformation getTokenInfo(TokenIdent ident) {
return currentTokens.get(ident);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void storeToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
currentTokens.put(ident, tokenInfo);
storeNewToken(ident, tokenInfo.getRenewDate());
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void updateToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
currentTokens.put(ident, tokenInfo);
updateStoredToken(ident, tokenInfo.getRenewDate());
}
/**
* This method is intended to be used for recovering persisted delegation
* tokens
* This method must be called before this secret manager is activated (before
* startThreads() is called)
* @param identifier identifier read from persistent storage
* @param renewDate token renew time
* @throws IOException
*/
public synchronized void addPersistedDelegationToken(
TokenIdent identifier, long renewDate) throws IOException {
if (running) {
// a safety check
throw new IOException(
"Can't add persisted delegation token to a running SecretManager.");
}
int keyId = identifier.getMasterKeyId();
DelegationKey dKey = allKeys.get(keyId);
if (dKey == null) {
LOG.warn("No KEY found for persisted identifier " + identifier.toString());
return;
}
byte[] password = createPassword(identifier.getBytes(), dKey.getKey());
if (identifier.getSequenceNumber() > delegationTokenSequenceNumber) {
delegationTokenSequenceNumber = identifier.getSequenceNumber();
}
if (getTokenInfo(identifier) == null) {
currentTokens.put(identifier, new DelegationTokenInformation(renewDate,
password, getTrackingIdIfEnabled(identifier)));
} else {
throw new IOException("Same delegation token being added twice.");
}
}
/**
* Update the current master key
* This is called once by startThreads before tokenRemoverThread is created,
* and only by tokenRemoverThread afterwards.
*/
private void updateCurrentKey() throws IOException {
LOG.info("Updating the current master key for generating delegation tokens");
/* Create a new currentKey with an estimated expiry date. */
int newCurrentId;
synchronized (this) {
newCurrentId = currentId+1;
}
DelegationKey newKey = new DelegationKey(newCurrentId, System
.currentTimeMillis()
+ keyUpdateInterval + tokenMaxLifetime, generateSecret());
//Log must be invoked outside the lock on 'this'
logUpdateMasterKey(newKey);
synchronized (this) {
currentId = newKey.getKeyId();
currentKey = newKey;
storeDelegationKey(currentKey);
}
}
/**
* Update the current master key for generating delegation tokens
* It should be called only by tokenRemoverThread.
*/
void rollMasterKey() throws IOException {
synchronized (this) {
removeExpiredKeys();
/* set final expiry date for retiring currentKey */
currentKey.setExpiryDate(Time.now() + tokenMaxLifetime);
/*
* currentKey might have been removed by removeExpiredKeys(), if
* updateMasterKey() isn't called at expected interval. Add it back to
* allKeys just in case.
*/
updateDelegationKey(currentKey);
}
updateCurrentKey();
}
private synchronized void removeExpiredKeys() {
long now = Time.now();
for (Iterator<Map.Entry<Integer, DelegationKey>> it = allKeys.entrySet()
.iterator(); it.hasNext();) {
Map.Entry<Integer, DelegationKey> e = it.next();
if (e.getValue().getExpiryDate() < now) {
it.remove();
// ensure the tokens generated by this current key can be recovered
// with this current key after this current key is rolled
if(!e.getValue().equals(currentKey))
removeStoredMasterKey(e.getValue());
}
}
}
@Override
protected synchronized byte[] createPassword(TokenIdent identifier) {
int sequenceNum;
long now = Time.now();
sequenceNum = incrementDelegationTokenSeqNum();
identifier.setIssueDate(now);
identifier.setMaxDate(now + tokenMaxLifetime);
identifier.setMasterKeyId(currentId);
identifier.setSequenceNumber(sequenceNum);
LOG.info("Creating password for identifier: " + identifier);
byte[] password = createPassword(identifier.getBytes(), currentKey.getKey());
DelegationTokenInformation tokenInfo = new DelegationTokenInformation(now
+ tokenRenewInterval, password, getTrackingIdIfEnabled(identifier));
try {
storeToken(identifier, tokenInfo);
} catch (IOException ioe) {
LOG.error("Could not store token !!", ioe);
}
return password;
}
/**
* Find the DelegationTokenInformation for the given token id, and verify that
* if the token is expired. Note that this method should be called with
* acquiring the secret manager's monitor.
*/
protected DelegationTokenInformation checkToken(TokenIdent identifier)
throws InvalidToken {
assert Thread.holdsLock(this);
DelegationTokenInformation info = getTokenInfo(identifier);
if (info == null) {
throw new InvalidToken("token (" + identifier.toString()
+ ") can't be found in cache");
}
if (info.getRenewDate() < Time.now()) {
throw new InvalidToken("token (" + identifier.toString() + ") is expired");
}
return info;
}
@Override
public synchronized byte[] retrievePassword(TokenIdent identifier)
throws InvalidToken {
return checkToken(identifier).getPassword();
}
protected String getTrackingIdIfEnabled(TokenIdent ident) {
if (storeTokenTrackingId) {
return ident.getTrackingId();
}
return null;
}
public synchronized String getTokenTrackingId(TokenIdent identifier) {
DelegationTokenInformation info = getTokenInfo(identifier);
if (info == null) {
return null;
}
return info.getTrackingId();
}
/**
* Verifies that the given identifier and password are valid and match.
* @param identifier Token identifier.
* @param password Password in the token.
* @throws InvalidToken
*/
public synchronized void verifyToken(TokenIdent identifier, byte[] password)
throws InvalidToken {
byte[] storedPassword = retrievePassword(identifier);
if (!Arrays.equals(password, storedPassword)) {
throw new InvalidToken("token (" + identifier
+ ") is invalid, password doesn't match");
}
}
/**
* Renew a delegation token.
* @param token the token to renew
* @param renewer the full principal name of the user doing the renewal
* @return the new expiration time
* @throws InvalidToken if the token is invalid
* @throws AccessControlException if the user can't renew token
*/
public synchronized long renewToken(Token<TokenIdent> token,
String renewer) throws InvalidToken, IOException {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
LOG.info("Token renewal for identifier: " + id + "; total currentTokens "
+ currentTokens.size());
long now = Time.now();
if (id.getMaxDate() < now) {
throw new InvalidToken(renewer + " tried to renew an expired token");
}
if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) {
throw new AccessControlException(renewer +
" tried to renew a token without a renewer");
}
if (!id.getRenewer().toString().equals(renewer)) {
throw new AccessControlException(renewer +
" tries to renew a token with renewer " + id.getRenewer());
}
DelegationKey key = getDelegationKey(id.getMasterKeyId());
if (key == null) {
throw new InvalidToken("Unable to find master key for keyId="
+ id.getMasterKeyId()
+ " from cache. Failed to renew an unexpired token"
+ " with sequenceNumber=" + id.getSequenceNumber());
}
byte[] password = createPassword(token.getIdentifier(), key.getKey());
if (!Arrays.equals(password, token.getPassword())) {
throw new AccessControlException(renewer +
" is trying to renew a token with wrong password");
}
long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval);
String trackingId = getTrackingIdIfEnabled(id);
DelegationTokenInformation info = new DelegationTokenInformation(renewTime,
password, trackingId);
if (getTokenInfo(id) == null) {
throw new InvalidToken("Renewal request for unknown token");
}
updateToken(id, info);
return renewTime;
}
/**
* Cancel a token by removing it from cache.
* @return Identifier of the canceled token
* @throws InvalidToken for invalid token
* @throws AccessControlException if the user isn't allowed to cancel
*/
public synchronized TokenIdent cancelToken(Token<TokenIdent> token,
String canceller) throws IOException {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
LOG.info("Token cancelation requested for identifier: "+id);
if (id.getUser() == null) {
throw new InvalidToken("Token with no owner");
}
String owner = id.getUser().getUserName();
Text renewer = id.getRenewer();
HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller);
String cancelerShortName = cancelerKrbName.getShortName();
if (!canceller.equals(owner)
&& (renewer == null || renewer.toString().isEmpty() || !cancelerShortName
.equals(renewer.toString()))) {
throw new AccessControlException(canceller
+ " is not authorized to cancel the token");
}
DelegationTokenInformation info = currentTokens.remove(id);
if (info == null) {
throw new InvalidToken("Token not found");
}
removeStoredToken(id);
return id;
}
/**
* Convert the byte[] to a secret key
* @param key the byte[] to create the secret key from
* @return the secret key
*/
public static SecretKey createSecretKey(byte[] key) {
return SecretManager.createSecretKey(key);
}
/** Class to encapsulate a token's renew date and password. */
@InterfaceStability.Evolving
public static class DelegationTokenInformation {
long renewDate;
byte[] password;
String trackingId;
public DelegationTokenInformation(long renewDate, byte[] password) {
this(renewDate, password, null);
}
public DelegationTokenInformation(long renewDate, byte[] password,
String trackingId) {
this.renewDate = renewDate;
this.password = password;
this.trackingId = trackingId;
}
/** returns renew date */
public long getRenewDate() {
return renewDate;
}
/** returns password */
byte[] getPassword() {
return password;
}
/** returns tracking id */
public String getTrackingId() {
return trackingId;
}
}
/** Remove expired delegation tokens from cache */
private void removeExpiredToken() throws IOException {
long now = Time.now();
Set<TokenIdent> expiredTokens = new HashSet<TokenIdent>();
synchronized (this) {
Iterator<Map.Entry<TokenIdent, DelegationTokenInformation>> i =
currentTokens.entrySet().iterator();
while (i.hasNext()) {
Map.Entry<TokenIdent, DelegationTokenInformation> entry = i.next();
long renewDate = entry.getValue().getRenewDate();
if (renewDate < now) {
expiredTokens.add(entry.getKey());
i.remove();
}
}
}
// don't hold lock on 'this' to avoid edit log updates blocking token ops
for (TokenIdent ident : expiredTokens) {
logExpireToken(ident);
removeStoredToken(ident);
}
}
public void stopThreads() {
if (LOG.isDebugEnabled())
LOG.debug("Stopping expired delegation token remover thread");
running = false;
if (tokenRemoverThread != null) {
synchronized (noInterruptsLock) {
tokenRemoverThread.interrupt();
}
try {
tokenRemoverThread.join();
} catch (InterruptedException e) {
throw new RuntimeException(
"Unable to join on token removal thread", e);
}
}
}
/**
* is secretMgr running
* @return true if secret mgr is running
*/
public synchronized boolean isRunning() {
return running;
}
private class ExpiredTokenRemover extends Thread {
private long lastMasterKeyUpdate;
private long lastTokenCacheCleanup;
@Override
public void run() {
LOG.info("Starting expired delegation token remover thread, "
+ "tokenRemoverScanInterval=" + tokenRemoverScanInterval
/ (60 * 1000) + " min(s)");
try {
while (running) {
long now = Time.now();
if (lastMasterKeyUpdate + keyUpdateInterval < now) {
try {
rollMasterKey();
lastMasterKeyUpdate = now;
} catch (IOException e) {
LOG.error("Master key updating failed: ", e);
}
}
if (lastTokenCacheCleanup + tokenRemoverScanInterval < now) {
removeExpiredToken();
lastTokenCacheCleanup = now;
}
try {
Thread.sleep(Math.min(5000, keyUpdateInterval)); // 5 seconds
} catch (InterruptedException ie) {
LOG.error("ExpiredTokenRemover received " + ie);
}
}
} catch (Throwable t) {
LOG.error("ExpiredTokenRemover thread received unexpected exception", t);
Runtime.getRuntime().exit(-1);
}
}
}
}
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.token.delegation;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.crypto.SecretKey;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.HadoopKerberosName;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.Time;
import com.google.common.base.Preconditions;
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "Hive"})
@InterfaceStability.Evolving
public abstract
class AbstractDelegationTokenSecretManager<TokenIdent
extends AbstractDelegationTokenIdentifier>
extends SecretManager<TokenIdent> {
private static final Log LOG = LogFactory
.getLog(AbstractDelegationTokenSecretManager.class);
/**
* Cache of currently valid tokens, mapping from DelegationTokenIdentifier
* to DelegationTokenInformation. Protected by this object lock.
*/
protected final Map<TokenIdent, DelegationTokenInformation> currentTokens
= new HashMap<TokenIdent, DelegationTokenInformation>();
/**
* Sequence number to create DelegationTokenIdentifier.
* Protected by this object lock.
*/
protected int delegationTokenSequenceNumber = 0;
/**
* Access to allKeys is protected by this object lock
*/
protected final Map<Integer, DelegationKey> allKeys
= new HashMap<Integer, DelegationKey>();
/**
* Access to currentId is protected by this object lock.
*/
protected int currentId = 0;
/**
* Access to currentKey is protected by this object lock
*/
private DelegationKey currentKey;
private long keyUpdateInterval;
private long tokenMaxLifetime;
private long tokenRemoverScanInterval;
private long tokenRenewInterval;
/**
* Whether to store a token's tracking ID in its TokenInformation.
* Can be overridden by a subclass.
*/
protected boolean storeTokenTrackingId;
private Thread tokenRemoverThread;
protected volatile boolean running;
/**
* If the delegation token update thread holds this lock, it will
* not get interrupted.
*/
protected Object noInterruptsLock = new Object();
public AbstractDelegationTokenSecretManager(long delegationKeyUpdateInterval,
long delegationTokenMaxLifetime, long delegationTokenRenewInterval,
long delegationTokenRemoverScanInterval) {
this.keyUpdateInterval = delegationKeyUpdateInterval;
this.tokenMaxLifetime = delegationTokenMaxLifetime;
this.tokenRenewInterval = delegationTokenRenewInterval;
this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval;
this.storeTokenTrackingId = false;
}
/** should be called before this object is used */
public void startThreads() throws IOException {
Preconditions.checkState(!running);
updateCurrentKey();
synchronized (this) {
running = true;
tokenRemoverThread = new Daemon(new ExpiredTokenRemover());
tokenRemoverThread.start();
}
}
/**
* Reset all data structures and mutable state.
*/
public synchronized void reset() {
currentId = 0;
allKeys.clear();
setDelegationTokenSeqNum(0);
currentTokens.clear();
}
/**
* Add a previously used master key to cache (when NN restarts),
* should be called before activate().
* */
public synchronized void addKey(DelegationKey key) throws IOException {
if (running) // a safety check
throw new IOException("Can't add delegation key to a running SecretManager.");
if (key.getKeyId() > currentId) {
currentId = key.getKeyId();
}
storeDelegationKey(key);
}
public synchronized DelegationKey[] getAllKeys() {
return allKeys.values().toArray(new DelegationKey[0]);
}
// HDFS
protected void logUpdateMasterKey(DelegationKey key) throws IOException {
return;
}
// HDFS
protected void logExpireToken(TokenIdent ident) throws IOException {
return;
}
// RM
protected void storeNewMasterKey(DelegationKey key) throws IOException {
return;
}
// for ZK based secretManager
protected void updateMasterKey(DelegationKey key) throws IOException{
return;
}
// RM
protected void removeStoredMasterKey(DelegationKey key) {
return;
}
// RM
protected void storeNewToken(TokenIdent ident, long renewDate) throws IOException{
return;
}
// RM
protected void removeStoredToken(TokenIdent ident) throws IOException {
}
// RM
protected void updateStoredToken(TokenIdent ident, long renewDate) throws IOException {
return;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected int getDelegationTokenSeqNum() {
return delegationTokenSequenceNumber;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected int incrementDelegationTokenSeqNum() {
return ++delegationTokenSequenceNumber;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void setDelegationTokenSeqNum(int seqNum) {
delegationTokenSequenceNumber = seqNum;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected DelegationKey getDelegationKey(int keyId) {
return allKeys.get(keyId);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void storeDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
storeNewMasterKey(key);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void updateDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
updateMasterKey(key);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected DelegationTokenInformation getTokenInfo(TokenIdent ident) {
return currentTokens.get(ident);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void storeToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
currentTokens.put(ident, tokenInfo);
storeNewToken(ident, tokenInfo.getRenewDate());
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*/
protected void updateToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
currentTokens.put(ident, tokenInfo);
updateStoredToken(ident, tokenInfo.getRenewDate());
}
/**
* This method is intended to be used for recovering persisted delegation
* tokens
* @param identifier identifier read from persistent storage
* @param renewDate token renew time
* @throws IOException
*/
public synchronized void addPersistedDelegationToken(
TokenIdent identifier, long renewDate) throws IOException {
if (running) {
// a safety check
throw new IOException(
"Can't add persisted delegation token to a running SecretManager.");
}
int keyId = identifier.getMasterKeyId();
DelegationKey dKey = getDelegationKey(keyId);
if (dKey == null) {
LOG.warn("No KEY found for persisted identifier " + identifier.toString());
return;
}
byte[] password = createPassword(identifier.getBytes(), dKey.getKey());
int delegationTokenSeqNum = getDelegationTokenSeqNum();
if (identifier.getSequenceNumber() > delegationTokenSeqNum) {
setDelegationTokenSeqNum(identifier.getSequenceNumber());
}
if (getTokenInfo(identifier) == null) {
storeToken(identifier, new DelegationTokenInformation(renewDate,
password, getTrackingIdIfEnabled(identifier)));
} else {
throw new IOException("Same delegation token being added twice.");
}
}
/**
* Update the current master key
* This is called once by startThreads before tokenRemoverThread is created,
* and only by tokenRemoverThread afterwards.
*/
private void updateCurrentKey() throws IOException {
LOG.info("Updating the current master key for generating delegation tokens");
/* Create a new currentKey with an estimated expiry date. */
int newCurrentId;
synchronized (this) {
newCurrentId = currentId+1;
}
DelegationKey newKey = new DelegationKey(newCurrentId, System
.currentTimeMillis()
+ keyUpdateInterval + tokenMaxLifetime, generateSecret());
//Log must be invoked outside the lock on 'this'
logUpdateMasterKey(newKey);
storeNewMasterKey(newKey);
synchronized (this) {
currentId = newKey.getKeyId();
currentKey = newKey;
storeDelegationKey(currentKey);
}
}
/**
* Update the current master key for generating delegation tokens
* It should be called only by tokenRemoverThread.
*/
void rollMasterKey() throws IOException {
synchronized (this) {
removeExpiredKeys();
/* set final expiry date for retiring currentKey */
currentKey.setExpiryDate(Time.now() + tokenMaxLifetime);
/*
* currentKey might have been removed by removeExpiredKeys(), if
* updateMasterKey() isn't called at expected interval. Add it back to
* allKeys just in case.
*/
updateDelegationKey(currentKey);
}
updateCurrentKey();
}
private synchronized void removeExpiredKeys() {
long now = Time.now();
for (Iterator<Map.Entry<Integer, DelegationKey>> it = allKeys.entrySet()
.iterator(); it.hasNext();) {
Map.Entry<Integer, DelegationKey> e = it.next();
if (e.getValue().getExpiryDate() < now) {
it.remove();
// ensure the tokens generated by this current key can be recovered
// with this current key after this current key is rolled
if(!e.getValue().equals(currentKey))
removeStoredMasterKey(e.getValue());
}
}
}
@Override
protected synchronized byte[] createPassword(TokenIdent identifier) {
int sequenceNum;
long now = Time.now();
sequenceNum = incrementDelegationTokenSeqNum();
identifier.setIssueDate(now);
identifier.setMaxDate(now + tokenMaxLifetime);
identifier.setMasterKeyId(currentId);
identifier.setSequenceNumber(sequenceNum);
LOG.info("Creating password for identifier: " + identifier);
byte[] password = createPassword(identifier.getBytes(), currentKey.getKey());
DelegationTokenInformation tokenInfo = new DelegationTokenInformation(now
+ tokenRenewInterval, password, getTrackingIdIfEnabled(identifier));
try {
storeToken(identifier, tokenInfo);
} catch (IOException ioe) {
LOG.error("Could not store token !!", ioe);
}
return password;
}
/**
* Find the DelegationTokenInformation for the given token id, and verify that
* if the token is expired. Note that this method should be called with
* acquiring the secret manager's monitor.
*/
protected DelegationTokenInformation checkToken(TokenIdent identifier)
throws InvalidToken {
assert Thread.holdsLock(this);
DelegationTokenInformation info = getTokenInfo(identifier);
if (info == null) {
throw new InvalidToken("token (" + identifier.toString()
+ ") can't be found in cache");
}
if (info.getRenewDate() < Time.now()) {
throw new InvalidToken("token (" + identifier.toString() + ") is expired");
}
return info;
}
@Override
public synchronized byte[] retrievePassword(TokenIdent identifier)
throws InvalidToken {
return checkToken(identifier).getPassword();
}
protected String getTrackingIdIfEnabled(TokenIdent ident) {
if (storeTokenTrackingId) {
return ident.getTrackingId();
}
return null;
}
public synchronized String getTokenTrackingId(TokenIdent identifier) {
DelegationTokenInformation info = getTokenInfo(identifier);
if (info == null) {
return null;
}
return info.getTrackingId();
}
/**
* Verifies that the given identifier and password are valid and match.
* @param identifier Token identifier.
* @param password Password in the token.
* @throws InvalidToken
*/
public synchronized void verifyToken(TokenIdent identifier, byte[] password)
throws InvalidToken {
byte[] storedPassword = retrievePassword(identifier);
if (!Arrays.equals(password, storedPassword)) {
throw new InvalidToken("token (" + identifier
+ ") is invalid, password doesn't match");
}
}
/**
* Renew a delegation token.
* @param token the token to renew
* @param renewer the full principal name of the user doing the renewal
* @return the new expiration time
* @throws InvalidToken if the token is invalid
* @throws AccessControlException if the user can't renew token
*/
public synchronized long renewToken(Token<TokenIdent> token,
String renewer) throws InvalidToken, IOException {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
LOG.info("Token renewal for identifier: " + id + "; total currentTokens "
+ currentTokens.size());
long now = Time.now();
if (id.getMaxDate() < now) {
throw new InvalidToken(renewer + " tried to renew an expired token");
}
if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) {
throw new AccessControlException(renewer +
" tried to renew a token without a renewer");
}
if (!id.getRenewer().toString().equals(renewer)) {
throw new AccessControlException(renewer +
" tries to renew a token with renewer " + id.getRenewer());
}
DelegationKey key = getDelegationKey(id.getMasterKeyId());
if (key == null) {
throw new InvalidToken("Unable to find master key for keyId="
+ id.getMasterKeyId()
+ " from cache. Failed to renew an unexpired token"
+ " with sequenceNumber=" + id.getSequenceNumber());
}
byte[] password = createPassword(token.getIdentifier(), key.getKey());
if (!Arrays.equals(password, token.getPassword())) {
throw new AccessControlException(renewer +
" is trying to renew a token with wrong password");
}
long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval);
String trackingId = getTrackingIdIfEnabled(id);
DelegationTokenInformation info = new DelegationTokenInformation(renewTime,
password, trackingId);
if (getTokenInfo(id) == null) {
throw new InvalidToken("Renewal request for unknown token");
}
updateToken(id, info);
return renewTime;
}
/**
* Cancel a token by removing it from cache.
* @return Identifier of the canceled token
* @throws InvalidToken for invalid token
* @throws AccessControlException if the user isn't allowed to cancel
*/
public synchronized TokenIdent cancelToken(Token<TokenIdent> token,
String canceller) throws IOException {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
LOG.info("Token cancelation requested for identifier: "+id);
if (id.getUser() == null) {
throw new InvalidToken("Token with no owner");
}
String owner = id.getUser().getUserName();
Text renewer = id.getRenewer();
HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller);
String cancelerShortName = cancelerKrbName.getShortName();
if (!canceller.equals(owner)
&& (renewer == null || renewer.toString().isEmpty() || !cancelerShortName
.equals(renewer.toString()))) {
throw new AccessControlException(canceller
+ " is not authorized to cancel the token");
}
DelegationTokenInformation info = currentTokens.remove(id);
if (info == null) {
throw new InvalidToken("Token not found");
}
removeStoredToken(id);
return id;
}
/**
* Convert the byte[] to a secret key
* @param key the byte[] to create the secret key from
* @return the secret key
*/
public static SecretKey createSecretKey(byte[] key) {
return SecretManager.createSecretKey(key);
}
/** Class to encapsulate a token's renew date and password. */
@InterfaceStability.Evolving
public static class DelegationTokenInformation {
long renewDate;
byte[] password;
String trackingId;
public DelegationTokenInformation(long renewDate, byte[] password) {
this(renewDate, password, null);
}
public DelegationTokenInformation(long renewDate, byte[] password,
String trackingId) {
this.renewDate = renewDate;
this.password = password;
this.trackingId = trackingId;
}
/** returns renew date */
public long getRenewDate() {
return renewDate;
}
/** returns password */
byte[] getPassword() {
return password;
}
/** returns tracking id */
public String getTrackingId() {
return trackingId;
}
}
/** Remove expired delegation tokens from cache */
private void removeExpiredToken() throws IOException {
long now = Time.now();
Set<TokenIdent> expiredTokens = new HashSet<TokenIdent>();
synchronized (this) {
Iterator<Map.Entry<TokenIdent, DelegationTokenInformation>> i =
currentTokens.entrySet().iterator();
while (i.hasNext()) {
Map.Entry<TokenIdent, DelegationTokenInformation> entry = i.next();
long renewDate = entry.getValue().getRenewDate();
if (renewDate < now) {
expiredTokens.add(entry.getKey());
i.remove();
}
}
}
// don't hold lock on 'this' to avoid edit log updates blocking token ops
for (TokenIdent ident : expiredTokens) {
logExpireToken(ident);
removeStoredToken(ident);
}
}
public void stopThreads() {
if (LOG.isDebugEnabled())
LOG.debug("Stopping expired delegation token remover thread");
running = false;
if (tokenRemoverThread != null) {
synchronized (noInterruptsLock) {
tokenRemoverThread.interrupt();
}
try {
tokenRemoverThread.join();
} catch (InterruptedException e) {
throw new RuntimeException(
"Unable to join on token removal thread", e);
}
}
}
/**
* is secretMgr running
* @return true if secret mgr is running
*/
public synchronized boolean isRunning() {
return running;
}
private class ExpiredTokenRemover extends Thread {
private long lastMasterKeyUpdate;
private long lastTokenCacheCleanup;
@Override
public void run() {
LOG.info("Starting expired delegation token remover thread, "
+ "tokenRemoverScanInterval=" + tokenRemoverScanInterval
/ (60 * 1000) + " min(s)");
try {
while (running) {
long now = Time.now();
if (lastMasterKeyUpdate + keyUpdateInterval < now) {
try {
rollMasterKey();
lastMasterKeyUpdate = now;
} catch (IOException e) {
LOG.error("Master key updating failed: ", e);
}
}
if (lastTokenCacheCleanup + tokenRemoverScanInterval < now) {
removeExpiredToken();
lastTokenCacheCleanup = now;
}
try {
Thread.sleep(Math.min(5000, keyUpdateInterval)); // 5 seconds
} catch (InterruptedException ie) {
LOG.error("ExpiredTokenRemover received " + ie);
}
}
} catch (Throwable t) {
LOG.error("ExpiredTokenRemover thread received unexpected exception", t);
Runtime.getRuntime().exit(-1);
}
}
}
}
|
HADOOP-11017. Addendum to fix RM HA. KMS delegation token secret manager should be able to use zookeeper as store. (Arun Suresh via kasha)
(cherry picked from commit ef784a2e08c2452026a85ae382a956ff7deecbd0)
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
|
HADOOP-11017. Addendum to fix RM HA. KMS delegation token secret manager should be able to use zookeeper as store. (Arun Suresh via kasha)
|
<ide><path>adoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
<ide>
<ide> /**
<ide> * Add a previously used master key to cache (when NN restarts),
<del> * should be called before activate().
<add> * should be called before activate().
<ide> * */
<ide> public synchronized void addKey(DelegationKey key) throws IOException {
<ide> if (running) // a safety check
<ide> if (key.getKeyId() > currentId) {
<ide> currentId = key.getKeyId();
<ide> }
<del> storeDelegationKey(key);
<add> allKeys.put(key.getKeyId(), key);
<ide> }
<ide>
<ide> public synchronized DelegationKey[] getAllKeys() {
<ide> /**
<ide> * This method is intended to be used for recovering persisted delegation
<ide> * tokens
<add> * This method must be called before this secret manager is activated (before
<add> * startThreads() is called)
<ide> * @param identifier identifier read from persistent storage
<ide> * @param renewDate token renew time
<ide> * @throws IOException
<ide> "Can't add persisted delegation token to a running SecretManager.");
<ide> }
<ide> int keyId = identifier.getMasterKeyId();
<del> DelegationKey dKey = getDelegationKey(keyId);
<add> DelegationKey dKey = allKeys.get(keyId);
<ide> if (dKey == null) {
<ide> LOG.warn("No KEY found for persisted identifier " + identifier.toString());
<ide> return;
<ide> }
<ide> byte[] password = createPassword(identifier.getBytes(), dKey.getKey());
<del> int delegationTokenSeqNum = getDelegationTokenSeqNum();
<del> if (identifier.getSequenceNumber() > delegationTokenSeqNum) {
<del> setDelegationTokenSeqNum(identifier.getSequenceNumber());
<add> if (identifier.getSequenceNumber() > delegationTokenSequenceNumber) {
<add> delegationTokenSequenceNumber = identifier.getSequenceNumber();
<ide> }
<ide> if (getTokenInfo(identifier) == null) {
<del> storeToken(identifier, new DelegationTokenInformation(renewDate,
<add> currentTokens.put(identifier, new DelegationTokenInformation(renewDate,
<ide> password, getTrackingIdIfEnabled(identifier)));
<ide> } else {
<ide> throw new IOException("Same delegation token being added twice.");
<ide> + keyUpdateInterval + tokenMaxLifetime, generateSecret());
<ide> //Log must be invoked outside the lock on 'this'
<ide> logUpdateMasterKey(newKey);
<del> storeNewMasterKey(newKey);
<ide> synchronized (this) {
<ide> currentId = newKey.getKeyId();
<ide> currentKey = newKey;
|
|
Java
|
bsd-2-clause
|
4d4de60745d0da6c8595a6696eeb435de699dba8
| 0 |
vicky-katara/fuse-jna,vicky-katara/fuse-jna,vicky-katara/fuse-jna
|
package net.vicky;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.Arrays;
import net.fusejna.DirectoryFiller;
import net.fusejna.ErrorCodes;
import net.fusejna.FlockCommand;
import net.fusejna.FuseException;
import net.fusejna.StructFlock.FlockWrapper;
import net.fusejna.StructFuseFileInfo.FileInfoWrapper;
import net.fusejna.StructStat.StatWrapper;
import net.fusejna.StructStatvfs.StatvfsWrapper;
import net.fusejna.StructTimeBuffer.TimeBufferWrapper;
import net.fusejna.XattrFiller;
import net.fusejna.XattrListFiller;
import net.fusejna.types.TypeMode.ModeWrapper;
import net.fusejna.types.TypeMode.NodeType;
public class VRamdisk extends net.fusejna.FuseFilesystem
{
public static void main(final String[] args) throws FuseException
{
if (args.length < 2) {
System.err.println("Usage: ramdisk <mountpoint> <size>");
System.err.println("You gave wrongly, " + Arrays.toString(args));
System.exit(1);
}
final int capacity = Integer.parseInt(args[1]) * 1024 * 1024;
System.out.println("Ramdisk of size " + capacity + " bytes loaded at " + args[0] + ".");
new VRamdisk(capacity).mount(args[0]);
}
VickyFS openVFS;
VRamdisk(final int size)
{
openVFS = new VickyFS(size);
}
@Override
public int access(final String path, final int access)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public void afterUnmount(final File mountPoint)
{
// TODO Auto-generated method stub
}
@Override
public void beforeMount(final File mountPoint)
{
// TODO Auto-generated method stub
}
@Override
public int bmap(final String path, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int chmod(final String path, final ModeWrapper mode)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int chown(final String path, final long uid, final long gid)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int create(final String path, final ModeWrapper mode, final FileInfoWrapper info)
{
System.out.println("create called with " + path + " on " + openVFS);
final int existing = open(path, info);
if (existing == -ErrorCodes.ENOENT()) {
return mknod(path, mode, 0);
}
else {
return 0;
}
}
@Override
public void destroy()
{
// TODO Auto-generated method stub
}
@Override
public int fgetattr(final String path, final StatWrapper stat, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int flush(final String path, final FileInfoWrapper info)
{
System.out.println("flush called with " + path);
if (openVFS.close_file_point((int) info.fh()) == true) {
return 0;
}
return -1;
}
@Override
public int fsync(final String path, final int datasync, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int fsyncdir(final String path, final int datasync, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int ftruncate(final String path, final long offset, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int getattr(final String path, final StatWrapper stat)
{
System.out.println("getattr called with " + path);
VPoint point;
if (path.equals("/") && openVFS.currentDir.name.equals("/")) {
point = openVFS.currentDir;
}
else {
point = openVFS.return_point_fully_qualified(path);
}
if (point == null) {
return -ErrorCodes.ENOENT();
}
stat.ino(point.hashCode());
if (point.isDirectory()) {
stat.nlink(2);
stat.size(point.name.length() * 2);
stat.setMode(NodeType.DIRECTORY);
}
else {
stat.nlink(1);
stat.size(point.name.length() * 2 + point.contents.size());
stat.setMode(NodeType.FILE);
}
return 0;
}
@Override
protected String getName()
{
// TODO Auto-generated method stub
return null;
}
@Override
protected String[] getOptions()
{
// TODO Auto-generated method stub
return null;
}
@Override
public int getxattr(final String path, final String xattr, final XattrFiller filler, final long size, final long position)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public void init()
{
}
@Override
public int link(final String path, final String target)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int listxattr(final String path, final XattrListFiller filler)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int lock(final String path, final FileInfoWrapper info, final FlockCommand command, final FlockWrapper flock)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int mkdir(final String path, final ModeWrapper mode)
{
System.out.println("mkdir called with " + path);
return openVFS.create_point(path, VPoint.IS_DIRECTORY);
}
@Override
public int mknod(final String path, final ModeWrapper mode, final long dev)
{
System.out.println("mknod called with " + path);
return openVFS.create_point(path, VPoint.IS_FILE);
}
@Override
public int open(final String path, final FileInfoWrapper info)
{
System.out.println("open called with " + path + " on " + openVFS);
final int existingFD = openVFS.open_file(path);
System.out.println("Open: existingFD > 0:" + existingFD);
info.fh(existingFD);
return 0;
}
@Override
public int opendir(final String path, final FileInfoWrapper info)
{
System.out.println("opendir called with " + path);
return openVFS.change_dir(path) == true ? 0 : -1;
}
@Override
public int read(final String path, final ByteBuffer buffer, final long size, final long offset, final FileInfoWrapper info)
{
System.out.println("read called with " + path);
return openVFS.vread((int) info.fh(), (int) size, (int) offset, buffer);
}
@Override
public int readdir(final String path, final DirectoryFiller filler)
{
System.out.println("readdir called with " + path);
final VPoint toBeRead = openVFS.return_point(path);
if (toBeRead == null) {
System.err.println("No file returned by return_point: " + path);
return -ErrorCodes.ENOENT();
}
else if (toBeRead.isFile()) {
return -ErrorCodes.ENOTDIR();
}
else {
filler.add(toBeRead.returnChildPoints());
return 0;
}
}
@Override
public int readlink(final String path, final ByteBuffer buffer, final long size)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int release(final String path, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int releasedir(final String path, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int removexattr(final String path, final String xattr)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int rename(final String path, final String newName)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int rmdir(final String path)
{
System.out.println("rmdir called with " + path);
return openVFS.remove_point(path) == true ? 0 : -1;
}
@Override
public int setxattr(final String path, final String xattr, final ByteBuffer value, final long size, final int flags,
final int position)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int statfs(final String path, final StatvfsWrapper wrapper)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int symlink(final String path, final String target)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int truncate(final String path, final long offset)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int unlink(final String path)
{
System.out.println("unlink called with " + path);
return openVFS.remove_point(path) == true ? 0 : -1;
}
@Override
public int utimens(final String path, final TimeBufferWrapper wrapper)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int write(final String path, final ByteBuffer buf, final long bufSize, final long writeOffset,
final FileInfoWrapper info)
{
System.out.println("write called with " + path);
return openVFS.vwrite((int) info.fh(), (int) bufSize, (int) writeOffset, buf);
}
}
|
src/main/java/net/vicky/VRamdisk.java
|
package net.vicky;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.Arrays;
import net.fusejna.DirectoryFiller;
import net.fusejna.ErrorCodes;
import net.fusejna.FlockCommand;
import net.fusejna.FuseException;
import net.fusejna.StructFlock.FlockWrapper;
import net.fusejna.StructFuseFileInfo.FileInfoWrapper;
import net.fusejna.StructStat.StatWrapper;
import net.fusejna.StructStatvfs.StatvfsWrapper;
import net.fusejna.StructTimeBuffer.TimeBufferWrapper;
import net.fusejna.XattrFiller;
import net.fusejna.XattrListFiller;
import net.fusejna.types.TypeMode.ModeWrapper;
import net.fusejna.types.TypeMode.NodeType;
public class VRamdisk extends net.fusejna.FuseFilesystem
{
public static void main(final String[] args) throws FuseException
{
if (args.length < 2) {
System.err.println("Usage: ramdisk <mountpoint> <size>");
System.err.println("You gave wrongly, " + Arrays.toString(args));
System.exit(1);
}
final int capacity = Integer.parseInt(args[1]) * 1024 * 1024;
System.out.println("Ramdisk of size " + capacity + " bytes loaded at " + args[0] + ".");
new VRamdisk(capacity).mount(args[0]);
}
VickyFS openVFS;
VRamdisk(final int size)
{
openVFS = new VickyFS(size);
}
@Override
public int access(final String path, final int access)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public void afterUnmount(final File mountPoint)
{
// TODO Auto-generated method stub
}
@Override
public void beforeMount(final File mountPoint)
{
// TODO Auto-generated method stub
}
@Override
public int bmap(final String path, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int chmod(final String path, final ModeWrapper mode)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int chown(final String path, final long uid, final long gid)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int create(final String path, final ModeWrapper mode, final FileInfoWrapper info)
{
System.out.println("create called with " + path + " on " + openVFS);
return open(path, info);
}
@Override
public void destroy()
{
// TODO Auto-generated method stub
}
@Override
public int fgetattr(final String path, final StatWrapper stat, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int flush(final String path, final FileInfoWrapper info)
{
System.out.println("flush called with " + path);
if (openVFS.close_file_point((int) info.fh()) == true) {
return 0;
}
return -1;
}
@Override
public int fsync(final String path, final int datasync, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int fsyncdir(final String path, final int datasync, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int ftruncate(final String path, final long offset, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int getattr(final String path, final StatWrapper stat)
{
System.out.println("getattr called with " + path);
VPoint point;
if (path.equals("/") && openVFS.currentDir.name.equals("/")) {
point = openVFS.currentDir;
}
else {
point = openVFS.return_point_fully_qualified(path);
}
if (point == null) {
return -ErrorCodes.ENOENT();
}
stat.ino(point.hashCode());
if (point.isDirectory()) {
stat.nlink(2);
stat.size(point.name.length() * 2);
stat.setMode(NodeType.DIRECTORY);
}
else {
stat.nlink(1);
stat.size(point.name.length() * 2 + point.contents.size());
stat.setMode(NodeType.FILE);
}
return 0;
}
@Override
protected String getName()
{
// TODO Auto-generated method stub
return null;
}
@Override
protected String[] getOptions()
{
// TODO Auto-generated method stub
return null;
}
@Override
public int getxattr(final String path, final String xattr, final XattrFiller filler, final long size, final long position)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public void init()
{
}
@Override
public int link(final String path, final String target)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int listxattr(final String path, final XattrListFiller filler)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int lock(final String path, final FileInfoWrapper info, final FlockCommand command, final FlockWrapper flock)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int mkdir(final String path, final ModeWrapper mode)
{
System.out.println("mkdir called with " + path);
return openVFS.create_point(path, VPoint.IS_DIRECTORY);
}
@Override
public int mknod(final String path, final ModeWrapper mode, final long dev)
{
System.out.println("mknod called with " + path);
return openVFS.create_point(path, VPoint.IS_FILE);
}
@Override
public int open(final String path, final FileInfoWrapper info)
{
System.out.println("open called with " + path + " on " + openVFS);
final int existingFD = openVFS.open_file(path);
if (existingFD > 0) {
System.out.println("Open: existingFD > 0:" + existingFD);
info.fh(existingFD);
return existingFD;
}
else {
System.out.println("Open: existingFD !!!> 0:" + existingFD);
openVFS.create_point(path, VPoint.IS_FILE);
final int newFD = openVFS.open_file(path);
info.fh(newFD);
return existingFD;
}
}
@Override
public int opendir(final String path, final FileInfoWrapper info)
{
System.out.println("opendir called with " + path);
return openVFS.change_dir(path) == true ? 0 : -1;
}
@Override
public int read(final String path, final ByteBuffer buffer, final long size, final long offset, final FileInfoWrapper info)
{
System.out.println("read called with " + path);
return openVFS.vread((int) info.fh(), (int) size, (int) offset, buffer);
}
@Override
public int readdir(final String path, final DirectoryFiller filler)
{
System.out.println("readdir called with " + path);
final VPoint toBeRead = openVFS.return_point(path);
if (toBeRead == null) {
System.err.println("No file returned by return_point: " + path);
return -ErrorCodes.ENOENT();
}
else if (toBeRead.isFile()) {
return -ErrorCodes.ENOTDIR();
}
else {
filler.add(toBeRead.returnChildPoints());
return 0;
}
}
@Override
public int readlink(final String path, final ByteBuffer buffer, final long size)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int release(final String path, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int releasedir(final String path, final FileInfoWrapper info)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int removexattr(final String path, final String xattr)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int rename(final String path, final String newName)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int rmdir(final String path)
{
System.out.println("rmdir called with " + path);
return openVFS.remove_point(path) == true ? 0 : -1;
}
@Override
public int setxattr(final String path, final String xattr, final ByteBuffer value, final long size, final int flags,
final int position)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int statfs(final String path, final StatvfsWrapper wrapper)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int symlink(final String path, final String target)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int truncate(final String path, final long offset)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int unlink(final String path)
{
System.out.println("unlink called with " + path);
return openVFS.remove_point(path) == true ? 0 : -1;
}
@Override
public int utimens(final String path, final TimeBufferWrapper wrapper)
{
// TODO Auto-generated method stub
return 0;
}
@Override
public int write(final String path, final ByteBuffer buf, final long bufSize, final long writeOffset,
final FileInfoWrapper info)
{
System.out.println("write called with " + path);
return openVFS.vwrite((int) info.fh(), (int) bufSize, (int) writeOffset, buf);
}
}
|
Added
|
src/main/java/net/vicky/VRamdisk.java
|
Added
|
<ide><path>rc/main/java/net/vicky/VRamdisk.java
<ide> public int create(final String path, final ModeWrapper mode, final FileInfoWrapper info)
<ide> {
<ide> System.out.println("create called with " + path + " on " + openVFS);
<del> return open(path, info);
<add> final int existing = open(path, info);
<add> if (existing == -ErrorCodes.ENOENT()) {
<add> return mknod(path, mode, 0);
<add> }
<add> else {
<add> return 0;
<add> }
<ide> }
<ide>
<ide> @Override
<ide> {
<ide> System.out.println("open called with " + path + " on " + openVFS);
<ide> final int existingFD = openVFS.open_file(path);
<del> if (existingFD > 0) {
<del> System.out.println("Open: existingFD > 0:" + existingFD);
<del> info.fh(existingFD);
<del> return existingFD;
<del> }
<del> else {
<del> System.out.println("Open: existingFD !!!> 0:" + existingFD);
<del> openVFS.create_point(path, VPoint.IS_FILE);
<del> final int newFD = openVFS.open_file(path);
<del> info.fh(newFD);
<del> return existingFD;
<del> }
<add> System.out.println("Open: existingFD > 0:" + existingFD);
<add> info.fh(existingFD);
<add> return 0;
<ide> }
<ide>
<ide> @Override
|
|
Java
|
apache-2.0
|
1093d9f5cb3ccec4d1d3cf3be59bb17b4eda5b7d
| 0 |
erichwang/presto,hgschmie/presto,haozhun/presto,twitter-forks/presto,ptkool/presto,11xor6/presto,hgschmie/presto,treasure-data/presto,jxiang/presto,stewartpark/presto,sopel39/presto,prateek1306/presto,jxiang/presto,sopel39/presto,yuananf/presto,smartnews/presto,sopel39/presto,facebook/presto,shixuan-fan/presto,raghavsethi/presto,ebyhr/presto,dain/presto,smartnews/presto,wyukawa/presto,stewartpark/presto,martint/presto,Yaliang/presto,martint/presto,yuananf/presto,ptkool/presto,sopel39/presto,miniway/presto,mvp/presto,wyukawa/presto,arhimondr/presto,treasure-data/presto,jxiang/presto,Teradata/presto,prestodb/presto,prestodb/presto,elonazoulay/presto,zzhao0/presto,prestodb/presto,Teradata/presto,electrum/presto,prateek1306/presto,miniway/presto,gh351135612/presto,gh351135612/presto,prestodb/presto,facebook/presto,jxiang/presto,dain/presto,losipiuk/presto,losipiuk/presto,Teradata/presto,Praveen2112/presto,stewartpark/presto,martint/presto,twitter-forks/presto,erichwang/presto,11xor6/presto,elonazoulay/presto,treasure-data/presto,twitter-forks/presto,hgschmie/presto,shixuan-fan/presto,nezihyigitbasi/presto,EvilMcJerkface/presto,11xor6/presto,prestodb/presto,arhimondr/presto,zzhao0/presto,wyukawa/presto,ebyhr/presto,arhimondr/presto,hgschmie/presto,electrum/presto,ptkool/presto,electrum/presto,nezihyigitbasi/presto,ebyhr/presto,mvp/presto,miniway/presto,ptkool/presto,raghavsethi/presto,prateek1306/presto,Yaliang/presto,miniway/presto,losipiuk/presto,arhimondr/presto,arhimondr/presto,treasure-data/presto,electrum/presto,stewartpark/presto,prateek1306/presto,haozhun/presto,zzhao0/presto,EvilMcJerkface/presto,haozhun/presto,ptkool/presto,shixuan-fan/presto,facebook/presto,raghavsethi/presto,twitter-forks/presto,Teradata/presto,stewartpark/presto,Yaliang/presto,zzhao0/presto,martint/presto,Teradata/presto,dain/presto,raghavsethi/presto,youngwookim/presto,erichwang/presto,jxiang/presto,twitter-forks/presto,haozhun/presto,Praveen2112/presto,gh351135612/presto,nezihyigitbasi/presto,raghavsethi/presto,shixuan-fan/presto,elonazoulay/presto,nezihyigitbasi/presto,11xor6/presto,youngwookim/presto,martint/presto,gh351135612/presto,Yaliang/presto,prateek1306/presto,gh351135612/presto,mvp/presto,facebook/presto,youngwookim/presto,Yaliang/presto,losipiuk/presto,EvilMcJerkface/presto,treasure-data/presto,yuananf/presto,mvp/presto,Praveen2112/presto,treasure-data/presto,wyukawa/presto,yuananf/presto,ebyhr/presto,dain/presto,Praveen2112/presto,losipiuk/presto,prestodb/presto,yuananf/presto,elonazoulay/presto,smartnews/presto,11xor6/presto,EvilMcJerkface/presto,erichwang/presto,EvilMcJerkface/presto,smartnews/presto,facebook/presto,mvp/presto,elonazoulay/presto,electrum/presto,zzhao0/presto,nezihyigitbasi/presto,hgschmie/presto,haozhun/presto,erichwang/presto,wyukawa/presto,youngwookim/presto,dain/presto,smartnews/presto,Praveen2112/presto,miniway/presto,youngwookim/presto,shixuan-fan/presto,sopel39/presto,ebyhr/presto
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server.security;
import com.google.common.base.Throwables;
import com.sun.security.auth.module.Krb5LoginModule;
import io.airlift.log.Logger;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.util.Base64;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import static com.google.common.io.ByteStreams.copy;
import static com.google.common.io.ByteStreams.nullOutputStream;
import static com.google.common.net.HttpHeaders.AUTHORIZATION;
import static com.google.common.net.HttpHeaders.WWW_AUTHENTICATE;
import static java.lang.String.format;
import static javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag.REQUIRED;
import static javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED;
import static org.ietf.jgss.GSSCredential.ACCEPT_ONLY;
import static org.ietf.jgss.GSSCredential.INDEFINITE_LIFETIME;
public class SpnegoFilter
implements Filter
{
private static final Logger LOG = Logger.get(SpnegoFilter.class);
private static final String NEGOTIATE_SCHEME = "Negotiate";
private final GSSManager gssManager = GSSManager.getInstance();
private final LoginContext loginContext;
private final GSSCredential serverCredential;
@Inject
public SpnegoFilter(KerberosConfig config)
{
System.setProperty("java.security.krb5.conf", config.getKerberosConfig().getAbsolutePath());
try {
String hostname = InetAddress.getLocalHost().getCanonicalHostName().toLowerCase(Locale.US);
String servicePrincipal = config.getServiceName() + "/" + hostname;
loginContext = new LoginContext("", null, null, new Configuration()
{
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name)
{
Map<String, String> options = new HashMap<>();
options.put("refreshKrb5Config", "true");
options.put("doNotPrompt", "true");
if (LOG.isDebugEnabled()) {
options.put("debug", "true");
}
if (config.getKeytab() != null) {
options.put("keyTab", config.getKeytab().getAbsolutePath());
}
options.put("isInitiator", "false");
options.put("useKeyTab", "true");
options.put("principal", servicePrincipal);
options.put("storeKey", "true");
return new AppConfigurationEntry[] {new AppConfigurationEntry(Krb5LoginModule.class.getName(), REQUIRED, options)};
}
});
loginContext.login();
serverCredential = doAs(loginContext.getSubject(), () -> gssManager.createCredential(
gssManager.createName(config.getServiceName() + "@" + hostname, GSSName.NT_HOSTBASED_SERVICE),
INDEFINITE_LIFETIME,
new Oid[] {
new Oid("1.2.840.113554.1.2.2"), // kerberos 5
new Oid("1.3.6.1.5.5.2") // spnego
},
ACCEPT_ONLY));
}
catch (LoginException | UnknownHostException e) {
throw Throwables.propagate(e);
}
}
@PreDestroy
public void shutdown()
{
try {
loginContext.logout();
}
catch (LoginException e) {
Throwables.propagate(e);
}
}
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain nextFilter)
throws IOException, ServletException
{
// skip auth for http
if (!servletRequest.isSecure()) {
nextFilter.doFilter(servletRequest, servletResponse);
return;
}
HttpServletRequest request = (HttpServletRequest) servletRequest;
HttpServletResponse response = (HttpServletResponse) servletResponse;
String header = request.getHeader(AUTHORIZATION);
String requestSpnegoToken = null;
if (header != null) {
String[] parts = header.split("\\s+");
if (parts.length == 2 && parts[0].equals(NEGOTIATE_SCHEME)) {
try {
requestSpnegoToken = parts[1];
Optional<Principal> principal = authenticate(parts[1]);
if (principal.isPresent()) {
nextFilter.doFilter(new HttpServletRequestWrapper(request)
{
@Override
public Principal getUserPrincipal()
{
return principal.get();
}
}, servletResponse);
return;
}
}
catch (GSSException | RuntimeException e) {
throw new RuntimeException("Authentication error for token: " + parts[1], e);
}
}
}
sendChallenge(request, response, requestSpnegoToken);
}
private Optional<Principal> authenticate(String token)
throws GSSException
{
GSSContext context = doAs(loginContext.getSubject(), () -> gssManager.createContext(serverCredential));
try {
byte[] inputToken = Base64.getDecoder().decode(token);
context.acceptSecContext(inputToken, 0, inputToken.length);
// We can't hold on to the GSS context because HTTP is stateless, so fail
// if it can't be set up in a single challenge-response cycle
if (context.isEstablished()) {
return Optional.of(new KerberosPrincipal(context.getSrcName().toString()));
}
LOG.debug("Failed to establish GSS context for token %s", token);
}
catch (GSSException e) {
// ignore and fail the authentication
LOG.debug(e, "Authentication failed for token %s", token);
}
finally {
try {
context.dispose();
}
catch (GSSException e) {
// ignore
}
}
return Optional.empty();
}
private static void sendChallenge(HttpServletRequest request, HttpServletResponse response, String invalidSpnegoToken)
throws IOException
{
// If we send the challenge without consuming the body of the request,
// the Jetty server will close the connection after sending the response.
// The client interprets this as a failed request and does not resend
// the request with the authentication header.
// We can avoid this behavior in the Jetty client by reading and discarding
// the entire body of the unauthenticated request before sending the response.
skipRequestBody(request);
if (invalidSpnegoToken != null) {
response.sendError(SC_UNAUTHORIZED, format("Authentication failed for token %s", invalidSpnegoToken));
}
else {
response.setStatus(SC_UNAUTHORIZED);
}
response.setHeader(WWW_AUTHENTICATE, NEGOTIATE_SCHEME);
}
private static void skipRequestBody(HttpServletRequest request)
throws IOException
{
try (InputStream inputStream = request.getInputStream()) {
copy(inputStream, nullOutputStream());
}
}
@Override
public void init(FilterConfig filterConfig)
throws ServletException
{
}
@Override
public void destroy()
{
}
private interface GssSupplier<T>
{
T get()
throws GSSException;
}
private static <T> T doAs(Subject subject, GssSupplier<T> action)
{
return Subject.doAs(subject, (PrivilegedAction<T>) () -> {
try {
return action.get();
}
catch (GSSException e) {
throw Throwables.propagate(e);
}
});
}
}
|
presto-main/src/main/java/com/facebook/presto/server/security/SpnegoFilter.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server.security;
import com.google.common.base.Throwables;
import com.sun.security.auth.module.Krb5LoginModule;
import io.airlift.log.Logger;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.util.Base64;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import static com.google.common.io.ByteStreams.copy;
import static com.google.common.io.ByteStreams.nullOutputStream;
import static com.google.common.net.HttpHeaders.AUTHORIZATION;
import static com.google.common.net.HttpHeaders.WWW_AUTHENTICATE;
import static java.lang.String.format;
import static javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag.REQUIRED;
import static javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED;
import static org.ietf.jgss.GSSCredential.ACCEPT_ONLY;
import static org.ietf.jgss.GSSCredential.INDEFINITE_LIFETIME;
public class SpnegoFilter
implements Filter
{
private static final Logger LOG = Logger.get(SpnegoFilter.class);
private static final String NEGOTIATE_SCHEME = "Negotiate";
private final GSSManager gssManager = GSSManager.getInstance();
private final LoginContext loginContext;
private final GSSCredential serverCredential;
@Inject
public SpnegoFilter(KerberosConfig config)
{
System.setProperty("java.security.krb5.conf", config.getKerberosConfig().getAbsolutePath());
try {
String hostname = InetAddress.getLocalHost().getCanonicalHostName().toLowerCase(Locale.US);
String servicePrincipal = config.getServiceName() + "/" + hostname;
loginContext = new LoginContext("", null, null, new Configuration()
{
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name)
{
Map<String, String> options = new HashMap<>();
options.put("refreshKrb5Config", "true");
options.put("doNotPrompt", "true");
if (LOG.isDebugEnabled()) {
options.put("debug", "true");
}
if (config.getKeytab() != null) {
options.put("keyTab", config.getKeytab().getAbsolutePath());
}
options.put("isInitiator", "false");
options.put("useKeyTab", "true");
options.put("principal", servicePrincipal);
options.put("storeKey", "true");
return new AppConfigurationEntry[] {new AppConfigurationEntry(Krb5LoginModule.class.getName(), REQUIRED, options)};
}
});
loginContext.login();
serverCredential = doAs(loginContext.getSubject(), () -> gssManager.createCredential(
gssManager.createName(config.getServiceName() + "@" + hostname, GSSName.NT_HOSTBASED_SERVICE),
INDEFINITE_LIFETIME,
new Oid[] {
new Oid("1.2.840.113554.1.2.2"), // kerberos 5
new Oid("1.3.6.1.5.5.2") // spnego
},
ACCEPT_ONLY));
}
catch (LoginException | UnknownHostException e) {
throw Throwables.propagate(e);
}
}
@PreDestroy
public void shutdown()
{
try {
loginContext.logout();
}
catch (LoginException e) {
Throwables.propagate(e);
}
}
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain nextFilter)
throws IOException, ServletException
{
// skip auth for http
if (!servletRequest.isSecure()) {
nextFilter.doFilter(servletRequest, servletResponse);
return;
}
HttpServletRequest request = (HttpServletRequest) servletRequest;
HttpServletResponse response = (HttpServletResponse) servletResponse;
String header = request.getHeader(AUTHORIZATION);
String requestSpnegoToken = null;
if (header != null) {
String[] parts = header.split("\\s+");
if (parts.length == 2 && parts[0].equals(NEGOTIATE_SCHEME)) {
try {
requestSpnegoToken = parts[1];
Optional<Result> authentication = authenticate(parts[1]);
if (authentication.isPresent()) {
authentication.get().getToken()
.map(token -> NEGOTIATE_SCHEME + " " + Base64.getEncoder().encodeToString(token))
.ifPresent(value -> response.setHeader(WWW_AUTHENTICATE, value));
nextFilter.doFilter(new HttpServletRequestWrapper(request)
{
@Override
public Principal getUserPrincipal()
{
return authentication.get().getPrincipal();
}
}, servletResponse);
return;
}
}
catch (GSSException | RuntimeException e) {
throw new RuntimeException("Authentication error for token: " + parts[1], e);
}
}
}
sendChallenge(request, response, requestSpnegoToken);
}
private Optional<Result> authenticate(String token)
throws GSSException
{
GSSContext context = doAs(loginContext.getSubject(), () -> gssManager.createContext(serverCredential));
try {
byte[] inputToken = Base64.getDecoder().decode(token);
byte[] outputToken = context.acceptSecContext(inputToken, 0, inputToken.length);
// We can't hold on to the GSS context because HTTP is stateless, so fail
// if it can't be set up in a single challenge-response cycle
if (context.isEstablished()) {
return Optional.of(new Result(
Optional.ofNullable(outputToken),
new KerberosPrincipal(context.getSrcName().toString())));
}
LOG.debug("Failed to establish GSS context for token %s", token);
}
catch (GSSException e) {
// ignore and fail the authentication
LOG.debug(e, "Authentication failed for token %s", token);
}
finally {
try {
context.dispose();
}
catch (GSSException e) {
// ignore
}
}
return Optional.empty();
}
private static void sendChallenge(HttpServletRequest request, HttpServletResponse response, String invalidSpnegoToken)
throws IOException
{
// If we send the challenge without consuming the body of the request,
// the Jetty server will close the connection after sending the response.
// The client interprets this as a failed request and does not resend
// the request with the authentication header.
// We can avoid this behavior in the Jetty client by reading and discarding
// the entire body of the unauthenticated request before sending the response.
skipRequestBody(request);
if (invalidSpnegoToken != null) {
response.sendError(SC_UNAUTHORIZED, format("Authentication failed for token %s", invalidSpnegoToken));
}
else {
response.setStatus(SC_UNAUTHORIZED);
}
response.setHeader(WWW_AUTHENTICATE, NEGOTIATE_SCHEME);
}
private static void skipRequestBody(HttpServletRequest request)
throws IOException
{
try (InputStream inputStream = request.getInputStream()) {
copy(inputStream, nullOutputStream());
}
}
@Override
public void init(FilterConfig filterConfig)
throws ServletException
{
}
@Override
public void destroy()
{
}
private interface GssSupplier<T>
{
T get()
throws GSSException;
}
private static <T> T doAs(Subject subject, GssSupplier<T> action)
{
return Subject.doAs(subject, (PrivilegedAction<T>) () -> {
try {
return action.get();
}
catch (GSSException e) {
throw Throwables.propagate(e);
}
});
}
private static class Result
{
private final Optional<byte[]> token;
private final KerberosPrincipal principal;
public Result(Optional<byte[]> token, KerberosPrincipal principal)
{
this.token = token;
this.principal = principal;
}
public Optional<byte[]> getToken()
{
return token;
}
public KerberosPrincipal getPrincipal()
{
return principal;
}
}
}
|
Skip sending final leg of SPNEGO authentication
After authenticating the client, the server can include a response header
containing the final leg of the authentication, which the client can use
to authenticate the server. This is not needed with TLS, since the client
has already validated the identity of the server during the handshake. No
clients use this today, so it can be removed.
|
presto-main/src/main/java/com/facebook/presto/server/security/SpnegoFilter.java
|
Skip sending final leg of SPNEGO authentication
|
<ide><path>resto-main/src/main/java/com/facebook/presto/server/security/SpnegoFilter.java
<ide> if (parts.length == 2 && parts[0].equals(NEGOTIATE_SCHEME)) {
<ide> try {
<ide> requestSpnegoToken = parts[1];
<del> Optional<Result> authentication = authenticate(parts[1]);
<del> if (authentication.isPresent()) {
<del> authentication.get().getToken()
<del> .map(token -> NEGOTIATE_SCHEME + " " + Base64.getEncoder().encodeToString(token))
<del> .ifPresent(value -> response.setHeader(WWW_AUTHENTICATE, value));
<del>
<add> Optional<Principal> principal = authenticate(parts[1]);
<add> if (principal.isPresent()) {
<ide> nextFilter.doFilter(new HttpServletRequestWrapper(request)
<ide> {
<ide> @Override
<ide> public Principal getUserPrincipal()
<ide> {
<del> return authentication.get().getPrincipal();
<add> return principal.get();
<ide> }
<ide> }, servletResponse);
<ide> return;
<ide> sendChallenge(request, response, requestSpnegoToken);
<ide> }
<ide>
<del> private Optional<Result> authenticate(String token)
<add> private Optional<Principal> authenticate(String token)
<ide> throws GSSException
<ide> {
<ide> GSSContext context = doAs(loginContext.getSubject(), () -> gssManager.createContext(serverCredential));
<ide>
<ide> try {
<ide> byte[] inputToken = Base64.getDecoder().decode(token);
<del> byte[] outputToken = context.acceptSecContext(inputToken, 0, inputToken.length);
<add> context.acceptSecContext(inputToken, 0, inputToken.length);
<ide>
<ide> // We can't hold on to the GSS context because HTTP is stateless, so fail
<ide> // if it can't be set up in a single challenge-response cycle
<ide> if (context.isEstablished()) {
<del> return Optional.of(new Result(
<del> Optional.ofNullable(outputToken),
<del> new KerberosPrincipal(context.getSrcName().toString())));
<add> return Optional.of(new KerberosPrincipal(context.getSrcName().toString()));
<ide> }
<ide> LOG.debug("Failed to establish GSS context for token %s", token);
<ide> }
<ide> }
<ide> });
<ide> }
<del>
<del> private static class Result
<del> {
<del> private final Optional<byte[]> token;
<del> private final KerberosPrincipal principal;
<del>
<del> public Result(Optional<byte[]> token, KerberosPrincipal principal)
<del> {
<del> this.token = token;
<del> this.principal = principal;
<del> }
<del>
<del> public Optional<byte[]> getToken()
<del> {
<del> return token;
<del> }
<del>
<del> public KerberosPrincipal getPrincipal()
<del> {
<del> return principal;
<del> }
<del> }
<ide> }
|
|
Java
|
apache-2.0
|
f4b71aa340852537c7de0e2e8efe56a8b6ee80ef
| 0 |
fengshao0907/async-http-client,jxauchengchao/async-http-client,Aulust/async-http-client,hgl888/async-http-client,wyyl1/async-http-client,ooon/async-http-client,liuyb02/async-http-client,nemoyixin/async-http-client,dotta/async-http-client,craigwblake/async-http-client-1,olksdr/async-http-client,afelisatti/async-http-client,ALEXGUOQ/async-http-client,junjiemars/async-http-client,bomgar/async-http-client,magiccao/async-http-client,Aulust/async-http-client,thinker-fang/async-http-client,stepancheg/async-http-client,elijah513/async-http-client,drmaas/async-http-client
|
/*
* Copyright (c) 2010-2012 Sonatype, Inc. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package org.asynchttpclient.util;
import static org.asynchttpclient.util.MiscUtils.isNonEmpty;
import org.asynchttpclient.AsyncHttpClientConfig;
import org.asynchttpclient.ProxyServer;
import org.asynchttpclient.ProxyServer.Protocol;
import org.asynchttpclient.ProxyServerSelector;
import org.asynchttpclient.Request;
import org.asynchttpclient.uri.Uri;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.ProxySelector;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Properties;
/**
* Utilities for Proxy handling.
*
* @author cstamas
*/
public final class ProxyUtils {
private final static Logger log = LoggerFactory.getLogger(ProxyUtils.class);
private static final String PROPERTY_PREFIX = "org.asynchttpclient.AsyncHttpClientConfig.proxy.";
/**
* The host to use as proxy.
*/
public static final String PROXY_HOST = "http.proxyHost";
/**
* The port to use for the proxy.
*/
public static final String PROXY_PORT = "http.proxyPort";
/**
* The protocol to use. Is mapped to the {@link Protocol} enum.
*/
public static final String PROXY_PROTOCOL = PROPERTY_PREFIX + "protocol";
/**
* A specification of non-proxy hosts. See http://download.oracle.com/javase/1.4.2/docs/guide/net/properties.html
*/
public static final String PROXY_NONPROXYHOSTS = "http.nonProxyHosts";
/**
* The username to use for authentication for the proxy server.
*/
public static final String PROXY_USER = PROPERTY_PREFIX + "user";
/**
* The password to use for authentication for the proxy server.
*/
public static final String PROXY_PASSWORD = PROPERTY_PREFIX + "password";
private ProxyUtils() {
}
/**
* @param config the global config
* @param request the request
* @return the proxy server to be used for this request (can be null)
*/
public static ProxyServer getProxyServer(AsyncHttpClientConfig config, Request request) {
ProxyServer proxyServer = request.getProxyServer();
if (proxyServer == null) {
ProxyServerSelector selector = config.getProxyServerSelector();
if (selector != null) {
proxyServer = selector.select(request.getUri());
}
}
return ProxyUtils.avoidProxy(proxyServer, request) ? null : proxyServer;
}
/**
* @see #avoidProxy(ProxyServer, String)
*/
public static boolean avoidProxy(final ProxyServer proxyServer, final Request request) {
return avoidProxy(proxyServer, request.getUri().getHost());
}
private static boolean matchNonProxyHost(String targetHost, String nonProxyHost) {
if (nonProxyHost.length() > 1) {
if (nonProxyHost.charAt(0) == '*')
return targetHost.regionMatches(true, targetHost.length() - nonProxyHost.length() + 1, nonProxyHost, 1,
nonProxyHost.length() - 1);
else if (nonProxyHost.charAt(nonProxyHost.length() - 1) == '*')
return targetHost.regionMatches(true, 0, nonProxyHost, 0, nonProxyHost.length() - 1);
}
return nonProxyHost.equalsIgnoreCase(targetHost);
}
/**
* Checks whether proxy should be used according to nonProxyHosts settings of it, or we want to go directly to
* target host. If <code>null</code> proxy is passed in, this method returns true -- since there is NO proxy, we
* should avoid to use it. Simple hostname pattern matching using "*" are supported, but only as prefixes.
* See http://download.oracle.com/javase/1.4.2/docs/guide/net/properties.html
*
* @param proxyServer
* @param hostname the hostname
* @return true if we have to avoid proxy use (obeying non-proxy hosts settings), false otherwise.
*/
public static boolean avoidProxy(final ProxyServer proxyServer, final String hostname) {
if (proxyServer != null) {
if (hostname == null)
throw new NullPointerException("hostname");
List<String> nonProxyHosts = proxyServer.getNonProxyHosts();
if (isNonEmpty(nonProxyHosts)) {
for (String nonProxyHost : nonProxyHosts) {
if (matchNonProxyHost(hostname, nonProxyHost))
return true;
}
}
return false;
} else {
return true;
}
}
/**
* Creates a proxy server instance from the given properties.
* Currently the default http.* proxy properties are supported as well as properties specific for AHC.
*
* @param properties the properties to evaluate. Must not be null.
* @return a ProxyServer instance or null, if no valid properties were set.
* @see <a href="http://download.oracle.com/javase/1.4.2/docs/guide/net/properties.html">Networking Properties</a>
* @see #PROXY_HOST
* @see #PROXY_PORT
* @see #PROXY_PROTOCOL
* @see #PROXY_NONPROXYHOSTS
*/
public static ProxyServerSelector createProxyServerSelector(Properties properties) {
String host = properties.getProperty(PROXY_HOST);
if (host != null) {
int port = Integer.valueOf(properties.getProperty(PROXY_PORT, "80"));
Protocol protocol;
try {
protocol = Protocol.valueOf(properties.getProperty(PROXY_PROTOCOL, "HTTP"));
} catch (IllegalArgumentException e) {
protocol = Protocol.HTTP;
}
ProxyServer proxyServer = new ProxyServer(protocol, host, port, properties.getProperty(PROXY_USER),
properties.getProperty(PROXY_PASSWORD));
String nonProxyHosts = properties.getProperty(PROXY_NONPROXYHOSTS);
if (nonProxyHosts != null) {
for (String spec : nonProxyHosts.split("\\|")) {
proxyServer.addNonProxyHost(spec);
}
}
return createProxyServerSelector(proxyServer);
}
return ProxyServerSelector.NO_PROXY_SELECTOR;
}
/**
* Get a proxy server selector based on the JDK default proxy selector.
*
* @return The proxy server selector.
*/
public static ProxyServerSelector getJdkDefaultProxyServerSelector() {
return createProxyServerSelector(ProxySelector.getDefault());
}
/**
* Create a proxy server selector based on the passed in JDK proxy selector.
*
* @param proxySelector The proxy selector to use. Must not be null.
* @return The proxy server selector.
*/
public static ProxyServerSelector createProxyServerSelector(final ProxySelector proxySelector) {
return new ProxyServerSelector() {
public ProxyServer select(Uri uri) {
try {
URI javaUri = uri.toJavaNetURI();
List<Proxy> proxies = proxySelector.select(javaUri);
if (proxies != null) {
// Loop through them until we find one that we know how to use
for (Proxy proxy : proxies) {
switch (proxy.type()) {
case HTTP:
if (!(proxy.address() instanceof InetSocketAddress)) {
log.warn("Don't know how to connect to address " + proxy.address());
return null;
} else {
InetSocketAddress address = (InetSocketAddress) proxy.address();
return new ProxyServer(Protocol.HTTP, address.getHostName(), address.getPort());
}
case DIRECT:
return null;
default:
log.warn("ProxySelector returned proxy type that we don't know how to use: " + proxy.type());
break;
}
}
}
return null;
} catch (URISyntaxException e) {
log.warn(uri + " couldn't be turned into a java.net.URI", e);
return null;
}
}
};
}
/**
* Create a proxy server selector that always selects a single proxy server.
*
* @param proxyServer The proxy server to select.
* @return The proxy server selector.
*/
public static ProxyServerSelector createProxyServerSelector(final ProxyServer proxyServer) {
return new ProxyServerSelector() {
public ProxyServer select(Uri uri) {
return proxyServer;
}
};
}
}
|
api/src/main/java/org/asynchttpclient/util/ProxyUtils.java
|
/*
* Copyright (c) 2010-2012 Sonatype, Inc. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package org.asynchttpclient.util;
import static org.asynchttpclient.util.MiscUtils.isNonEmpty;
import org.asynchttpclient.AsyncHttpClientConfig;
import org.asynchttpclient.ProxyServer;
import org.asynchttpclient.ProxyServer.Protocol;
import org.asynchttpclient.ProxyServerSelector;
import org.asynchttpclient.Request;
import org.asynchttpclient.uri.Uri;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.ProxySelector;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Properties;
/**
* Utilities for Proxy handling.
*
* @author cstamas
*/
public final class ProxyUtils {
private final static Logger log = LoggerFactory.getLogger(ProxyUtils.class);
private static final String PROPERTY_PREFIX = "org.asynchttpclient.AsyncHttpClientConfig.proxy.";
/**
* The host to use as proxy.
*/
public static final String PROXY_HOST = "http.proxyHost";
/**
* The port to use for the proxy.
*/
public static final String PROXY_PORT = "http.proxyPort";
/**
* The protocol to use. Is mapped to the {@link Protocol} enum.
*/
public static final String PROXY_PROTOCOL = PROPERTY_PREFIX + "protocol";
/**
* A specification of non-proxy hosts. See http://download.oracle.com/javase/1.4.2/docs/guide/net/properties.html
*/
public static final String PROXY_NONPROXYHOSTS = "http.nonProxyHosts";
/**
* The username to use for authentication for the proxy server.
*/
public static final String PROXY_USER = PROPERTY_PREFIX + "user";
/**
* The password to use for authentication for the proxy server.
*/
public static final String PROXY_PASSWORD = PROPERTY_PREFIX + "password";
private ProxyUtils() {
}
/**
* @param config the global config
* @param request the request
* @return the proxy server to be used for this request (can be null)
*/
public static ProxyServer getProxyServer(AsyncHttpClientConfig config, Request request) {
ProxyServer proxyServer = request.getProxyServer();
if (proxyServer == null) {
ProxyServerSelector selector = config.getProxyServerSelector();
if (selector != null) {
proxyServer = selector.select(request.getUri());
}
}
return ProxyUtils.avoidProxy(proxyServer, request) ? null : proxyServer;
}
/**
* @see #avoidProxy(ProxyServer, String)
*/
public static boolean avoidProxy(final ProxyServer proxyServer, final Request request) {
return avoidProxy(proxyServer, request.getUri().getHost());
}
private static boolean matchNonProxyHost(String targetHost, String nonProxyHost) {
if (nonProxyHost.length() > 1) {
if (nonProxyHost.charAt(0) == '*')
return targetHost.regionMatches(true, targetHost.length() - nonProxyHost.length() + 1, nonProxyHost, 1,
nonProxyHost.length() - 1);
else if (nonProxyHost.charAt(nonProxyHost.length() - 1) == '*')
return targetHost.regionMatches(true, 0, nonProxyHost, 0, nonProxyHost.length() - 1);
}
return nonProxyHost.equalsIgnoreCase(targetHost);
}
/**
* Checks whether proxy should be used according to nonProxyHosts settings of it, or we want to go directly to
* target host. If <code>null</code> proxy is passed in, this method returns true -- since there is NO proxy, we
* should avoid to use it. Simple hostname pattern matching using "*" are supported, but only as prefixes.
* See http://download.oracle.com/javase/1.4.2/docs/guide/net/properties.html
*
* @param proxyServer
* @param hostname the hostname
* @return true if we have to avoid proxy use (obeying non-proxy hosts settings), false otherwise.
*/
public static boolean avoidProxy(final ProxyServer proxyServer, final String hostname) {
if (proxyServer != null) {
if (hostname == null)
throw new NullPointerException("hostname");
List<String> nonProxyHosts = proxyServer.getNonProxyHosts();
if (isNonEmpty(nonProxyHosts)) {
for (String nonProxyHost : nonProxyHosts) {
if (matchNonProxyHost(hostname, nonProxyHost))
return true;
}
}
return false;
} else {
return true;
}
}
/**
* Creates a proxy server instance from the given properties.
* <p/>
* Currently the default http.* proxy properties are supported as well as properties specific for AHC.
*
* @param properties the properties to evaluate. Must not be null.
* @return a ProxyServer instance or null, if no valid properties were set.
* @see <a href="http://download.oracle.com/javase/1.4.2/docs/guide/net/properties.html">Networking Properties</a>
* @see #PROXY_HOST
* @see #PROXY_PORT
* @see #PROXY_PROTOCOL
* @see #PROXY_NONPROXYHOSTS
*/
public static ProxyServerSelector createProxyServerSelector(Properties properties) {
String host = properties.getProperty(PROXY_HOST);
if (host != null) {
int port = Integer.valueOf(properties.getProperty(PROXY_PORT, "80"));
Protocol protocol;
try {
protocol = Protocol.valueOf(properties.getProperty(PROXY_PROTOCOL, "HTTP"));
} catch (IllegalArgumentException e) {
protocol = Protocol.HTTP;
}
ProxyServer proxyServer = new ProxyServer(protocol, host, port, properties.getProperty(PROXY_USER),
properties.getProperty(PROXY_PASSWORD));
String nonProxyHosts = properties.getProperty(PROXY_NONPROXYHOSTS);
if (nonProxyHosts != null) {
for (String spec : nonProxyHosts.split("\\|")) {
proxyServer.addNonProxyHost(spec);
}
}
return createProxyServerSelector(proxyServer);
}
return ProxyServerSelector.NO_PROXY_SELECTOR;
}
/**
* Get a proxy server selector based on the JDK default proxy selector.
*
* @return The proxy server selector.
*/
public static ProxyServerSelector getJdkDefaultProxyServerSelector() {
return createProxyServerSelector(ProxySelector.getDefault());
}
/**
* Create a proxy server selector based on the passed in JDK proxy selector.
*
* @param proxySelector The proxy selector to use. Must not be null.
* @return The proxy server selector.
*/
public static ProxyServerSelector createProxyServerSelector(final ProxySelector proxySelector) {
return new ProxyServerSelector() {
public ProxyServer select(Uri uri) {
try {
URI javaUri = uri.toJavaNetURI();
List<Proxy> proxies = proxySelector.select(javaUri);
if (proxies != null) {
// Loop through them until we find one that we know how to use
for (Proxy proxy : proxies) {
switch (proxy.type()) {
case HTTP:
if (!(proxy.address() instanceof InetSocketAddress)) {
log.warn("Don't know how to connect to address " + proxy.address());
return null;
} else {
InetSocketAddress address = (InetSocketAddress) proxy.address();
return new ProxyServer(Protocol.HTTP, address.getHostName(), address.getPort());
}
case DIRECT:
return null;
default:
log.warn("ProxySelector returned proxy type that we don't know how to use: " + proxy.type());
break;
}
}
}
return null;
} catch (URISyntaxException e) {
log.warn(uri + " couldn't be turned into a java.net.URI", e);
return null;
}
}
};
}
/**
* Create a proxy server selector that always selects a single proxy server.
*
* @param proxyServer The proxy server to select.
* @return The proxy server selector.
*/
public static ProxyServerSelector createProxyServerSelector(final ProxyServer proxyServer) {
return new ProxyServerSelector() {
public ProxyServer select(Uri uri) {
return proxyServer;
}
};
}
}
|
Damn JDK8!
|
api/src/main/java/org/asynchttpclient/util/ProxyUtils.java
|
Damn JDK8!
|
<ide><path>pi/src/main/java/org/asynchttpclient/util/ProxyUtils.java
<ide>
<ide> /**
<ide> * Creates a proxy server instance from the given properties.
<del> * <p/>
<ide> * Currently the default http.* proxy properties are supported as well as properties specific for AHC.
<ide> *
<ide> * @param properties the properties to evaluate. Must not be null.
|
|
JavaScript
|
mit
|
f2c5620890b703de9e2b840fe0f5ea982213ac52
| 0 |
morynicz/Ippon,morynicz/tournament-service,morynicz/Ippon,morynicz/tournament-service,morynicz/tournament-service,morynicz/Ippon
|
angular.module('ippon').controller('PlayersController',[
'$scope',
'$stateParams',
'$location',
'$resource',
'$state',
'Auth',
function($scope, $stateParams, $location, $resource, $state, Auth){
var controllerRoot = "/players/";
var player = $resource(controllerRoot + ':playerId',
{
playerId: "@id",
format: "json"
}, {
'save' : {
method: 'PUT'
},
'create': {
method: 'POST'
}
});
if($state.is('players_show') || $state.is('players_edit')) {
if(!$stateParams.playerId) {
$state.go('players');
} else {
player.get({
playerId: $stateParams.playerId
}, function(player) {
$scope.player = player;
}, function(httpResponse) {
$scope.player = null;
//flash.error = 'There is no club with Id + $routeParams.clubId'
});
}
} else {
if($state.is('players')){
player.query(function(results) {
return $scope.players = results;
});
}
$scope.player = {};
}
$scope.index = function() {
$state.go('players');
}
$scope.save = function() {
var onError = function(_httpResponse) {
//TODO flash.error
}
if($scope.player.id) {
$scope.player.$save((function() {
$state.go('players_show',{playerId: $scope.player.id});
}), onError)
} else {
player.create($scope.player, (function(newPlayer) {
$state.go('players_show',{playerId: newPlayer.id});
}), onError);
}
};
$scope["delete"] = function() {
$scope.player.$delete();
$scope.index();
}
}]);
|
app/assets/javascripts/controllers/PlayersController.js
|
angular.module('ippon').controller('PlayersController',[
'$scope',
'$stateParams',
'$location',
'$resource',
'$state',
'Auth',
function($scope, $stateParams, $location, $resource, $state, Auth){
var controllerRoot = "/players/";
var player = $resource(controllerRoot + ':playerId',
{
playerId: "@id",
format: "json"
}, {
'save' : {
method: 'PUT'
},
'create': {
method: 'POST'
}
});
if($state.is('players_show') || $state.is('players_edit')) {
if(!$stateParams.playerId) {
$state.go('players');
} else {
player.get({
playerId: $stateParams.playerId
}, function(player) {
$scope.player = player;
}, function(httpResponse) {
$scope.player = null;
//flash.error = 'There is no club with Id + $routeParams.clubId'
});
}
} else {
if($state.is('players')){
player.query(function(results) {
return $scope.players = results;
});
}
$scope.player = {};
}
$scope.save = function() {
var onError = function(_httpResponse) {
//TODO flash.error
}
if($scope.player.id) {
$scope.player.$save((function() {
$state.go('players_show',{playerId: $scope.player.id});
}), onError)
} else {
player.create($scope.player, (function(newPlayer) {
$state.go('players_show',{playerId: newPlayer.id});
}), onError);
}
};
}]);
|
Pass PlayersController.delete spec
|
app/assets/javascripts/controllers/PlayersController.js
|
Pass PlayersController.delete spec
|
<ide><path>pp/assets/javascripts/controllers/PlayersController.js
<ide> $scope.player = {};
<ide> }
<ide>
<add> $scope.index = function() {
<add> $state.go('players');
<add> }
<add>
<ide> $scope.save = function() {
<ide> var onError = function(_httpResponse) {
<ide> //TODO flash.error
<ide> }), onError);
<ide> }
<ide> };
<add>
<add> $scope["delete"] = function() {
<add> $scope.player.$delete();
<add> $scope.index();
<add> }
<ide> }]);
|
|
Java
|
bsd-3-clause
|
15e8f7e849c4a76e306e8076a25a1f47229d2cc2
| 0 |
NCIP/c3pr,NCIP/c3pr,NCIP/c3pr
|
package edu.duke.cabig.c3pr.domain;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.MappedSuperclass;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Transient;
import org.apache.commons.collections15.functors.InstantiateFactory;
import org.apache.commons.collections15.list.LazyList;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import edu.duke.cabig.c3pr.utils.StringUtils;
import gov.nih.nci.cabig.ctms.collections.LazyListHelper;
/**
* @author Ram Chilukuri Kulasekaran
*/
@MappedSuperclass
public abstract class Organization extends AbstractMutableDeletableDomainObject implements
Comparable<HealthcareSite> {
private String name;
private String descriptionText;
private Address address;
private String trimmedName;
private List<StudyOrganization> studyOrganizations = new ArrayList<StudyOrganization>();
private List<OrganizationAssignedIdentifier> identifiers = new ArrayList<OrganizationAssignedIdentifier>();
private LazyListHelper lazyListHelper;
public Organization() {
address = new Address();
lazyListHelper = new LazyListHelper();
lazyListHelper.add(PlannedNotification.class, new InstantiateFactory<PlannedNotification>(
PlannedNotification.class));
}
public Organization(boolean initialise) {
address = new Address();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescriptionText() {
return descriptionText;
}
public void setDescriptionText(String descriptionText) {
this.descriptionText = descriptionText;
}
@OneToMany(mappedBy = "healthcareSite", fetch = FetchType.LAZY)
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
public List<StudyOrganization> getStudyOrganizations() {
return studyOrganizations;
}
public void setStudyOrganizations(List<StudyOrganization> studyOrganizations) {
this.studyOrganizations = studyOrganizations;
}
@OneToMany(mappedBy = "healthcareSite", fetch = FetchType.LAZY)
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
public List<OrganizationAssignedIdentifier> getIdentifiers() {
return identifiers;
}
public void setIdentifiers(List<OrganizationAssignedIdentifier> identifiers) {
this.identifiers = identifiers;
}
@OneToOne(cascade = { javax.persistence.CascadeType.ALL }, optional = false)
@JoinColumn(name = "ADDRESS_ID", nullable = false)
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
@Transient
public String getTrimmedName() {
return StringUtils.getTrimmedText(name, 25);
}
public int compareTo(HealthcareSite o) {
if (this.equals((HealthcareSite) o)) return 0;
return 1;
}
@Override
public int hashCode() {
final int PRIME = 31;
int result = 1;
result = PRIME * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Organization other = (Organization) obj;
if (name == null) {
if (other.name != null) return false;
}
else if (!name.equals(other.name)) return false;
return true;
}
@OneToMany(fetch = FetchType.LAZY)
@JoinColumn(name = "organizations_id")
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
public List<PlannedNotification> getPlannedNotificationsInternal() {
return lazyListHelper.getInternalList(PlannedNotification.class);
}
public void setPlannedNotificationsInternal(List<PlannedNotification> plannedNotifications) {
lazyListHelper.setInternalList(PlannedNotification.class, plannedNotifications);
}
@Transient
public List<PlannedNotification> getPlannedNotifications() {
return lazyListHelper.getLazyList(PlannedNotification.class);
}
public void setPlannedNotifications(List<PlannedNotification> plannedNotifications) {
}
}
|
codebase/projects/core/src/java/edu/duke/cabig/c3pr/domain/Organization.java
|
package edu.duke.cabig.c3pr.domain;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.MappedSuperclass;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Transient;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import edu.duke.cabig.c3pr.utils.StringUtils;
/**
* @author Ram Chilukuri Kulasekaran
*/
@MappedSuperclass
public abstract class Organization extends AbstractMutableDeletableDomainObject implements
Comparable<HealthcareSite> {
private String name;
private String descriptionText;
private Address address;
private String trimmedName;
private List<StudyOrganization> studyOrganizations = new ArrayList<StudyOrganization>();
private List<OrganizationAssignedIdentifier> identifiers = new ArrayList<OrganizationAssignedIdentifier>();
public Organization() {
address = new Address();
}
public Organization(boolean initialise) {
address = new Address();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescriptionText() {
return descriptionText;
}
public void setDescriptionText(String descriptionText) {
this.descriptionText = descriptionText;
}
@OneToMany(mappedBy = "healthcareSite", fetch = FetchType.LAZY)
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
public List<StudyOrganization> getStudyOrganizations() {
return studyOrganizations;
}
public void setStudyOrganizations(List<StudyOrganization> studyOrganizations) {
this.studyOrganizations = studyOrganizations;
}
@OneToMany(mappedBy = "healthcareSite", fetch = FetchType.LAZY)
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
public List<OrganizationAssignedIdentifier> getIdentifiers() {
return identifiers;
}
public void setIdentifiers(List<OrganizationAssignedIdentifier> identifiers) {
this.identifiers = identifiers;
}
@OneToOne(cascade = { javax.persistence.CascadeType.ALL }, optional = false)
@JoinColumn(name = "ADDRESS_ID", nullable = false)
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
@Transient
public String getTrimmedName() {
return StringUtils.getTrimmedText(name, 25);
}
public int compareTo(HealthcareSite o) {
if (this.equals((HealthcareSite) o)) return 0;
return 1;
}
@Override
public int hashCode() {
final int PRIME = 31;
int result = 1;
result = PRIME * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Organization other = (Organization) obj;
if (name == null) {
if (other.name != null) return false;
}
else if (!name.equals(other.name)) return false;
return true;
}
}
|
changed due to notifications
|
codebase/projects/core/src/java/edu/duke/cabig/c3pr/domain/Organization.java
|
changed due to notifications
|
<ide><path>odebase/projects/core/src/java/edu/duke/cabig/c3pr/domain/Organization.java
<ide> import javax.persistence.OneToOne;
<ide> import javax.persistence.Transient;
<ide>
<add>import org.apache.commons.collections15.functors.InstantiateFactory;
<add>import org.apache.commons.collections15.list.LazyList;
<ide> import org.hibernate.annotations.Cascade;
<ide> import org.hibernate.annotations.CascadeType;
<ide>
<ide> import edu.duke.cabig.c3pr.utils.StringUtils;
<add>import gov.nih.nci.cabig.ctms.collections.LazyListHelper;
<ide>
<ide> /**
<ide> * @author Ram Chilukuri Kulasekaran
<ide> private List<StudyOrganization> studyOrganizations = new ArrayList<StudyOrganization>();
<ide>
<ide> private List<OrganizationAssignedIdentifier> identifiers = new ArrayList<OrganizationAssignedIdentifier>();
<del>
<add>
<add> private LazyListHelper lazyListHelper;
<add>
<ide> public Organization() {
<ide> address = new Address();
<add> lazyListHelper = new LazyListHelper();
<add> lazyListHelper.add(PlannedNotification.class, new InstantiateFactory<PlannedNotification>(
<add> PlannedNotification.class));
<ide> }
<ide>
<ide> public Organization(boolean initialise) {
<ide> return true;
<ide> }
<ide>
<add> @OneToMany(fetch = FetchType.LAZY)
<add> @JoinColumn(name = "organizations_id")
<add> @Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
<add> public List<PlannedNotification> getPlannedNotificationsInternal() {
<add> return lazyListHelper.getInternalList(PlannedNotification.class);
<add> }
<add>
<add> public void setPlannedNotificationsInternal(List<PlannedNotification> plannedNotifications) {
<add> lazyListHelper.setInternalList(PlannedNotification.class, plannedNotifications);
<add> }
<add>
<add> @Transient
<add> public List<PlannedNotification> getPlannedNotifications() {
<add> return lazyListHelper.getLazyList(PlannedNotification.class);
<add> }
<add>
<add> public void setPlannedNotifications(List<PlannedNotification> plannedNotifications) {
<add> }
<add>
<ide> }
|
|
Java
|
apache-2.0
|
948a889a098cbf2155f27445394aec7e3e010321
| 0 |
anHALytics/anHALytics-core,anHALytics/anHALytics-core
|
package fr.inria.anhalytics.commons.dao;
import fr.inria.anhalytics.commons.exceptions.ServiceException;
import fr.inria.anhalytics.commons.properties.CommonsProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* @author azhar
*/
public class DatabaseConnection {
private static final Logger logger = LoggerFactory.getLogger(DatabaseConnection.class);
private static Connection connectDB;
private static Connection connectBiblioDB;
public static Connection getDBInstance() {
try {
if (connectDB == null) {
final String mysqlPort = CommonsProperties.getMysql_port().isEmpty() ? "" : ":" + CommonsProperties.getMysql_port();
final String url = "jdbc:mysql://"
+ CommonsProperties.getMysql_host() +
mysqlPort + "/" + CommonsProperties.getMysql_db() + "?characterEncoding=utf8";
connectDB = DriverManager.getConnection(url, CommonsProperties.getMysql_user(), CommonsProperties.getMysql_pass());
}
} catch (SQLException e) {
throw new ServiceException("Can't connect to MySQL. ", e);
}
return connectDB;
}
public static Connection getBiblioDBInstance() {
try {
if (connectBiblioDB == null) {
final String mysqlPort = CommonsProperties.getMysql_port().isEmpty() ? "" : ":" + CommonsProperties.getMysql_port();
final String url = "jdbc:mysql://"
+ CommonsProperties.getMysql_host() +
mysqlPort + "/" + CommonsProperties.getMysql_bibliodb() + "?characterEncoding=utf8";
connectBiblioDB = DriverManager.getConnection(url);
}
} catch (SQLException e) {
throw new ServiceException("Can't connect to MySQL. ", e);
}
return connectBiblioDB;
}
}
|
anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DatabaseConnection.java
|
package fr.inria.anhalytics.commons.dao;
import fr.inria.anhalytics.commons.exceptions.PropertyException;
import fr.inria.anhalytics.commons.exceptions.ServiceException;
import fr.inria.anhalytics.commons.properties.CommonsProperties;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author azhar
*/
public class DatabaseConnection {
private static final Logger logger = LoggerFactory.getLogger(DatabaseConnection.class);
private static Connection connectDB;
private static Connection connectBiblioDB;
public static Connection getDBInstance() {
try {
if (connectDB == null ) {
connectDB = DriverManager.getConnection("jdbc:mysql://"+CommonsProperties.getMysql_host() +
(CommonsProperties.getMysql_port().isEmpty() ? "":":" + CommonsProperties.getMysql_port())+"/"+ CommonsProperties.getMysql_db(), CommonsProperties.getMysql_user(), CommonsProperties.getMysql_pass());
}
} catch (SQLException e) {
throw new ServiceException("Can't connect to MySQL. ", e);
}
return connectDB;
}
public static Connection getBiblioDBInstance() {
try {
if (connectBiblioDB == null ) {
connectBiblioDB = DriverManager.getConnection("jdbc:mysql://"+CommonsProperties.getMysql_host() +
(CommonsProperties.getMysql_port().isEmpty() ? "":":" + CommonsProperties.getMysql_port())+"/"+ CommonsProperties.getMysql_bibliodb(), CommonsProperties.getMysql_user(), CommonsProperties.getMysql_pass());
}
} catch (SQLException e) {
throw new ServiceException("Can't connect to MySQL. ", e);
}
return connectBiblioDB;
}
}
|
Fix #68
http://stackoverflow.com/questions/3040597/jdbc-character-encoding
|
anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DatabaseConnection.java
|
Fix #68 http://stackoverflow.com/questions/3040597/jdbc-character-encoding
|
<ide><path>nhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DatabaseConnection.java
<ide> package fr.inria.anhalytics.commons.dao;
<ide>
<del>import fr.inria.anhalytics.commons.exceptions.PropertyException;
<ide> import fr.inria.anhalytics.commons.exceptions.ServiceException;
<ide> import fr.inria.anhalytics.commons.properties.CommonsProperties;
<add>import org.slf4j.Logger;
<add>import org.slf4j.LoggerFactory;
<add>
<ide> import java.sql.Connection;
<ide> import java.sql.DriverManager;
<ide> import java.sql.SQLException;
<del>import org.slf4j.Logger;
<del>import org.slf4j.LoggerFactory;
<ide>
<ide> /**
<del> *
<ide> * @author azhar
<ide> */
<ide> public class DatabaseConnection {
<ide> private static final Logger logger = LoggerFactory.getLogger(DatabaseConnection.class);
<ide> private static Connection connectDB;
<ide> private static Connection connectBiblioDB;
<del>
<add>
<ide> public static Connection getDBInstance() {
<ide> try {
<del> if (connectDB == null ) {
<add> if (connectDB == null) {
<ide>
<del> connectDB = DriverManager.getConnection("jdbc:mysql://"+CommonsProperties.getMysql_host() +
<del> (CommonsProperties.getMysql_port().isEmpty() ? "":":" + CommonsProperties.getMysql_port())+"/"+ CommonsProperties.getMysql_db(), CommonsProperties.getMysql_user(), CommonsProperties.getMysql_pass());
<add> final String mysqlPort = CommonsProperties.getMysql_port().isEmpty() ? "" : ":" + CommonsProperties.getMysql_port();
<add> final String url = "jdbc:mysql://"
<add> + CommonsProperties.getMysql_host() +
<add> mysqlPort + "/" + CommonsProperties.getMysql_db() + "?characterEncoding=utf8";
<ide>
<add> connectDB = DriverManager.getConnection(url, CommonsProperties.getMysql_user(), CommonsProperties.getMysql_pass());
<ide> }
<ide> } catch (SQLException e) {
<ide> throw new ServiceException("Can't connect to MySQL. ", e);
<ide> }
<ide> return connectDB;
<ide> }
<del>
<add>
<ide> public static Connection getBiblioDBInstance() {
<ide> try {
<del> if (connectBiblioDB == null ) {
<add> if (connectBiblioDB == null) {
<ide>
<del> connectBiblioDB = DriverManager.getConnection("jdbc:mysql://"+CommonsProperties.getMysql_host() +
<del> (CommonsProperties.getMysql_port().isEmpty() ? "":":" + CommonsProperties.getMysql_port())+"/"+ CommonsProperties.getMysql_bibliodb(), CommonsProperties.getMysql_user(), CommonsProperties.getMysql_pass());
<add> final String mysqlPort = CommonsProperties.getMysql_port().isEmpty() ? "" : ":" + CommonsProperties.getMysql_port();
<add> final String url = "jdbc:mysql://"
<add> + CommonsProperties.getMysql_host() +
<add> mysqlPort + "/" + CommonsProperties.getMysql_bibliodb() + "?characterEncoding=utf8";
<add>
<add> connectBiblioDB = DriverManager.getConnection(url);
<ide>
<ide> }
<ide> } catch (SQLException e) {
<ide> }
<ide> return connectBiblioDB;
<ide> }
<del>
<add>
<ide> }
|
|
Java
|
apache-2.0
|
db384da070ad013f876cd2805fe68b5b3870506b
| 0 |
SeleniumHQ/buck,Addepar/buck,facebook/buck,Addepar/buck,shs96c/buck,nguyentruongtho/buck,romanoid/buck,JoelMarcey/buck,brettwooldridge/buck,nguyentruongtho/buck,rmaz/buck,romanoid/buck,Addepar/buck,shs96c/buck,JoelMarcey/buck,facebook/buck,romanoid/buck,facebook/buck,brettwooldridge/buck,rmaz/buck,facebook/buck,shs96c/buck,shs96c/buck,kageiit/buck,JoelMarcey/buck,rmaz/buck,romanoid/buck,zpao/buck,SeleniumHQ/buck,shs96c/buck,SeleniumHQ/buck,zpao/buck,romanoid/buck,Addepar/buck,romanoid/buck,nguyentruongtho/buck,zpao/buck,brettwooldridge/buck,rmaz/buck,Addepar/buck,rmaz/buck,SeleniumHQ/buck,SeleniumHQ/buck,SeleniumHQ/buck,SeleniumHQ/buck,brettwooldridge/buck,rmaz/buck,SeleniumHQ/buck,zpao/buck,JoelMarcey/buck,JoelMarcey/buck,rmaz/buck,Addepar/buck,shs96c/buck,Addepar/buck,brettwooldridge/buck,brettwooldridge/buck,kageiit/buck,romanoid/buck,brettwooldridge/buck,zpao/buck,SeleniumHQ/buck,facebook/buck,brettwooldridge/buck,zpao/buck,kageiit/buck,romanoid/buck,nguyentruongtho/buck,rmaz/buck,SeleniumHQ/buck,zpao/buck,Addepar/buck,nguyentruongtho/buck,rmaz/buck,brettwooldridge/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,romanoid/buck,Addepar/buck,romanoid/buck,Addepar/buck,shs96c/buck,JoelMarcey/buck,kageiit/buck,nguyentruongtho/buck,SeleniumHQ/buck,kageiit/buck,rmaz/buck,JoelMarcey/buck,shs96c/buck,rmaz/buck,Addepar/buck,rmaz/buck,JoelMarcey/buck,facebook/buck,shs96c/buck,kageiit/buck,romanoid/buck,shs96c/buck,shs96c/buck,kageiit/buck,Addepar/buck,nguyentruongtho/buck,brettwooldridge/buck,shs96c/buck,romanoid/buck,rmaz/buck,Addepar/buck,romanoid/buck,facebook/buck,SeleniumHQ/buck,brettwooldridge/buck,shs96c/buck,brettwooldridge/buck,brettwooldridge/buck,SeleniumHQ/buck,JoelMarcey/buck,JoelMarcey/buck
|
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.core.rules.pipeline;
import com.facebook.buck.core.build.buildable.context.BuildableContext;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.step.Step;
import com.google.common.collect.ImmutableList;
import javax.annotation.Nullable;
/**
* The steps required to build this rule locally can run more efficiently when executed immediately
* after those of a dependency.
*
* @param <T> the type that is used to share build state between rules in the pipeline
* @deprecated Rule pipelining couples rules in a way that makes it very easy to violate buck's
* assumptions and makes it nearly impossible for buck to understand and restrict what rules are
* doing.
*/
@Deprecated
public interface SupportsPipelining<T extends RulePipelineState> extends BuildRule {
static boolean isSupported(BuildRule rule) {
if (!(rule instanceof SupportsPipelining)) {
return false;
}
SupportsPipelining<?> supportsPipelining = (SupportsPipelining<?>) rule;
return supportsPipelining.useRulePipelining();
}
boolean useRulePipelining();
@Nullable
SupportsPipelining<T> getPreviousRuleInPipeline();
ImmutableList<? extends Step> getPipelinedBuildSteps(
BuildContext context, BuildableContext buildableContext, T state);
RulePipelineStateFactory<T> getPipelineStateFactory();
}
|
src/com/facebook/buck/core/rules/pipeline/SupportsPipelining.java
|
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.core.rules.pipeline;
import com.facebook.buck.core.build.buildable.context.BuildableContext;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.step.Step;
import com.google.common.collect.ImmutableList;
import javax.annotation.Nullable;
/**
* The steps required to build this rule locally can run more efficiently when executed immediately
* after those of a dependency.
*
* @param <T> the type that is used to share build state between rules in the pipeline
*/
public interface SupportsPipelining<T extends RulePipelineState> extends BuildRule {
static boolean isSupported(BuildRule rule) {
if (!(rule instanceof SupportsPipelining)) {
return false;
}
SupportsPipelining<?> supportsPipelining = (SupportsPipelining<?>) rule;
return supportsPipelining.useRulePipelining();
}
static <T extends RulePipelineState> SupportsPipelining<T> getRootRule(
SupportsPipelining<T> rule) {
SupportsPipelining<T> result = rule;
while (result.getPreviousRuleInPipeline() != null) {
result = result.getPreviousRuleInPipeline();
}
return result;
}
boolean useRulePipelining();
@Nullable
SupportsPipelining<T> getPreviousRuleInPipeline();
ImmutableList<? extends Step> getPipelinedBuildSteps(
BuildContext context, BuildableContext buildableContext, T state);
RulePipelineStateFactory<T> getPipelineStateFactory();
}
|
Deprecate SupportsPipelining
Summary:
Rule pipelining couples rules in a way that makes it very easy to
violate buck's assumptions and makes it nearly impossible for buck to
understand and restrict what rules are doing.
Deprecate to at least try discouraging anyone else from using it.
Reviewed By: ttsugriy
fbshipit-source-id: 786180fa53
|
src/com/facebook/buck/core/rules/pipeline/SupportsPipelining.java
|
Deprecate SupportsPipelining
|
<ide><path>rc/com/facebook/buck/core/rules/pipeline/SupportsPipelining.java
<ide> * after those of a dependency.
<ide> *
<ide> * @param <T> the type that is used to share build state between rules in the pipeline
<add> * @deprecated Rule pipelining couples rules in a way that makes it very easy to violate buck's
<add> * assumptions and makes it nearly impossible for buck to understand and restrict what rules are
<add> * doing.
<ide> */
<add>@Deprecated
<ide> public interface SupportsPipelining<T extends RulePipelineState> extends BuildRule {
<ide> static boolean isSupported(BuildRule rule) {
<ide> if (!(rule instanceof SupportsPipelining)) {
<ide>
<ide> SupportsPipelining<?> supportsPipelining = (SupportsPipelining<?>) rule;
<ide> return supportsPipelining.useRulePipelining();
<del> }
<del>
<del> static <T extends RulePipelineState> SupportsPipelining<T> getRootRule(
<del> SupportsPipelining<T> rule) {
<del> SupportsPipelining<T> result = rule;
<del> while (result.getPreviousRuleInPipeline() != null) {
<del> result = result.getPreviousRuleInPipeline();
<del> }
<del>
<del> return result;
<ide> }
<ide>
<ide> boolean useRulePipelining();
|
|
Java
|
apache-2.0
|
4f973cc8be1aee754fa849318c9bde542a47b9d3
| 0 |
uronce-cc/alluxio,maboelhassan/alluxio,ChangerYoung/alluxio,Reidddddd/alluxio,aaudiber/alluxio,riversand963/alluxio,PasaLab/tachyon,Reidddddd/mo-alluxio,bf8086/alluxio,aaudiber/alluxio,maobaolong/alluxio,aaudiber/alluxio,calvinjia/tachyon,maobaolong/alluxio,Alluxio/alluxio,jsimsa/alluxio,aaudiber/alluxio,WilliamZapata/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,riversand963/alluxio,ShailShah/alluxio,Alluxio/alluxio,uronce-cc/alluxio,jswudi/alluxio,calvinjia/tachyon,madanadit/alluxio,Reidddddd/alluxio,maobaolong/alluxio,jswudi/alluxio,maboelhassan/alluxio,EvilMcJerkface/alluxio,maboelhassan/alluxio,aaudiber/alluxio,ChangerYoung/alluxio,Alluxio/alluxio,maboelhassan/alluxio,Reidddddd/alluxio,yuluo-ding/alluxio,maobaolong/alluxio,maboelhassan/alluxio,apc999/alluxio,madanadit/alluxio,ChangerYoung/alluxio,ShailShah/alluxio,riversand963/alluxio,jsimsa/alluxio,apc999/alluxio,Reidddddd/mo-alluxio,jswudi/alluxio,uronce-cc/alluxio,riversand963/alluxio,wwjiang007/alluxio,PasaLab/tachyon,maobaolong/alluxio,WilliamZapata/alluxio,maobaolong/alluxio,maobaolong/alluxio,riversand963/alluxio,PasaLab/tachyon,ShailShah/alluxio,wwjiang007/alluxio,apc999/alluxio,calvinjia/tachyon,maboelhassan/alluxio,madanadit/alluxio,bf8086/alluxio,calvinjia/tachyon,Alluxio/alluxio,bf8086/alluxio,PasaLab/tachyon,calvinjia/tachyon,yuluo-ding/alluxio,Reidddddd/mo-alluxio,EvilMcJerkface/alluxio,madanadit/alluxio,WilliamZapata/alluxio,ShailShah/alluxio,wwjiang007/alluxio,Reidddddd/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,riversand963/alluxio,aaudiber/alluxio,Reidddddd/mo-alluxio,EvilMcJerkface/alluxio,Alluxio/alluxio,aaudiber/alluxio,WilliamZapata/alluxio,jsimsa/alluxio,jswudi/alluxio,ChangerYoung/alluxio,Alluxio/alluxio,apc999/alluxio,jsimsa/alluxio,wwjiang007/alluxio,uronce-cc/alluxio,wwjiang007/alluxio,calvinjia/tachyon,apc999/alluxio,calvinjia/tachyon,Alluxio/alluxio,calvinjia/tachyon,jsimsa/alluxio,madanadit/alluxio,Alluxio/alluxio,ChangerYoung/alluxio,wwjiang007/alluxio,WilliamZapata/alluxio,maboelhassan/alluxio,maobaolong/alluxio,Alluxio/alluxio,yuluo-ding/alluxio,uronce-cc/alluxio,bf8086/alluxio,PasaLab/tachyon,bf8086/alluxio,madanadit/alluxio,bf8086/alluxio,wwjiang007/alluxio,WilliamZapata/alluxio,yuluo-ding/alluxio,madanadit/alluxio,apc999/alluxio,apc999/alluxio,jswudi/alluxio,Reidddddd/alluxio,jswudi/alluxio,Reidddddd/mo-alluxio,EvilMcJerkface/alluxio,jsimsa/alluxio,ShailShah/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,Alluxio/alluxio,madanadit/alluxio,ChangerYoung/alluxio,maobaolong/alluxio,bf8086/alluxio,PasaLab/tachyon,PasaLab/tachyon,Reidddddd/alluxio,ShailShah/alluxio,EvilMcJerkface/alluxio,uronce-cc/alluxio,wwjiang007/alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,bf8086/alluxio,yuluo-ding/alluxio
|
/*
* Licensed to the University of California, Berkeley under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package tachyon.master.journal;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.List;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import com.google.common.base.Function;
import com.google.common.collect.ContiguousSet;
import com.google.common.collect.DiscreteDomain;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Range;
import com.google.protobuf.ByteString;
import tachyon.Constants;
import tachyon.TachyonURI;
import tachyon.proto.JournalEntryProtos.AddMountPointEntry;
import tachyon.proto.JournalEntryProtos.AsyncCompleteFileEntry;
import tachyon.proto.JournalEntryProtos.BlockContainerIdGeneratorEntry;
import tachyon.proto.JournalEntryProtos.BlockInfoEntry;
import tachyon.proto.JournalEntryProtos.CompleteFileEntry;
import tachyon.proto.JournalEntryProtos.DeleteFileEntry;
import tachyon.proto.JournalEntryProtos.DeleteLineageEntry;
import tachyon.proto.JournalEntryProtos.DeleteMountPointEntry;
import tachyon.proto.JournalEntryProtos.InodeDirectoryEntry;
import tachyon.proto.JournalEntryProtos.InodeDirectoryIdGeneratorEntry;
import tachyon.proto.JournalEntryProtos.InodeFileEntry;
import tachyon.proto.JournalEntryProtos.InodeLastModificationTimeEntry;
import tachyon.proto.JournalEntryProtos.JournalEntry;
import tachyon.proto.JournalEntryProtos.LineageEntry;
import tachyon.proto.JournalEntryProtos.LineageFileState;
import tachyon.proto.JournalEntryProtos.LineageIdGeneratorEntry;
import tachyon.proto.JournalEntryProtos.PersistDirectoryEntry;
import tachyon.proto.JournalEntryProtos.PersistFileEntry;
import tachyon.proto.JournalEntryProtos.PersistFilesEntry;
import tachyon.proto.JournalEntryProtos.PersistFilesRequestEntry;
import tachyon.proto.JournalEntryProtos.RawTableEntry;
import tachyon.proto.JournalEntryProtos.ReinitializeFileEntry;
import tachyon.proto.JournalEntryProtos.RenameEntry;
import tachyon.proto.JournalEntryProtos.SetStateEntry;
import tachyon.proto.JournalEntryProtos.UpdateMetadataEntry;
import tachyon.util.io.BufferUtils;
/**
* Base class for testing different {@link JournalFormatter}'s serialization/deserialization
* correctness of each entry type defined in {@link JournalEntryType}.
* <p>
* To test an implementation of {@link JournalFormatter} like {@link JsonJournalFormatter}, extend
* this class and override method {@link #getFormatter()}.
* <p>
* See example usage in {@link ProtoJournalFormatterTest}.
*/
public abstract class JournalFormatterTestBase {
protected static final long TEST_CONTAINER_ID = 2011L;
protected static final long TEST_BLOCK_ID = 2015L;
protected static final long TEST_FILE_ID = 1L;
protected static final long TEST_LINEAGE_ID = 1L;
protected static final String TEST_FILE_NAME = "journalFormatter.test";
protected static final long TEST_LENGTH_BYTES = 256L;
protected static final long TEST_BLOCK_SIZE_BYTES = 256L;
protected static final long TEST_TABLE_ID = 2L;
protected static final long TEST_OP_TIME_MS = 1409349750338L;
protected static final long TEST_SEQUENCE_NUMBER = 1945L;
protected static final TachyonURI TEST_TACHYON_PATH = new TachyonURI("/test/path");
protected static final long TEST_TTL = 624L;
protected static final TachyonURI TEST_UFS_PATH = new TachyonURI("hdfs://host:port/test/path");
protected static final String TEST_JOB_COMMAND = "Command";
protected static final String TEST_JOB_OUTPUT_PATH = "/test/path";
protected JournalFormatter mFormatter = getFormatter();
protected OutputStream mOs;
protected InputStream mIs;
// set that holds test journal entries
protected static final List<JournalEntry> DATA_SET;
static {
List<JournalEntry> entries = ImmutableList.<JournalEntry>builder()
.add(
JournalEntry.newBuilder()
.setBlockContainerIdGenerator(
BlockContainerIdGeneratorEntry.newBuilder()
.setNextContainerId(TEST_CONTAINER_ID))
.build())
.add(
JournalEntry.newBuilder()
.setBlockInfo(BlockInfoEntry.newBuilder()
.setBlockId(TEST_BLOCK_ID)
.setLength(TEST_LENGTH_BYTES))
.build())
.add(JournalEntry.newBuilder()
.setInodeFileEntry(InodeFileEntry.newBuilder()
.setCreationTimeMs(TEST_OP_TIME_MS)
.setId(TEST_FILE_ID)
.setName(TEST_FILE_NAME)
.setParentId(TEST_FILE_ID)
.setPersisted(true)
.setPinned(true)
.setLastModificationTimeMs(TEST_OP_TIME_MS)
.setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES)
.setLength(TEST_LENGTH_BYTES)
.setCompleted(true)
.setCacheable(true)
.addAllBlocks(ContiguousSet.create(
Range.closedOpen(TEST_BLOCK_ID, TEST_BLOCK_ID + 10), DiscreteDomain.longs())
.asList())
.setTtl(Constants.NO_TTL))
.build())
.add(JournalEntry.newBuilder()
.setInodeDirectory(InodeDirectoryEntry.newBuilder()
.setCreationTimeMs(TEST_OP_TIME_MS)
.setId(TEST_FILE_ID)
.setName(TEST_FILE_NAME)
.setParentId(TEST_FILE_ID)
.setPersisted(true)
.setPinned(true)
.setLastModificationTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setInodeLastModificationTime(InodeLastModificationTimeEntry.newBuilder()
.setId(TEST_FILE_ID)
.setLastModificationTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setPersistDirectory(PersistDirectoryEntry.newBuilder()
.setId(TEST_FILE_ID))
.build())
.add(JournalEntry.newBuilder()
.setPersistFile(PersistFileEntry.newBuilder()
.setId(TEST_FILE_ID)
.setLength(TEST_LENGTH_BYTES)
.setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(
JournalEntry.newBuilder()
.setCompleteFile(CompleteFileEntry.newBuilder()
.addAllBlockIds(Arrays.asList(1L, 2L, 3L))
.setId(TEST_FILE_ID)
.setLength(TEST_LENGTH_BYTES)
.setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setDeleteFile(DeleteFileEntry.newBuilder()
.setId(TEST_FILE_ID)
.setRecursive(true)
.setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setRename(RenameEntry.newBuilder()
.setId(TEST_FILE_ID)
.setDstPath(TEST_FILE_NAME)
.setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setInodeDirectoryIdGenerator(InodeDirectoryIdGeneratorEntry.newBuilder()
.setContainerId(TEST_CONTAINER_ID)
.setSequenceNumber(TEST_SEQUENCE_NUMBER))
.build())
.add(JournalEntry.newBuilder()
.setAddMountPoint(AddMountPointEntry.newBuilder()
.setTachyonPath(TEST_TACHYON_PATH.toString())
.setUfsPath(TEST_UFS_PATH.toString()))
.build())
.add(
JournalEntry.newBuilder()
.setDeleteMountPoint(DeleteMountPointEntry.newBuilder()
.setTachyonPath(TEST_TACHYON_PATH.toString()))
.build())
.add(JournalEntry.newBuilder()
.setRawTable(RawTableEntry.newBuilder()
.setId(TEST_BLOCK_ID)
.setColumns(100)
.setMetadata(ByteString.copyFrom(BufferUtils.getIncreasingByteBuffer(10))))
.build())
.add(JournalEntry.newBuilder()
.setUpdateMetadata(UpdateMetadataEntry.newBuilder()
.setId(TEST_BLOCK_ID)
.setMetadata(ByteString.copyFrom(new byte[10])))
.build())
.add(JournalEntry.newBuilder()
.setReinitializeFile(ReinitializeFileEntry.newBuilder()
.setPath(TEST_FILE_NAME)
.setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES)
.setTtl(TEST_TTL))
.build())
.add(
JournalEntry.newBuilder()
.setAsyncCompleteFile(AsyncCompleteFileEntry.newBuilder()
.setFileId(TEST_FILE_ID))
.build())
.add(
JournalEntry.newBuilder()
.setDeleteLineage(DeleteLineageEntry.newBuilder()
.setLineageId(TEST_LINEAGE_ID)
.setCascade(false))
.build())
.add(JournalEntry.newBuilder()
.setLineage(LineageEntry.newBuilder()
.setId(TEST_LINEAGE_ID)
.addAllInputFiles(Arrays.asList(TEST_FILE_ID))
.addAllOutputFileIds(Arrays.asList(TEST_FILE_ID))
.addAllOutputFileStates(Arrays.asList(LineageFileState.CREATED))
.setJobCommand(TEST_JOB_COMMAND)
.setJobOutputPath(TEST_JOB_OUTPUT_PATH)
.setCreationTimeMs(TEST_OP_TIME_MS))
.build())
.add(
JournalEntry.newBuilder()
.setLineageIdGenerator(LineageIdGeneratorEntry.newBuilder()
.setSequenceNumber(TEST_SEQUENCE_NUMBER))
.build())
.add(
JournalEntry.newBuilder()
.setPersistFiles(PersistFilesEntry.newBuilder()
.addAllFileIds(Arrays.asList(1L, 2L)))
.build())
.add(JournalEntry.newBuilder()
.setPersistFilesRequest(PersistFilesRequestEntry.newBuilder()
.addAllFileIds(Arrays.asList(1L, 2L)))
.build())
.add(
JournalEntry.newBuilder()
.setSetState(SetStateEntry.newBuilder()
.setId(TEST_FILE_ID)
.setOpTimeMs(TEST_OP_TIME_MS)
.setPinned(true)
.setTtl(TEST_TTL))
.build())
.build();
DATA_SET = Lists.transform(entries, new Function<JournalEntry, JournalEntry>() {
@Override
public JournalEntry apply(JournalEntry entry) {
return entry.toBuilder().setSequenceNumber(TEST_SEQUENCE_NUMBER).build();
}
});
}
/**
* Returns the implementation of {@link JournalFormatter} that wants to be tested.
*/
protected abstract JournalFormatter getFormatter();
@Rule
public TemporaryFolder mTestFolder = new TemporaryFolder();
@Before
public void before() throws Exception {
String path = mTestFolder.newFile().getAbsolutePath();
mOs = new FileOutputStream(path);
mIs = new FileInputStream(path);
}
@After
public final void after() throws Exception {
mOs.close();
mIs.close();
}
protected void write(JournalEntry entry) throws IOException {
mFormatter.serialize(entry, mOs);
}
protected JournalEntry read() throws IOException {
JournalInputStream jis = mFormatter.deserialize(mIs);
JournalEntry entry = jis.getNextEntry();
Assert.assertEquals(TEST_SEQUENCE_NUMBER, jis.getLatestSequenceNumber());
return entry;
}
protected void assertSameEntry(JournalEntry entry1, JournalEntry entry2) {
Assert.assertEquals(entry1, entry2);
}
protected void entryTest(JournalEntry entry) throws IOException {
write(entry);
JournalEntry readEntry = read();
assertSameEntry(entry, readEntry);
}
// check if every entry is covered by this test
@Test
public void checkEntriesNumberTest() {
// Subtract one to exclude ENTRY_NOT_SET
Assert.assertEquals(JournalEntry.EntryCase.values().length - 1, DATA_SET.size());
}
@Test
public void entriesTest() throws IOException {
for (JournalEntry entry : DATA_SET) {
entryTest(entry);
}
}
}
|
servers/src/test/java/tachyon/master/journal/JournalFormatterTestBase.java
|
/*
* Licensed to the University of California, Berkeley under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package tachyon.master.journal;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Set;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import com.google.common.collect.ContiguousSet;
import com.google.common.collect.DiscreteDomain;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Range;
import com.google.protobuf.ByteString;
import tachyon.Constants;
import tachyon.TachyonURI;
import tachyon.proto.JournalEntryProtos.AddMountPointEntry;
import tachyon.proto.JournalEntryProtos.AsyncCompleteFileEntry;
import tachyon.proto.JournalEntryProtos.BlockContainerIdGeneratorEntry;
import tachyon.proto.JournalEntryProtos.BlockInfoEntry;
import tachyon.proto.JournalEntryProtos.CompleteFileEntry;
import tachyon.proto.JournalEntryProtos.DeleteFileEntry;
import tachyon.proto.JournalEntryProtos.DeleteLineageEntry;
import tachyon.proto.JournalEntryProtos.DeleteMountPointEntry;
import tachyon.proto.JournalEntryProtos.InodeDirectoryEntry;
import tachyon.proto.JournalEntryProtos.InodeDirectoryIdGeneratorEntry;
import tachyon.proto.JournalEntryProtos.InodeFileEntry;
import tachyon.proto.JournalEntryProtos.InodeLastModificationTimeEntry;
import tachyon.proto.JournalEntryProtos.JournalEntry;
import tachyon.proto.JournalEntryProtos.LineageEntry;
import tachyon.proto.JournalEntryProtos.LineageFileState;
import tachyon.proto.JournalEntryProtos.PersistDirectoryEntry;
import tachyon.proto.JournalEntryProtos.PersistFileEntry;
import tachyon.proto.JournalEntryProtos.PersistFilesEntry;
import tachyon.proto.JournalEntryProtos.PersistFilesRequestEntry;
import tachyon.proto.JournalEntryProtos.RawTableEntry;
import tachyon.proto.JournalEntryProtos.ReinitializeFileEntry;
import tachyon.proto.JournalEntryProtos.RenameEntry;
import tachyon.proto.JournalEntryProtos.SetStateEntry;
import tachyon.proto.JournalEntryProtos.UpdateMetadataEntry;
import tachyon.util.io.BufferUtils;
/**
* Base class for testing different {@link JournalFormatter}'s serialization/deserialization
* correctness of each entry type defined in {@link JournalEntryType}.
* <p>
* To test an implementation of {@link JournalFormatter} like {@link JsonJournalFormatter}, extend
* this class and override method {@link #getFormatter()}.
* <p>
* See example usage in {@link ProtoJournalFormatterTest}.
*/
public abstract class JournalFormatterTestBase {
protected static final long TEST_CONTAINER_ID = 2011L;
protected static final long TEST_BLOCK_ID = 2015L;
protected static final long TEST_FILE_ID = 1L;
protected static final long TEST_LINEAGE_ID = 1L;
protected static final String TEST_FILE_NAME = "journalFormatter.test";
protected static final long TEST_LENGTH_BYTES = 256L;
protected static final long TEST_BLOCK_SIZE_BYTES = 256L;
protected static final long TEST_TABLE_ID = 2L;
protected static final long TEST_OP_TIME_MS = 1409349750338L;
protected static final long TEST_SEQUENCE_NUMBER = 1945L;
protected static final TachyonURI TEST_TACHYON_PATH = new TachyonURI("/test/path");
protected static final long TEST_TTL = 624L;
protected static final TachyonURI TEST_UFS_PATH = new TachyonURI("hdfs://host:port/test/path");
protected static final String TEST_JOB_COMMAND = "Command";
protected static final String TEST_JOB_OUTPUT_PATH = "/test/path";
protected JournalFormatter mFormatter = getFormatter();
protected OutputStream mOs;
protected InputStream mIs;
// set that holds test journal entries
protected Set<JournalEntry> mDataSet =
ImmutableSet.<JournalEntry>builder()
.add(
JournalEntry.newBuilder()
.setBlockContainerIdGenerator(
BlockContainerIdGeneratorEntry.newBuilder()
.setNextContainerId(TEST_CONTAINER_ID))
.build())
.add(
JournalEntry.newBuilder()
.setBlockInfo(
BlockInfoEntry.newBuilder()
.setBlockId(
TEST_BLOCK_ID)
.setLength(
TEST_LENGTH_BYTES))
.build())
.add(JournalEntry.newBuilder().setInodeFileEntry(InodeFileEntry.newBuilder()
.setCreationTimeMs(TEST_OP_TIME_MS).setId(TEST_FILE_ID).setName(TEST_FILE_NAME)
.setParentId(TEST_FILE_ID).setPersisted(true).setPinned(true)
.setLastModificationTimeMs(TEST_OP_TIME_MS).setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES)
.setLength(TEST_LENGTH_BYTES).setCompleted(true).setCacheable(true)
.addAllBlocks(ContiguousSet
.create(Range.closedOpen(TEST_BLOCK_ID, TEST_BLOCK_ID + 10), DiscreteDomain.longs())
.asList())
.setTtl(Constants.NO_TTL)).build())
.add(JournalEntry.newBuilder()
.setInodeDirectory(InodeDirectoryEntry.newBuilder().setCreationTimeMs(TEST_OP_TIME_MS)
.setId(TEST_FILE_ID).setName(TEST_FILE_NAME).setParentId(TEST_FILE_ID)
.setPersisted(true).setPinned(true).setLastModificationTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setInodeLastModificationTime(InodeLastModificationTimeEntry.newBuilder()
.setId(TEST_FILE_ID).setLastModificationTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setPersistDirectory(PersistDirectoryEntry.newBuilder().setId(TEST_FILE_ID)).build())
.add(JournalEntry.newBuilder()
.setPersistFile(PersistFileEntry.newBuilder().setId(TEST_FILE_ID)
.setLength(TEST_LENGTH_BYTES).setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setCompleteFile(CompleteFileEntry.newBuilder().addAllBlockIds(Arrays.asList(1L, 2L, 3L))
.setId(TEST_FILE_ID).setLength(TEST_LENGTH_BYTES).setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setDeleteFile(DeleteFileEntry.newBuilder().setId(TEST_FILE_ID).setRecursive(true)
.setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setRename(RenameEntry.newBuilder().setId(TEST_FILE_ID).setDstPath(TEST_FILE_NAME)
.setOpTimeMs(TEST_OP_TIME_MS))
.build())
.add(JournalEntry.newBuilder()
.setInodeDirectoryIdGenerator(InodeDirectoryIdGeneratorEntry.newBuilder()
.setContainerId(TEST_CONTAINER_ID).setSequenceNumber(TEST_SEQUENCE_NUMBER))
.build())
.add(JournalEntry.newBuilder()
.setAddMountPoint(AddMountPointEntry.newBuilder()
.setTachyonPath(TEST_TACHYON_PATH.toString()).setUfsPath(TEST_UFS_PATH.toString()))
.build()).add(
JournalEntry.newBuilder()
.setDeleteMountPoint(DeleteMountPointEntry.newBuilder()
.setTachyonPath(TEST_TACHYON_PATH.toString()))
.build())
.add(JournalEntry.newBuilder()
.setRawTable(RawTableEntry.newBuilder().setId(TEST_BLOCK_ID).setColumns(100)
.setMetadata(ByteString.copyFrom(BufferUtils.getIncreasingByteBuffer(10))))
.build())
.add(JournalEntry.newBuilder()
.setUpdateMetadata(UpdateMetadataEntry.newBuilder().setId(TEST_BLOCK_ID)
.setMetadata(ByteString.copyFrom(new byte[10])))
.build())
.add(JournalEntry.newBuilder()
.setReinitializeFile(ReinitializeFileEntry.newBuilder().setPath(TEST_FILE_NAME)
.setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES).setTtl(TEST_TTL))
.build())
.add(
JournalEntry.newBuilder()
.setAsyncCompleteFile(AsyncCompleteFileEntry.newBuilder().setFileId(TEST_FILE_ID))
.build())
.add(
JournalEntry.newBuilder()
.setDeleteLineage(DeleteLineageEntry.newBuilder().setLineageId(TEST_LINEAGE_ID)
.setCascade(false))
.build())
.add(JournalEntry.newBuilder()
.setLineage(LineageEntry.newBuilder().setId(TEST_LINEAGE_ID)
.addAllInputFiles(Arrays.asList(TEST_FILE_ID))
.addAllOutputFileIds(Arrays.asList(TEST_FILE_ID))
.addAllOutputFileStates(Arrays.asList(LineageFileState.CREATED))
.setJobCommand(TEST_JOB_COMMAND).setJobOutputPath(TEST_JOB_OUTPUT_PATH)
.setCreationTimeMs(TEST_OP_TIME_MS))
.build())
.add(
JournalEntry.newBuilder()
.setPersistFiles(
PersistFilesEntry.newBuilder().addAllFileIds(Arrays.asList(1L, 2L)))
.build())
.add(JournalEntry.newBuilder()
.setPersistFilesRequest(
PersistFilesRequestEntry.newBuilder().addAllFileIds(Arrays.asList(1L, 2L)))
.build())
.add(
JournalEntry.newBuilder()
.setSetState(SetStateEntry.newBuilder().setId(TEST_FILE_ID)
.setOpTimeMs(TEST_OP_TIME_MS).setPinned(true).setTtl(TEST_TTL))
.build())
.build();
/**
* Returns the implementation of {@link JournalFormatter} that wants to be tested.
*/
protected abstract JournalFormatter getFormatter();
@Rule
public TemporaryFolder mTestFolder = new TemporaryFolder();
@Before
public void before() throws Exception {
String path = mTestFolder.newFile().getAbsolutePath();
mOs = new FileOutputStream(path);
mIs = new FileInputStream(path);
}
@After
public final void after() throws Exception {
mOs.close();
mIs.close();
}
protected void write(JournalEntry entry) throws IOException {
mFormatter.serialize(entry.toBuilder().setSequenceNumber(TEST_SEQUENCE_NUMBER).build(), mOs);
}
protected JournalEntry read() throws IOException {
JournalInputStream jis = mFormatter.deserialize(mIs);
JournalEntry entry = jis.getNextEntry();
Assert.assertEquals(TEST_SEQUENCE_NUMBER, jis.getLatestSequenceNumber());
return entry;
}
protected void assertSameEntry(JournalEntry entry1, JournalEntry entry2) {
Assert.assertEquals(entry1, entry2);
}
protected void entryTest(JournalEntry entry) throws IOException {
write(entry);
JournalEntry readEntry = read();
assertSameEntry(entry, readEntry);
}
// check if every entry is covered by this test
@Test
public void checkEntriesNumberTest() {
Assert.assertEquals(JournalEntry.EntryCase.values().length, mDataSet.size());
}
@Test
public void entriesTest() throws IOException {
for (JournalEntry entry : mDataSet) {
entryTest(entry);
}
}
}
|
Clean up JournalFormatterTestBase and add a missing JournalEntry
|
servers/src/test/java/tachyon/master/journal/JournalFormatterTestBase.java
|
Clean up JournalFormatterTestBase and add a missing JournalEntry
|
<ide><path>ervers/src/test/java/tachyon/master/journal/JournalFormatterTestBase.java
<ide> import java.io.InputStream;
<ide> import java.io.OutputStream;
<ide> import java.util.Arrays;
<del>import java.util.Set;
<add>import java.util.List;
<ide>
<ide> import org.junit.After;
<ide> import org.junit.Assert;
<ide> import org.junit.Test;
<ide> import org.junit.rules.TemporaryFolder;
<ide>
<add>import com.google.common.base.Function;
<ide> import com.google.common.collect.ContiguousSet;
<ide> import com.google.common.collect.DiscreteDomain;
<del>import com.google.common.collect.ImmutableSet;
<add>import com.google.common.collect.ImmutableList;
<add>import com.google.common.collect.Lists;
<ide> import com.google.common.collect.Range;
<ide> import com.google.protobuf.ByteString;
<ide>
<ide> import tachyon.proto.JournalEntryProtos.JournalEntry;
<ide> import tachyon.proto.JournalEntryProtos.LineageEntry;
<ide> import tachyon.proto.JournalEntryProtos.LineageFileState;
<add>import tachyon.proto.JournalEntryProtos.LineageIdGeneratorEntry;
<ide> import tachyon.proto.JournalEntryProtos.PersistDirectoryEntry;
<ide> import tachyon.proto.JournalEntryProtos.PersistFileEntry;
<ide> import tachyon.proto.JournalEntryProtos.PersistFilesEntry;
<ide> protected InputStream mIs;
<ide>
<ide> // set that holds test journal entries
<del> protected Set<JournalEntry> mDataSet =
<del> ImmutableSet.<JournalEntry>builder()
<del> .add(
<del> JournalEntry.newBuilder()
<del> .setBlockContainerIdGenerator(
<del> BlockContainerIdGeneratorEntry.newBuilder()
<del> .setNextContainerId(TEST_CONTAINER_ID))
<del> .build())
<del> .add(
<del> JournalEntry.newBuilder()
<del> .setBlockInfo(
<del> BlockInfoEntry.newBuilder()
<del> .setBlockId(
<del> TEST_BLOCK_ID)
<del> .setLength(
<del> TEST_LENGTH_BYTES))
<del> .build())
<del> .add(JournalEntry.newBuilder().setInodeFileEntry(InodeFileEntry.newBuilder()
<del> .setCreationTimeMs(TEST_OP_TIME_MS).setId(TEST_FILE_ID).setName(TEST_FILE_NAME)
<del> .setParentId(TEST_FILE_ID).setPersisted(true).setPinned(true)
<del> .setLastModificationTimeMs(TEST_OP_TIME_MS).setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES)
<del> .setLength(TEST_LENGTH_BYTES).setCompleted(true).setCacheable(true)
<del> .addAllBlocks(ContiguousSet
<del> .create(Range.closedOpen(TEST_BLOCK_ID, TEST_BLOCK_ID + 10), DiscreteDomain.longs())
<del> .asList())
<del> .setTtl(Constants.NO_TTL)).build())
<del> .add(JournalEntry.newBuilder()
<del> .setInodeDirectory(InodeDirectoryEntry.newBuilder().setCreationTimeMs(TEST_OP_TIME_MS)
<del> .setId(TEST_FILE_ID).setName(TEST_FILE_NAME).setParentId(TEST_FILE_ID)
<del> .setPersisted(true).setPinned(true).setLastModificationTimeMs(TEST_OP_TIME_MS))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setInodeLastModificationTime(InodeLastModificationTimeEntry.newBuilder()
<del> .setId(TEST_FILE_ID).setLastModificationTimeMs(TEST_OP_TIME_MS))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setPersistDirectory(PersistDirectoryEntry.newBuilder().setId(TEST_FILE_ID)).build())
<del> .add(JournalEntry.newBuilder()
<del> .setPersistFile(PersistFileEntry.newBuilder().setId(TEST_FILE_ID)
<del> .setLength(TEST_LENGTH_BYTES).setOpTimeMs(TEST_OP_TIME_MS))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setCompleteFile(CompleteFileEntry.newBuilder().addAllBlockIds(Arrays.asList(1L, 2L, 3L))
<del> .setId(TEST_FILE_ID).setLength(TEST_LENGTH_BYTES).setOpTimeMs(TEST_OP_TIME_MS))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setDeleteFile(DeleteFileEntry.newBuilder().setId(TEST_FILE_ID).setRecursive(true)
<del> .setOpTimeMs(TEST_OP_TIME_MS))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setRename(RenameEntry.newBuilder().setId(TEST_FILE_ID).setDstPath(TEST_FILE_NAME)
<del> .setOpTimeMs(TEST_OP_TIME_MS))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setInodeDirectoryIdGenerator(InodeDirectoryIdGeneratorEntry.newBuilder()
<del> .setContainerId(TEST_CONTAINER_ID).setSequenceNumber(TEST_SEQUENCE_NUMBER))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setAddMountPoint(AddMountPointEntry.newBuilder()
<del> .setTachyonPath(TEST_TACHYON_PATH.toString()).setUfsPath(TEST_UFS_PATH.toString()))
<del> .build()).add(
<del> JournalEntry.newBuilder()
<del> .setDeleteMountPoint(DeleteMountPointEntry.newBuilder()
<del> .setTachyonPath(TEST_TACHYON_PATH.toString()))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setRawTable(RawTableEntry.newBuilder().setId(TEST_BLOCK_ID).setColumns(100)
<del> .setMetadata(ByteString.copyFrom(BufferUtils.getIncreasingByteBuffer(10))))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setUpdateMetadata(UpdateMetadataEntry.newBuilder().setId(TEST_BLOCK_ID)
<del> .setMetadata(ByteString.copyFrom(new byte[10])))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setReinitializeFile(ReinitializeFileEntry.newBuilder().setPath(TEST_FILE_NAME)
<del> .setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES).setTtl(TEST_TTL))
<del> .build())
<del> .add(
<del> JournalEntry.newBuilder()
<del> .setAsyncCompleteFile(AsyncCompleteFileEntry.newBuilder().setFileId(TEST_FILE_ID))
<del> .build())
<del> .add(
<del> JournalEntry.newBuilder()
<del> .setDeleteLineage(DeleteLineageEntry.newBuilder().setLineageId(TEST_LINEAGE_ID)
<del> .setCascade(false))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setLineage(LineageEntry.newBuilder().setId(TEST_LINEAGE_ID)
<del> .addAllInputFiles(Arrays.asList(TEST_FILE_ID))
<del> .addAllOutputFileIds(Arrays.asList(TEST_FILE_ID))
<del> .addAllOutputFileStates(Arrays.asList(LineageFileState.CREATED))
<del> .setJobCommand(TEST_JOB_COMMAND).setJobOutputPath(TEST_JOB_OUTPUT_PATH)
<del> .setCreationTimeMs(TEST_OP_TIME_MS))
<del> .build())
<del> .add(
<del> JournalEntry.newBuilder()
<del> .setPersistFiles(
<del> PersistFilesEntry.newBuilder().addAllFileIds(Arrays.asList(1L, 2L)))
<del> .build())
<del> .add(JournalEntry.newBuilder()
<del> .setPersistFilesRequest(
<del> PersistFilesRequestEntry.newBuilder().addAllFileIds(Arrays.asList(1L, 2L)))
<del> .build())
<del> .add(
<del> JournalEntry.newBuilder()
<del> .setSetState(SetStateEntry.newBuilder().setId(TEST_FILE_ID)
<del> .setOpTimeMs(TEST_OP_TIME_MS).setPinned(true).setTtl(TEST_TTL))
<del> .build())
<del> .build();
<add> protected static final List<JournalEntry> DATA_SET;
<add>
<add> static {
<add> List<JournalEntry> entries = ImmutableList.<JournalEntry>builder()
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setBlockContainerIdGenerator(
<add> BlockContainerIdGeneratorEntry.newBuilder()
<add> .setNextContainerId(TEST_CONTAINER_ID))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setBlockInfo(BlockInfoEntry.newBuilder()
<add> .setBlockId(TEST_BLOCK_ID)
<add> .setLength(TEST_LENGTH_BYTES))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setInodeFileEntry(InodeFileEntry.newBuilder()
<add> .setCreationTimeMs(TEST_OP_TIME_MS)
<add> .setId(TEST_FILE_ID)
<add> .setName(TEST_FILE_NAME)
<add> .setParentId(TEST_FILE_ID)
<add> .setPersisted(true)
<add> .setPinned(true)
<add> .setLastModificationTimeMs(TEST_OP_TIME_MS)
<add> .setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES)
<add> .setLength(TEST_LENGTH_BYTES)
<add> .setCompleted(true)
<add> .setCacheable(true)
<add> .addAllBlocks(ContiguousSet.create(
<add> Range.closedOpen(TEST_BLOCK_ID, TEST_BLOCK_ID + 10), DiscreteDomain.longs())
<add> .asList())
<add> .setTtl(Constants.NO_TTL))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setInodeDirectory(InodeDirectoryEntry.newBuilder()
<add> .setCreationTimeMs(TEST_OP_TIME_MS)
<add> .setId(TEST_FILE_ID)
<add> .setName(TEST_FILE_NAME)
<add> .setParentId(TEST_FILE_ID)
<add> .setPersisted(true)
<add> .setPinned(true)
<add> .setLastModificationTimeMs(TEST_OP_TIME_MS))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setInodeLastModificationTime(InodeLastModificationTimeEntry.newBuilder()
<add> .setId(TEST_FILE_ID)
<add> .setLastModificationTimeMs(TEST_OP_TIME_MS))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setPersistDirectory(PersistDirectoryEntry.newBuilder()
<add> .setId(TEST_FILE_ID))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setPersistFile(PersistFileEntry.newBuilder()
<add> .setId(TEST_FILE_ID)
<add> .setLength(TEST_LENGTH_BYTES)
<add> .setOpTimeMs(TEST_OP_TIME_MS))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setCompleteFile(CompleteFileEntry.newBuilder()
<add> .addAllBlockIds(Arrays.asList(1L, 2L, 3L))
<add> .setId(TEST_FILE_ID)
<add> .setLength(TEST_LENGTH_BYTES)
<add> .setOpTimeMs(TEST_OP_TIME_MS))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setDeleteFile(DeleteFileEntry.newBuilder()
<add> .setId(TEST_FILE_ID)
<add> .setRecursive(true)
<add> .setOpTimeMs(TEST_OP_TIME_MS))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setRename(RenameEntry.newBuilder()
<add> .setId(TEST_FILE_ID)
<add> .setDstPath(TEST_FILE_NAME)
<add> .setOpTimeMs(TEST_OP_TIME_MS))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setInodeDirectoryIdGenerator(InodeDirectoryIdGeneratorEntry.newBuilder()
<add> .setContainerId(TEST_CONTAINER_ID)
<add> .setSequenceNumber(TEST_SEQUENCE_NUMBER))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setAddMountPoint(AddMountPointEntry.newBuilder()
<add> .setTachyonPath(TEST_TACHYON_PATH.toString())
<add> .setUfsPath(TEST_UFS_PATH.toString()))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setDeleteMountPoint(DeleteMountPointEntry.newBuilder()
<add> .setTachyonPath(TEST_TACHYON_PATH.toString()))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setRawTable(RawTableEntry.newBuilder()
<add> .setId(TEST_BLOCK_ID)
<add> .setColumns(100)
<add> .setMetadata(ByteString.copyFrom(BufferUtils.getIncreasingByteBuffer(10))))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setUpdateMetadata(UpdateMetadataEntry.newBuilder()
<add> .setId(TEST_BLOCK_ID)
<add> .setMetadata(ByteString.copyFrom(new byte[10])))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setReinitializeFile(ReinitializeFileEntry.newBuilder()
<add> .setPath(TEST_FILE_NAME)
<add> .setBlockSizeBytes(TEST_BLOCK_SIZE_BYTES)
<add> .setTtl(TEST_TTL))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setAsyncCompleteFile(AsyncCompleteFileEntry.newBuilder()
<add> .setFileId(TEST_FILE_ID))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setDeleteLineage(DeleteLineageEntry.newBuilder()
<add> .setLineageId(TEST_LINEAGE_ID)
<add> .setCascade(false))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setLineage(LineageEntry.newBuilder()
<add> .setId(TEST_LINEAGE_ID)
<add> .addAllInputFiles(Arrays.asList(TEST_FILE_ID))
<add> .addAllOutputFileIds(Arrays.asList(TEST_FILE_ID))
<add> .addAllOutputFileStates(Arrays.asList(LineageFileState.CREATED))
<add> .setJobCommand(TEST_JOB_COMMAND)
<add> .setJobOutputPath(TEST_JOB_OUTPUT_PATH)
<add> .setCreationTimeMs(TEST_OP_TIME_MS))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setLineageIdGenerator(LineageIdGeneratorEntry.newBuilder()
<add> .setSequenceNumber(TEST_SEQUENCE_NUMBER))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setPersistFiles(PersistFilesEntry.newBuilder()
<add> .addAllFileIds(Arrays.asList(1L, 2L)))
<add> .build())
<add> .add(JournalEntry.newBuilder()
<add> .setPersistFilesRequest(PersistFilesRequestEntry.newBuilder()
<add> .addAllFileIds(Arrays.asList(1L, 2L)))
<add> .build())
<add> .add(
<add> JournalEntry.newBuilder()
<add> .setSetState(SetStateEntry.newBuilder()
<add> .setId(TEST_FILE_ID)
<add> .setOpTimeMs(TEST_OP_TIME_MS)
<add> .setPinned(true)
<add> .setTtl(TEST_TTL))
<add> .build())
<add> .build();
<add> DATA_SET = Lists.transform(entries, new Function<JournalEntry, JournalEntry>() {
<add> @Override
<add> public JournalEntry apply(JournalEntry entry) {
<add> return entry.toBuilder().setSequenceNumber(TEST_SEQUENCE_NUMBER).build();
<add> }
<add> });
<add> }
<ide>
<ide> /**
<ide> * Returns the implementation of {@link JournalFormatter} that wants to be tested.
<ide> }
<ide>
<ide> protected void write(JournalEntry entry) throws IOException {
<del> mFormatter.serialize(entry.toBuilder().setSequenceNumber(TEST_SEQUENCE_NUMBER).build(), mOs);
<add> mFormatter.serialize(entry, mOs);
<ide> }
<ide>
<ide> protected JournalEntry read() throws IOException {
<ide> // check if every entry is covered by this test
<ide> @Test
<ide> public void checkEntriesNumberTest() {
<del> Assert.assertEquals(JournalEntry.EntryCase.values().length, mDataSet.size());
<add> // Subtract one to exclude ENTRY_NOT_SET
<add> Assert.assertEquals(JournalEntry.EntryCase.values().length - 1, DATA_SET.size());
<ide> }
<ide>
<ide> @Test
<ide> public void entriesTest() throws IOException {
<del> for (JournalEntry entry : mDataSet) {
<add> for (JournalEntry entry : DATA_SET) {
<ide> entryTest(entry);
<ide> }
<ide> }
|
|
Java
|
agpl-3.0
|
e90cea46e7e66bb3cfc25527e0f4d459fa801e22
| 0 |
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
3bf280ae-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
3bed1f74-2e60-11e5-9284-b827eb9e62be
|
3bf280ae-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
3bf280ae-2e60-11e5-9284-b827eb9e62be
|
<ide><path>ello.java
<del>3bed1f74-2e60-11e5-9284-b827eb9e62be
<add>3bf280ae-2e60-11e5-9284-b827eb9e62be
|
|
Java
|
bsd-3-clause
|
ae6bea8f82ed9177fa23f5a8587edeb52ed7fe11
| 0 |
steveliles/dsl4xml
|
package com.sjl.dsl4xml;
import org.xmlpull.v1.*;
import com.sjl.dsl4xml.support.*;
public interface ReadingContext {
public abstract void registerConverters(Converter<?>... aConverters);
public abstract <T> T peek();
public abstract void push(Object aContext);
public abstract <T> T pop();
public abstract XmlPullParser getParser();
public abstract boolean hasMoreTags();
public abstract int next();
public abstract boolean isTagNamed(String aTagName);
public abstract boolean isNotEndTag(String aTagName);
public abstract boolean isStartTag();
public abstract String getAttributeValue(String anAttributeName);
public abstract String getAttributeValue(int anIndex);
public abstract boolean isTextNode();
public abstract boolean isStartTagNamed(String aTagName);
public abstract <T> Converter<T> getConverter(Class<T> aArgType);
}
|
src/main/java/com/sjl/dsl4xml/ReadingContext.java
|
package com.sjl.dsl4xml;
import java.util.*;
import org.xmlpull.v1.*;
import com.sjl.dsl4xml.support.*;
import com.sjl.dsl4xml.support.convert.*;
public final class ReadingContext {
private XmlPullParser parser;
private Stack<Object> stack;
private List<Converter<?>> converters;
public ReadingContext(XmlPullParser aParser) {
stack = new Stack<Object>();
parser = aParser;
converters = new ArrayList<Converter<?>>();
registerConverters(
new PrimitiveBooleanConverter(),
new PrimitiveByteConverter(),
new PrimitiveShortConverter(),
new PrimitiveIntConverter(),
new PrimitiveLongConverter(),
new PrimitiveCharConverter(),
new PrimitiveFloatConverter(),
new PrimitiveDoubleConverter(),
new BooleanConverter(),
new ByteConverter(),
new ShortConverter(),
new IntegerConverter(),
new LongConverter(),
new CharacterConverter(),
new FloatConverter(),
new DoubleConverter(),
new ClassConverter(),
new StringConverter()
);
}
public void registerConverters(Converter<?>... aConverters) {
// push any registered converters on ahead of existing converters (allows simple override)
converters.addAll(0, Arrays.asList(aConverters));
}
@SuppressWarnings("unchecked")
public <T> T peek() {
return (T) stack.peek();
}
public void push(Object aContext) {
stack.push(aContext);
}
@SuppressWarnings("unchecked")
public <T> T pop() {
return (T) stack.pop();
}
public XmlPullParser getParser() {
return parser;
}
public boolean hasMoreTags() {
try {
return parser.getEventType() != XmlPullParser.END_DOCUMENT;
} catch (XmlPullParserException anExc) {
throw new XmlReadingException(anExc);
}
}
public int next() {
try {
return parser.next();
} catch (Exception anExc) {
throw new XmlReadingException(anExc);
}
}
public boolean isTagNamed(String aTagName) {
return aTagName.equals(parser.getName());
}
public boolean isNotEndTag(String aTagName) {
try {
return !(
(parser.getEventType() == XmlPullParser.END_TAG) &&
(aTagName.equals(parser.getName()))
);
} catch (XmlPullParserException anExc) {
throw new XmlReadingException(anExc);
}
}
public boolean isStartTag() {
try {
return parser.getEventType() == XmlPullParser.START_TAG;
} catch (XmlPullParserException anExc) {
throw new XmlReadingException(anExc);
}
}
public String getAttributeValue(String anAttributeName) {
return parser.getAttributeValue(null, anAttributeName);
}
public String getAttributeValue(int anIndex) {
return parser.getAttributeValue(anIndex);
}
public boolean isTextNode() {
try {
return parser.getEventType() == XmlPullParser.TEXT;
} catch (XmlPullParserException anExc) {
throw new XmlReadingException(anExc);
}
}
public boolean isStartTagNamed(String aTagName) {
try {
return (
(parser.getEventType() == XmlPullParser.START_TAG) &&
(aTagName.equals(parser.getName()))
);
} catch (Exception anExc) {
throw new XmlReadingException(anExc);
}
}
@SuppressWarnings("unchecked")
public <T> Converter<T> getConverter(Class<T> aArgType) {
for (Converter<?> _c : converters) {
if (_c.canConvertTo(aArgType)) {
return (Converter<T>) _c;
}
}
throw new RuntimeException("No converter registered that can convert to " + aArgType);
}
}
|
Extracted interface ReadingContext
|
src/main/java/com/sjl/dsl4xml/ReadingContext.java
|
Extracted interface ReadingContext
|
<ide><path>rc/main/java/com/sjl/dsl4xml/ReadingContext.java
<ide> package com.sjl.dsl4xml;
<del>
<del>import java.util.*;
<ide>
<ide> import org.xmlpull.v1.*;
<ide>
<ide> import com.sjl.dsl4xml.support.*;
<del>import com.sjl.dsl4xml.support.convert.*;
<ide>
<del>public final class ReadingContext {
<add>public interface ReadingContext {
<ide>
<del> private XmlPullParser parser;
<del> private Stack<Object> stack;
<del> private List<Converter<?>> converters;
<del>
<del> public ReadingContext(XmlPullParser aParser) {
<del> stack = new Stack<Object>();
<del> parser = aParser;
<del> converters = new ArrayList<Converter<?>>();
<del>
<del> registerConverters(
<del> new PrimitiveBooleanConverter(),
<del> new PrimitiveByteConverter(),
<del> new PrimitiveShortConverter(),
<del> new PrimitiveIntConverter(),
<del> new PrimitiveLongConverter(),
<del> new PrimitiveCharConverter(),
<del> new PrimitiveFloatConverter(),
<del> new PrimitiveDoubleConverter(),
<del> new BooleanConverter(),
<del> new ByteConverter(),
<del> new ShortConverter(),
<del> new IntegerConverter(),
<del> new LongConverter(),
<del> new CharacterConverter(),
<del> new FloatConverter(),
<del> new DoubleConverter(),
<del> new ClassConverter(),
<del> new StringConverter()
<del> );
<del> }
<del>
<del> public void registerConverters(Converter<?>... aConverters) {
<del> // push any registered converters on ahead of existing converters (allows simple override)
<del> converters.addAll(0, Arrays.asList(aConverters));
<del> }
<del>
<del> @SuppressWarnings("unchecked")
<del> public <T> T peek() {
<del> return (T) stack.peek();
<del> }
<del>
<del> public void push(Object aContext) {
<del> stack.push(aContext);
<del> }
<del>
<del> @SuppressWarnings("unchecked")
<del> public <T> T pop() {
<del> return (T) stack.pop();
<del> }
<del>
<del> public XmlPullParser getParser() {
<del> return parser;
<del> }
<del>
<del> public boolean hasMoreTags() {
<del> try {
<del> return parser.getEventType() != XmlPullParser.END_DOCUMENT;
<del> } catch (XmlPullParserException anExc) {
<del> throw new XmlReadingException(anExc);
<del> }
<del> }
<del>
<del> public int next() {
<del> try {
<del> return parser.next();
<del> } catch (Exception anExc) {
<del> throw new XmlReadingException(anExc);
<del> }
<del> }
<add> public abstract void registerConverters(Converter<?>... aConverters);
<ide>
<del> public boolean isTagNamed(String aTagName) {
<del> return aTagName.equals(parser.getName());
<del> }
<add> public abstract <T> T peek();
<ide>
<del> public boolean isNotEndTag(String aTagName) {
<del> try {
<del> return !(
<del> (parser.getEventType() == XmlPullParser.END_TAG) &&
<del> (aTagName.equals(parser.getName()))
<del> );
<del> } catch (XmlPullParserException anExc) {
<del> throw new XmlReadingException(anExc);
<del> }
<del> }
<add> public abstract void push(Object aContext);
<ide>
<del> public boolean isStartTag() {
<del> try {
<del> return parser.getEventType() == XmlPullParser.START_TAG;
<del> } catch (XmlPullParserException anExc) {
<del> throw new XmlReadingException(anExc);
<del> }
<del> }
<del>
<del> public String getAttributeValue(String anAttributeName) {
<del> return parser.getAttributeValue(null, anAttributeName);
<del> }
<del>
<del> public String getAttributeValue(int anIndex) {
<del> return parser.getAttributeValue(anIndex);
<del> }
<del>
<del> public boolean isTextNode() {
<del> try {
<del> return parser.getEventType() == XmlPullParser.TEXT;
<del> } catch (XmlPullParserException anExc) {
<del> throw new XmlReadingException(anExc);
<del> }
<del> }
<add> public abstract <T> T pop();
<ide>
<del> public boolean isStartTagNamed(String aTagName) {
<del> try {
<del> return (
<del> (parser.getEventType() == XmlPullParser.START_TAG) &&
<del> (aTagName.equals(parser.getName()))
<del> );
<del> } catch (Exception anExc) {
<del> throw new XmlReadingException(anExc);
<del> }
<del> }
<add> public abstract XmlPullParser getParser();
<ide>
<del> @SuppressWarnings("unchecked")
<del> public <T> Converter<T> getConverter(Class<T> aArgType) {
<del> for (Converter<?> _c : converters) {
<del> if (_c.canConvertTo(aArgType)) {
<del> return (Converter<T>) _c;
<del> }
<del> }
<del> throw new RuntimeException("No converter registered that can convert to " + aArgType);
<del> }
<add> public abstract boolean hasMoreTags();
<add>
<add> public abstract int next();
<add>
<add> public abstract boolean isTagNamed(String aTagName);
<add>
<add> public abstract boolean isNotEndTag(String aTagName);
<add>
<add> public abstract boolean isStartTag();
<add>
<add> public abstract String getAttributeValue(String anAttributeName);
<add>
<add> public abstract String getAttributeValue(int anIndex);
<add>
<add> public abstract boolean isTextNode();
<add>
<add> public abstract boolean isStartTagNamed(String aTagName);
<add>
<add> public abstract <T> Converter<T> getConverter(Class<T> aArgType);
<add>
<ide> }
|
|
Java
|
apache-2.0
|
35c18778ea40abd3295248564b3ed27630d42661
| 0 |
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.controller.deployment;
import com.google.common.collect.ImmutableMap;
import com.yahoo.component.Version;
import com.yahoo.config.application.api.DeploymentInstanceSpec;
import com.yahoo.config.application.api.DeploymentSpec;
import com.yahoo.config.application.api.DeploymentSpec.DeclaredTest;
import com.yahoo.config.application.api.DeploymentSpec.DeclaredZone;
import com.yahoo.config.provision.ApplicationId;
import com.yahoo.config.provision.InstanceName;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.zone.ZoneId;
import com.yahoo.vespa.hosted.controller.Application;
import com.yahoo.vespa.hosted.controller.Instance;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.ApplicationVersion;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobId;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType;
import com.yahoo.vespa.hosted.controller.application.Change;
import com.yahoo.vespa.hosted.controller.application.Deployment;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.yahoo.config.provision.Environment.prod;
import static com.yahoo.config.provision.Environment.staging;
import static com.yahoo.config.provision.Environment.test;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.stagingTest;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.systemTest;
import static java.util.Comparator.comparing;
import static java.util.Comparator.naturalOrder;
import static java.util.Objects.requireNonNull;
import static java.util.function.BinaryOperator.maxBy;
import static java.util.stream.Collectors.collectingAndThen;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toUnmodifiableList;
/**
* Status of the deployment jobs of an {@link Application}.
*
* @author jonmv
*/
public class DeploymentStatus {
public static List<JobId> jobsFor(Application application, SystemName system) {
if (DeploymentSpec.empty.equals(application.deploymentSpec()))
return List.of();
return application.deploymentSpec().instances().stream()
.flatMap(spec -> Stream.concat(Stream.of(systemTest, stagingTest),
flatten(spec).filter(step -> step.concerns(prod))
.map(step -> {
if (step instanceof DeclaredZone)
return JobType.from(system, prod, ((DeclaredZone) step).region().get());
return JobType.testFrom(system, ((DeclaredTest) step).region());
})
.flatMap(Optional::stream))
.map(type -> new JobId(application.id().instance(spec.name()), type)))
.collect(toUnmodifiableList());
}
private static Stream<DeploymentSpec.Step> flatten(DeploymentSpec.Step step) {
return step instanceof DeploymentSpec.Steps ? step.steps().stream().flatMap(DeploymentStatus::flatten) : Stream.of(step);
}
private static <T> List<T> union(List<T> first, List<T> second) {
return Stream.concat(first.stream(), second.stream()).distinct().collect(toUnmodifiableList());
}
private final Application application;
private final JobList allJobs;
private final SystemName system;
private final Version systemVersion;
private final Instant now;
private final Map<JobId, StepStatus> jobSteps;
private final List<StepStatus> allSteps;
public DeploymentStatus(Application application, Map<JobId, JobStatus> allJobs, SystemName system,
Version systemVersion, Instant now) {
this.application = requireNonNull(application);
this.allJobs = JobList.from(allJobs.values());
this.system = requireNonNull(system);
this.systemVersion = requireNonNull(systemVersion);
this.now = requireNonNull(now);
List<StepStatus> allSteps = new ArrayList<>();
this.jobSteps = jobDependencies(application.deploymentSpec(), allSteps);
this.allSteps = Collections.unmodifiableList(allSteps);
}
/** The application this deployment status concerns. */
public Application application() {
return application;
}
/** A filterable list of the status of all jobs for this application. */
public JobList jobs() {
return allJobs;
}
/** Whether any jobs of this application are failing with other errors than lack of capacity in a test zone. */
public boolean hasFailures() {
return ! allJobs.failing()
.not().withStatus(RunStatus.outOfCapacity)
.isEmpty();
}
/** All job statuses, by job type, for the given instance. */
public Map<JobType, JobStatus> instanceJobs(InstanceName instance) {
return allJobs.asList().stream()
.filter(job -> job.id().application().equals(application.id().instance(instance)))
.collect(Collectors.toUnmodifiableMap(job -> job.id().type(),
Function.identity()));
}
/** Filterable job status lists for each instance of this application. */
public Map<ApplicationId, JobList> instanceJobs() {
return allJobs.groupingBy(job -> job.id().application());
}
/**
* The set of jobs that need to run for the changes of each instance of the application to be considered complete,
* and any test jobs for any outstanding change, which will likely be needed to later deploy this change.
*/
public Map<JobId, List<Versions>> jobsToRun() {
Map<InstanceName, Change> changes = new LinkedHashMap<>();
for (InstanceName instance : application.deploymentSpec().instanceNames())
changes.put(instance, application.require(instance).change());
Map<JobId, List<Versions>> jobs = jobsToRun(changes);
// Add test jobs for any outstanding change.
for (InstanceName instance : application.deploymentSpec().instanceNames())
changes.put(instance, outstandingChange(instance).onTopOf(application.require(instance).change()));
var testJobs = jobsToRun(changes, true).entrySet().stream()
.filter(entry -> ! entry.getKey().type().isProduction());
return Stream.concat(jobs.entrySet().stream(), testJobs)
.collect(collectingAndThen(toMap(Map.Entry::getKey,
Map.Entry::getValue,
DeploymentStatus::union,
LinkedHashMap::new),
Collections::unmodifiableMap));
}
private Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes, boolean eagerTests) {
Map<JobId, Versions> productionJobs = new LinkedHashMap<>();
changes.forEach((instance, change) -> productionJobs.putAll(productionJobs(instance, change, eagerTests)));
Map<JobId, List<Versions>> testJobs = testJobs(productionJobs);
Map<JobId, List<Versions>> jobs = new LinkedHashMap<>(testJobs);
productionJobs.forEach((job, versions) -> jobs.put(job, List.of(versions)));
// Add runs for idle, declared test jobs if they have no successes on their instance's change's versions.
jobSteps.forEach((job, step) -> {
if ( ! step.isDeclared() || jobs.containsKey(job))
return;
Change change = changes.get(job.application().instance());
if (change == null || ! change.hasTargets())
return;
Optional<JobId> firstProductionJobWithDeployment = jobSteps.keySet().stream()
.filter(jobId -> jobId.type().isProduction() && jobId.type().isDeployment())
.filter(jobId -> deploymentFor(jobId).isPresent())
.findFirst();
Versions versions = Versions.from(change, application, firstProductionJobWithDeployment.flatMap(this::deploymentFor), systemVersion);
if (step.completedAt(change, firstProductionJobWithDeployment).isEmpty())
jobs.merge(job, List.of(versions), DeploymentStatus::union);
});
return Collections.unmodifiableMap(jobs);
}
/** The set of jobs that need to run for the given changes to be considered complete. */
public Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes) {
return jobsToRun(changes, false);
}
/** The step status for all steps in the deployment spec of this, which are jobs, in the same order as in the deployment spec. */
public Map<JobId, StepStatus> jobSteps() { return jobSteps; }
public Map<InstanceName, StepStatus> instanceSteps() {
ImmutableMap.Builder<InstanceName, StepStatus> instances = ImmutableMap.builder();
for (StepStatus status : allSteps)
if (status instanceof InstanceStatus)
instances.put(status.instance(), status);
return instances.build();
}
/** The step status for all relevant steps in the deployment spec of this, in the same order as in the deployment spec. */
public List<StepStatus> allSteps() {
if (allSteps.isEmpty())
return List.of();
List<JobId> firstTestJobs = List.of(firstDeclaredOrElseImplicitTest(systemTest),
firstDeclaredOrElseImplicitTest(stagingTest));
return allSteps.stream()
.filter(step -> step.isDeclared() || firstTestJobs.contains(step.job().orElseThrow()))
.collect(toUnmodifiableList());
}
public Optional<Deployment> deploymentFor(JobId job) {
return Optional.ofNullable(application.require(job.application().instance())
.deployments().get(job.type().zone(system)));
}
/**
* The change of this application's latest submission, if this upgrades any of its production deployments,
* and has not yet started rolling out, due to some other change or a block window being present at the time of submission.
*/
public Change outstandingChange(InstanceName instance) {
return nextVersion(instance).map(Change::of)
.filter(change -> application.require(instance).change().application().map(change::upgrades).orElse(true))
.filter(change -> ! jobsToRun(Map.of(instance, change)).isEmpty())
.orElse(Change.empty());
}
/** The next application version to roll out to instance. */
private Optional<ApplicationVersion> nextVersion(InstanceName instance) {
return Optional.ofNullable(instanceSteps().get(instance)).stream()
.flatMap(this::allDependencies)
.flatMap(step -> step.instance.latestDeployed().stream())
.min(naturalOrder())
.or(application::latestVersion);
}
private Stream<InstanceStatus> allDependencies(StepStatus step) {
return step.dependencies.stream()
.flatMap(dep -> Stream.concat(Stream.of(dep), allDependencies(dep)))
.filter(InstanceStatus.class::isInstance)
.map(InstanceStatus.class::cast)
.distinct();
}
/**
* True if the job has already been triggered on the given versions, or if all test types (systemTest, stagingTest),
* restricted to the job's instance if declared in that instance, have successful runs on the given versions.
*/
public boolean isTested(JobId job, Change change) {
Versions versions = Versions.from(change, application, deploymentFor(job), systemVersion);
return allJobs.triggeredOn(versions).get(job).isPresent()
|| Stream.of(systemTest, stagingTest)
.noneMatch(testType -> declaredTest(job.application(), testType).map(__ -> allJobs.instance(job.application().instance()))
.orElse(allJobs)
.type(testType)
.successOn(versions).isEmpty());
}
private Map<JobId, Versions> productionJobs(InstanceName instance, Change change, boolean assumeUpgradesSucceed) {
Map<JobId, Versions> jobs = new LinkedHashMap<>();
jobSteps.forEach((job, step) -> {
// When computing eager test jobs for outstanding changes, assume current upgrade completes successfully.
Optional<Deployment> deployment = deploymentFor(job)
.map(existing -> assumeUpgradesSucceed ? new Deployment(existing.zone(),
existing.applicationVersion(),
change.platform().orElse(existing.version()),
existing.at(),
existing.metrics(),
existing.activity(),
existing.quota(),
existing.cost())
: existing);
if ( job.application().instance().equals(instance)
&& job.type().isProduction()
&& step.completedAt(change, Optional.of(job)).isEmpty()) // Signal strict completion criterion by depending on job itself.
jobs.put(job, Versions.from(change, application, deployment, systemVersion));
});
return jobs;
}
/** The test jobs that need to run prior to the given production deployment jobs. */
public Map<JobId, List<Versions>> testJobs(Map<JobId, Versions> jobs) {
Map<JobId, List<Versions>> testJobs = new LinkedHashMap<>();
for (JobType testType : List.of(systemTest, stagingTest)) {
jobs.forEach((job, versions) -> {
if (job.type().isProduction() && job.type().isDeployment()) {
declaredTest(job.application(), testType).ifPresent(testJob -> {
if (allJobs.successOn(versions).get(testJob).isEmpty())
testJobs.merge(testJob, List.of(versions), DeploymentStatus::union);
});
}
});
jobs.forEach((job, versions) -> {
if ( job.type().isProduction() && job.type().isDeployment()
&& allJobs.successOn(versions).type(testType).isEmpty()
&& testJobs.keySet().stream()
.noneMatch(test -> test.type() == testType
&& testJobs.get(test).contains(versions)))
testJobs.merge(firstDeclaredOrElseImplicitTest(testType), List.of(versions), DeploymentStatus::union);
});
}
return Collections.unmodifiableMap(testJobs);
}
private JobId firstDeclaredOrElseImplicitTest(JobType testJob) {
return application.deploymentSpec().instanceNames().stream()
.map(name -> new JobId(application.id().instance(name), testJob))
.min(comparing(id -> ! jobSteps.get(id).isDeclared())).orElseThrow();
}
/** JobId of any declared test of the given type, for the given instance. */
private Optional<JobId> declaredTest(ApplicationId instanceId, JobType testJob) {
JobId jobId = new JobId(instanceId, testJob);
return jobSteps.get(jobId).isDeclared() ? Optional.of(jobId) : Optional.empty();
}
/** A DAG of the dependencies between the primitive steps in the spec, with iteration order equal to declaration order. */
private Map<JobId, StepStatus> jobDependencies(DeploymentSpec spec, List<StepStatus> allSteps) {
if (DeploymentSpec.empty.equals(spec))
return Map.of();
Map<JobId, StepStatus> dependencies = new LinkedHashMap<>();
List<StepStatus> previous = List.of();
for (DeploymentSpec.Step step : spec.steps())
previous = fillStep(dependencies, allSteps, step, previous, null);
return Collections.unmodifiableMap(dependencies);
}
/** Adds the primitive steps contained in the given step, which depend on the given previous primitives, to the dependency graph. */
private List<StepStatus> fillStep(Map<JobId, StepStatus> dependencies, List<StepStatus> allSteps,
DeploymentSpec.Step step, List<StepStatus> previous, InstanceName instance) {
if (step.steps().isEmpty() && ! (step instanceof DeploymentInstanceSpec)) {
if (instance == null)
return previous; // Ignore test and staging outside all instances.
if ( ! step.delay().isZero()) {
StepStatus stepStatus = new DelayStatus((DeploymentSpec.Delay) step, previous, instance);
allSteps.add(stepStatus);
return List.of(stepStatus);
}
JobType jobType;
StepStatus stepStatus;
if (step.concerns(test) || step.concerns(staging)) {
jobType = JobType.from(system, ((DeclaredZone) step).environment(), null)
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
stepStatus = JobStepStatus.ofTestDeployment((DeclaredZone) step, List.of(), this, instance, jobType, true);
previous = new ArrayList<>(previous);
previous.add(stepStatus);
}
else if (step.isTest()) {
jobType = JobType.testFrom(system, ((DeclaredTest) step).region())
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
JobType preType = JobType.from(system, prod, ((DeclaredTest) step).region())
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
stepStatus = JobStepStatus.ofProductionTest((DeclaredTest) step, previous, this, instance, jobType, preType);
previous = List.of(stepStatus);
}
else if (step.concerns(prod)) {
jobType = JobType.from(system, ((DeclaredZone) step).environment(), ((DeclaredZone) step).region().get())
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
stepStatus = JobStepStatus.ofProductionDeployment((DeclaredZone) step, previous, this, instance, jobType);
previous = List.of(stepStatus);
}
else return previous; // Empty container steps end up here, and are simply ignored.
JobId jobId = new JobId(application.id().instance(instance), jobType);
allSteps.removeIf(existing -> existing.job().equals(Optional.of(jobId))); // Replace implicit tests with explicit ones.
allSteps.add(stepStatus);
dependencies.put(jobId, stepStatus);
return previous;
}
if (step instanceof DeploymentInstanceSpec) {
DeploymentInstanceSpec spec = ((DeploymentInstanceSpec) step);
StepStatus instanceStatus = new InstanceStatus(spec, previous, now, application.require(spec.name()), this);
instance = spec.name();
allSteps.add(instanceStatus);
previous = List.of(instanceStatus);
for (JobType test : List.of(systemTest, stagingTest)) {
JobId job = new JobId(application.id().instance(instance), test);
if ( ! dependencies.containsKey(job)) {
var testStatus = JobStepStatus.ofTestDeployment(new DeclaredZone(test.environment()), List.of(),
this, job.application().instance(), test, false);
dependencies.put(job, testStatus);
allSteps.add(testStatus);
}
}
}
if (step.isOrdered()) {
for (DeploymentSpec.Step nested : step.steps())
previous = fillStep(dependencies, allSteps, nested, previous, instance);
return previous;
}
List<StepStatus> parallel = new ArrayList<>();
for (DeploymentSpec.Step nested : step.steps())
parallel.addAll(fillStep(dependencies, allSteps, nested, previous, instance));
return List.copyOf(parallel);
}
public enum StepType {
/** An instance — completion marks a change as ready for the jobs contained in it. */
instance,
/** A timed delay. */
delay,
/** A system, staging or production test. */
test,
/** A production deployment. */
deployment,
}
/**
* Used to represent all steps — explicit and implicit — that may run in order to complete deployment of a change.
*
* Each node contains a step describing the node,
* a list of steps which need to be complete before the step may start,
* a list of jobs from which completion of the step is computed, and
* optionally, an instance name used to identify a job type for the step,
*
* The completion criterion for each type of step is implemented in subclasses of this.
*/
public static abstract class StepStatus {
private final StepType type;
private final DeploymentSpec.Step step;
private final List<StepStatus> dependencies; // All direct dependencies of this step.
private final InstanceName instance;
private StepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, InstanceName instance) {
this.type = requireNonNull(type);
this.step = requireNonNull(step);
this.dependencies = List.copyOf(dependencies);
this.instance = instance;
}
/** The type of step this is. */
public final StepType type() { return type; }
/** The step defining this. */
public final DeploymentSpec.Step step() { return step; }
/** The list of steps that need to be complete before this may start. */
public final List<StepStatus> dependencies() { return dependencies; }
/** The instance of this. */
public final InstanceName instance() { return instance; }
/** The id of the job this corresponds to, if any. */
public Optional<JobId> job() { return Optional.empty(); }
/** The time at which this is, or was, complete on the given change and / or versions. */
public Optional<Instant> completedAt(Change change) { return completedAt(change, Optional.empty()); }
/** The time at which this is, or was, complete on the given change and / or versions. */
abstract Optional<Instant> completedAt(Change change, Optional<JobId> dependent);
/** The time at which this step is ready to run the specified change and / or versions. */
public Optional<Instant> readyAt(Change change) { return readyAt(change, Optional.empty()); }
/** The time at which this step is ready to run the specified change and / or versions. */
Optional<Instant> readyAt(Change change, Optional<JobId> dependent) {
return dependenciesCompletedAt(change, dependent)
.map(ready -> Stream.of(blockedUntil(change),
pausedUntil(),
coolingDownUntil(change))
.flatMap(Optional::stream)
.reduce(ready, maxBy(naturalOrder())));
}
/** The time at which all dependencies completed on the given change and / or versions. */
Optional<Instant> dependenciesCompletedAt(Change change, Optional<JobId> dependent) {
return dependencies.stream().allMatch(step -> step.completedAt(change, dependent).isPresent())
? dependencies.stream().map(step -> step.completedAt(change, dependent).get())
.max(naturalOrder())
.or(() -> Optional.of(Instant.EPOCH))
: Optional.empty();
}
/** The time until which this step is blocked by a change blocker. */
public Optional<Instant> blockedUntil(Change change) { return Optional.empty(); }
/** The time until which this step is paused by user intervention. */
public Optional<Instant> pausedUntil() { return Optional.empty(); }
/** The time until which this step is cooling down, due to consecutive failures. */
public Optional<Instant> coolingDownUntil(Change change) { return Optional.empty(); }
/** Whether this step is declared in the deployment spec, or is an implicit step. */
public boolean isDeclared() { return true; }
}
private static class DelayStatus extends StepStatus {
private DelayStatus(DeploymentSpec.Delay step, List<StepStatus> dependencies, InstanceName instance) {
super(StepType.delay, step, dependencies, instance);
}
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
return readyAt(change, dependent).map(completion -> completion.plus(step().delay()));
}
}
private static class InstanceStatus extends StepStatus {
private final DeploymentInstanceSpec spec;
private final Instant now;
private final Instance instance;
private final DeploymentStatus status;
private InstanceStatus(DeploymentInstanceSpec spec, List<StepStatus> dependencies, Instant now,
Instance instance, DeploymentStatus status) {
super(StepType.instance, spec, dependencies, spec.name());
this.spec = spec;
this.now = now;
this.instance = instance;
this.status = status;
}
/**
* Time of completion of its dependencies, if all parts of the given change are contained in the change
* for this instance, or if no more jobs should run for this instance for the given change.
*/
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
return ( (change.platform().isEmpty() || change.platform().equals(instance.change().platform()))
&& (change.application().isEmpty() || change.application().equals(instance.change().application()))
|| step().steps().stream().noneMatch(step -> step.concerns(prod)))
? dependenciesCompletedAt(change, dependent)
: Optional.empty();
}
// TODO jonmv: complete for p-jobs: last is XXX, but ready/verified uses any is XXX.
@Override
public Optional<Instant> blockedUntil(Change change) {
for (Instant current = now; now.plus(Duration.ofDays(7)).isAfter(current); ) {
boolean blocked = false;
for (DeploymentSpec.ChangeBlocker blocker : spec.changeBlocker()) {
while ( blocker.window().includes(current)
&& now.plus(Duration.ofDays(7)).isAfter(current)
&& ( change.platform().isPresent() && blocker.blocksVersions()
|| change.application().isPresent() && blocker.blocksRevisions())) {
blocked = true;
current = current.plus(Duration.ofHours(1)).truncatedTo(ChronoUnit.HOURS);
}
}
if ( ! blocked)
return current == now ? Optional.empty() : Optional.of(current);
}
return Optional.of(now.plusSeconds(1 << 30)); // Some time in the future that doesn't look like anything you'd expect.
}
}
private static abstract class JobStepStatus extends StepStatus {
private final JobStatus job;
private final DeploymentStatus status;
private JobStepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, JobStatus job,
DeploymentStatus status) {
super(type, step, dependencies, job.id().application().instance());
this.job = requireNonNull(job);
this.status = requireNonNull(status);
}
@Override
public Optional<JobId> job() { return Optional.of(job.id()); }
@Override
public Optional<Instant> pausedUntil() {
return status.application().require(job.id().application().instance()).jobPause(job.id().type());
}
@Override
public Optional<Instant> coolingDownUntil(Change change) {
if (job.lastTriggered().isEmpty()) return Optional.empty();
if (job.lastCompleted().isEmpty()) return Optional.empty();
if (job.firstFailing().isEmpty() || ! job.firstFailing().get().hasEnded()) return Optional.empty();
Versions lastVersions = job.lastCompleted().get().versions();
if (change.platform().isPresent() && ! change.platform().get().equals(lastVersions.targetPlatform())) return Optional.empty();
if (change.application().isPresent() && ! change.application().get().equals(lastVersions.targetApplication())) return Optional.empty();
if (job.id().type().environment().isTest() && job.isOutOfCapacity()) return Optional.empty();
Instant firstFailing = job.firstFailing().get().end().get();
Instant lastCompleted = job.lastCompleted().get().end().get();
return firstFailing.equals(lastCompleted) ? Optional.of(lastCompleted)
: Optional.of(lastCompleted.plus(Duration.ofMinutes(10))
.plus(Duration.between(firstFailing, lastCompleted)
.dividedBy(2)))
.filter(status.now::isBefore);
}
private static JobStepStatus ofProductionDeployment(DeclaredZone step, List<StepStatus> dependencies,
DeploymentStatus status, InstanceName instance, JobType jobType) {
ZoneId zone = ZoneId.from(step.environment(), step.region().get());
JobStatus job = status.instanceJobs(instance).get(jobType);
Optional<Deployment> existingDeployment = Optional.ofNullable(status.application().require(instance)
.deployments().get(zone));
return new JobStepStatus(StepType.deployment, step, dependencies, job, status) {
@Override
public Optional<Instant> readyAt(Change change, Optional<JobId> dependent) {
return super.readyAt(change, Optional.of(job.id()))
.filter(__ -> status.isTested(job.id(), change));
}
/** Complete if deployment is on pinned version, and last successful deployment, or if given versions is strictly a downgrade, and this isn't forced by a pin. */
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
if ( change.isPinned()
&& change.platform().isPresent()
&& ! existingDeployment.map(Deployment::version).equals(change.platform()))
return Optional.empty();
if ( change.application().isPresent()
&& ! existingDeployment.map(Deployment::applicationVersion).equals(change.application())
&& dependent.equals(job())) // Job should (re-)run in this case, but other dependents need not wait.
return Optional.empty();
Change fullChange = status.application().require(instance).change();
if (existingDeployment.map(deployment -> ! (change.upgrades(deployment.version()) || change.upgrades(deployment.applicationVersion()))
&& (fullChange.downgrades(deployment.version()) || fullChange.downgrades(deployment.applicationVersion())))
.orElse(false))
return job.lastCompleted().flatMap(Run::end);
return (dependent.equals(job()) ? job.lastSuccess().stream()
: RunList.from(job).status(RunStatus.success).asList().stream())
.filter(run -> change.platform().map(run.versions().targetPlatform()::equals).orElse(true)
&& change.application().map(run.versions().targetApplication()::equals).orElse(true))
.findFirst()
.flatMap(Run::end);
}
};
}
private static JobStepStatus ofProductionTest(DeclaredTest step, List<StepStatus> dependencies,
DeploymentStatus status, InstanceName instance, JobType testType, JobType prodType) {
JobStatus job = status.instanceJobs(instance).get(testType);
return new JobStepStatus(StepType.test, step, dependencies, job, status) {
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
Versions versions = Versions.from(change, status.application, status.deploymentFor(job.id()), status.systemVersion);
return dependent.equals(job()) ? job.lastSuccess()
.filter(run -> versions.targetsMatch(run.versions()))
.filter(run -> ! status.jobs()
.instance(instance)
.type(prodType)
.lastCompleted().endedNoLaterThan(run.start())
.isEmpty())
.map(run -> run.end().get())
: RunList.from(job)
.matching(run -> versions.targetsMatch(run.versions()))
.status(RunStatus.success)
.first()
.map(run -> run.end().get());
}
};
}
private static JobStepStatus ofTestDeployment(DeclaredZone step, List<StepStatus> dependencies,
DeploymentStatus status, InstanceName instance,
JobType jobType, boolean declared) {
JobStatus job = status.instanceJobs(instance).get(jobType);
return new JobStepStatus(StepType.test, step, dependencies, job, status) {
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
return RunList.from(job)
.matching(run -> run.versions().targetsMatch(Versions.from(change,
status.application,
dependent.flatMap(status::deploymentFor),
status.systemVersion)))
.status(RunStatus.success)
.asList().stream()
.map(run -> run.end().get())
.max(naturalOrder());
}
@Override
public boolean isDeclared() { return declared; }
};
}
}
}
|
controller-server/src/main/java/com/yahoo/vespa/hosted/controller/deployment/DeploymentStatus.java
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.controller.deployment;
import com.google.common.collect.ImmutableMap;
import com.yahoo.component.Version;
import com.yahoo.config.application.api.DeploymentInstanceSpec;
import com.yahoo.config.application.api.DeploymentSpec;
import com.yahoo.config.application.api.DeploymentSpec.DeclaredTest;
import com.yahoo.config.application.api.DeploymentSpec.DeclaredZone;
import com.yahoo.config.provision.ApplicationId;
import com.yahoo.config.provision.InstanceName;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.zone.ZoneId;
import com.yahoo.vespa.hosted.controller.Application;
import com.yahoo.vespa.hosted.controller.Instance;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.ApplicationVersion;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobId;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType;
import com.yahoo.vespa.hosted.controller.application.Change;
import com.yahoo.vespa.hosted.controller.application.Deployment;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.yahoo.config.provision.Environment.prod;
import static com.yahoo.config.provision.Environment.staging;
import static com.yahoo.config.provision.Environment.test;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.stagingTest;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.systemTest;
import static java.util.Comparator.comparing;
import static java.util.Comparator.naturalOrder;
import static java.util.Objects.requireNonNull;
import static java.util.function.BinaryOperator.maxBy;
import static java.util.stream.Collectors.collectingAndThen;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toUnmodifiableList;
/**
* Status of the deployment jobs of an {@link Application}.
*
* @author jonmv
*/
public class DeploymentStatus {
public static List<JobId> jobsFor(Application application, SystemName system) {
if (DeploymentSpec.empty.equals(application.deploymentSpec()))
return List.of();
return application.deploymentSpec().instances().stream()
.flatMap(spec -> Stream.concat(Stream.of(systemTest, stagingTest),
flatten(spec).filter(step -> step.concerns(prod))
.map(step -> {
if (step instanceof DeclaredZone)
return JobType.from(system, prod, ((DeclaredZone) step).region().get());
return JobType.testFrom(system, ((DeclaredTest) step).region());
})
.flatMap(Optional::stream))
.map(type -> new JobId(application.id().instance(spec.name()), type)))
.collect(toUnmodifiableList());
}
private static Stream<DeploymentSpec.Step> flatten(DeploymentSpec.Step step) {
return step instanceof DeploymentSpec.Steps ? step.steps().stream().flatMap(DeploymentStatus::flatten) : Stream.of(step);
}
private static <T> List<T> union(List<T> first, List<T> second) {
return Stream.concat(first.stream(), second.stream()).distinct().collect(toUnmodifiableList());
}
private final Application application;
private final JobList allJobs;
private final SystemName system;
private final Version systemVersion;
private final Instant now;
private final Map<JobId, StepStatus> jobSteps;
private final List<StepStatus> allSteps;
public DeploymentStatus(Application application, Map<JobId, JobStatus> allJobs, SystemName system,
Version systemVersion, Instant now) {
this.application = requireNonNull(application);
this.allJobs = JobList.from(allJobs.values());
this.system = requireNonNull(system);
this.systemVersion = requireNonNull(systemVersion);
this.now = requireNonNull(now);
List<StepStatus> allSteps = new ArrayList<>();
this.jobSteps = jobDependencies(application.deploymentSpec(), allSteps);
this.allSteps = Collections.unmodifiableList(allSteps);
}
/** The application this deployment status concerns. */
public Application application() {
return application;
}
/** A filterable list of the status of all jobs for this application. */
public JobList jobs() {
return allJobs;
}
/** Whether any jobs of this application are failing with other errors than lack of capacity in a test zone. */
public boolean hasFailures() {
return ! allJobs.failing()
.not().withStatus(RunStatus.outOfCapacity)
.isEmpty();
}
/** All job statuses, by job type, for the given instance. */
public Map<JobType, JobStatus> instanceJobs(InstanceName instance) {
return allJobs.asList().stream()
.filter(job -> job.id().application().equals(application.id().instance(instance)))
.collect(Collectors.toUnmodifiableMap(job -> job.id().type(),
Function.identity()));
}
/** Filterable job status lists for each instance of this application. */
public Map<ApplicationId, JobList> instanceJobs() {
return allJobs.groupingBy(job -> job.id().application());
}
/**
* The set of jobs that need to run for the changes of each instance of the application to be considered complete,
* and any test jobs for any outstanding change, which will likely be needed to later deploy this change.
*/
public Map<JobId, List<Versions>> jobsToRun() {
Map<InstanceName, Change> changes = new LinkedHashMap<>();
for (InstanceName instance : application.deploymentSpec().instanceNames())
changes.put(instance, application.require(instance).change());
Map<JobId, List<Versions>> jobs = jobsToRun(changes);
// Add test jobs for any outstanding change.
for (InstanceName instance : application.deploymentSpec().instanceNames())
changes.put(instance, outstandingChange(instance).onTopOf(application.require(instance).change()));
var testJobs = jobsToRun(changes, true).entrySet().stream()
.filter(entry -> ! entry.getKey().type().isProduction());
return Stream.concat(jobs.entrySet().stream(), testJobs)
.collect(collectingAndThen(toMap(Map.Entry::getKey,
Map.Entry::getValue,
DeploymentStatus::union,
LinkedHashMap::new),
Collections::unmodifiableMap));
}
private Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes, boolean eagerTests) {
Map<JobId, Versions> productionJobs = new LinkedHashMap<>();
changes.forEach((instance, change) -> productionJobs.putAll(productionJobs(instance, change, eagerTests)));
Map<JobId, List<Versions>> testJobs = testJobs(productionJobs);
Map<JobId, List<Versions>> jobs = new LinkedHashMap<>(testJobs);
productionJobs.forEach((job, versions) -> jobs.put(job, List.of(versions)));
// Add runs for idle, declared test jobs if they have no successes on their instance's change's versions.
jobSteps.forEach((job, step) -> {
if ( ! step.isDeclared() || jobs.containsKey(job))
return;
Change change = changes.get(job.application().instance());
if (change == null || ! change.hasTargets())
return;
Optional<JobId> firstProductionJobWithDeployment = jobSteps.keySet().stream()
.filter(jobId -> jobId.type().isProduction() && jobId.type().isDeployment())
.filter(jobId -> deploymentFor(jobId).isPresent())
.findFirst();
Versions versions = Versions.from(change, application, firstProductionJobWithDeployment.flatMap(this::deploymentFor), systemVersion);
if (step.completedAt(change, firstProductionJobWithDeployment).isEmpty())
jobs.merge(job, List.of(versions), DeploymentStatus::union);
});
return Collections.unmodifiableMap(jobs);
}
/** The set of jobs that need to run for the given changes to be considered complete. */
public Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes) {
return jobsToRun(changes, false);
}
/** The step status for all steps in the deployment spec of this, which are jobs, in the same order as in the deployment spec. */
public Map<JobId, StepStatus> jobSteps() { return jobSteps; }
public Map<InstanceName, StepStatus> instanceSteps() {
ImmutableMap.Builder<InstanceName, StepStatus> instances = ImmutableMap.builder();
for (StepStatus status : allSteps)
if (status instanceof InstanceStatus)
instances.put(status.instance(), status);
return instances.build();
}
/** The step status for all relevant steps in the deployment spec of this, in the same order as in the deployment spec. */
public List<StepStatus> allSteps() {
if (allSteps.isEmpty())
return List.of();
List<JobId> firstTestJobs = List.of(firstDeclaredOrElseImplicitTest(systemTest),
firstDeclaredOrElseImplicitTest(stagingTest));
return allSteps.stream()
.filter(step -> step.isDeclared() || firstTestJobs.contains(step.job().orElseThrow()))
.collect(toUnmodifiableList());
}
public Optional<Deployment> deploymentFor(JobId job) {
return Optional.ofNullable(application.require(job.application().instance())
.deployments().get(job.type().zone(system)));
}
/**
* The change of this application's latest submission, if this upgrades any of its production deployments,
* and has not yet started rolling out, due to some other change or a block window being present at the time of submission.
*/
public Change outstandingChange(InstanceName instance) {
return nextVersion(instance).map(Change::of)
.filter(change -> application.require(instance).change().application().map(change::upgrades).orElse(true))
.filter(change -> ! jobsToRun(Map.of(instance, change)).isEmpty())
.orElse(Change.empty());
}
/** The next application version to roll out to instance. */
private Optional<ApplicationVersion> nextVersion(InstanceName instance) {
return Optional.ofNullable(instanceSteps().get(instance)).stream()
.flatMap(this::allDependencies)
.flatMap(step -> step.instance.latestDeployed().stream())
.min(naturalOrder())
.or(application::latestVersion);
}
private Stream<InstanceStatus> allDependencies(StepStatus step) {
return step.dependencies.stream()
.flatMap(dep -> Stream.concat(Stream.of(dep), allDependencies(dep)))
.filter(InstanceStatus.class::isInstance)
.map(InstanceStatus.class::cast)
.distinct();
}
/**
* True if the job has already been triggered on the given versions, or if all test types (systemTest, stagingTest),
* restricted to the job's instance if declared in that instance, have successful runs on the given versions.
*/
public boolean isTested(JobId job, Change change) {
Versions versions = Versions.from(change, application, deploymentFor(job), systemVersion);
return allJobs.triggeredOn(versions).get(job).isPresent()
|| Stream.of(systemTest, stagingTest)
.noneMatch(testType -> declaredTest(job.application(), testType).map(__ -> allJobs.instance(job.application().instance()))
.orElse(allJobs)
.type(testType)
.successOn(versions).isEmpty());
}
private Map<JobId, Versions> productionJobs(InstanceName instance, Change change, boolean assumeUpgradesSucceed) {
Map<JobId, Versions> jobs = new LinkedHashMap<>();
jobSteps.forEach((job, step) -> {
// When computing eager test jobs for outstanding changes, assume current upgrade completes successfully.
Optional<Deployment> deployment = deploymentFor(job)
.map(existing -> assumeUpgradesSucceed ? new Deployment(existing.zone(),
existing.applicationVersion(),
change.platform().orElse(existing.version()),
existing.at(),
existing.metrics(),
existing.activity(),
existing.quota(),
existing.cost())
: existing);
if ( job.application().instance().equals(instance)
&& job.type().isProduction()
&& step.completedAt(change, Optional.of(job)).isEmpty()) // Signal strict completion criterion by depending on job itself.
jobs.put(job, Versions.from(change, application, deployment, systemVersion));
});
return jobs;
}
/** The test jobs that need to run prior to the given production deployment jobs. */
public Map<JobId, List<Versions>> testJobs(Map<JobId, Versions> jobs) {
Map<JobId, List<Versions>> testJobs = new LinkedHashMap<>();
for (JobType testType : List.of(systemTest, stagingTest)) {
jobs.forEach((job, versions) -> {
if (job.type().isProduction() && job.type().isDeployment()) {
declaredTest(job.application(), testType).ifPresent(testJob -> {
if (allJobs.successOn(versions).get(testJob).isEmpty())
testJobs.merge(testJob, List.of(versions), DeploymentStatus::union);
});
}
});
jobs.forEach((job, versions) -> {
if ( job.type().isProduction() && job.type().isDeployment()
&& allJobs.successOn(versions).type(testType).isEmpty()
&& testJobs.keySet().stream()
.noneMatch(test -> test.type() == testType
&& testJobs.get(test).contains(versions)))
testJobs.merge(firstDeclaredOrElseImplicitTest(testType), List.of(versions), DeploymentStatus::union);
});
}
return Collections.unmodifiableMap(testJobs);
}
private JobId firstDeclaredOrElseImplicitTest(JobType testJob) {
return application.deploymentSpec().instanceNames().stream()
.map(name -> new JobId(application.id().instance(name), testJob))
.min(comparing(id -> ! jobSteps.get(id).isDeclared())).orElseThrow();
}
/** JobId of any declared test of the given type, for the given instance. */
private Optional<JobId> declaredTest(ApplicationId instanceId, JobType testJob) {
JobId jobId = new JobId(instanceId, testJob);
return jobSteps.get(jobId).isDeclared() ? Optional.of(jobId) : Optional.empty();
}
/** A DAG of the dependencies between the primitive steps in the spec, with iteration order equal to declaration order. */
private Map<JobId, StepStatus> jobDependencies(DeploymentSpec spec, List<StepStatus> allSteps) {
if (DeploymentSpec.empty.equals(spec))
return Map.of();
Map<JobId, StepStatus> dependencies = new LinkedHashMap<>();
List<StepStatus> previous = List.of();
for (DeploymentSpec.Step step : spec.steps())
previous = fillStep(dependencies, allSteps, step, previous, null);
return Collections.unmodifiableMap(dependencies);
}
/** Adds the primitive steps contained in the given step, which depend on the given previous primitives, to the dependency graph. */
private List<StepStatus> fillStep(Map<JobId, StepStatus> dependencies, List<StepStatus> allSteps,
DeploymentSpec.Step step, List<StepStatus> previous, InstanceName instance) {
if (step.steps().isEmpty() && ! (step instanceof DeploymentInstanceSpec)) {
if (instance == null)
return previous; // Ignore test and staging outside all instances.
if ( ! step.delay().isZero()) {
StepStatus stepStatus = new DelayStatus((DeploymentSpec.Delay) step, previous, instance);
allSteps.add(stepStatus);
return List.of(stepStatus);
}
JobType jobType;
StepStatus stepStatus;
if (step.concerns(test) || step.concerns(staging)) {
jobType = JobType.from(system, ((DeclaredZone) step).environment(), null)
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
stepStatus = JobStepStatus.ofTestDeployment((DeclaredZone) step, List.of(), this, instance, jobType, true);
previous = new ArrayList<>(previous);
previous.add(stepStatus);
}
else if (step.isTest()) {
jobType = JobType.testFrom(system, ((DeclaredTest) step).region())
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
JobType preType = JobType.from(system, prod, ((DeclaredTest) step).region())
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
stepStatus = JobStepStatus.ofProductionTest((DeclaredTest) step, previous, this, instance, jobType, preType);
previous = List.of(stepStatus);
}
else if (step.concerns(prod)) {
jobType = JobType.from(system, ((DeclaredZone) step).environment(), ((DeclaredZone) step).region().get())
.orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system));
stepStatus = JobStepStatus.ofProductionDeployment((DeclaredZone) step, previous, this, instance, jobType);
previous = List.of(stepStatus);
}
else return previous; // Empty container steps end up here, and are simply ignored.
JobId jobId = new JobId(application.id().instance(instance), jobType);
allSteps.removeIf(existing -> existing.job().equals(Optional.of(jobId))); // Replace implicit tests with explicit ones.
allSteps.add(stepStatus);
dependencies.put(jobId, stepStatus);
return previous;
}
if (step instanceof DeploymentInstanceSpec) {
DeploymentInstanceSpec spec = ((DeploymentInstanceSpec) step);
StepStatus instanceStatus = new InstanceStatus(spec, previous, now, application.require(spec.name()), this);
instance = spec.name();
allSteps.add(instanceStatus);
previous = List.of(instanceStatus);
for (JobType test : List.of(systemTest, stagingTest)) {
JobId job = new JobId(application.id().instance(instance), test);
if ( ! dependencies.containsKey(job)) {
var testStatus = JobStepStatus.ofTestDeployment(new DeclaredZone(test.environment()), List.of(),
this, job.application().instance(), test, false);
dependencies.put(job, testStatus);
allSteps.add(testStatus);
}
}
}
if (step.isOrdered()) {
for (DeploymentSpec.Step nested : step.steps())
previous = fillStep(dependencies, allSteps, nested, previous, instance);
return previous;
}
List<StepStatus> parallel = new ArrayList<>();
for (DeploymentSpec.Step nested : step.steps())
parallel.addAll(fillStep(dependencies, allSteps, nested, previous, instance));
return List.copyOf(parallel);
}
public enum StepType {
/** An instance — completion marks a change as ready for the jobs contained in it. */
instance,
/** A timed delay. */
delay,
/** A system, staging or production test. */
test,
/** A production deployment. */
deployment,
}
/**
* Used to represent all steps — explicit and implicit — that may run in order to complete deployment of a change.
*
* Each node contains a step describing the node,
* a list of steps which need to be complete before the step may start,
* a list of jobs from which completion of the step is computed, and
* optionally, an instance name used to identify a job type for the step,
*
* The completion criterion for each type of step is implemented in subclasses of this.
*/
public static abstract class StepStatus {
private final StepType type;
private final DeploymentSpec.Step step;
private final List<StepStatus> dependencies; // All direct dependencies of this step.
private final InstanceName instance;
private StepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, InstanceName instance) {
this.type = requireNonNull(type);
this.step = requireNonNull(step);
this.dependencies = List.copyOf(dependencies);
this.instance = instance;
}
/** The type of step this is. */
public final StepType type() { return type; }
/** The step defining this. */
public final DeploymentSpec.Step step() { return step; }
/** The list of steps that need to be complete before this may start. */
public final List<StepStatus> dependencies() { return dependencies; }
/** The instance of this. */
public final InstanceName instance() { return instance; }
/** The id of the job this corresponds to, if any. */
public Optional<JobId> job() { return Optional.empty(); }
/** The time at which this is, or was, complete on the given change and / or versions. */
public Optional<Instant> completedAt(Change change) { return completedAt(change, Optional.empty()); }
/** The time at which this is, or was, complete on the given change and / or versions. */
abstract Optional<Instant> completedAt(Change change, Optional<JobId> dependent);
/** The time at which this step is ready to run the specified change and / or versions. */
public Optional<Instant> readyAt(Change change) { return readyAt(change, Optional.empty()); }
/** The time at which this step is ready to run the specified change and / or versions. */
Optional<Instant> readyAt(Change change, Optional<JobId> dependent) {
return dependenciesCompletedAt(change, dependent)
.map(ready -> Stream.of(blockedUntil(change),
pausedUntil(),
coolingDownUntil(change))
.flatMap(Optional::stream)
.reduce(ready, maxBy(naturalOrder())));
}
/** The time at which all dependencies completed on the given change and / or versions. */
Optional<Instant> dependenciesCompletedAt(Change change, Optional<JobId> dependent) {
return dependencies.stream().allMatch(step -> step.completedAt(change, dependent).isPresent())
? dependencies.stream().map(step -> step.completedAt(change, dependent).get())
.max(naturalOrder())
.or(() -> Optional.of(Instant.EPOCH))
: Optional.empty();
}
/** The time until which this step is blocked by a change blocker. */
public Optional<Instant> blockedUntil(Change change) { return Optional.empty(); }
/** The time until which this step is paused by user intervention. */
public Optional<Instant> pausedUntil() { return Optional.empty(); }
/** The time until which this step is cooling down, due to consecutive failures. */
public Optional<Instant> coolingDownUntil(Change change) { return Optional.empty(); }
/** Whether this step is declared in the deployment spec, or is an implicit step. */
public boolean isDeclared() { return true; }
}
private static class DelayStatus extends StepStatus {
private DelayStatus(DeploymentSpec.Delay step, List<StepStatus> dependencies, InstanceName instance) {
super(StepType.delay, step, dependencies, instance);
}
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
return readyAt(change, dependent).map(completion -> completion.plus(step().delay()));
}
}
private static class InstanceStatus extends StepStatus {
private final DeploymentInstanceSpec spec;
private final Instant now;
private final Instance instance;
private final DeploymentStatus status;
private InstanceStatus(DeploymentInstanceSpec spec, List<StepStatus> dependencies, Instant now,
Instance instance, DeploymentStatus status) {
super(StepType.instance, spec, dependencies, spec.name());
this.spec = spec;
this.now = now;
this.instance = instance;
this.status = status;
}
/**
* Time of completion of its dependencies, if all parts of the given change are contained in the change
* for this instance, or if no more jobs should run for this instance for the given change.
*/
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
return ( (change.platform().isEmpty() || change.platform().equals(instance.change().platform()))
&& (change.application().isEmpty() || change.application().equals(instance.change().application()))
|| status.jobsToRun(Map.of(instance.name(), change)).isEmpty())
? dependenciesCompletedAt(change, dependent)
: Optional.empty();
}
// TODO jonmv: complete for p-jobs: last is XXX, but ready/verified uses any is XXX.
@Override
public Optional<Instant> blockedUntil(Change change) {
for (Instant current = now; now.plus(Duration.ofDays(7)).isAfter(current); ) {
boolean blocked = false;
for (DeploymentSpec.ChangeBlocker blocker : spec.changeBlocker()) {
while ( blocker.window().includes(current)
&& now.plus(Duration.ofDays(7)).isAfter(current)
&& ( change.platform().isPresent() && blocker.blocksVersions()
|| change.application().isPresent() && blocker.blocksRevisions())) {
blocked = true;
current = current.plus(Duration.ofHours(1)).truncatedTo(ChronoUnit.HOURS);
}
}
if ( ! blocked)
return current == now ? Optional.empty() : Optional.of(current);
}
return Optional.of(now.plusSeconds(1 << 30)); // Some time in the future that doesn't look like anything you'd expect.
}
}
private static abstract class JobStepStatus extends StepStatus {
private final JobStatus job;
private final DeploymentStatus status;
private JobStepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, JobStatus job,
DeploymentStatus status) {
super(type, step, dependencies, job.id().application().instance());
this.job = requireNonNull(job);
this.status = requireNonNull(status);
}
@Override
public Optional<JobId> job() { return Optional.of(job.id()); }
@Override
public Optional<Instant> pausedUntil() {
return status.application().require(job.id().application().instance()).jobPause(job.id().type());
}
@Override
public Optional<Instant> coolingDownUntil(Change change) {
if (job.lastTriggered().isEmpty()) return Optional.empty();
if (job.lastCompleted().isEmpty()) return Optional.empty();
if (job.firstFailing().isEmpty() || ! job.firstFailing().get().hasEnded()) return Optional.empty();
Versions lastVersions = job.lastCompleted().get().versions();
if (change.platform().isPresent() && ! change.platform().get().equals(lastVersions.targetPlatform())) return Optional.empty();
if (change.application().isPresent() && ! change.application().get().equals(lastVersions.targetApplication())) return Optional.empty();
if (job.id().type().environment().isTest() && job.isOutOfCapacity()) return Optional.empty();
Instant firstFailing = job.firstFailing().get().end().get();
Instant lastCompleted = job.lastCompleted().get().end().get();
return firstFailing.equals(lastCompleted) ? Optional.of(lastCompleted)
: Optional.of(lastCompleted.plus(Duration.ofMinutes(10))
.plus(Duration.between(firstFailing, lastCompleted)
.dividedBy(2)))
.filter(status.now::isBefore);
}
private static JobStepStatus ofProductionDeployment(DeclaredZone step, List<StepStatus> dependencies,
DeploymentStatus status, InstanceName instance, JobType jobType) {
ZoneId zone = ZoneId.from(step.environment(), step.region().get());
JobStatus job = status.instanceJobs(instance).get(jobType);
Optional<Deployment> existingDeployment = Optional.ofNullable(status.application().require(instance)
.deployments().get(zone));
return new JobStepStatus(StepType.deployment, step, dependencies, job, status) {
@Override
public Optional<Instant> readyAt(Change change, Optional<JobId> dependent) {
return super.readyAt(change, Optional.of(job.id()))
.filter(__ -> status.isTested(job.id(), change));
}
/** Complete if deployment is on pinned version, and last successful deployment, or if given versions is strictly a downgrade, and this isn't forced by a pin. */
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
if ( change.isPinned()
&& change.platform().isPresent()
&& ! existingDeployment.map(Deployment::version).equals(change.platform()))
return Optional.empty();
if ( change.application().isPresent()
&& ! existingDeployment.map(Deployment::applicationVersion).equals(change.application())
&& dependent.equals(job())) // Job should (re-)run in this case, but other dependents need not wait.
return Optional.empty();
Change fullChange = status.application().require(instance).change();
if (existingDeployment.map(deployment -> ! (change.upgrades(deployment.version()) || change.upgrades(deployment.applicationVersion()))
&& (fullChange.downgrades(deployment.version()) || fullChange.downgrades(deployment.applicationVersion())))
.orElse(false))
return job.lastCompleted().flatMap(Run::end);
return (dependent.equals(job()) ? job.lastSuccess().stream()
: RunList.from(job).status(RunStatus.success).asList().stream())
.filter(run -> change.platform().map(run.versions().targetPlatform()::equals).orElse(true)
&& change.application().map(run.versions().targetApplication()::equals).orElse(true))
.findFirst()
.flatMap(Run::end);
}
};
}
private static JobStepStatus ofProductionTest(DeclaredTest step, List<StepStatus> dependencies,
DeploymentStatus status, InstanceName instance, JobType testType, JobType prodType) {
JobStatus job = status.instanceJobs(instance).get(testType);
return new JobStepStatus(StepType.test, step, dependencies, job, status) {
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
Versions versions = Versions.from(change, status.application, status.deploymentFor(job.id()), status.systemVersion);
return dependent.equals(job()) ? job.lastSuccess()
.filter(run -> versions.targetsMatch(run.versions()))
.filter(run -> ! status.jobs()
.instance(instance)
.type(prodType)
.lastCompleted().endedNoLaterThan(run.start())
.isEmpty())
.map(run -> run.end().get())
: RunList.from(job)
.matching(run -> versions.targetsMatch(run.versions()))
.status(RunStatus.success)
.first()
.map(run -> run.end().get());
}
};
}
private static JobStepStatus ofTestDeployment(DeclaredZone step, List<StepStatus> dependencies,
DeploymentStatus status, InstanceName instance,
JobType jobType, boolean declared) {
JobStatus job = status.instanceJobs(instance).get(jobType);
return new JobStepStatus(StepType.test, step, dependencies, job, status) {
@Override
Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
return RunList.from(job)
.matching(run -> run.versions().targetsMatch(Versions.from(change,
status.application,
dependent.flatMap(status::deploymentFor),
status.systemVersion)))
.status(RunStatus.success)
.asList().stream()
.map(run -> run.end().get())
.max(naturalOrder());
}
@Override
public boolean isDeclared() { return declared; }
};
}
}
}
|
Simpler completion checek for non-prod instances
|
controller-server/src/main/java/com/yahoo/vespa/hosted/controller/deployment/DeploymentStatus.java
|
Simpler completion checek for non-prod instances
|
<ide><path>ontroller-server/src/main/java/com/yahoo/vespa/hosted/controller/deployment/DeploymentStatus.java
<ide> Optional<Instant> completedAt(Change change, Optional<JobId> dependent) {
<ide> return ( (change.platform().isEmpty() || change.platform().equals(instance.change().platform()))
<ide> && (change.application().isEmpty() || change.application().equals(instance.change().application()))
<del> || status.jobsToRun(Map.of(instance.name(), change)).isEmpty())
<add> || step().steps().stream().noneMatch(step -> step.concerns(prod)))
<ide> ? dependenciesCompletedAt(change, dependent)
<ide> : Optional.empty();
<ide> }
|
|
Java
|
agpl-3.0
|
8352d963e01c50231533ab97336657339b975722
| 0 |
VietOpenCPS/opencps-v2,VietOpenCPS/opencps-v2
|
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.opencps.dossiermgt.service.impl;
import com.liferay.counter.kernel.service.CounterLocalServiceUtil;
import com.liferay.petra.string.StringPool;
import com.liferay.portal.kernel.dao.orm.QueryUtil;
import com.liferay.portal.kernel.exception.PortalException;
import com.liferay.portal.kernel.json.JSONFactoryUtil;
import com.liferay.portal.kernel.json.JSONObject;
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.search.BaseBooleanQueryImpl;
import com.liferay.portal.kernel.search.BooleanClause;
import com.liferay.portal.kernel.search.BooleanClauseOccur;
import com.liferay.portal.kernel.search.BooleanQuery;
import com.liferay.portal.kernel.search.BooleanQueryFactoryUtil;
import com.liferay.portal.kernel.search.Document;
import com.liferay.portal.kernel.search.Field;
import com.liferay.portal.kernel.search.Hits;
import com.liferay.portal.kernel.search.IndexSearcherHelperUtil;
import com.liferay.portal.kernel.search.Indexable;
import com.liferay.portal.kernel.search.IndexableType;
import com.liferay.portal.kernel.search.Indexer;
import com.liferay.portal.kernel.search.IndexerRegistryUtil;
import com.liferay.portal.kernel.search.ParseException;
import com.liferay.portal.kernel.search.Query;
import com.liferay.portal.kernel.search.SearchContext;
import com.liferay.portal.kernel.search.SearchException;
import com.liferay.portal.kernel.search.Sort;
import com.liferay.portal.kernel.search.TermQuery;
import com.liferay.portal.kernel.search.TermRangeQuery;
import com.liferay.portal.kernel.search.WildcardQuery;
import com.liferay.portal.kernel.search.filter.Filter;
import com.liferay.portal.kernel.search.filter.FilterTranslator;
import com.liferay.portal.kernel.search.filter.RangeTermFilter;
import com.liferay.portal.kernel.search.filter.TermFilter;
import com.liferay.portal.kernel.search.filter.TermsFilter;
import com.liferay.portal.kernel.search.generic.BooleanQueryImpl;
import com.liferay.portal.kernel.search.generic.MultiMatchQuery;
import com.liferay.portal.kernel.search.generic.TermQueryImpl;
import com.liferay.portal.kernel.search.generic.TermRangeQueryImpl;
import com.liferay.portal.kernel.search.generic.WildcardQueryImpl;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.PwdGenerator;
import com.liferay.portal.kernel.util.StringUtil;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.portal.kernel.uuid.PortalUUIDUtil;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.opencps.auth.utils.APIDateTimeUtils;
import org.opencps.communication.model.ServerConfig;
import org.opencps.communication.service.ServerConfigLocalServiceUtil;
import org.opencps.datamgt.constants.DataMGTConstants;
import org.opencps.datamgt.model.DictCollection;
import org.opencps.datamgt.model.DictItem;
import org.opencps.datamgt.service.DictCollectionLocalServiceUtil;
import org.opencps.datamgt.service.DictItemLocalServiceUtil;
import org.opencps.datamgt.util.HolidayUtils;
import org.opencps.datamgt.utils.DictCollectionUtils;
import org.opencps.dossiermgt.action.util.DossierMgtUtils;
import org.opencps.dossiermgt.action.util.DossierNumberGenerator;
import org.opencps.dossiermgt.constants.ConstantsTerm;
import org.opencps.dossiermgt.constants.DossierActionTerm;
import org.opencps.dossiermgt.constants.DossierStatusConstants;
import org.opencps.dossiermgt.constants.DossierTerm;
import org.opencps.dossiermgt.constants.PaymentFileTerm;
import org.opencps.dossiermgt.constants.ServiceInfoTerm;
import org.opencps.dossiermgt.exception.NoSuchDossierException;
import org.opencps.dossiermgt.model.Dossier;
import org.opencps.dossiermgt.model.DossierAction;
import org.opencps.dossiermgt.model.DossierFile;
import org.opencps.dossiermgt.model.DossierPart;
import org.opencps.dossiermgt.model.DossierTemplate;
import org.opencps.dossiermgt.model.ProcessOption;
import org.opencps.dossiermgt.model.ProcessStep;
import org.opencps.dossiermgt.model.ServiceConfig;
import org.opencps.dossiermgt.model.ServiceInfo;
import org.opencps.dossiermgt.model.ServiceProcess;
import org.opencps.dossiermgt.service.DossierActionLocalServiceUtil;
import org.opencps.dossiermgt.service.DossierLocalServiceUtil;
import org.opencps.dossiermgt.service.ProcessOptionLocalServiceUtil;
import org.opencps.dossiermgt.service.ProcessStepLocalServiceUtil;
import org.opencps.dossiermgt.service.ServiceConfigLocalServiceUtil;
import org.opencps.dossiermgt.service.ServiceProcessLocalServiceUtil;
import org.opencps.dossiermgt.service.base.DossierLocalServiceBaseImpl;
import aQute.bnd.annotation.ProviderType;
/**
* The implementation of the dossier local service.
*
* <p>
* All custom service methods should be put in this class. Whenever methods are
* added, rerun ServiceBuilder to copy their definitions into the
* {@link org.opencps.dossiermgt.service.DossierLocalService} interface.
*
* <p>
* This is a local service. Methods of this service will not have security
* checks based on the propagated JAAS credentials because this service can only
* be accessed from within the same VM.
* </p>
*
* @author huymq
* @see DossierLocalServiceBaseImpl
* @see org.opencps.dossiermgt.service.DossierLocalServiceUtil
*/
@ProviderType
public class DossierLocalServiceImpl extends DossierLocalServiceBaseImpl {
/*
* NOTE FOR DEVELOPERS:
*
* Never reference this class directly. Always use {@link
* org.opencps.dossiermgt.service.DossierLocalServiceUtil} to access the
* dossier local service.
*/
protected Log _log = LogFactoryUtil.getLog(DossierLocalServiceImpl.class);
@Indexable(type = IndexableType.REINDEX)
public Dossier syncDossier(Dossier dossier) throws PortalException {
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initDossier(long groupId, long dossierId, String referenceUid, long counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
Dossier dossier = null;
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
//Update sampleCount
ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
if (option != null) {
dossier.setSampleCount(option.getSampleCount());
}
dossierPersistence.update(dossier);
// create DossierFile if it is eForm
// List<DossierPart> dossierParts = new ArrayList<DossierPart>();
//
// dossierParts = dossierPartPersistence.findByTP_NO(groupId, dossierTemplateNo);
// for (DossierPart part : dossierParts) {
// if (Validator.isNotNull(part.getFormScript()) && part.getPartType() != 2) {
// String dossierFileUUID = PortalUUIDUtil.generate();
// TODO HotFix
// if (groupId != 55301) {
// if (originality == DossierTerm.ORIGINALITY_DVCTT || originality == DossierTerm.ORIGINALITY_MOTCUA) {
// dossierFileLocalService.addDossierFile(groupId, dossierId, dossierFileUUID, dossierTemplateNo,
// part.getPartNo(), part.getFileTemplateNo(), part.getPartName(), StringPool.BLANK, 0l,
// null, StringPool.BLANK, StringPool.TRUE, context);
// }
// }
// }
// if (originality == DossierTerm.ORIGINALITY_MOTCUA) {
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
ServiceProcess serviceProcess = null;
_log.debug("option: "+option);
if (option != null) {
//Process submition note
_log.debug("option: "+option.getSubmissionNote());
dossier.setSubmissionNote(option.getSubmissionNote());
_log.debug("option: "+true);
long serviceProcessId = option.getServiceProcessId();
serviceProcess = serviceProcessPersistence.findByPrimaryKey(serviceProcessId);
String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
dossierId, option.getProcessOptionId(), serviceProcess.getDossierNoPattern(), params);
dossier.setDossierNo(dossierRef.trim());
dossier.setServerNo(serviceProcess.getServerNo());
}
//Update submit date
// now = new Date();
// dossier.setSubmitDate(now);
Double durationCount;
Integer durationUnit = 0;
if (serviceProcess != null ) {
durationCount = serviceProcess.getDurationCount();
durationUnit = serviceProcess.getDurationUnit();
// _log.debug("durationCount: "+durationCount);
// _log.debug("durationUnit: "+durationUnit);
// int durationDays = 0;
//
// if (durationUnit == 0) {
// durationDays = durationCount;
// } else {
// durationDays = Math.round(durationCount / 8);
// }
// Date dueDate = null;
// if (Validator.isNotNull(durationCount) && durationCount > 0) {
// dueDate = HolidayUtils.getDueDate(now, durationCount, durationUnit, groupId);
// }
//
// _log.debug("dueDate: "+dueDate);
// if (durationDays > 0) {
// dueDate = DossierOverDueUtils.calculateEndDate(now, durationDays);
// }
// dossier.setDueDate(dueDate);
// dossier.setReceiveDate(now);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
// }
}
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossier = dossierPersistence.update(dossier);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier.setDossierNote(dossierNote);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossier = dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality,
ServiceInfo service,
ServiceProcess serviceProcess,
ProcessOption processOption,
ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (originality == 9) {
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
//String dossierNote = getDossierNote(service, processOption);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
//dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setOriginality(originality);
dossier.setSampleCount(processOption != null ? processOption.getSampleCount(): 0);
String registerBookCode = processOption != null ? processOption.getRegisterBookCode() : StringPool.BLANK;
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(Validator.isNotNull(registerBookCode) ? getDictItemName(groupId, "REGISTER_BOOK", registerBookCode) : StringPool.BLANK);
dossier.setProcessNo(serviceProcess != null ? serviceProcess.getProcessNo() : StringPool.BLANK);
dossierPersistence.update(dossier);
}
return dossier;
} else {
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(service, processOption);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
String registerBookCode = processOption != null ? processOption.getRegisterBookCode() : StringPool.BLANK;
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(Validator.isNotNull(registerBookCode) ? getDictItemName(groupId, "REGISTER_BOOK", registerBookCode) : StringPool.BLANK);
dossier.setProcessNo(serviceProcess != null ? serviceProcess.getProcessNo() : StringPool.BLANK);
//Update sampleCount
// ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
ProcessOption option = processOption;
if (option != null) {
dossier.setSampleCount(option.getSampleCount());
dossier.setSubmissionNote(option.getSubmissionNote());
}
Double durationCount;
Integer durationUnit = 0;
if (serviceProcess != null ) {
durationCount = serviceProcess.getDurationCount();
durationUnit = serviceProcess.getDurationUnit();
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setServerNo(serviceProcess.getServerNo());
}
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossierPersistence.update(dossier);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
// String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
String dossierNote = getDossierNote(service, processOption);
dossier.setDossierNote(dossierNote);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossierPersistence.update(dossier);
}
return dossier;
}
}
//initMultipleDossier(groupId, 0l, referenceUid, counter, input.getServiceCode(), serviceName,
//input.getGovAgencyCode(), govAgencyName, applicantName, applicantIdType,
//applicantIdNo, appIdDate, address,
//contactName, contactTelNo, contactEmail,
//input.getDossierTemplateNo(), password,
//viaPostal,postalServiceCode,postalServiceName, postalAddress, postalCityCode, postalCityName,
//postalDistrictCode,postalDistrictName,postalWardCode,postalWardName,
//postalTelNo,
//online, process.getDirectNotification(), applicantNote,
//input.getOriginality(),
//delegateIdNo, delegateName,delegateTelNo,delegateEmail,delegateEmail,delegateAddress,
//delegateCityCode,delegateCityName,delegateDistrictCode,delegateDistrictName,delegateWardCode,delegateWardName,
//registerBookCode,registerBookName,sampleCount,
//dossierName,
//service, process, option,
//serviceContext);
@Indexable(type = IndexableType.REINDEX)
public Dossier initMultipleDossier(long groupId, long dossierId, String referenceUid, int counter,
String serviceCode, String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, String password, int viaPostal,
String postalServiceCode, String postalServiceName, String postalAddress, String postalCityCode,
String postalCityName, String postalDistrictCode, String postalDistrictName, String postalWardCode,
String postalWardName, String postalTelNo, boolean online, boolean notification, String applicantNote,
int originality, String delegateIdNo, String delegateName, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateCityName, String delegateDistrictCode,
String delegateDistrictName, String delegateWardCode, String delegateWardName, String registerBookCode,
String registerBookName, int sampleCount, String dossierName, ServiceInfo service,
ServiceProcess process, ProcessOption option, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
String dossierNote = getDossierNote(service, option);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(postalDistrictName);
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(postalWardName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setSampleCount(sampleCount);
dossier.setOriginality(originality);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateName(delegateName);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateEmail(delegateEmail);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(delegateCityName);
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(delegateDistrictName);
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(delegateWardName);
dossier.setNotification(notification);
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(registerBookName);
dossier.setDossierName(dossierName);
dossier.setProcessNo(process.getProcessNo());
dossier.setServerNo(process.getServerNo());
//Update submit date
if (process != null ) {
dossier.setDurationCount(process.getDurationCount());
dossier.setDurationUnit(Validator.isNotNull(process.getDurationUnit()) ? process.getDurationUnit() : 0);
}
dossierPersistence.update(dossier);
//LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
//params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
//params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
//params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
//params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
// if (option != null) {
// String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
// dossierId, option.getProcessOptionId(), process.getDossierNoPattern(), params);
// dossier.setDossierNo(dossierRef.trim());
// dossier.setSubmissionNote(option.getSubmissionNote());
//
// }
//dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initFullDossier(long groupId, long dossierId, String referenceUid, int counter,
String serviceCode, String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, String password, int viaPostal,
String postalServiceCode, String postalServiceName, String postalAddress, String postalCityCode,
String postalCityName, String postalDistrictCode, String postalDistrictName, String postalWardCode,
String postalWardName, String postalTelNo, boolean online, boolean notification, String applicantNote,
int originality, String delegateIdNo, String delegateName, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateCityName, String delegateDistrictCode,
String delegateDistrictName, String delegateWardCode, String delegateWardName, String registerBookCode,
String registerBookName, int sampleCount, String dossierName, ServiceInfo service,
ServiceProcess process, ProcessOption option, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
String dossierNote = getDossierNote(service, option);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(postalDistrictName);
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(postalWardName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setSampleCount(sampleCount);
dossier.setOriginality(originality);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateName(delegateName);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateEmail(delegateEmail);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(delegateCityName);
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(delegateDistrictName);
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(delegateWardName);
dossier.setNotification(notification);
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(registerBookName);
dossier.setDossierName(dossierName);
dossier.setProcessNo(process.getProcessNo());
dossier.setServerNo(process.getServerNo());
//Update submit date
if (process != null ) {
dossier.setDurationCount(process.getDurationCount());
dossier.setDurationUnit(Validator.isNotNull(process.getDurationUnit()) ? process.getDurationUnit() : 0);
}
dossierPersistence.update(dossier);
//LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
//params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
//params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
//params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
//params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
//if (option != null) {
// String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
// dossierId, option.getProcessOptionId(), process.getDossierNoPattern(), params);
// dossier.setDossierNo(dossierRef.trim());
// dossier.setSubmissionNote(option.getSubmissionNote());
//}
//dossierPersistence.update(dossier);
}
return dossier;
}
private final String ADMINISTRATIVE_REGION = "ADMINISTRATIVE_REGION";
// private final String POSTAL_ADMINISTRATIVE_REGION = "VNPOST_CODE";
private final String GOVERNMENT_AGENCY = "GOVERNMENT_AGENCY";
// private final int DUE_DATE_DEFAULT = 5;
private String getDictItemName(long groupId, String collectionCode, String itemCode) {
DictCollection dc = DictCollectionLocalServiceUtil.fetchByF_dictCollectionCode(collectionCode, groupId);
if (dc == null) return StringPool.BLANK;
_log.debug("COLLECTION UPDATE DOSSIER: " + dc + "," + collectionCode);
if (dc != null) {
_log.debug("COLLECTION UPDATE DOSSIER: " + dc.getCollectionCode() + "," + dc.getDictCollectionId() + "," + dc.getPrimaryKey());
DictItem it = DictItemLocalServiceUtil.fetchByF_dictItemCode(itemCode, dc.getPrimaryKey(), groupId);
if (it == null) return StringPool.BLANK;
_log.debug("ITEM: " + itemCode + "," + it);
return it.getItemName();
}
return StringPool.BLANK;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierOneGate(long dossierId, String applicantName, String applicantIdType,
String applicantIdNo, Date applicantIdDate, String address, String cityCode, String districtCode,
String wardCode, String contactName, String contactTelNo, String contactEmail, boolean isSameAsApplicant,
String delegateName, String delegateIdNo, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateDistrictCode, String delegateWardCode,
String applicantNote, String briefNote, String dossierNo, int viaPostal, String postalServiceCode,
String postalServiceName, String postalAddress, String postalCityCode, String postalDistrictCode,
String postalWardCode, String postalTelNo, long dossierActionId, String paymentFee, String paymentFeeNote,
ServiceContext context) throws PortalException {
Date now = new Date();
Dossier dossier = dossierLocalService.fetchDossier(dossierId);
dossier.setModifiedDate(now);
// create dossierRegister
String dossierRegister = PwdGenerator.getPassword(10).toUpperCase();
dossier.setDossierRegister(dossierRegister);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
if (Validator.isNotNull(cityCode)) {
dossier.setCityCode(cityCode);
dossier.setCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setWardCode(wardCode);
dossier.setWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
dossier.setContactEmail(contactEmail);
dossier.setContactName(contactName);
dossier.setContactTelNo(contactTelNo);
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
dossier.setDossierNo(dossierNo);
// viaPortal: 0 disable, 1: unselected, 2: selected
if (viaPostal == 1) {
dossier.setViaPostal(viaPostal);
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, postalCityCode));
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, postalDistrictCode));
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, postalWardCode));
dossier.setPostalTelNo(postalTelNo);
}
String password = PwdGenerator.getPassword(8).toUpperCase();
dossier.setPassword(password);
dossier.setOnline(false);
//LamTV_Process
// if (dossierActionId > 0) {
// DossierAction dAction = DossierActionLocalServiceUtil.fetchDossierAction(dossierActionId);
// ProcessAction process = ProcessActionLocalServiceUtil.getByServiceProcess(dAction.getServiceProcessId(),
// dAction.getActionCode());
// if (process != null) {
// process.setPaymentFee(paymentFee);
// ProcessActionLocalServiceUtil.updateProcessAction(process);
// }
// } else {
// ServiceProcess serProcess = ServiceProcessLocalServiceUtil.getServiceByCode(dossier.getGroupId(), dossier.getServiceCode(), dossier.getGovAgencyCode(),
// dossier.getDossierTemplateNo());
// if (serProcess != null) {
// ProcessAction process = ProcessActionLocalServiceUtil.getByServiceProcess(serProcess.getServiceProcessId(),
// String.valueOf(10000));
// if (process != null) {
// process.setPaymentFee(paymentFee);
// ProcessActionLocalServiceUtil.updateProcessAction(process);
// }
// }
// }
//LamTV_ Process Post payment
// long userId = context.getUserId();
// long groupId = dossier.getGroupId();
// String referenceUid = StringPool.BLANK;
// if (Validator.isNull(referenceUid)) {
// referenceUid = PortalUUIDUtil.generate();
// }
// String govAgencyCode = dossier.getGovAgencyCode();
// String govAgencyName = dossier.getGovAgencyName();
// long paymentAmount = 0;
// String epaymentProfile = StringPool.BLANK;
// String bankInfo = StringPool.BLANK;
// PaymentFileLocalServiceUtil.createPaymentFiles(userId, groupId, dossierId,
// referenceUid, govAgencyCode, govAgencyName, applicantName, applicantIdNo, paymentFee, paymentAmount,
// paymentFeeNote, epaymentProfile, bankInfo, context);
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier createDossier(long groupId, String serviceCode, String govAgencyCode, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String districtCode, String wardCode, String contactName, String contactTelNo, String contactEmail,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, String applicantNote, String briefNote, String dossierNo, String dossierTemplateNo,
int viaPostal, String postalServiceCode, String postalServiceName, String postalAddress,
String postalCityCode, String postalDistrictCode, String postalWardCode, String postalTelNo,
int originality,
ServiceContext context) throws PortalException {
Date now = new Date();
long dossierId = counterLocalService.increment(Dossier.class.getName());
long userId = context.getUserId();
// create referentUid
String referenceUid = PortalUUIDUtil.generate();
// create counterId
int counter = DossierNumberGenerator.counterDossier(userId, groupId);
Dossier dossier = dossierLocalService.createDossier(dossierId);
//setDossierStatus = new
dossier.setDossierStatus(DossierStatusConstants.NEW);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
// create dossierRegister
String dossierRegister = PwdGenerator.getPassword(10).toUpperCase();
dossier.setDossierRegister(dossierRegister);
ServiceInfo serviceInfo = serviceInfoLocalService.getByCode(groupId, serviceCode);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceInfo.getServiceName());
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(getDictItemName(groupId, GOVERNMENT_AGENCY, govAgencyCode));
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setOriginality(originality);
if (Validator.isNotNull(cityCode)) {
dossier.setCityCode(cityCode);
dossier.setCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setWardCode(wardCode);
dossier.setWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, wardCode));
}
dossier.setContactEmail(contactEmail);
dossier.setContactName(contactName);
dossier.setContactTelNo(contactTelNo);
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, delegateWardCode));
}
}
ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
long serviceProcessId = option.getServiceProcessId();
ServiceProcess serviceProcess = serviceProcessPersistence.findByPrimaryKey(serviceProcessId);
double durationCount = 0;
int durationUnit = 0;
if (serviceProcess != null ) {
durationCount = serviceProcess.getDurationCount();
durationUnit = serviceProcess.getDurationUnit();
}
// _log.debug("durationCount: "+durationCount);
// _log.debug("durationUnit: "+durationUnit);
Date dueDate = HolidayUtils.getDueDate(now, durationCount, durationUnit, groupId);
// set dueDate
dossier.setDueDate(dueDate);
// set receivedDate
dossier.setReceiveDate(now);
dossier.setDossierNote(option.getInstructionNote());
dossier.setSubmissionNote(option.getSubmissionNote());
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
//dossier.setDossierNo(dossierNo);
// viaPortal: 0 disable, 1: unselected, 2: selected
// if (viaPostal == 2) {
//LamTV_Hot fix
if (viaPostal == 1) {
dossier.setViaPostal(viaPostal);
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, postalCityCode));
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, postalDistrictCode));
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, postalWardCode));
dossier.setPostalTelNo(postalTelNo);
}
String password = PwdGenerator.getPassword(8).toUpperCase();
dossier.setPassword(password);
dossier.setOnline(false);
dossierPersistence.update(dossier);
// init DossierFile
List<DossierPart> dossierParts;
dossierParts = dossierPartPersistence.findByTP_NO(groupId, dossierTemplateNo);
for (DossierPart part : dossierParts) {
if (Validator.isNotNull(part.getFormScript()) && part.getPartType() != 2) {
String dossierFileUUID = PortalUUIDUtil.generate();
dossierFileLocalService.addDossierFile(groupId, dossierId, dossierFileUUID, dossierTemplateNo,
part.getPartNo(), part.getFileTemplateNo(), part.getPartName(), StringPool.BLANK, 0l, null,
StringPool.BLANK, StringPool.TRUE, context);
}
}
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
dossierId, option.getProcessOptionId(), serviceProcess != null ? serviceProcess.getDossierNoPattern() : StringPool.BLANK, params);
//LamTV_ Process Post payment
// String referenceUid = StringPool.BLANK;
// if (Validator.isNull(referenceUid)) {
// referenceUid = PortalUUIDUtil.generate();
// }
// String govAgencyCode = dossier.getGovAgencyCode();
// String govAgencyName = dossier.getGovAgencyName();
// String paymentNote = StringPool.BLANK;
// String epaymentProfile = StringPool.BLANK;
// String bankInfo = StringPool.BLANK;
// String paymentFee;
// long paymentAmount = 0;
if (serviceProcess != null) {
// paymentFee = serviceProcess.getPaymentFee();
// _log.debug("paymentFee: "+paymentFee);
}
// PaymentFileLocalServiceUtil.createPaymentFiles(userId, groupId, dossierId, referenceUid, govAgencyCode,
// govAgencyName, applicantName, applicantIdNo, paymentFee, paymentAmount, paymentNote, epaymentProfile,
// bankInfo, context);
// _log.debug("SERVICEPROCESS"+ serviceProcess.getDossierNoPattern());
//
// _log.debug("DOSSIER_NO_"+ dossierRef);
dossier.setDossierNo(dossierRef.trim());
dossierPersistence.update(dossier);
return dossier;
}
private ProcessOption getProcessOption(String serviceInfoCode, String govAgencyCode, String dossierTemplateNo,
long groupId) throws PortalException {
ServiceConfig config = ServiceConfigLocalServiceUtil.getBySICodeAndGAC(groupId, serviceInfoCode, govAgencyCode);
return ProcessOptionLocalServiceUtil.getByDTPLNoAndServiceCF(groupId, dossierTemplateNo,
config.getServiceConfigId());
}
@Indexable(type = IndexableType.REINDEX)
public Dossier postDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
Dossier dossier = null;
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
// if (originality == DossierTerm.ORIGINALITY_MOTCUA) {
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
ServiceProcess serviceProcess = null;
_log.debug("option: "+option);
if (option != null) {
//Process submition note
_log.debug("option: "+option.getSubmissionNote());
dossier.setSubmissionNote(option.getSubmissionNote());
_log.debug("option: "+true);
long serviceProcessId = option.getServiceProcessId();
serviceProcess = serviceProcessPersistence.findByPrimaryKey(serviceProcessId);
String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
dossierId, option.getProcessOptionId(), serviceProcess.getDossierNoPattern(), params);
dossier.setDossierNo(dossierRef.trim());
dossier.setServerNo(serviceProcess.getServerNo());
}
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
dossierPersistence.update(dossier);
// create DossierFile if it is eForm
List<DossierPart> dossierParts;
dossierParts = dossierPartPersistence.findByTP_NO(groupId, dossierTemplateNo);
for (DossierPart part : dossierParts) {
if (Validator.isNotNull(part.getFormScript()) && part.getPartType() != 2) {
String dossierFileUUID = PortalUUIDUtil.generate();
// TODO HotFix
if (groupId != 55301) {
dossierFileLocalService.addDossierFile(groupId, dossierId, dossierFileUUID, dossierTemplateNo,
part.getPartNo(), part.getFileTemplateNo(), part.getPartName(), StringPool.BLANK, 0l,
null, StringPool.BLANK, StringPool.TRUE, context);
}
}
}
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier.setDossierNote(dossierNote);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier assignToProcess(long dossierId, String dossierNote, String submissionNote, String briefNote,
String dossierNo, long folderId, long dossierActionId, String serverNo, ServiceContext context) {
Dossier dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setDossierNote(dossierNote);
dossier.setSubmissionNote(submissionNote);
dossier.setBriefNote(briefNote);
dossier.setDossierNo(dossierNo);
dossier.setFolderId(folderId);
dossier.setDossierActionId(dossierActionId);
dossier.setServerNo(serverNo);
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String dossierNote,
String submissionNote, String applicantNote, String briefNote, String dossierNo, boolean submitting,
Date correctingDate, String dossierStatus, String dossierStatusText, String dossierSubStatus,
String dossierSubStatusText, long folderId, long dossierActionId, int viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String password, boolean notification,
boolean online, String serverNo, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateUpdateDossier(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode,
districtCode, wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo, dossierNote,
submissionNote, dossierNo, submitting, dossierStatusText, dossierSubStatusText, postalAddress,
postalCityCode, postalTelNo, serverNo);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
if (Validator.isNotNull(auditUser))
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
DossierTemplate dt = dossierTemplatePersistence.findByG_DT_TPLNO(groupId, dossierTemplateNo);
if (Validator.isNotNull(dt)) {
dossier.setDossierTemplateName(dt.getTemplateName());
}
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setDossierNo(dossierNo);
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
dossier.setDossierSubStatus(dossierSubStatus);
dossier.setDossierSubStatusText(dossierSubStatusText);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setFolderId(folderId);
dossier.setDossierActionId(dossierActionId);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setPassword(password);
dossier.setNotification(notification);
dossier.setOnline(online);
dossier.setServerNo(serverNo);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setNotification(notification);
}
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String dossierNote,
String submissionNote, String applicantNote, String briefNote, String dossierNo, boolean submitting,
Date correctingDate, String dossierStatus, String dossierStatusText, String dossierSubStatus,
String dossierSubStatusText, long folderId, long dossierActionId, int viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String password, boolean notification,
boolean online, String serverNo, Date submitDate, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateUpdateDossier(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode,
districtCode, wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo, dossierNote,
submissionNote, dossierNo, submitting, dossierStatusText, dossierSubStatusText, postalAddress,
postalCityCode, postalTelNo, serverNo);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
if (Validator.isNotNull(auditUser))
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
DossierTemplate dt = dossierTemplatePersistence.findByG_DT_TPLNO(groupId, dossierTemplateNo);
if (Validator.isNotNull(dt)) {
dossier.setDossierTemplateName(dt.getTemplateName());
}
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setDossierNo(dossierNo);
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
dossier.setDossierSubStatus(dossierSubStatus);
dossier.setDossierSubStatusText(dossierSubStatusText);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setFolderId(folderId);
dossier.setDossierActionId(dossierActionId);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setPassword(password);
dossier.setNotification(notification);
dossier.setOnline(online);
dossier.setServerNo(serverNo);
dossier.setSubmitDate(submitDate);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setNotification(notification);
}
dossierPersistence.update(dossier);
return dossier;
}
private static final String LOCK_ALL = "LOCK ALL";
@Indexable(type = IndexableType.REINDEX)
public Dossier submitting(long groupId, long id, String refId, ServiceContext context) throws PortalException {
validateSubmitting(groupId, id, refId);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setSubmitting(true);
/*
* if (Validator.isNull(dossier.getSubmitDate())) {
* dossier.setSubmitDate(now); }
*/
if (dossier.getDossierStatus().contentEquals(DossierStatusConstants.NEW)) {
dossier.setSubmitDate(now);
}
// long dActionId = 0;
// String stepCode = StringPool.BLANK;
// long serviceProcessId = 0;
// String lockState = StringPool.BLANK;
// if (dossier != null) {
// dActionId = dossier.getDossierActionId();
// }
// if (dActionId > 0) {
// DossierAction dAction =
// DossierActionLocalServiceUtil.fetchDossierAction(dActionId);
// if (dAction != null) {
// stepCode = dAction.getStepCode();
// serviceProcessId = dAction.getServiceProcessId();
// }
// }
// if (Validator.isNotNull(stepCode) && serviceProcessId > 0) {
// ProcessStep proStep =
// ProcessStepLocalServiceUtil.fetchBySC_GID(stepCode, groupId,
// serviceProcessId);
// if (proStep != null) {
// lockState = proStep.getLockState();
// }
// }
dossier.setLockState(LOCK_ALL);
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier reset(long groupId, long id, String refId, ServiceContext context) throws PortalException {
validateReset(groupId, id, refId);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setSubmitting(false);
// dossier.setSubmitDate(null);
dossierPersistence.update(dossier);
// TODO add reset for DossierFile and PaymentFile (isNew => false)
// TODO add remove DossierFile out system
List<DossierFile> lsDF = dossierFileLocalService.getDossierFilesByDossierId(id);
for (DossierFile df : lsDF) {
if (df.getIsNew()) {
df.setIsNew(false);
dossierFileLocalService.updateDossierFile(df);
}
}
// List<PaymentFile> lsPF = paymentFileLocalService.getByDossierId(id);
// for (PaymentFile pf : lsPF) {
// if (pf.getIsNew()) {
// pf.setIsNew(false);
//
// paymentFileLocalService.updatePaymentFile(pf);
// }
// }
return dossier;
}
// @Indexable(type = IndexableType.REINDEX)
public Dossier updateStatus(long groupId, long id, String refId, String status, String statusText, String subStatus,
String subStatusText, String lockState, String stepInstruction, ServiceContext context)
throws PortalException {
validateUpdateStatus(groupId, id, refId, status, statusText, subStatus, subStatusText);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.findByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setDossierStatus(status);
dossier.setDossierStatusText(statusText);
dossier.setDossierSubStatus(subStatus);
dossier.setDossierSubStatusText(subStatusText);
dossier.setLockState(lockState);
dossier.setDossierNote(stepInstruction);
/*
* if (status.equalsIgnoreCase(DossierStatusConstants.RECEIVING)) {
* dossier.setReceiveDate(now); }
*/
if (status.equalsIgnoreCase(DossierStatusConstants.RELEASING)) {
dossier.setReleaseDate(now);
}
if (status.equalsIgnoreCase(DossierStatusConstants.DONE)) {
dossier.setFinishDate(now);
}
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateProcessDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setProcessDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateSubmittingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateSubmittingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setSubmitDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateReceivingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateReceivingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setReceiveDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDueDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateDueDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setDueDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateReleaseDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateReleaseDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setReleaseDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateCancellingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateCancellingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setCancellingDate(date);
dossier.setSubmitting(true);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateEndosementDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateCancellingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
// dossier.setCancellingDate(date);
dossier.setEndorsementDate(date);
dossier.setSubmitting(true);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateFinishDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateFinishDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setFinishDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateCorrectingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateCorrectingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setCorrecttingDate(date);
dossier.setSubmitting(true);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierAction(long groupId, long id, String refId, long dossierActionId,
ServiceContext context) throws PortalException {
validateDossierAction(groupId, id, refId, dossierActionId);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setDossierActionId(dossierActionId);
dossierPersistence.update(dossier);
return dossier;
}
//sondt start
@Indexable(type = IndexableType.REINDEX)
public Dossier updateViaPostal(long groupId, long id, String refId, int viaPostal,
ServiceContext context) throws PortalException {
validateViaPostal(groupId, id, refId, viaPostal);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setViaPostal(viaPostal);
dossierPersistence.update(dossier);
return dossier;
}
//sondt end
public Dossier getByRef(long groupId, String refId) {
return dossierPersistence.fetchByG_REF(groupId, refId);
}
@Indexable(type = IndexableType.DELETE)
public Dossier removeDossier(long groupId, long dossierId, String refId) throws PortalException {
// TODO remove dossierLog
// TODO remove dossierFile
// TODO remove dossierAction
// TODO remove PaymentFile
validateRemoveDossier(groupId, dossierId, refId);
Dossier dossier = null;
if (dossierId != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
} else {
dossier = dossierPersistence.findByG_REF(groupId, refId);
}
return dossierPersistence.remove(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierBriefNote(long dossierId, String dossierBriefNote) throws PortalException {
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
dossier.setBriefNote(dossierBriefNote);
return dossierPersistence.update(dossier);
}
public int countByUserId(long userId, long groupId) {
return dossierPersistence.countByG_UID(groupId, userId);
}
private void validateViaPostal(long groupId, long id, String refId, int viaPostal)
throws PortalException {
// TODO add validate for submitting
}
private void validateRemoveDossier(long groupId, long dossierId, String refId) throws PortalException {
// TODO add validate for remove Dossier
}
private void validateDossierAction(long groupId, long id, String refId, long dossierActionId)
throws PortalException {
// TODO add validate for submitting
}
private void validateSubmittingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateReceivingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateReleaseDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateFinishDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateCancellingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateDueDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateCorrectingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateUpdateStatus(long groupId, long id, String refId, String status, String statusText,
String subStatus, String subStatusText) throws PortalException {
// TODO add validate
}
private void validateSubmitting(long groupId, long id, String refId) throws PortalException {
// TODO add validate for submitting
// Check dossier status
// Check DossierFile, PaymentFile
}
private void validateReset(long groupId, long id, String refId) throws PortalException {
// TODO add validate for submitting
}
private void validateInit(long groupId, long dossierId, String referenceUid, String serviceCode,
String govAgencyCode, String address, String cityCode, String districtCode, String wardCode,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo)
throws PortalException {
}
private void validateUpdateDossier(long groupId, long dossierId, String referenceUid, String serviceCode,
String govAgencyCode, String address, String cityCode, String districtCode, String wardCode,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String dossierNote,
String submissionNote, String dossierNo, boolean submitting, String dossierStatus, String dossierSubStatus,
String postalAddress, String postalCityCode, String postalTelNo, String serverNo) throws PortalException {
}
public Document getDossierById(long dossierId, long companyId) throws PortalException {
// Document document = null;
Indexer<Dossier> indexer = IndexerRegistryUtil.nullSafeGetIndexer(Dossier.class);
SearchContext searchContext = new SearchContext();
searchContext.setCompanyId(companyId);
// SearchContext searchContext =
// SearchContextFactory.getInstance(request);
searchContext.setEnd(QueryUtil.ALL_POS);
searchContext.setKeywords(StringPool.BLANK);
searchContext.setStart(QueryUtil.ALL_POS);
// searchContext.set
BooleanQuery booleanQuery = null;
booleanQuery = indexer.getFullQuery(searchContext);
if (dossierId != 0) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(dossierId));
query.addField(DossierTerm.DOSSIER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
booleanQuery.addRequiredTerm(Field.ENTRY_CLASS_NAME, CLASS_NAME);
Hits hits = IndexSearcherHelperUtil.search(searchContext, booleanQuery);
List<Document> documents = hits.toList();
if (documents.size() > 0) {
return documents.get(0);
} else {
return null;
}
}
@SuppressWarnings("deprecation")
public Hits searchLucene(LinkedHashMap<String, Object> params, Sort[] sorts, int start, int end,
SearchContext searchContext) throws ParseException, SearchException {
String keywords = (String) params.get(Field.KEYWORD_SEARCH);
String groupId = (String) params.get(Field.GROUP_ID);
String secetKey = GetterUtil.getString(params.get("secetKey"));
String status = GetterUtil.getString(params.get(DossierTerm.STATUS));
String subStatus = GetterUtil.getString(params.get(DossierTerm.SUBSTATUS));
String agency = GetterUtil.getString(params.get(DossierTerm.AGENCY));
String service = GetterUtil.getString(params.get(DossierTerm.SERVICE));
String template = GetterUtil.getString(params.get(DossierTerm.TEMPLATE));
String step = GetterUtil.getString(params.get(DossierTerm.STEP));
String state = GetterUtil.getString(params.get(DossierTerm.STATE));
String follow = GetterUtil.getString(params.get(DossierTerm.FOLLOW));
String dossierNo = GetterUtil.getString(params.get(DossierTerm.DOSSIER_NO));
// Get by certificate number
String certificateNo = (String) params.get(DossierTerm.DOSSIER_ID_CTN);
String top = GetterUtil.getString(params.get(DossierTerm.TOP));
String owner = GetterUtil.getString(params.get(DossierTerm.OWNER));
String submitting = GetterUtil.getString(params.get(DossierTerm.SUBMITTING));
int year = GetterUtil.getInteger(params.get(DossierTerm.YEAR));
int month = GetterUtil.getInteger(params.get(DossierTerm.MONTH));
int day = GetterUtil.getInteger(params.get(DossierTerm.DAY));
long userId = GetterUtil.getLong(params.get(DossierTerm.USER_ID));
String strDossierActionId = GetterUtil.getString(params.get(DossierTerm.DOSSIER_ACTION_ID));
String fromReceiveDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVEDATE));
String toReceiveDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVEDATE));
String certNo = GetterUtil.getString(params.get(DossierTerm.CERT_NO));
String fromCertDate = GetterUtil.getString(params.get(DossierTerm.FROM_CERT_DATE));
String toCertDate = GetterUtil.getString(params.get(DossierTerm.TO_CERT_DATE));
String fromSubmitDate = GetterUtil.getString(params.get(DossierTerm.FROM_SUBMIT_DATE));
String toSubmitDate = GetterUtil.getString(params.get(DossierTerm.TO_SUBMIT_DATE));
String notState = GetterUtil.getString(params.get(DossierTerm.NOT_STATE));
Long statusReg = GetterUtil.getLong(params.get(DossierTerm.STATUS_REG));
Long notStatusReg = GetterUtil.getLong(params.get(DossierTerm.NOT_STATUS_REG));
String online = GetterUtil.getString(params.get(DossierTerm.ONLINE));
String originality = GetterUtil.getString(params.get(DossierTerm.ORIGINALLITY));
String assigned = GetterUtil.getString(params.get(DossierTerm.ASSIGNED));
//LamTV_ADD
String statusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_STATUS_STEP));
String subStatusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_SUBSTATUS_STEP));
String permission = GetterUtil.getString(params.get(DossierTerm.MAPPING_PERMISSION));
String domain = GetterUtil.getString(params.get(DossierTerm.DOMAIN_CODE));
String domainName = GetterUtil.getString(params.get(DossierTerm.DOMAIN_NAME));
String applicantName = GetterUtil.getString(params.get(DossierTerm.APPLICANT_NAME));
String applicantIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_ID_NO));
String serviceName = GetterUtil.getString(params.get(DossierTerm.SERVICE_NAME));
String emailLogin = GetterUtil.getString(params.get(DossierTerm.EMAIL_USER_LOGIN));
String fromReleaseDate = GetterUtil.getString(params.get(DossierTerm.FROM_RELEASE_DATE));
String toReleaseDate = GetterUtil.getString(params.get(DossierTerm.TO_RELEASE_DATE));
//
String fromFinishDate = GetterUtil.getString(params.get(DossierTerm.FROM_FINISH_DATE));
String toFinishDate = GetterUtil.getString(params.get(DossierTerm.TO_FINISH_DATE));
String fromReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVE_NOTDONE_DATE));
String toReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVE_NOTDONE_DATE));
String paymentStatus = GetterUtil.getString(params.get(PaymentFileTerm.PAYMENT_STATUS));
String origin = GetterUtil.getString(params.get(DossierTerm.ORIGIN));
String fromStatisticDate = GetterUtil.getString(params.get(DossierTerm.FROM_STATISTIC_DATE));
String toStatisticDate = GetterUtil.getString(params.get(DossierTerm.TO_STATISTIC_DATE));
Integer originDossierId = (params.get(DossierTerm.ORIGIN_DOSSIER_ID) != null)
? GetterUtil.getInteger(params.get(DossierTerm.ORIGIN_DOSSIER_ID))
: null;
String time = GetterUtil.getString(params.get(DossierTerm.TIME));
String register = GetterUtil.getString(params.get(DossierTerm.REGISTER));
Long groupDossierId = GetterUtil.getLong(params.get(DossierTerm.GROUP_DOSSIER_ID));
String applicantFollowIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_FOLLOW_ID_NO));
String assignedUserId = GetterUtil.getString(params.get(DossierTerm.ASSIGNED_USER_ID));
Indexer<Dossier> indexer = IndexerRegistryUtil.nullSafeGetIndexer(Dossier.class);
searchContext.addFullQueryEntryClassName(CLASS_NAME);
searchContext.setEntryClassNames(new String[] { CLASS_NAME });
searchContext.setAttribute("paginationType", "regular");
searchContext.setLike(true);
searchContext.setStart(start);
searchContext.setEnd(end);
searchContext.setAndSearch(true);
searchContext.setSorts(sorts);
BooleanQuery booleanQuery = null;
if (Validator.isNotNull(keywords)) {
booleanQuery = BooleanQueryFactoryUtil.create(searchContext);
} else {
booleanQuery = indexer.getFullQuery(searchContext);
}
//Search follow params default
BooleanQuery booleanCommon = processSearchCommon(keywords, secetKey, groupId, owner, userId, follow, step,
template, top, emailLogin, originality, applicantFollowIdNo, booleanQuery);
// Search follow param input
BooleanQuery booleanInput = processSearchInput(status, subStatus, state, online, submitting, agency, service,
userId, top, year, month, dossierNo, certificateNo, strDossierActionId, fromReceiveDate, toReceiveDate,
certNo, fromCertDate, toCertDate, fromSubmitDate, toSubmitDate, notState, statusReg, notStatusReg,
follow, originality, assigned, statusStep, subStatusStep, permission, domain, domainName, applicantName,
applicantIdNo, serviceName, fromReleaseDate, toReleaseDate, fromFinishDate, toFinishDate,
fromReceiveNotDoneDate, toReceiveNotDoneDate, paymentStatus, origin, fromStatisticDate, toStatisticDate,
originDossierId, time, register, day, groupDossierId, assignedUserId, booleanCommon);
booleanQuery.addRequiredTerm(Field.ENTRY_CLASS_NAME, CLASS_NAME);
return IndexSearcherHelperUtil.search(searchContext, booleanInput);
}
@SuppressWarnings("deprecation")
public long countLucene(LinkedHashMap<String, Object> params, SearchContext searchContext)
throws ParseException, SearchException {
String keywords = (String) params.get(Field.KEYWORD_SEARCH);
String groupId = (String) params.get(Field.GROUP_ID);
String secetKey = GetterUtil.getString(params.get("secetKey"));
String status = GetterUtil.getString(params.get(DossierTerm.STATUS));
String subStatus = GetterUtil.getString(params.get(DossierTerm.SUBSTATUS));
String agency = GetterUtil.getString(params.get(DossierTerm.AGENCY));
String service = GetterUtil.getString(params.get(DossierTerm.SERVICE));
String template = GetterUtil.getString(params.get(DossierTerm.TEMPLATE));
String state = GetterUtil.getString(params.get(DossierTerm.STATE));
String step = GetterUtil.getString(params.get(DossierTerm.STEP));
String dossierNo = GetterUtil.getString(params.get(DossierTerm.DOSSIER_NO));
// Get by certificate number
String certificateNo = (String) params.get(DossierTerm.DOSSIER_ID_CTN);
String online = GetterUtil.getString(params.get(DossierTerm.ONLINE));
String follow = GetterUtil.getString(params.get(DossierTerm.FOLLOW));
String top = GetterUtil.getString(params.get(DossierTerm.TOP));
String owner = GetterUtil.getString(params.get(DossierTerm.OWNER));
String submitting = GetterUtil.getString(params.get(DossierTerm.SUBMITTING));
long userId = GetterUtil.getLong(params.get(DossierTerm.USER_ID));
int year = GetterUtil.getInteger(params.get(DossierTerm.YEAR));
int month = GetterUtil.getInteger(params.get(DossierTerm.MONTH));
int day = GetterUtil.getInteger(params.get(DossierTerm.DAY));
String strDossierActionId = GetterUtil.getString(params.get(DossierTerm.DOSSIER_ACTION_ID));
String fromReceiveDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVEDATE));
String toReceiveDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVEDATE));
String certNo = GetterUtil.getString(params.get(DossierTerm.CERT_NO));
String fromCertDate = GetterUtil.getString(params.get(DossierTerm.FROM_CERT_DATE));
String toCertDate = GetterUtil.getString(params.get(DossierTerm.TO_CERT_DATE));
String fromSubmitDate = GetterUtil.getString(params.get(DossierTerm.FROM_SUBMIT_DATE));
String toSubmitDate = GetterUtil.getString(params.get(DossierTerm.TO_SUBMIT_DATE));
String notState = GetterUtil.getString(params.get(DossierTerm.NOT_STATE));
Long statusReg = GetterUtil.getLong(params.get(DossierTerm.STATUS_REG));
Long notStatusReg = GetterUtil.getLong(params.get(DossierTerm.NOT_STATUS_REG));
String originality = GetterUtil.getString(params.get(DossierTerm.ORIGINALLITY));
String assigned = GetterUtil.getString(params.get(DossierTerm.ASSIGNED));
//LamTV_ADD
String statusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_STATUS_STEP));
String subStatusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_SUBSTATUS_STEP));
String permission = GetterUtil.getString(params.get(DossierTerm.MAPPING_PERMISSION));
String domain = GetterUtil.getString(params.get(DossierTerm.DOMAIN_CODE));
String domainName = GetterUtil.getString(params.get(DossierTerm.DOMAIN_NAME));
String applicantName = GetterUtil.getString(params.get(DossierTerm.APPLICANT_NAME));
String applicantIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_ID_NO));
String serviceName = GetterUtil.getString(params.get(DossierTerm.SERVICE_NAME));
String emailLogin = GetterUtil.getString(params.get(DossierTerm.EMAIL_USER_LOGIN));
String fromReleaseDate = GetterUtil.getString(params.get(DossierTerm.FROM_RELEASE_DATE));
String toReleaseDate = GetterUtil.getString(params.get(DossierTerm.TO_RELEASE_DATE));
//
String fromFinishDate = GetterUtil.getString(params.get(DossierTerm.FROM_FINISH_DATE));
String toFinishDate = GetterUtil.getString(params.get(DossierTerm.TO_FINISH_DATE));
String fromReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVE_NOTDONE_DATE));
String toReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVE_NOTDONE_DATE));
String paymentStatus = GetterUtil.getString(params.get(PaymentFileTerm.PAYMENT_STATUS));
//
String fromStatisticDate = GetterUtil.getString(params.get(DossierTerm.FROM_STATISTIC_DATE));
String toStatisticDate = GetterUtil.getString(params.get(DossierTerm.TO_STATISTIC_DATE));
String origin = GetterUtil.getString(params.get(DossierTerm.ORIGIN));
Integer originDossierId = (params.get(DossierTerm.ORIGIN_DOSSIER_ID) != null
? GetterUtil.getInteger(params.get(DossierTerm.ORIGIN_DOSSIER_ID))
: null);
String time = GetterUtil.getString(params.get(DossierTerm.TIME));
String register = GetterUtil.getString(params.get(DossierTerm.REGISTER));
Long groupDossierId = GetterUtil.getLong(params.get(DossierTerm.GROUP_DOSSIER_ID));
String applicantFollowIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_FOLLOW_ID_NO));
String assignedUserId = GetterUtil.getString(params.get(DossierTerm.ASSIGNED_USER_ID));
Indexer<Dossier> indexer = IndexerRegistryUtil.nullSafeGetIndexer(Dossier.class);
searchContext.addFullQueryEntryClassName(CLASS_NAME);
searchContext.setEntryClassNames(new String[] { CLASS_NAME });
searchContext.setAttribute("paginationType", "regular");
searchContext.setLike(true);
searchContext.setAndSearch(true);
BooleanQuery booleanQuery = null;
if (Validator.isNotNull(keywords)) {
booleanQuery = BooleanQueryFactoryUtil.create(searchContext);
} else {
booleanQuery = indexer.getFullQuery(searchContext);
}
//Search follow params default
BooleanQuery booleanCommon = processSearchCommon(keywords, secetKey, groupId, owner, userId, follow, step,
template, top, emailLogin, originality, applicantFollowIdNo, booleanQuery);
// Search follow param input
BooleanQuery booleanInput = processSearchInput(status, subStatus, state, online, submitting, agency, service,
userId, top, year, month, dossierNo, certificateNo, strDossierActionId, fromReceiveDate, toReceiveDate,
certNo, fromCertDate, toCertDate, fromSubmitDate, toSubmitDate, notState, statusReg, notStatusReg,
follow, originality, assigned, statusStep, subStatusStep, permission, domain, domainName, applicantName,
applicantIdNo, serviceName, fromReleaseDate, toReleaseDate, fromFinishDate, toFinishDate,
fromReceiveNotDoneDate, toReceiveNotDoneDate, paymentStatus, origin, fromStatisticDate, toStatisticDate,
originDossierId, time, register, day, groupDossierId, assignedUserId, booleanCommon);
booleanQuery.addRequiredTerm(Field.ENTRY_CLASS_NAME, CLASS_NAME);
return IndexSearcherHelperUtil.searchCount(searchContext, booleanInput);
}
private BooleanQuery processSearchCommon(String keywords, String secetKey, String groupId, String owner,
long userId, String follow, String step, String template, String top, String emailLogin, String originality,
String applicantFollowIdNo, BooleanQuery booleanQuery) throws ParseException {
// LamTV: Process search LIKE
if (Validator.isNotNull(keywords)) {
BooleanQuery queryBool = new BooleanQueryImpl();
String[] subQuerieArr = new String[] { DossierTerm.SERVICE_NAME_SEARCH, DossierTerm.APPLICANT_NAME,
DossierTerm.DOSSIER_NO_SEARCH, DossierTerm.DOSSIER_ID_CTN, DossierTerm.BRIEF_NOTE,
DossierTerm.DOSSIER_NAME_SEARCH, DossierTerm.CURRENT_ACTION_USER,
DossierTerm.ORIGIN_DOSSIER_NO_SEARCH, ServiceInfoTerm.SERVICE_CODE_SEARCH,
DossierTerm.DELEGATE_NAME_SEARCH};
String[] keywordArr = keywords.split(StringPool.SPACE);
for (String fieldSearch : subQuerieArr) {
BooleanQuery query = new BooleanQueryImpl();
for (String key : keywordArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(fieldSearch,
StringPool.STAR + key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
queryBool.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(queryBool, BooleanClauseOccur.MUST);
}
if (!(Validator.isNotNull(secetKey) && secetKey.contentEquals("OPENCPSV2"))) {
if (Validator.isNotNull(groupId)) {
MultiMatchQuery query = new MultiMatchQuery(groupId);
query.addFields(Field.GROUP_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(owner) && Boolean.parseBoolean(owner) && userId > 0) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(userId));
query.addField(DossierTerm.USER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(follow) && Boolean.parseBoolean(follow) && userId > 0) {
if (Validator.isNotNull(originality) && Long.valueOf(originality) == DossierTerm.ORIGINALITY_PUBLISH) {
//_log.info("applicantFollowIdNo: "+applicantFollowIdNo);
MultiMatchQuery query = new MultiMatchQuery(applicantFollowIdNo);
query.addField(DossierTerm.APPLICANT_ID_NO);
booleanQuery.add(query, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(userId));
query.addField(DossierTerm.ACTION_MAPPING_USERID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(step)) {
String[] stepArr = StringUtil.split(step);
if (stepArr != null && stepArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < stepArr.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(stepArr[i]);
query.addField(DossierTerm.STEP_CODE);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(step);
query.addFields(DossierTerm.STEP_CODE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(template)) {
MultiMatchQuery query = new MultiMatchQuery(template);
query.addFields(DossierTerm.TEMPLATE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// //OriginDossierId = 0
// MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(0));
// queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
// booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
return booleanQuery;
}
private BooleanQuery processSearchInput(String status, String subStatus, String state, String online,
String submitting, String agency, String service, long userId, String top, int year, int month,
String dossierNo, String certificateNo, String strDossierActionId, String fromReceiveDate,
String toReceiveDate, String certNo, String fromCertDate, String toCertDate, String fromSubmitDate,
String toSubmitDate, String notState, Long statusReg, Long notStatusReg, String follow, String originality,
String assigned, String statusStep, String subStatusStep, String permission, String domain,
String domainName, String applicantName, String applicantIdNo, String serviceName, String fromReleaseDate,
String toReleaseDate, String fromFinishDate, String toFinishDate, String fromReceiveNotDoneDate,
String toReceiveNotDoneDate, String paymentStatus, String origin, String fromStatisticDate,
String toStatisticDate, Integer originDossierId, String time, String register, int day, Long groupDossierId,
String assignedUserId, BooleanQuery booleanQuery) throws ParseException {
if (Validator.isNotNull(status)) {
String[] lstStatus = StringUtil.split(status);
if (lstStatus != null && lstStatus.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < lstStatus.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(lstStatus[i]);
query.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(status);
query.addFields(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(subStatus)) {
String[] lstSubStatus = StringUtil.split(subStatus);
if (lstSubStatus != null && lstSubStatus.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < lstSubStatus.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(lstSubStatus[i]);
query.addField(DossierTerm.DOSSIER_SUB_STATUS);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(subStatus);
query.addFields(DossierTerm.DOSSIER_SUB_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(state)) {
if (state.equals(ConstantsTerm.CANCELLING)) {
BooleanQuery subQuery = new BooleanQueryImpl();
MultiMatchQuery query1 = new MultiMatchQuery(String.valueOf(0));
query1.addField(DossierTerm.CANCELLING_DATE_TIMESTAMP);
MultiMatchQuery query2 = new MultiMatchQuery(ConstantsTerm.CANCELLED);
query2.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(query1, BooleanClauseOccur.MUST_NOT);
subQuery.add(query2, BooleanClauseOccur.MUST_NOT);
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
}
if (state.equals(ConstantsTerm.CORRECTING)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.CORRECTING_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
}
if (state.equals(ConstantsTerm.ENDORSEMENT)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.ENDORSEMENT_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
}
}
if (Validator.isNotNull(online)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(online));
query.addField(DossierTerm.ONLINE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(submitting) && Boolean.parseBoolean(submitting)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(submitting));
query.addField(DossierTerm.SUBMITTING);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(agency)) {
MultiMatchQuery query = new MultiMatchQuery(agency);
query.addFields(DossierTerm.GOV_AGENCY_CODE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(service)) {
MultiMatchQuery query = new MultiMatchQuery(service);
query.addFields(ServiceInfoTerm.SERVICE_CODE_SEARCH);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (DossierTerm.STATISTIC.equals(top.toLowerCase())) {
if (month > 0 && year > 0) {
int minDayOfMonth = DossierMgtUtils.minDay(month, year);
//_log.debug("minDayOfMonth: "+minDayOfMonth);
if (minDayOfMonth > 0) {
String strMonth;
String strMonthEnd;
String strMinDay;
int monthEnd = month + 1;
if (month < 10) {
strMonth = "0" + month;
} else {
strMonth = String.valueOf(month);
}
if (monthEnd < 10) {
strMonthEnd = "0" + monthEnd;
} else {
strMonthEnd = String.valueOf(monthEnd);
}
if (minDayOfMonth < 10) {
strMinDay = "0" + minDayOfMonth;
} else {
strMinDay = String.valueOf(minDayOfMonth);
}
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
String fromStatisDateFilter = year + strMonth + strMinDay + ConstantsTerm.HOUR_START;
String toStatisDateFilter = year + strMonthEnd + strMinDay + ConstantsTerm.HOUR_START;
//Check startDate <= receiveDate < endDate
TermRangeQueryImpl termRangeQueryOne = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromStatisDateFilter, toStatisDateFilter, true, false);
subQueryOne.add(termRangeQueryOne, BooleanClauseOccur.SHOULD);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - START **/
// Check receiveDate < startDate
TermRangeQueryImpl termRangeQueryTwo = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
null, fromStatisDateFilter, false, false);
subQueryTwo.add(termRangeQueryTwo, BooleanClauseOccur.MUST);
// Check startDate <= releaseDate
TermRangeQueryImpl termRangeQueryThree = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromStatisDateFilter, null, true, true);
subQueryThree.add(termRangeQueryThree, BooleanClauseOccur.SHOULD);
// Check releaseDate = null
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryThree.add(queryRelease, BooleanClauseOccur.SHOULD);
//
subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - END **/
subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
//
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
}
} else if (Validator.isNotNull(fromStatisticDate) && Validator.isNotNull(toStatisticDate)) {
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
String fromStatisDateFilter = fromStatisticDate + ConstantsTerm.HOUR_START;
String toStatisDateFilter = toStatisticDate + ConstantsTerm.HOUR_END;
_log.debug("fromStatisDateFilter: "+fromStatisDateFilter);
_log.debug("toStatisDateFilter: "+toStatisDateFilter);
//Check startDate <= receiveDate < endDate
TermRangeQueryImpl termRangeQueryOne = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromStatisDateFilter, toStatisDateFilter, true, true);
subQueryOne.add(termRangeQueryOne, BooleanClauseOccur.SHOULD);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - START **/
// Check receiveDate < startDate
TermRangeQueryImpl termRangeQueryTwo = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
null, fromStatisDateFilter, false, false);
subQueryTwo.add(termRangeQueryTwo, BooleanClauseOccur.MUST);
// Check startDate <= releaseDate
TermRangeQueryImpl termRangeQueryThree = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromStatisDateFilter, null, true, true);
subQueryThree.add(termRangeQueryThree, BooleanClauseOccur.SHOULD);
// Check startDate <= finishDate
TermRangeQueryImpl termRangeQueryFinish = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromStatisDateFilter, toStatisDateFilter, true, true);
subQueryThree.add(termRangeQueryFinish, BooleanClauseOccur.SHOULD);
// Check releaseDate = null
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryThree.add(queryRelease, BooleanClauseOccur.SHOULD);
//
subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - END **/
subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
//
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
}
} else {
if (year > 0 || month > 0) {
if (year > 0) {
// _log.debug("year: "+year);
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(year));
//MultiMatchQuery queryYearTwo = new MultiMatchQuery(String.valueOf(year));
// if (Validator.isNotNull(top) && DossierTerm.STATISTIC.equals(top.toLowerCase())) {
// MultiMatchQuery queryReceive = new MultiMatchQuery(String.valueOf(0));
// MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
// BooleanQuery subQueryOne = new BooleanQueryImpl();
// BooleanQuery subQueryTwo = new BooleanQueryImpl();
// BooleanQuery subQueryThree = new BooleanQueryImpl();
//
// //Check receiveDate != null
// queryReceive.addField(DossierTerm.YEAR_DOSSIER);
// subQueryOne.add(queryReceive, BooleanClauseOccur.MUST_NOT);
// //Check receiveDate
// queryYearTwo.addFields(DossierTerm.YEAR_DOSSIER);
// subQueryOne.add(queryYearTwo, BooleanClauseOccur.SHOULD);
// /**Check receiveDate < now && releaseDate = null or releaseDate = now**/
// TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.YEAR_DOSSIER,
// String.valueOf(0), String.valueOf(month), false, false);
// subQueryTwo.add(termRangeQuery, BooleanClauseOccur.MUST);
//
// queryRelease.addField(DossierTerm.YEAR_RELEASE);
// subQueryTwo.add(queryRelease, BooleanClauseOccur.SHOULD);
//
// subQueryThree.add(queryYearTwo, BooleanClauseOccur.SHOULD);
//
// queryYearTwo.addFields(DossierTerm.YEAR_RELEASE);
// subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
// //
// subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
// //
// booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
// } else {
query.addFields(DossierTerm.YEAR_DOSSIER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
}
if (month > 0) {
// _log.debug("month: "+month);
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(month));
//MultiMatchQuery queryMonthTwo = new MultiMatchQuery(String.valueOf(month));
// if (Validator.isNotNull(top) && DossierTerm.STATISTIC.equals(top.toLowerCase())) {
// MultiMatchQuery queryReceive = new MultiMatchQuery(String.valueOf(0));
// MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
// BooleanQuery subQueryOne = new BooleanQueryImpl();
// BooleanQuery subQueryTwo = new BooleanQueryImpl();
// BooleanQuery subQueryThree = new BooleanQueryImpl();
//
// //Check receiveDate != null
// queryReceive.addField(DossierTerm.MONTH_DOSSIER);
// subQueryOne.add(queryReceive, BooleanClauseOccur.MUST_NOT);
// //Check receiveDate
// queryMonthTwo.addFields(DossierTerm.MONTH_DOSSIER);
// subQueryOne.add(queryMonthTwo, BooleanClauseOccur.SHOULD);
// /**Check receiveDate < now && releaseDate = null or releaseDate = now**/
// // Check receiveDate < now
//// Calendar calDate = Calendar.getInstance();
//// calDate.setTime(new Date());
//// int monthCurrent = calDate.get(Calendar.MONTH) + 1;
// TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.MONTH_DOSSIER,
// String.valueOf(0), String.valueOf(month), false, false);
// subQueryTwo.add(termRangeQuery, BooleanClauseOccur.MUST);
//
// queryRelease.addField(DossierTerm.MONTH_RELEASE);
// subQueryTwo.add(queryRelease, BooleanClauseOccur.SHOULD);
//
// subQueryThree.add(queryMonthTwo, BooleanClauseOccur.SHOULD);
//
// queryMonthTwo.addFields(DossierTerm.MONTH_RELEASE);
// subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
//// //
// subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
// //
// booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
// } else {
query.addFields(DossierTerm.MONTH_DOSSIER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
}
}
//Temporatory comment for dossier has not received
// else {
// MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
// query.addField(DossierTerm.RECEIVE_DATE_TIMESTAMP);
// booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
// }
}
if (Validator.isNotNull(top)) {
if (DossierTerm.PASSED.equals(top.toLowerCase())) {
// _log.debug("top: "+top);
MultiMatchQuery queryAction = new MultiMatchQuery(String.valueOf(userId));
queryAction.addField(DossierTerm.USER_DOSSIER_ACTION_ID);
booleanQuery.add(queryAction, BooleanClauseOccur.MUST);
} else if (!DossierTerm.STATISTIC.equals(top.toLowerCase())) {
BooleanQuery subQuery = new BooleanQueryImpl();
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQuery.add(queryRelease, BooleanClauseOccur.MUST);
// Dossier is delay
if (top.toLowerCase().equals(DossierTerm.DELAY)) {
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition status != waiting **/
MultiMatchQuery queryWaiting = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(1));
query.addFields(DossierTerm.COMPARE_DELAY_DATE);
subQuery.add(query, BooleanClauseOccur.MUST);
// Dossier is overDue
} else if (top.toLowerCase().equals(DossierTerm.OVER_DUE)) {
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition status != waiting **/
MultiMatchQuery queryWaiting = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
/** Check condition status != receiving **/
MultiMatchQuery queryReceiving = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_RECEIVING);
queryReceiving.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(queryReceiving, BooleanClauseOccur.MUST_NOT);
/** Check condition releaseDate > dueDate **/
MultiMatchQuery queryCompareRelease = new MultiMatchQuery(String.valueOf(1));
queryCompareRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryOne.add(queryCompareRelease, BooleanClauseOccur.MUST);
/** Check condition nowDate >= dueDate **/
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(0), String.valueOf(nowTime), false, false);
subQueryTwo.add(termRangeQuery, BooleanClauseOccur.MUST);
/** Check condition (releaseDate > dueDate || nowDate >= dueDate) **/
subQueryThree.add(subQueryTwo, BooleanClauseOccur.SHOULD);
subQueryThree.add(subQueryOne, BooleanClauseOccur.SHOULD);
/** Check condition dueDate!=null && (releaseDate>=dueDate || now>=dueDate) **/
subQuery.add(subQueryThree, BooleanClauseOccur.MUST);
// Dossier is coming
} else if (top.toLowerCase().equals(DossierTerm.COMING)) {
/** Check condition dueDate != null **/
MultiMatchQuery querydueDateNull = new MultiMatchQuery(String.valueOf(0));
querydueDateNull.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQuery.add(querydueDateNull, BooleanClauseOccur.MUST_NOT);
//Check dossier is not dueDate
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addFields(DossierTerm.DUE_DATE_COMING);
subQuery.add(query, BooleanClauseOccur.MUST_NOT);
//Check dossier has dueDate
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_COMING,
String.valueOf(0), String.valueOf(nowTime), false, true);
subQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
//Check nowDate < dueDate
TermRangeQueryImpl termRangeQueryNow = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(nowTime), null, true, true);
subQuery.add(termRangeQueryNow, BooleanClauseOccur.MUST);
}
//
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(dossierNo)) {
String[] keyDossier = dossierNo.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : keyDossier) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.DOSSIER_NO_SEARCH,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(certificateNo)) {
MultiMatchQuery query = new MultiMatchQuery(certificateNo);
query.addFields(DossierTerm.DOSSIER_ID_CTN);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(strDossierActionId)) {
String[] sliptDossierActionId = StringUtil.split(strDossierActionId);
if (sliptDossierActionId != null && sliptDossierActionId.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (String dossierActionId : sliptDossierActionId) {
if (Validator.isNotNull(dossierActionId)) {
MultiMatchQuery query = new MultiMatchQuery(dossierActionId);
query.addFields(DossierTerm.DOSSIER_ACTION_ID);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(strDossierActionId);
query.addFields(DossierTerm.DOSSIER_ACTION_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
String fromReceiveDateFilter = fromReceiveDate + ConstantsTerm.HOUR_START;
String toReceiveDateFilter = toReceiveDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromReceiveDate)) {
if (Validator.isNotNull(toReceiveDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveDateFilter, toReceiveDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveDateFilter, toReceiveDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toReceiveDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveDateFilter, toReceiveDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(certNo)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(certNo));
query.addField(DossierTerm.CERT_NO_SEARCH);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
String fromCertDateFilter = fromCertDate + ConstantsTerm.HOUR_START;
String toCertDateFilter = toCertDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromCertDate)) {
if (Validator.isNotNull(toCertDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.CERT_DATE, fromCertDateFilter,
toCertDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.CERT_DATE, fromCertDateFilter,
toCertDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toCertDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.CERT_DATE, fromCertDateFilter,
toCertDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
String fromSubmitDateFilter = fromSubmitDate + ConstantsTerm.HOUR_START;
String toSubmitDateFilter = toSubmitDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromSubmitDate)) {
if (Validator.isNotNull(toSubmitDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.SUBMIT_DATE,
fromSubmitDateFilter, toSubmitDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.SUBMIT_DATE,
fromSubmitDateFilter, toSubmitDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toSubmitDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromSubmitDateFilter, toSubmitDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(notState)) {
// LamTV: Case not have flag cancel
if (notState.equals(ConstantsTerm.CANCELLING)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.CANCELLING_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Case not have flag correct and endorsement
if (notState.contains(ConstantsTerm.CORRECTING)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.CORRECTING_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (notState.contains(ConstantsTerm.ENDORSEMENT)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.ENDORSEMENT_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
// LamTV: Add process case abnormal
if (Validator.isNotNull(statusReg)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(statusReg));
query.addField(DossierTerm.STATUS_REG);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(notStatusReg)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(notStatusReg));
query.addField(DossierTerm.STATUS_REG);
booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
}
// LamTV: Process originality and assigned
if (Validator.isNotNull(assigned)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(assigned));
query.addField(DossierTerm.ASSIGNED);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process originality and assigned
if (Validator.isNotNull(assignedUserId)) {
String[] assignedArr = StringUtil.split(assignedUserId);
if (assignedArr != null && assignedArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < assignedArr.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(assignedArr[i]);
query.addField(DossierTerm.ASSIGNED_USER_ID);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(assignedUserId);
query.addFields(DossierTerm.ASSIGNED_USER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
//_log.debug("originality: "+originality);
if (Validator.isNotNull(originality)) {
if (originality.contains(StringPool.COMMA)) {
String[] originalArr = StringUtil.split(originality);
if (originalArr != null && originalArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < originalArr.length; i++) {
int orginalInt = GetterUtil.getInteger(originalArr[i]);
if (orginalInt >= 0) {
MultiMatchQuery query = new MultiMatchQuery(originalArr[i]);
query.addField(DossierTerm.ORIGINALLITY);
subQuery.add(query, BooleanClauseOccur.SHOULD);
} else {
String originalSearch = String.valueOf(DossierTerm.CONSTANT_INDEX_ORIGINALITY + orginalInt);
MultiMatchQuery query = new MultiMatchQuery(originalSearch);
query.addField(DossierTerm.ORIGINALLITY);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
}
} else {
Integer originalityInt = GetterUtil.getInteger(originality);
if (originalityInt == -1) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.ORIGINALLITY,
String.valueOf(DossierTerm.ORIGINALITY_MOTCUA),
String.valueOf(DossierTerm.CONSTANT_INDEX_ORIGINALITY), false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else if (originalityInt >= 0) {
//_log.debug("originalityxxxx: "+originality);
MultiMatchQuery query = new MultiMatchQuery(originality);
query.addFields(DossierTerm.ORIGINALLITY);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
} else {
// _log.debug("START originality: "+originality);
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.ORIGINALLITY,
String.valueOf(DossierTerm.ORIGINALITY_PUBLISH), String.valueOf(DossierTerm.ORIGINALITY_HSLT), true,
true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
//Check original by action
if (Validator.isNotNull(originDossierId) && originDossierId > 0) {
MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(originDossierId));
queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
} else {
if (Validator.isNotNull(originality)) {
Integer originalityInt = GetterUtil.getInteger(originality);
if (Validator.isNotNull(follow) && Boolean.valueOf(follow)
&& originalityInt == DossierTerm.ORIGINALITY_PUBLISH) {
} else if (originalityInt != 9) {
MultiMatchQuery queryDossierAction = new MultiMatchQuery(String.valueOf(0));
queryDossierAction.addField(DossierTerm.DOSSIER_ACTION_ID);
booleanQuery.add(queryDossierAction, BooleanClauseOccur.MUST_NOT);
//
// MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(0));
// queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
// booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
}
} else {
MultiMatchQuery queryDossierAction = new MultiMatchQuery(String.valueOf(0));
queryDossierAction.addField(DossierTerm.DOSSIER_ACTION_ID);
booleanQuery.add(queryDossierAction, BooleanClauseOccur.MUST_NOT);
//
MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(0));
queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
}
}
//LamTV_Test
// if (Validator.isNotNull(statusStep)) {
// String[] statusStepArr = StringUtil.split(statusStep);
//
// if (statusStepArr != null && statusStepArr.length > 0) {
// BooleanQuery subQuery = new BooleanQueryImpl();
// for (int i = 0; i < statusStepArr.length; i++) {
// MultiMatchQuery query = new MultiMatchQuery(statusStepArr[i]);
// query.addField(DossierTerm.DOSSIER_STATUS);
// subQuery.add(query, BooleanClauseOccur.SHOULD);
// }
// booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
// } else {
// MultiMatchQuery query = new MultiMatchQuery(statusStep);
// query.addFields(DossierTerm.DOSSIER_STATUS);
// booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
// }
// Set<String> addedSubStatuses = new HashSet<>();
// if (Validator.isNotNull(subStatusStep)) {
// String[] subStatusStepArr = StringUtil.split(subStatusStep);
// if (subStatusStepArr != null && subStatusStepArr.length > 0) {
// BooleanQuery subQuery = new BooleanQueryImpl();
// for (int i = 0; i < subStatusStepArr.length; i++) {
// String subStatusStepDetail = subStatusStepArr[i];
// if (!"empty".equals(subStatusStepDetail) && !addedSubStatuses.contains(subStatusStepDetail)) {
// MultiMatchQuery query = new MultiMatchQuery(subStatusStepArr[i]);
// query.addField(DossierTerm.DOSSIER_SUB_STATUS);
// subQuery.add(query, BooleanClauseOccur.SHOULD);
// addedSubStatuses.add(subStatusStepArr[i]);
//
// }
// }
// booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
// } else {
// if (!"empty".equals(subStatusStep)) {
// MultiMatchQuery query = new MultiMatchQuery(subStatusStep);
// query.addFields(DossierTerm.DOSSIER_SUB_STATUS);
// booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
// }
// }
if (Validator.isNotNull(statusStep)
&& Validator.isNotNull(subStatusStep)) {
String[] statusStepArr = StringUtil.split(statusStep);
String[] subStatusStepArr = StringUtil.split(subStatusStep);
if (statusStepArr != null && statusStepArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < statusStepArr.length; i++) {
BooleanQuery matchedQuery = new BooleanQueryImpl();
MultiMatchQuery query = new MultiMatchQuery(statusStepArr[i]);
query.addField(DossierTerm.DOSSIER_STATUS);
matchedQuery.add(query, BooleanClauseOccur.MUST);
if (!"empty".equals(subStatusStepArr[i])) {
MultiMatchQuery querySub = new MultiMatchQuery(subStatusStepArr[i]);
querySub.addField(DossierTerm.DOSSIER_SUB_STATUS);
matchedQuery.add(querySub, BooleanClauseOccur.MUST);
}
subQuery.add(matchedQuery, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(statusStep);
query.addFields(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
MultiMatchQuery querySub = new MultiMatchQuery(subStatusStep);
query.addFields(DossierTerm.DOSSIER_SUB_STATUS);
booleanQuery.add(querySub, BooleanClauseOccur.MUST);
}
}
// _log.debug("Permission: " + permission);
if (Validator.isNotNull(permission)) {
String[] permissionArr = StringUtil.split(permission);
if (permissionArr != null && permissionArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < permissionArr.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(permissionArr[i]);
query.addField(DossierTerm.MAPPING_PERMISSION);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(permission);
query.addFields(DossierTerm.MAPPING_PERMISSION);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(domain)) {
MultiMatchQuery query = new MultiMatchQuery(domain);
query.addFields(DossierTerm.DOMAIN_CODE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process search LIKE
if (Validator.isNotNull(domainName)) {
String[] domainArr = domainName.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : domainArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.DOMAIN_NAME,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(applicantName)) {
String[] applicantArr = applicantName.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : applicantArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.APPLICANT_NAME,
StringPool.STAR + key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process search LIKE
if (Validator.isNotNull(applicantIdNo)) {
String[] keywordArr = applicantIdNo.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : keywordArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.APPLICANT_ID_NO,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process search LIKE
if (Validator.isNotNull(serviceName)) {
String[] serviceArr = serviceName.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : serviceArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.SERVICE_NAME,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
String fromReleaseDateFilter = fromReleaseDate + ConstantsTerm.HOUR_START;
String toReleaseDateFilter = toReleaseDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromReleaseDate)) {
if (Validator.isNotNull(toReleaseDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromReleaseDateFilter, toReleaseDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromReleaseDateFilter, null, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toReleaseDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
null, toReleaseDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
//Process Statistic
//TODO
if (Validator.isNotNull(fromFinishDate) || Validator.isNotNull(toFinishDate)) {
String fromFinishDateFilter = fromFinishDate + ConstantsTerm.HOUR_START;
String toFinishDateFilter = toFinishDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromFinishDate)) {
if (Validator.isNotNull(toFinishDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromFinishDateFilter, toFinishDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromFinishDateFilter, toFinishDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toFinishDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromFinishDateFilter, toFinishDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
//
MultiMatchQuery query = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_DONE);
query.addField(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(fromReceiveNotDoneDate) || Validator.isNotNull(toReceiveNotDoneDate)) {
//Check Release is null
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
//
String fromReceiveNotDoneDateFilter = fromReceiveNotDoneDate + ConstantsTerm.HOUR_START;
String toReceiveNotDoneDateFilter = toReceiveNotDoneDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromReceiveNotDoneDate)) {
if (Validator.isNotNull(toReceiveNotDoneDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveNotDoneDateFilter, toReceiveNotDoneDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveNotDoneDateFilter, toReceiveNotDoneDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toReceiveNotDoneDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveNotDoneDateFilter, toReceiveNotDoneDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
}
if (Validator.isNotNull(paymentStatus)) {
MultiMatchQuery query = new MultiMatchQuery(paymentStatus);
query.addFields(PaymentFileTerm.PAYMENT_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(origin)) {
MultiMatchQuery query = new MultiMatchQuery(origin);
query.addFields(DossierTerm.ORIGIN);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// Check statistic with key "time"
if (Validator.isNotNull(time)) {
String[] lstTimes = StringUtil.split(time);
if (lstTimes != null && lstTimes.length > 1) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < lstTimes.length; i++) {
BooleanQuery query = processStatisticDossier(lstTimes[i]);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
booleanQuery.add(processStatisticDossier(time), BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(register)) {
MultiMatchQuery query = new MultiMatchQuery(register);
query.addFields(DossierTerm.REGISTER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (day > 0) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(day));
query.addFields(DossierTerm.DAY_DOSSIER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(groupDossierId)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(groupDossierId));
query.addField(DossierTerm.GROUP_DOSSIER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
return booleanQuery;
}
private BooleanQuery processStatisticDossier(String subTime) throws ParseException {
BooleanQuery booleanQuery = new BooleanQueryImpl();
// Check list dossier is betimes
if (subTime.equals(DossierTerm.BE_TIME)) {
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
BooleanQuery subQueryFour = new BooleanQueryImpl();
/** Check condition dueDate != null **/
MultiMatchQuery queryDueDate = new MultiMatchQuery(String.valueOf(0));
queryDueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryOne.add(queryDueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition extendDate != null and releaseDate < dueDate **/
//Check extendDate != null
MultiMatchQuery queryExtend = new MultiMatchQuery(String.valueOf(0));
queryExtend.addField(DossierTerm.EXTEND_DATE_TIMESTAMP);
subQueryTwo.add(queryExtend, BooleanClauseOccur.MUST_NOT);
// Check releaseDate < dueDate
//TermRangeQueryImpl termRangeRelease = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_RELEASE,
// null, String.valueOf(0), true, false);
//subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
MultiMatchQuery termRangeRelease = new MultiMatchQuery("3");
termRangeRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
/** Check condition finishDate < dueDate **/
//TermRangeQueryImpl termRangeFinish = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_FINISH,
// null, String.valueOf(0), true, false);
MultiMatchQuery termRangeFinish = new MultiMatchQuery(String.valueOf(3));
termRangeFinish.addField(DossierTerm.VALUE_COMPARE_FINISH);
subQueryThree.add(termRangeFinish, BooleanClauseOccur.MUST);
/** Check condition (extendDate != null && releaseDate < dueDate) || (finishDate < dueDate) **/
subQueryFour.add(subQueryThree, BooleanClauseOccur.SHOULD);
subQueryFour.add(subQueryTwo, BooleanClauseOccur.SHOULD);
/** Check condition dueDate != null && subQueryTwo **/
subQueryOne.add(subQueryFour, BooleanClauseOccur.MUST);
/** Add search all **/
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.OVER_TIME)) { // Check list dossier is overtime
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
/** Check condition releaseDate != null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryOne.add(queryRelease, BooleanClauseOccur.MUST_NOT);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryOne.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition releaseDate > dueDate **/
MultiMatchQuery termRangeRelease = new MultiMatchQuery(String.valueOf(1));
termRangeRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
//TermRangeQueryImpl termRangeRelease = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_RELEASE,
// String.valueOf(0), null, false, false);
//subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
/** Check condition releaseDate != null && dueDate != null && subQueryTwo **/
subQueryOne.add(subQueryTwo, BooleanClauseOccur.MUST);
/** Add search all **/
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.ON_TIME)) { // Check list dossier is ontime
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
BooleanQuery subQueryFour = new BooleanQueryImpl();
BooleanQuery subQueryFive = new BooleanQueryImpl();
BooleanQuery subQuerySix = new BooleanQueryImpl();
BooleanQuery subQuerySeven = new BooleanQueryImpl();
/** Check condition releaseDate!=null && (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate))) - START **/
/** Check condition releaseDate != null **/
MultiMatchQuery queryReleaseEmpty = new MultiMatchQuery(String.valueOf(0));
queryReleaseEmpty.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryOne.add(queryReleaseEmpty, BooleanClauseOccur.MUST_NOT);
/** Check condition (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate)) - START **/
/** Check condition dueDate == null **/
MultiMatchQuery queryDueDateEmpty = new MultiMatchQuery(String.valueOf(0));
queryDueDateEmpty.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryTwo.add(queryDueDateEmpty, BooleanClauseOccur.MUST);
/** Check condition (extendDate == null and releaseDate < dueDate && (finishDate==null||finishDate>=dueDate))- START **/
/** Check condition extendDate == null and releaseDate < dueDate **/
//Check extendDate == null
MultiMatchQuery queryExtend = new MultiMatchQuery(String.valueOf(0));
queryExtend.addField(DossierTerm.EXTEND_DATE_TIMESTAMP);
subQueryThree.add(queryExtend, BooleanClauseOccur.MUST);
//Check dueDate != null
MultiMatchQuery queryDueDate = new MultiMatchQuery(String.valueOf(0));
queryDueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryThree.add(queryDueDate, BooleanClauseOccur.MUST_NOT);
// Check releaseDate < dueDate
// TermRangeQueryImpl queryCompareRelease = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_RELEASE,
// String.valueOf(2), String.valueOf(2), true, true);
MultiMatchQuery queryCompareRelease = new MultiMatchQuery(String.valueOf(2));
queryCompareRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryThree.add(queryCompareRelease, BooleanClauseOccur.MUST);
/** Check condition (finishDate == null) || (finishDate != null && finishDate >= dueDate) - START **/
/** Check condition (finishDate == null) **/
MultiMatchQuery queryFinishDateEmpty = new MultiMatchQuery(String.valueOf(0));
queryFinishDateEmpty.addField(DossierTerm.FINISH_DATE_TIMESTAMP);
subQueryFour.add(queryFinishDateEmpty, BooleanClauseOccur.MUST);
/** Check condition (finishDate != null && finishDate >= dueDate) **/
//Check finishDate != null
MultiMatchQuery queryFinishDate = new MultiMatchQuery(String.valueOf(0));
queryFinishDate.addField(DossierTerm.FINISH_DATE_TIMESTAMP);
subQueryFive.add(queryFinishDate, BooleanClauseOccur.MUST_NOT);
//Check finishDate >= dueDate
// TermRangeQueryImpl queryCompareFinish = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_FINISH,
// String.valueOf(1), String.valueOf(2), true, true);
MultiMatchQuery queryCompareFinish = new MultiMatchQuery(String.valueOf(2));
queryCompareFinish.addField(DossierTerm.VALUE_COMPARE_FINISH);
subQueryFive.add(queryCompareFinish, BooleanClauseOccur.MUST);
/** Check condition (finishDate == null) || (finishDate != null && finishDate >= dueDate) - END **/
// subQuerySix.add(subQueryFive, BooleanClauseOccur.SHOULD);
// subQuerySix.add(subQueryFour, BooleanClauseOccur.SHOULD);
/** Check condition (releaseDate < dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate))- END **/
subQueryThree.add(subQuerySix, BooleanClauseOccur.MUST);
/** Check condition (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate)) - END **/
subQuerySeven.add(subQueryThree, BooleanClauseOccur.SHOULD);
subQuerySeven.add(subQueryTwo, BooleanClauseOccur.SHOULD);
/** Check condition releaseDate!=null && (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate))) - END **/
subQueryOne.add(subQuerySeven, BooleanClauseOccur.MUST);
/** Add search all **/
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.OVER_DUE)) {// List dossier is processing overdue
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
booleanQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition status != waiting **/
MultiMatchQuery queryWaiting = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
/** Check condition status != receiving **/
MultiMatchQuery queryReceiving = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_RECEIVING);
queryReceiving.addField(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(queryReceiving, BooleanClauseOccur.MUST_NOT);
/** Check condition lockState != PAUSE **/
//MultiMatchQuery queryLockState = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
//queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
//booleanQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
/** Check condition dueDate < now **/
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(0), String.valueOf(nowTime), false, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.UN_DUE)){// List dossier is processing undue
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
BooleanQuery subQueryFour = new BooleanQueryImpl();
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryOne.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate == null **/
MultiMatchQuery querydueDateNull = new MultiMatchQuery(String.valueOf(0));
querydueDateNull.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryTwo.add(querydueDateNull, BooleanClauseOccur.MUST);
/** Check condition (dueDate != null && now < dueDate) - START **/
// Check dueDate != null
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryThree.add(querydueDate, BooleanClauseOccur.MUST_NOT);
// Check condition dueDate < now
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(nowTime), null, true, false);
subQueryThree.add(termRangeQuery, BooleanClauseOccur.MUST);
/** Check condition (dueDate != null && now < dueDate) - END **/
/** Check condition (dueDate==null || (dueDate!=null && now<dueDate)) **/
subQueryFour.add(subQueryThree, BooleanClauseOccur.SHOULD);
subQueryFour.add(subQueryTwo, BooleanClauseOccur.SHOULD);
/** Check condition releaseDate==null && (dueDate==null || (dueDate!=null && now<dueDate)) **/
subQueryOne.add(subQueryFour, BooleanClauseOccur.MUST);
//
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.COMING)){// List dossier is processing comming
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDateNull = new MultiMatchQuery(String.valueOf(0));
querydueDateNull.addField(DossierTerm.DUE_DATE_TIMESTAMP);
booleanQuery.add(querydueDateNull, BooleanClauseOccur.MUST_NOT);
/** Check condition (dueDate-duration/5) < now **/
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_COMING,
String.valueOf(0), String.valueOf(nowTime), false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
/** Check conditionnowDate < dueDate **/
TermRangeQueryImpl termRangeQueryNow = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(nowTime), null, true, true);
booleanQuery.add(termRangeQueryNow, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.DELAY)){// List dossier is processing delay
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
booleanQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition extendDate > dueDate **/
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(1));
query.addFields(DossierTerm.COMPARE_DELAY_DATE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
return booleanQuery;
}
private String getDossierTemplateName(long groupId, String dossierTemplateCode) {
String name = StringPool.BLANK;
DossierTemplate template = dossierTemplatePersistence.fetchByG_DT_TPLNO(groupId, dossierTemplateCode);
if (Validator.isNotNull(template)) {
name = template.getTemplateName();
}
return name;
}
private String getDossierNote(String serviceInfoCode, String govAgencyCode, String dossierTemplateNo,
long groupId) {
String dossierNote = StringPool.BLANK;
ServiceInfo serviceInfo = serviceInfoPersistence.fetchBySC_GI(serviceInfoCode, groupId);
try {
ServiceConfig config = ServiceConfigLocalServiceUtil.getBySICodeAndGAC(groupId, serviceInfoCode,
govAgencyCode);
ProcessOption option = ProcessOptionLocalServiceUtil.getByDTPLNoAndServiceCF(groupId, dossierTemplateNo,
config.getServiceConfigId());
dossierNote = option.getInstructionNote();
if (Validator.isNull(dossierNote)) {
throw new Exception();
}
} catch (Exception e) {
_log.debug(e);
if (Validator.isNotNull(serviceInfo)) {
dossierNote = serviceInfo.getProcessText();
}
}
return dossierNote;
}
private String getDossierNote(ServiceInfo serviceInfo, ProcessOption option) {
if (option != null) {
return option.getInstructionNote();
} else if(Validator.isNotNull(serviceInfo)){
return serviceInfo.getProcessText();
}
return StringPool.BLANK;
}
public long countDossierByG_C_GAC_SC_DTNO_NOTDS(long groupId, long companyId, String govAgencyCode,
String serviceCode, String dossierTemplateNo, String dossierStatus) {
return dossierPersistence.countByG_C_GAC_SC_DTNO_NOTDS(groupId, companyId, govAgencyCode, serviceCode,
dossierTemplateNo, dossierStatus);
}
private String getServerNo(long groupId) {
try {
List<ServerConfig> sc = ServerConfigLocalServiceUtil.getGroupId(groupId);
// _log.debug("sc.get(0).getServerNo():" + sc.get(0).getServerNo());
return sc.get(0).getServerNo();
} catch (Exception e) {
_log.error(e);
return StringPool.BLANK;
}
}
// TrungDK: Process
public List<Dossier> getDossierByG_NOTO_DS(int originality, String dossierStatus) {
return dossierPersistence.findByNOTO_DS(originality, dossierStatus);
}
public List<Dossier> getDossierByG_NOTO_DS(int[] originalityArr, String dossierStatus) {
return dossierPersistence.findByNOTO_DS(originalityArr, dossierStatus);
}
public void removeDossierByG_NOTO_DS(int[] originalityArr, String dossierStatus) {
List<Dossier> lstDossiers = dossierPersistence.findByNOTO_DS(originalityArr, dossierStatus);
Date now = new Date();
if (lstDossiers != null && lstDossiers.size() > 0) {
for (Dossier dossier : lstDossiers) {
long diffInMillies = Math.abs(now.getTime() - dossier.getCreateDate().getTime());
long diff = TimeUnit.MINUTES.convert(diffInMillies, TimeUnit.MILLISECONDS);
try {
if (diff > DossierTerm.GARBAGE_COLLECTOR_TIME)
dossierPersistence.remove(dossier.getDossierId());
} catch (NoSuchDossierException e) {
_log.error(e);
}
}
}
}
public void removeDossierByF_OG_DS(int originality, String dossierStatus) {
List<Dossier> lstDossiers = dossierPersistence.findByF_OG_DS(originality, dossierStatus);
Date now = new Date();
if (lstDossiers != null && lstDossiers.size() > 0) {
for (Dossier dossier : lstDossiers) {
long diffInMillies = Math.abs(now.getTime() - dossier.getCreateDate().getTime());
long diff = TimeUnit.HOURS.convert(diffInMillies, TimeUnit.MILLISECONDS);
try {
if (diff > DossierTerm.GARBAGE_COLLECTOR_GROUP_DOSSIER)
dossierPersistence.remove(dossier.getDossierId());
} catch (NoSuchDossierException e) {
_log.error(e);
}
}
}
}
public static final String CLASS_NAME = Dossier.class.getName();
//LamTV: Process get Dossier by dossierId, govAgency, serviceProcess
public Dossier getByIdAndGovService(long groupId, String serviceCode, String govAgencyCode, long dossierId) {
return dossierPersistence.fetchByF_GID_GOV_DID(groupId, govAgencyCode, serviceCode, dossierId);
}
public List<Dossier> getByNotO_DS_SC_GC(long groupId, int originality, String dossierStatus, String serviceCode, String govAgencyCode) {
return dossierPersistence.findByG_NOTO_DS_SC_GC(groupId, originality, dossierStatus, serviceCode, govAgencyCode);
}
//LamTV_Process update dossier
@Indexable(type = IndexableType.REINDEX)
public Dossier initUpdateDossier(long groupId, long id, String applicantName, String applicantIdType,
String applicantIdNo, String applicantIdDate, String address, String cityCode, String cityName,
String districtCode, String districtName, String wardCode, String wardName, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, Integer viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String applicantNote,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, Long sampleCount, ServiceContext serviceContext) {
Date now = new Date();
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = dossierPersistence.fetchByPrimaryKey(id);
dossier.setModifiedDate(now);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
//
if (Validator.isNotNull(applicantName))
dossier.setApplicantName(applicantName);
if (Validator.isNotNull(applicantIdType))
dossier.setApplicantIdType(applicantIdType);
if (Validator.isNotNull(applicantIdNo))
dossier.setApplicantIdNo(applicantIdNo);
if (Validator.isNotNull(applicantIdDate))
dossier.setApplicantIdDate(
APIDateTimeUtils.convertStringToDate(applicantIdDate, APIDateTimeUtils._NORMAL_PARTTERN));
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(sampleCount))
dossier.setSampleCount(sampleCount);
if (Validator.isNotNull(viaPostal)) {
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
}
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateEmail(contactEmail);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initUpdateDossier(long groupId, long id, String applicantName, String applicantIdType,
String applicantIdNo, String applicantIdDate, String address, String cityCode, String cityName,
String districtCode, String districtName, String wardCode, String wardName, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, Integer viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String applicantNote,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, Long sampleCount, String dossierName, ServiceContext serviceContext) {
Date now = new Date();
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = dossierPersistence.fetchByPrimaryKey(id);
dossier.setModifiedDate(now);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
//
if (Validator.isNotNull(applicantName))
dossier.setApplicantName(applicantName);
if (Validator.isNotNull(applicantIdType))
dossier.setApplicantIdType(applicantIdType);
if (Validator.isNotNull(applicantIdNo))
dossier.setApplicantIdNo(applicantIdNo);
if (Validator.isNotNull(applicantIdDate))
dossier.setApplicantIdDate(
APIDateTimeUtils.convertStringToDate(applicantIdDate, APIDateTimeUtils._NORMAL_PARTTERN));
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(sampleCount))
dossier.setSampleCount(sampleCount);
if (Validator.isNotNull(dossierName)) {
dossier.setDossierName(dossierName);
}
if (Validator.isNotNull(viaPostal)) {
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
}
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateEmail(contactEmail);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateApplicantInfo(long dossierId,
Date applicantIdDate,
String applicantIdNo,
String applicantIdType,
String applicantName,
String address,
String cityCode,
String cityName,
String districtCode,
String districtName,
String wardCode,
String wardName,
String contactEmail,
String contactTelNo) throws NoSuchDossierException {
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantName(applicantName);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(cityName);
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(districtName);
dossier.setDelegateEmail(contactEmail);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateName(applicantName);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(wardName);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossier(long dossierId, JSONObject obj) throws NoSuchDossierException {
// _log.debug("Object dossier update: " + obj.toJSONString());
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
if (obj.has(DossierTerm.DOSSIER_NOTE)) {
if (!obj.getString(DossierTerm.DOSSIER_NOTE).equals(dossier.getDossierNote())) {
dossier.setDossierNote(obj.getString(DossierTerm.DOSSIER_NOTE));
}
}
if (obj.has(DossierTerm.EXTEND_DATE) && Validator.isNotNull(obj.get(DossierTerm.EXTEND_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.EXTEND_DATE)) > 0) {
if (dossier.getExtendDate() == null || obj.getLong(DossierTerm.EXTEND_DATE) != dossier.getExtendDate().getTime()) {
dossier.setExtendDate(new Date(obj.getLong(DossierTerm.EXTEND_DATE)));
}
}
if (obj.has(DossierTerm.DOSSIER_NO)) {
//_log.debug("Sync dossier no");
if (Validator.isNotNull(obj.getString(DossierTerm.DOSSIER_NO)) && !obj.getString(DossierTerm.DOSSIER_NO).equals(dossier.getDossierNo())) {
//_log.debug("Sync set dossier no");
dossier.setDossierNo(obj.getString(DossierTerm.DOSSIER_NO));
}
}
if (obj.has(DossierTerm.DUE_DATE) && Validator.isNotNull(obj.get(DossierTerm.DUE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.DUE_DATE)) > 0) {
if (dossier.getDueDate() == null || obj.getLong(DossierTerm.DUE_DATE) != dossier.getDueDate().getTime()) {
dossier.setDueDate(new Date(obj.getLong(DossierTerm.DUE_DATE)));
}
}
if (obj.has(DossierTerm.FINISH_DATE) && Validator.isNotNull(obj.get(DossierTerm.FINISH_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.FINISH_DATE)) > 0) {
if (dossier.getFinishDate() == null || obj.getLong(DossierTerm.FINISH_DATE) != dossier.getFinishDate().getTime()) {
dossier.setFinishDate(new Date(obj.getLong(DossierTerm.FINISH_DATE)));
}
}
if (obj.has(DossierTerm.RECEIVE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RECEIVE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RECEIVE_DATE)) > 0) {
if (dossier.getReceiveDate() == null || obj.getLong(DossierTerm.RECEIVE_DATE) != dossier.getReceiveDate().getTime()) {
dossier.setReceiveDate(new Date(obj.getLong(DossierTerm.RECEIVE_DATE)));
}
}
if (obj.has(DossierTerm.SUBMIT_DATE) && Validator.isNotNull(obj.get(DossierTerm.SUBMIT_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.SUBMIT_DATE)) > 0) {
if (dossier.getSubmitDate() == null || (dossier.getSubmitDate() != null && obj.getLong(DossierTerm.SUBMIT_DATE) != dossier.getSubmitDate().getTime())) {
dossier.setSubmitDate(new Date(obj.getLong(DossierTerm.SUBMIT_DATE)));
}
}
if (obj.has(DossierTerm.EXTEND_DATE) && Validator.isNotNull(obj.get(DossierTerm.EXTEND_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.EXTEND_DATE)) > 0) {
if (dossier.getExtendDate() == null || obj.getLong(DossierTerm.EXTEND_DATE) != dossier.getExtendDate().getTime()) {
dossier.setExtendDate(new Date(obj.getLong(DossierTerm.EXTEND_DATE)));
}
}
if (obj.has(DossierTerm.DOSSIER_NOTE)) {
if (dossier.getDossierNote() == null || !obj.getString(DossierTerm.DOSSIER_NOTE).equals(dossier.getDossierNote())) {
dossier.setDossierNote(obj.getString(DossierTerm.DOSSIER_NOTE));
}
}
if (obj.has(DossierTerm.SUBMISSION_NOTE)) {
if (!obj.getString(DossierTerm.SUBMISSION_NOTE).equals(dossier.getDossierNote())) {
dossier.setSubmissionNote(obj.getString(DossierTerm.SUBMISSION_NOTE));
}
}
if (obj.has(DossierTerm.RELEASE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RELEASE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RELEASE_DATE)) > 0) {
if (dossier.getReleaseDate() == null || obj.getLong(DossierTerm.RELEASE_DATE) != dossier.getReleaseDate().getTime()) {
dossier.setReleaseDate(new Date(obj.getLong(DossierTerm.RELEASE_DATE)));
}
}
if (obj.has(DossierTerm.LOCK_STATE)) {
if (!obj.getString(DossierTerm.LOCK_STATE).equals(dossier.getLockState())) {
dossier.setLockState(obj.getString(DossierTerm.LOCK_STATE));
}
}
if (obj.has(DossierTerm.BRIEF_NOTE)) {
if (!obj.getString(DossierTerm.BRIEF_NOTE).equals(dossier.getBriefNote())) {
dossier.setBriefNote(obj.getString(DossierTerm.BRIEF_NOTE));
}
}
return dossierPersistence.update(dossier);
}
public Dossier getByOrigin(long groupId, long originDossierId) {
return dossierPersistence.fetchByG_O_DID_First(groupId, originDossierId, null);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier publishDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality, Date createDate,
Date modifiedDate, Date submitDate, Date receiveDate, Date dueDate, Date releaseDate, Date finishDate,
Date cancellingDate, Date correctingDate, Date endorsementDate, Date extendDate, Date processDate,
String dossierNo, String dossierStatus, String dossierStatusText, String dossierSubStatus,
String dossierSubStatusText, long dossierActionId, String submissionNote, String lockState,
String delegateName, String delegateIdNo, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateCityName, String delegateDistrictCode,
String delegateDistrictName, String delegateWardCode, String delegateWardName, double durationCount,
int durationUnit, String dossierName, String processNo, String metaData, ServiceContext context) throws PortalException {
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
Dossier dossier = null;
dossier = getByRef(groupId, referenceUid);
if (dossier == null) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(createDate);
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
dossier.setCancellingDate(cancellingDate);
dossier.setCorrecttingDate(correctingDate);
dossier.setEndorsementDate(endorsementDate);
dossier.setExtendDate(extendDate);
dossier.setProcessDate(processDate);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
//
dossier.setDossierNo(dossierNo);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
dossier.setDossierSubStatus(dossierSubStatus);
dossier.setDossierSubStatusText(dossierSubStatusText);
dossier.setDossierActionId(dossierActionId);
dossier.setSubmissionNote(submissionNote);
dossier.setLockState(lockState);
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateEmail(delegateEmail);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(delegateCityName);
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(delegateDistrictName);
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(delegateWardName);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setDossierName(dossierName);
dossier.setProcessNo(processNo);
dossier.setMetaData(metaData);
dossier = dossierPersistence.update(dossier);
} else {
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
dossier.setCancellingDate(cancellingDate);
dossier.setCorrecttingDate(correctingDate);
dossier.setEndorsementDate(endorsementDate);
dossier.setExtendDate(extendDate);
dossier.setProcessDate(processDate);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(dossierNo))
dossier.setDossierNo(dossierNo);
if (Validator.isNotNull(dossierStatus))
dossier.setDossierStatus(dossierStatus);
if (Validator.isNotNull(dossierStatusText))
dossier.setDossierStatusText(dossierStatusText);
if (Validator.isNotNull(dossierSubStatus))
dossier.setDossierSubStatus(dossierSubStatus);
if (Validator.isNotNull(dossierSubStatusText))
dossier.setDossierSubStatusText(dossierSubStatusText);
if (Validator.isNotNull(dossierActionId))
dossier.setDossierActionId(dossierActionId);
if (Validator.isNotNull(submissionNote))
dossier.setSubmissionNote(submissionNote);
if (Validator.isNotNull(lockState))
dossier.setLockState(lockState);
if (Validator.isNotNull(delegateName))
dossier.setDelegateName(delegateName);
if (Validator.isNotNull(delegateIdNo))
dossier.setDelegateIdNo(delegateIdNo);
if (Validator.isNotNull(delegateTelNo))
dossier.setDelegateTelNo(delegateTelNo);
if (Validator.isNotNull(delegateEmail))
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateAddress))
dossier.setDelegateAddress(delegateAddress);
if (Validator.isNotNull(delegateCityCode))
dossier.setDelegateCityCode(delegateCityCode);
if (Validator.isNotNull(delegateCityName))
dossier.setDelegateCityName(delegateCityName);
if (Validator.isNotNull(delegateDistrictCode))
dossier.setDelegateDistrictCode(delegateDistrictCode);
if (Validator.isNotNull(delegateDistrictName))
dossier.setDelegateDistrictName(delegateDistrictName);
if (Validator.isNotNull(delegateWardCode))
dossier.setDelegateWardCode(delegateWardCode);
if (Validator.isNotNull(delegateWardName))
dossier.setDelegateWardName(delegateWardName);
if (Validator.isNotNull(durationCount))
dossier.setDurationCount(durationCount);
if (Validator.isNotNull(durationUnit))
dossier.setDurationUnit(durationUnit);
if (Validator.isNotNull(dossierName))
dossier.setDossierName(dossierName);
if (Validator.isNotNull(processNo))
dossier.setProcessNo(processNo);
if (Validator.isNotNull(metaData))
dossier.setProcessNo(metaData);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossier = dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier rollback(Dossier dossier, DossierAction dossierAction) {
ProcessStep processStep = ProcessStepLocalServiceUtil.fetchBySC_GID(dossierAction.getStepCode(), dossier.getGroupId(), dossierAction.getServiceProcessId());
if (processStep != null) {
dossierAction.setState(DossierActionTerm.STATE_WAITING_PROCESSING);
dossierAction = DossierActionLocalServiceUtil.updateState(dossierAction.getDossierActionId(), DossierActionTerm.STATE_WAITING_PROCESSING);
JSONObject jsonDataStatusText = getStatusText(dossier.getGroupId(), DossierTerm.DOSSIER_SATUS_DC_CODE, processStep.getDossierStatus(), processStep.getDossierSubStatus());
dossier.setDossierActionId(dossierAction.getDossierActionId());
dossier.setDossierStatus(processStep.getDossierStatus());
dossier.setDossierStatusText(jsonDataStatusText != null ? jsonDataStatusText.getString(processStep.getDossierStatus()) : StringPool.BLANK);
dossier.setDossierSubStatus(processStep.getDossierSubStatus());
dossier.setDossierSubStatusText(jsonDataStatusText != null ? jsonDataStatusText.getString(processStep.getDossierSubStatus()) : StringPool.BLANK);
}
return dossierPersistence.update(dossier);
}
private JSONObject getStatusText(long groupId, String collectionCode, String curStatus, String curSubStatus) {
JSONObject jsonData = null;
DictCollection dc = DictCollectionLocalServiceUtil.fetchByF_dictCollectionCode(collectionCode, groupId);
if (Validator.isNotNull(dc) && Validator.isNotNull(curStatus)) {
jsonData = JSONFactoryUtil.createJSONObject();
DictItem it = DictItemLocalServiceUtil.fetchByF_dictItemCode(curStatus, dc.getPrimaryKey(), groupId);
if (Validator.isNotNull(it)) {
jsonData.put(curStatus, it.getItemName());
if (Validator.isNotNull(curSubStatus)) {
DictItem dItem = DictItemLocalServiceUtil.fetchByF_dictItemCode(curSubStatus, dc.getPrimaryKey(),
groupId);
if (Validator.isNotNull(dItem)) {
jsonData.put(curSubStatus, dItem.getItemName());
}
}
}
}
return jsonData;
}
public long countDossierByGroup(long groupId) {
return dossierPersistence.countByG(groupId);
}
public List<Dossier> findDossierByGroup(long groupId) {
return dossierPersistence.findByG(groupId);
}
public List<Dossier> findByDN_AN(String dossierNo, String applicantIdNo) {
return dossierPersistence.findByDN_AN(dossierNo, applicantIdNo);
}
public List<Dossier> getByU_G_C_DS_SC_GC_O(long userId, long groupId, String serviceCode, String govAgencyCode, long dossierActionId, int originality) {
return dossierPersistence.findByU_G_GAC_SC_DTNO_DAI_O(userId, groupId, govAgencyCode, serviceCode, dossierActionId, originality);
}
public List<Dossier> findByVIAPOSTAL(int viaPostal) {
return dossierPersistence.findByVIAPOSTAL(viaPostal);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier cloneDossier(Dossier srcDossier) throws PortalException {
long desDossierId = counterLocalService.increment(Dossier.class.getName());
Dossier desDossier = dossierPersistence.create(desDossierId);
int counter = DossierNumberGenerator.counterDossier(srcDossier.getUserId(), desDossierId);
String referenceUid = DossierNumberGenerator.generateReferenceUID(desDossier.getGroupId());
desDossier.setCounter(counter);
desDossier.setReferenceUid(referenceUid);
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, srcDossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, srcDossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, srcDossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
Date now = new Date();
desDossier.setCreateDate(now);
desDossier.setModifiedDate(now);
desDossier.setCompanyId(srcDossier.getCompanyId());
desDossier.setGroupId(srcDossier.getGroupId());
desDossier.setUserId(srcDossier.getUserId());
desDossier.setUserName(srcDossier.getUserName());
// Add extent fields
desDossier.setServiceCode(srcDossier.getServiceCode());
desDossier.setServiceName(srcDossier.getServiceName());
desDossier.setGovAgencyCode(srcDossier.getGovAgencyCode());
desDossier.setGovAgencyName(srcDossier.getGovAgencyName());
desDossier.setDossierTemplateNo(srcDossier.getDossierTemplateNo());
desDossier.setDossierTemplateName(srcDossier.getDossierTemplateName());
desDossier.setApplicantName(srcDossier.getApplicantName());
desDossier.setApplicantIdType(srcDossier.getApplicantIdType());
desDossier.setApplicantIdNo(srcDossier.getApplicantIdNo());
desDossier.setApplicantIdDate(srcDossier.getApplicantIdDate());
desDossier.setPassword(srcDossier.getPassword());
desDossier.setOnline(srcDossier.getOnline());
desDossier.setDossierNote(srcDossier.getDossierNote());
desDossier.setAddress(srcDossier.getAddress());
desDossier.setCityCode(srcDossier.getCityCode());
desDossier.setCityName(srcDossier.getCityName());
desDossier.setDistrictCode(srcDossier.getDistrictCode());
desDossier.setDistrictName(srcDossier.getDistrictName());
desDossier.setWardCode(srcDossier.getWardCode());
desDossier.setWardName(srcDossier.getWardName());
desDossier.setContactName(srcDossier.getContactName());
desDossier.setContactEmail(srcDossier.getContactEmail());
desDossier.setContactTelNo(srcDossier.getContactTelNo());
desDossier.setViaPostal(srcDossier.getViaPostal());
desDossier.setPostalAddress(srcDossier.getPostalAddress());
desDossier.setPostalCityCode(srcDossier.getPostalCityCode());
desDossier.setPostalCityName(srcDossier.getPostalCityName());
desDossier.setPostalTelNo(srcDossier.getPostalTelNo());
desDossier.setApplicantNote(srcDossier.getApplicantNote());
desDossier.setServerNo(srcDossier.getServerNo());
desDossier.setOriginality(srcDossier.getOriginality());
desDossier.setDurationCount(srcDossier.getDurationCount());
desDossier.setDurationUnit(srcDossier.getDurationUnit());
//desDossier.setDossierStatus(srcDossier.getDossierStatus());
//desDossier.setDossierStatusText(srcDossier.getDossierStatusText());
//desDossier.setDossierSubStatus(srcDossier.getDossierSubStatus());
//desDossier.setDossierSubStatusText(srcDossier.getDossierSubStatusText());
desDossier.setDelegateName(srcDossier.getDelegateName());
desDossier.setDelegateAddress(srcDossier.getDelegateAddress());
desDossier.setDelegateCityCode(srcDossier.getDelegateCityCode());
desDossier.setDelegateCityName(srcDossier.getDelegateCityName());
desDossier.setDelegateDistrictCode(srcDossier.getDelegateDistrictCode());
desDossier.setDelegateDistrictName(srcDossier.getDelegateDistrictName());
desDossier.setDelegateWardCode(srcDossier.getDelegateWardCode());
desDossier.setDelegateWardName(srcDossier.getDelegateWardName());
desDossier.setDelegateEmail(srcDossier.getDelegateEmail());
desDossier.setDelegateIdNo(srcDossier.getDelegateIdNo());
desDossier.setDelegateTelNo(srcDossier.getDelegateTelNo());
desDossier.setDossierName(srcDossier.getDossierName());
desDossier.setRegisterBookCode(srcDossier.getRegisterBookCode());
desDossier.setProcessNo(srcDossier.getProcessNo());
//dossierPersistence.update(desDossier);
//ServiceProcess serviceProcess = null;
//ProcessOption option = getProcessOption(srcDossier.getServiceCode(), srcDossier.getGovAgencyCode(), srcDossier.getDossierTemplateNo(), srcDossier.getGroupId());
//_log.debug("Process option: " + option);
// if (option != null) {
// long serviceProcessId = option.getServiceProcessId();
// serviceProcess = ServiceProcessLocalServiceUtil.fetchServiceProcess(serviceProcessId);
// String dossierRef = DossierNumberGenerator.generateDossierNumber(srcDossier.getGroupId(), srcDossier.getCompanyId(),
// desDossierId, option.getProcessOptionId(), serviceProcess.getDossierNoPattern(), params);
// _log.debug("Dossier no: " + dossierRef);
// desDossier.setDossierNo(dossierRef.trim());
// }
// set dueDate
desDossier.setDueDate(srcDossier.getDueDate());
// set receivedDate
desDossier.setReceiveDate(srcDossier.getReceiveDate());
return dossierPersistence.update(desDossier);
}
public Dossier getByDossierNo(long groupId, String dossierNo) {
return dossierPersistence.fetchByG_DN(groupId, dossierNo);
}
// super_admin Generators
@Indexable(type = IndexableType.DELETE)
public Dossier adminProcessDelete(Long id) {
Dossier object = dossierPersistence.fetchByPrimaryKey(id);
if (Validator.isNull(object)) {
return null;
} else {
dossierPersistence.remove(object);
}
return object;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier adminProcessData(JSONObject objectData) {
Dossier object = null;
if (objectData.getLong("dossierId") > 0) {
object = dossierPersistence.fetchByPrimaryKey(objectData.getLong("dossierId"));
object.setModifiedDate(new Date());
} else {
long id = CounterLocalServiceUtil.increment(ServiceProcess.class.getName());
object = dossierPersistence.create(id);
object.setGroupId(objectData.getLong("groupId"));
object.setCompanyId(objectData.getLong("companyId"));
object.setCreateDate(new Date());
}
object.setUserId(objectData.getLong("userId"));
object.setUserName(objectData.getString("userName"));
object.setReferenceUid(objectData.getString("referenceUid"));
object.setCounter(objectData.getInt("counter"));
object.setRegisterBookCode(objectData.getString("registerBookCode"));
object.setRegisterBookName(objectData.getString("registerBookName"));
object.setDossierRegister(objectData.getString("dossierRegister"));
object.setProcessNo(objectData.getString("processNo"));
object.setServiceCode(objectData.getString("serviceCode"));
object.setServiceName(objectData.getString("serviceName"));
object.setGovAgencyCode(objectData.getString("govAgencyCode"));
object.setApplicantIdType(objectData.getString("applicantIdType"));
object.setApplicantIdNo(objectData.getString("applicantIdNo"));
object.setApplicantIdDate(new Date(objectData.getLong("applicantIdDate")));
object.setAddress(objectData.getString("address"));
object.setApplicantName(objectData.getString("applicantName"));
object.setPostalAddress(objectData.getString("postalAddress"));
DictItem govAgencyName = DictCollectionUtils.getDictItemByCode(DataMGTConstants.GOVERNMENT_AGENCY,
objectData.getString("govAgencyCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(govAgencyName)) {
object.setGovAgencyName(govAgencyName.getItemName());
}
object.setCityCode(objectData.getString("cityCode"));
object.setDistrictCode(objectData.getString("districtCode"));
object.setWardCode(objectData.getString("wardCode"));
object.setDelegateCityCode(objectData.getString("delegateCityCode"));
object.setDelegateDistrictCode(objectData.getString("delegateDistrictCode"));
object.setDelegateWardCode(objectData.getString("delegateWardCode"));
object.setPostalCityCode(objectData.getString("postalCityCode"));
object.setPostalDistrictCode(objectData.getString("postalDistrictCode"));
object.setPostalWardCode(objectData.getString("postalWardCode"));
DictItem dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("cityCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setCityName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("districtCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDistrictName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("wardCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setWardName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("delegateCityCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDelegateCityName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("delegateDistrictCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDelegateDistrictName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("delegateWardCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDelegateWardName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("postalCityCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setPostalCityName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("postalDistrictCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setPostalDistrictName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("postalWardCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setPostalWardName(dictItem.getItemName());
}
object.setPostalServiceCode(objectData.getString("postalServiceCode"));
object.setPostalServiceName(objectData.getString("postalServiceName"));
object.setDossierTemplateNo(objectData.getString("dossierTemplateNo"));
object.setDossierTemplateName(objectData.getString("dossierTemplateName"));
object.setDossierStatus(objectData.getString("dossierStatus"));
object.setDossierStatusText(objectData.getString("dossierStatusText"));
object.setDossierSubStatus(objectData.getString("dossierSubStatus"));
object.setDossierSubStatusText(objectData.getString("dossierSubStatusText"));
object.setContactName(objectData.getString("contactName"));
object.setContactTelNo(objectData.getString("contactTelNo"));
object.setContactEmail(objectData.getString("contactEmail"));
object.setDelegateName(objectData.getString("delegateName"));
object.setDelegateIdNo(objectData.getString("delegateIdNo"));
object.setDelegateTelNo(objectData.getString("delegateTelNo"));
object.setDelegateEmail(objectData.getString("delegateEmail"));
object.setDelegateAddress(objectData.getString("delegateAddress"));
object.setDossierNote(objectData.getString("dossierNote"));
object.setSubmissionNote(objectData.getString("submissionNote"));
object.setApplicantNote(objectData.getString("applicantNote"));
object.setBriefNote(objectData.getString("briefNote"));
object.setDossierNo(objectData.getString("dossierNo"));
object.setSubmitting(objectData.getBoolean("submitting"));
object.setProcessDate(new Date(objectData.getLong("processDate")));
object.setSubmitDate(new Date(objectData.getLong("submitDate")));
object.setReceiveDate(new Date(objectData.getLong("receiveDate")));
object.setDueDate(new Date(objectData.getLong("dueDate")));
object.setExtendDate(new Date(objectData.getLong("extendDate")));
object.setReleaseDate(new Date(objectData.getLong("releaseDate")));
object.setFinishDate(new Date(objectData.getLong("finishDate")));
object.setCancellingDate(new Date(objectData.getLong("cancellingDate")));
object.setCorrecttingDate(new Date(objectData.getLong("correcttingDate")));
// object.setFolderId(objectData.getString("userName")folderId);
object.setDossierActionId(objectData.getLong("dossierActionId"));
object.setViaPostal(objectData.getInt("viaPostal"));
object.setPostalTelNo(objectData.getString("postalTelNo"));
object.setPassword(objectData.getString("password"));
object.setNotification(objectData.getBoolean("notification"));
object.setOnline(objectData.getBoolean("online"));
object.setOriginal(objectData.getBoolean("original"));
object.setServerNo(objectData.getString("serverNo"));
object.setEndorsementDate(new Date(objectData.getLong("endorsementDate")));
object.setLockState(objectData.getString("lockState"));
object.setOriginality(objectData.getInt("originality"));
object.setOriginDossierId(objectData.getLong("originDossierId"));
object.setSampleCount(objectData.getLong("sampleCount"));
object.setDurationUnit(objectData.getInt("durationUnit"));
object.setDurationCount(objectData.getDouble("durationCount"));
object.setDossierName(objectData.getString("dossierName"));
object.setOriginDossierNo(objectData.getString("originDossierNo"));
dossierPersistence.update(object);
return object;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierSpecial(long dossierId, JSONObject obj) throws NoSuchDossierException {
// _log.debug("Object dossier update: " + obj.toJSONString());
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
if (obj.has(DossierTerm.DOSSIER_NOTE)) {
if (!obj.getString(DossierTerm.DOSSIER_NOTE).equals(dossier.getDossierNote())) {
dossier.setDossierNote(obj.getString(DossierTerm.DOSSIER_NOTE));
}
}
if (obj.has(DossierTerm.DOSSIER_STATUS)) {
if (!obj.getString(DossierTerm.DOSSIER_STATUS).equals(dossier.getDossierStatus())
&& Validator.isNotNull(obj.getString(DossierTerm.DOSSIER_STATUS))) {
dossier.setDossierStatus(obj.getString(DossierTerm.DOSSIER_STATUS));
}
}
if (obj.has(DossierTerm.RECEIVE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RECEIVE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RECEIVE_DATE)) > 0) {
if (dossier.getReceiveDate() == null || obj.getLong(DossierTerm.RECEIVE_DATE) != dossier.getReceiveDate().getTime()) {
dossier.setReceiveDate(new Date(obj.getLong(DossierTerm.RECEIVE_DATE)));
}
}
if (obj.has(DossierTerm.EXTEND_DATE) && Validator.isNotNull(obj.get(DossierTerm.EXTEND_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.EXTEND_DATE)) > 0) {
if (dossier.getExtendDate() == null || obj.getLong(DossierTerm.EXTEND_DATE) != dossier.getExtendDate().getTime()) {
dossier.setExtendDate(new Date(obj.getLong(DossierTerm.EXTEND_DATE)));
}
}
if (obj.has(DossierTerm.DUE_DATE) && Validator.isNotNull(obj.get(DossierTerm.DUE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.DUE_DATE)) > 0) {
if (dossier.getDueDate() == null || obj.getLong(DossierTerm.DUE_DATE) != dossier.getDueDate().getTime()) {
dossier.setDueDate(new Date(obj.getLong(DossierTerm.DUE_DATE)));
}
}
if (obj.has(DossierTerm.FINISH_DATE) && Validator.isNotNull(obj.get(DossierTerm.FINISH_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.FINISH_DATE)) > 0) {
if (dossier.getFinishDate() == null || obj.getLong(DossierTerm.FINISH_DATE) != dossier.getFinishDate().getTime()) {
dossier.setFinishDate(new Date(obj.getLong(DossierTerm.FINISH_DATE)));
}
}
if (obj.has(DossierTerm.RELEASE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RELEASE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RELEASE_DATE)) > 0) {
if (dossier.getReleaseDate() == null || obj.getLong(DossierTerm.RELEASE_DATE) != dossier.getReleaseDate().getTime()) {
dossier.setReleaseDate(new Date(obj.getLong(DossierTerm.RELEASE_DATE)));
}
}
return dossierPersistence.update(dossier);
}
public List<Dossier> getByG_AN(long groupId, String applicantIdNo) {
return dossierPersistence.findByG_AN(groupId, applicantIdNo);
}
public Dossier getByG_AN_SC_GAC_DTNO_ODID(long groupId, String applicantIdNo, String serviceCode, String govAgencyCode, String dossierTemplateNo, long originDossierId) {
return dossierPersistence.fetchByG_AN_SC_GAC_DTNO_ODID(groupId, applicantIdNo, serviceCode, govAgencyCode, dossierTemplateNo, originDossierId);
}
public Dossier fetchOnePublicService() {
return dossierPersistence.fetchByO_First(0, null);
}
public List<Dossier> findByNOT_ST_GT_MD(String[] statuses, Date d, int start, int end) {
return dossierPersistence.findByNOT_ST_GT_MD(statuses, d, start, end);
}
public List<Dossier> findByGID(long groupId, int start, int end) {
return dossierPersistence.findByG(groupId, start, end);
}
public Dossier updateStatus(Dossier dossier, String status, String statusText, String subStatus,
String subStatusText, String lockState, String stepInstruction, ServiceContext context)
throws PortalException {
Date now = new Date();
dossier.setModifiedDate(now);
dossier.setDossierStatus(status);
dossier.setDossierStatusText(statusText);
dossier.setDossierSubStatus(subStatus);
dossier.setDossierSubStatusText(subStatusText);
dossier.setLockState(lockState);
dossier.setDossierNote(stepInstruction);
if (status.equalsIgnoreCase(DossierStatusConstants.RELEASING)) {
dossier.setReleaseDate(now);
}
if (status.equalsIgnoreCase(DossierStatusConstants.DONE)) {
dossier.setFinishDate(now);
}
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initUpdateDossier(long groupId, long id, String applicantName, String applicantIdType,
String applicantIdNo, String applicantIdDate, String address, String cityCode, String cityName,
String districtCode, String districtName, String wardCode, String wardName, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, Integer viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String applicantNote,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, Long sampleCount, String dossierName, String briefNote, ServiceContext serviceContext) {
Date now = new Date();
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = dossierPersistence.fetchByPrimaryKey(id);
dossier.setModifiedDate(now);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
//
if (Validator.isNotNull(applicantName))
dossier.setApplicantName(applicantName);
if (Validator.isNotNull(applicantIdType))
dossier.setApplicantIdType(applicantIdType);
if (Validator.isNotNull(applicantIdNo))
dossier.setApplicantIdNo(applicantIdNo);
if (Validator.isNotNull(applicantIdDate))
dossier.setApplicantIdDate(
APIDateTimeUtils.convertStringToDate(applicantIdDate, APIDateTimeUtils._NORMAL_PARTTERN));
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(sampleCount))
dossier.setSampleCount(sampleCount);
if (Validator.isNotNull(viaPostal)) {
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
}
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateEmail(contactEmail);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
if (Validator.isNotNull(dossierName)) {
dossier.setDossierName(dossierName);
}
System.out.println("Dossier name: " + dossierName);
dossier.setBriefNote(briefNote);
//Process add status of group dossier
if (dossier.getOriginality() == 9) {
dossier.setDossierStatus(DossierTerm.DOSSIER_STATUS_PROCESSING);
}
return dossierPersistence.update(dossier);
}
public List<Dossier> getByGroupAndOriginDossierNo(long groupId, String originDossierNo) {
return dossierPersistence.findByGID_ORI_NO(groupId, originDossierNo);
}
public int countByGroupAndOriginDossierNo(long groupId, String originDossierNo) {
return dossierPersistence.countByGID_ORI_NO(groupId, originDossierNo);
}
public int countByOriginDossierNo(String originDossierNo) {
return dossierPersistence.countByORIGIN_NO(originDossierNo);
}
public List<Dossier> getByU_G_GAC_SC_DTNO_DS_O(long userId, long groupId, String govAgencyCode, String serviceCode, String dossierTemplateNo, String dossierStatus, int originality) {
return dossierPersistence.findByU_G_GAC_SC_DTNO_DS_O(userId, groupId, govAgencyCode, serviceCode, dossierTemplateNo, dossierStatus, originality);
}
public int countByG_NOTS_O_SC(long groupId, String[] dossierStatuses, int originality, String serviceCode) {
return dossierPersistence.countByG_NOTS_O_SC(groupId, dossierStatuses, originality, serviceCode);
}
public int countByG_NOTS_O_DTN(long groupId, String[] dossierStatuses, int originality, String dossierTemplateNo) {
return dossierPersistence.countByG_NOTS_O_DTN(groupId, dossierStatuses, originality, dossierTemplateNo);
}
public int countByG_NOTS_O_PN(long groupId, String[] dossierStatuses, int originality, String processNo) {
return dossierPersistence.countByG_NOTS_O_PN(groupId, dossierStatuses, originality, processNo);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier publishImportDossier(long groupId, long dossierId, String referenceUid, int counter,
String serviceCode, String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String contactName,
String contactTelNo, String contactEmail, Boolean online, int originality, String dossierNo,
String dossierStatus, String dossierStatusText, long dossierActionId, Double durationCount,
Integer durationUnit, Integer sampleCount, Date createDate, Date modifiedDate, Date submitDate,
Date receiveDate, Date dueDate, Date releaseDate, Date finishDate, String dossierTemplateNo,
String dossierTemplateName, ServiceContext serviceContext) {
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (dossierId > 0) {
dossier = DossierLocalServiceUtil.fetchDossier(dossierId);
} else {
dossier = getByRef(groupId, referenceUid);
}
if (dossier == null) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(createDate);
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
dossier.setCompanyId(serviceContext.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setOnline(online);
dossier.setAddress(address);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(0);
dossier.setOriginality(originality);
dossier.setDossierNo(dossierNo);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
if ("releasing".equals(dossierStatus)) {
dossier.setDossierSubStatus("releasing_0");
dossier.setDossierSubStatusText("Chờ trả kết quả tại một cửa");
}
dossier.setDossierActionId(dossierActionId);
dossier.setCounter(counter);
dossier.setDelegateName(applicantName);
dossier.setDelegateAddress(address);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateEmail(contactEmail);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setSampleCount(sampleCount);
dossier.setDossierName(serviceName);
dossier = dossierPersistence.update(dossier);
} else {
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(dossierTemplateNo))
dossier.setDossierTemplateNo(dossierTemplateNo);
if (Validator.isNotNull(dossierTemplateName))
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setViaPostal(0);
dossier.setOriginality(originality);
dossier.setDossierNo(dossierNo);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
if ("releasing".equals(dossierStatus)) {
dossier.setDossierSubStatus("releasing_0");
dossier.setDossierSubStatusText("Chờ trả kết quả tại một cửa");
}
dossier.setDossierActionId(dossierActionId);
dossier.setCounter(counter);
dossier.setDelegateName(applicantName);
dossier.setDelegateAddress(address);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateEmail(contactEmail);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setSampleCount(sampleCount);
dossier.setDossierName(serviceName);
dossier = dossierPersistence.update(dossier);
}
return dossier;
}
public List<Dossier> getByF_GID_AN_DS(long groupId, String applicantIdNo, String dossierStatus) {
return dossierPersistence.findByF_GID_AN_DS(groupId, applicantIdNo, dossierStatus);
}
public List<Dossier> getByGID_GC_SC_DTN_DS_APP_ORI(long groupId, String govAgencyCode, String serviceCode,
String dossierTemplateNo, String[] statusArr, String applicantIdType, int originality) {
try {
return dossierPersistence.findByGID_GC_SC_DTN_DS_APP_ORI(groupId, govAgencyCode, serviceCode,
dossierTemplateNo, statusArr, applicantIdType, originality);
} catch (Exception e) {
_log.debug(e);
}
return null;
}
public List<Dossier> findByG_GDID(long groupId, long groupDossierId) {
return dossierPersistence.findByG_GDID(groupId, groupDossierId);
}
}
|
modules/backend-dossiermgt/backend-dossiermgt-service/src/main/java/org/opencps/dossiermgt/service/impl/DossierLocalServiceImpl.java
|
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.opencps.dossiermgt.service.impl;
import com.liferay.counter.kernel.service.CounterLocalServiceUtil;
import com.liferay.petra.string.StringPool;
import com.liferay.portal.kernel.dao.orm.QueryUtil;
import com.liferay.portal.kernel.exception.PortalException;
import com.liferay.portal.kernel.json.JSONFactoryUtil;
import com.liferay.portal.kernel.json.JSONObject;
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.search.BaseBooleanQueryImpl;
import com.liferay.portal.kernel.search.BooleanClause;
import com.liferay.portal.kernel.search.BooleanClauseOccur;
import com.liferay.portal.kernel.search.BooleanQuery;
import com.liferay.portal.kernel.search.BooleanQueryFactoryUtil;
import com.liferay.portal.kernel.search.Document;
import com.liferay.portal.kernel.search.Field;
import com.liferay.portal.kernel.search.Hits;
import com.liferay.portal.kernel.search.IndexSearcherHelperUtil;
import com.liferay.portal.kernel.search.Indexable;
import com.liferay.portal.kernel.search.IndexableType;
import com.liferay.portal.kernel.search.Indexer;
import com.liferay.portal.kernel.search.IndexerRegistryUtil;
import com.liferay.portal.kernel.search.ParseException;
import com.liferay.portal.kernel.search.Query;
import com.liferay.portal.kernel.search.SearchContext;
import com.liferay.portal.kernel.search.SearchException;
import com.liferay.portal.kernel.search.Sort;
import com.liferay.portal.kernel.search.TermQuery;
import com.liferay.portal.kernel.search.TermRangeQuery;
import com.liferay.portal.kernel.search.WildcardQuery;
import com.liferay.portal.kernel.search.filter.Filter;
import com.liferay.portal.kernel.search.filter.FilterTranslator;
import com.liferay.portal.kernel.search.filter.RangeTermFilter;
import com.liferay.portal.kernel.search.filter.TermFilter;
import com.liferay.portal.kernel.search.filter.TermsFilter;
import com.liferay.portal.kernel.search.generic.BooleanQueryImpl;
import com.liferay.portal.kernel.search.generic.MultiMatchQuery;
import com.liferay.portal.kernel.search.generic.TermQueryImpl;
import com.liferay.portal.kernel.search.generic.TermRangeQueryImpl;
import com.liferay.portal.kernel.search.generic.WildcardQueryImpl;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.PwdGenerator;
import com.liferay.portal.kernel.util.StringUtil;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.portal.kernel.uuid.PortalUUIDUtil;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.opencps.auth.utils.APIDateTimeUtils;
import org.opencps.communication.model.ServerConfig;
import org.opencps.communication.service.ServerConfigLocalServiceUtil;
import org.opencps.datamgt.constants.DataMGTConstants;
import org.opencps.datamgt.model.DictCollection;
import org.opencps.datamgt.model.DictItem;
import org.opencps.datamgt.service.DictCollectionLocalServiceUtil;
import org.opencps.datamgt.service.DictItemLocalServiceUtil;
import org.opencps.datamgt.util.HolidayUtils;
import org.opencps.datamgt.utils.DictCollectionUtils;
import org.opencps.dossiermgt.action.util.DossierMgtUtils;
import org.opencps.dossiermgt.action.util.DossierNumberGenerator;
import org.opencps.dossiermgt.constants.ConstantsTerm;
import org.opencps.dossiermgt.constants.DossierActionTerm;
import org.opencps.dossiermgt.constants.DossierStatusConstants;
import org.opencps.dossiermgt.constants.DossierTerm;
import org.opencps.dossiermgt.constants.PaymentFileTerm;
import org.opencps.dossiermgt.constants.ServiceInfoTerm;
import org.opencps.dossiermgt.exception.NoSuchDossierException;
import org.opencps.dossiermgt.model.Dossier;
import org.opencps.dossiermgt.model.DossierAction;
import org.opencps.dossiermgt.model.DossierFile;
import org.opencps.dossiermgt.model.DossierPart;
import org.opencps.dossiermgt.model.DossierTemplate;
import org.opencps.dossiermgt.model.ProcessOption;
import org.opencps.dossiermgt.model.ProcessStep;
import org.opencps.dossiermgt.model.ServiceConfig;
import org.opencps.dossiermgt.model.ServiceInfo;
import org.opencps.dossiermgt.model.ServiceProcess;
import org.opencps.dossiermgt.service.DossierActionLocalServiceUtil;
import org.opencps.dossiermgt.service.DossierLocalServiceUtil;
import org.opencps.dossiermgt.service.ProcessOptionLocalServiceUtil;
import org.opencps.dossiermgt.service.ProcessStepLocalServiceUtil;
import org.opencps.dossiermgt.service.ServiceConfigLocalServiceUtil;
import org.opencps.dossiermgt.service.ServiceProcessLocalServiceUtil;
import org.opencps.dossiermgt.service.base.DossierLocalServiceBaseImpl;
import aQute.bnd.annotation.ProviderType;
/**
* The implementation of the dossier local service.
*
* <p>
* All custom service methods should be put in this class. Whenever methods are
* added, rerun ServiceBuilder to copy their definitions into the
* {@link org.opencps.dossiermgt.service.DossierLocalService} interface.
*
* <p>
* This is a local service. Methods of this service will not have security
* checks based on the propagated JAAS credentials because this service can only
* be accessed from within the same VM.
* </p>
*
* @author huymq
* @see DossierLocalServiceBaseImpl
* @see org.opencps.dossiermgt.service.DossierLocalServiceUtil
*/
@ProviderType
public class DossierLocalServiceImpl extends DossierLocalServiceBaseImpl {
/*
* NOTE FOR DEVELOPERS:
*
* Never reference this class directly. Always use {@link
* org.opencps.dossiermgt.service.DossierLocalServiceUtil} to access the
* dossier local service.
*/
protected Log _log = LogFactoryUtil.getLog(DossierLocalServiceImpl.class);
@Indexable(type = IndexableType.REINDEX)
public Dossier syncDossier(Dossier dossier) throws PortalException {
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initDossier(long groupId, long dossierId, String referenceUid, long counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
Dossier dossier = null;
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
//Update sampleCount
ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
if (option != null) {
dossier.setSampleCount(option.getSampleCount());
}
dossierPersistence.update(dossier);
// create DossierFile if it is eForm
// List<DossierPart> dossierParts = new ArrayList<DossierPart>();
//
// dossierParts = dossierPartPersistence.findByTP_NO(groupId, dossierTemplateNo);
// for (DossierPart part : dossierParts) {
// if (Validator.isNotNull(part.getFormScript()) && part.getPartType() != 2) {
// String dossierFileUUID = PortalUUIDUtil.generate();
// TODO HotFix
// if (groupId != 55301) {
// if (originality == DossierTerm.ORIGINALITY_DVCTT || originality == DossierTerm.ORIGINALITY_MOTCUA) {
// dossierFileLocalService.addDossierFile(groupId, dossierId, dossierFileUUID, dossierTemplateNo,
// part.getPartNo(), part.getFileTemplateNo(), part.getPartName(), StringPool.BLANK, 0l,
// null, StringPool.BLANK, StringPool.TRUE, context);
// }
// }
// }
// if (originality == DossierTerm.ORIGINALITY_MOTCUA) {
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
ServiceProcess serviceProcess = null;
_log.debug("option: "+option);
if (option != null) {
//Process submition note
_log.debug("option: "+option.getSubmissionNote());
dossier.setSubmissionNote(option.getSubmissionNote());
_log.debug("option: "+true);
long serviceProcessId = option.getServiceProcessId();
serviceProcess = serviceProcessPersistence.findByPrimaryKey(serviceProcessId);
String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
dossierId, option.getProcessOptionId(), serviceProcess.getDossierNoPattern(), params);
dossier.setDossierNo(dossierRef.trim());
dossier.setServerNo(serviceProcess.getServerNo());
}
//Update submit date
// now = new Date();
// dossier.setSubmitDate(now);
Double durationCount;
Integer durationUnit = 0;
if (serviceProcess != null ) {
durationCount = serviceProcess.getDurationCount();
durationUnit = serviceProcess.getDurationUnit();
// _log.debug("durationCount: "+durationCount);
// _log.debug("durationUnit: "+durationUnit);
// int durationDays = 0;
//
// if (durationUnit == 0) {
// durationDays = durationCount;
// } else {
// durationDays = Math.round(durationCount / 8);
// }
// Date dueDate = null;
// if (Validator.isNotNull(durationCount) && durationCount > 0) {
// dueDate = HolidayUtils.getDueDate(now, durationCount, durationUnit, groupId);
// }
//
// _log.debug("dueDate: "+dueDate);
// if (durationDays > 0) {
// dueDate = DossierOverDueUtils.calculateEndDate(now, durationDays);
// }
// dossier.setDueDate(dueDate);
// dossier.setReceiveDate(now);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
// }
}
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossier = dossierPersistence.update(dossier);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier.setDossierNote(dossierNote);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossier = dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality,
ServiceInfo service,
ServiceProcess serviceProcess,
ProcessOption processOption,
ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (originality == 9) {
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
//String dossierNote = getDossierNote(service, processOption);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
//dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setOriginality(originality);
dossier.setSampleCount(processOption != null ? processOption.getSampleCount(): 0);
String registerBookCode = processOption != null ? processOption.getRegisterBookCode() : StringPool.BLANK;
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(Validator.isNotNull(registerBookCode) ? getDictItemName(groupId, "REGISTER_BOOK", registerBookCode) : StringPool.BLANK);
dossier.setProcessNo(serviceProcess != null ? serviceProcess.getProcessNo() : StringPool.BLANK);
dossierPersistence.update(dossier);
}
return dossier;
} else {
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(service, processOption);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
String registerBookCode = processOption != null ? processOption.getRegisterBookCode() : StringPool.BLANK;
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(Validator.isNotNull(registerBookCode) ? getDictItemName(groupId, "REGISTER_BOOK", registerBookCode) : StringPool.BLANK);
dossier.setProcessNo(serviceProcess != null ? serviceProcess.getProcessNo() : StringPool.BLANK);
//Update sampleCount
// ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
ProcessOption option = processOption;
if (option != null) {
dossier.setSampleCount(option.getSampleCount());
dossier.setSubmissionNote(option.getSubmissionNote());
}
Double durationCount;
Integer durationUnit = 0;
if (serviceProcess != null ) {
durationCount = serviceProcess.getDurationCount();
durationUnit = serviceProcess.getDurationUnit();
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setServerNo(serviceProcess.getServerNo());
}
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossierPersistence.update(dossier);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
// String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
String dossierNote = getDossierNote(service, processOption);
dossier.setDossierNote(dossierNote);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossierPersistence.update(dossier);
}
return dossier;
}
}
//initMultipleDossier(groupId, 0l, referenceUid, counter, input.getServiceCode(), serviceName,
//input.getGovAgencyCode(), govAgencyName, applicantName, applicantIdType,
//applicantIdNo, appIdDate, address,
//contactName, contactTelNo, contactEmail,
//input.getDossierTemplateNo(), password,
//viaPostal,postalServiceCode,postalServiceName, postalAddress, postalCityCode, postalCityName,
//postalDistrictCode,postalDistrictName,postalWardCode,postalWardName,
//postalTelNo,
//online, process.getDirectNotification(), applicantNote,
//input.getOriginality(),
//delegateIdNo, delegateName,delegateTelNo,delegateEmail,delegateEmail,delegateAddress,
//delegateCityCode,delegateCityName,delegateDistrictCode,delegateDistrictName,delegateWardCode,delegateWardName,
//registerBookCode,registerBookName,sampleCount,
//dossierName,
//service, process, option,
//serviceContext);
@Indexable(type = IndexableType.REINDEX)
public Dossier initMultipleDossier(long groupId, long dossierId, String referenceUid, int counter,
String serviceCode, String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, String password, int viaPostal,
String postalServiceCode, String postalServiceName, String postalAddress, String postalCityCode,
String postalCityName, String postalDistrictCode, String postalDistrictName, String postalWardCode,
String postalWardName, String postalTelNo, boolean online, boolean notification, String applicantNote,
int originality, String delegateIdNo, String delegateName, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateCityName, String delegateDistrictCode,
String delegateDistrictName, String delegateWardCode, String delegateWardName, String registerBookCode,
String registerBookName, int sampleCount, String dossierName, ServiceInfo service,
ServiceProcess process, ProcessOption option, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
String dossierNote = getDossierNote(service, option);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(postalDistrictName);
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(postalWardName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setSampleCount(sampleCount);
dossier.setOriginality(originality);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateName(delegateName);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateEmail(delegateEmail);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(delegateCityName);
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(delegateDistrictName);
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(delegateWardName);
dossier.setNotification(notification);
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(registerBookName);
dossier.setDossierName(dossierName);
dossier.setProcessNo(process.getProcessNo());
dossier.setServerNo(process.getServerNo());
//Update submit date
if (process != null ) {
dossier.setDurationCount(process.getDurationCount());
dossier.setDurationUnit(Validator.isNotNull(process.getDurationUnit()) ? process.getDurationUnit() : 0);
}
dossierPersistence.update(dossier);
//LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
//params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
//params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
//params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
//params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
// if (option != null) {
// String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
// dossierId, option.getProcessOptionId(), process.getDossierNoPattern(), params);
// dossier.setDossierNo(dossierRef.trim());
// dossier.setSubmissionNote(option.getSubmissionNote());
//
// }
//dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initFullDossier(long groupId, long dossierId, String referenceUid, int counter,
String serviceCode, String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, String password, int viaPostal,
String postalServiceCode, String postalServiceName, String postalAddress, String postalCityCode,
String postalCityName, String postalDistrictCode, String postalDistrictName, String postalWardCode,
String postalWardName, String postalTelNo, boolean online, boolean notification, String applicantNote,
int originality, String delegateIdNo, String delegateName, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateCityName, String delegateDistrictCode,
String delegateDistrictName, String delegateWardCode, String delegateWardName, String registerBookCode,
String registerBookName, int sampleCount, String dossierName, ServiceInfo service,
ServiceProcess process, ProcessOption option, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
String dossierNote = getDossierNote(service, option);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(postalDistrictName);
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(postalWardName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setSampleCount(sampleCount);
dossier.setOriginality(originality);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateName(delegateName);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateEmail(delegateEmail);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(delegateCityName);
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(delegateDistrictName);
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(delegateWardName);
dossier.setNotification(notification);
dossier.setRegisterBookCode(registerBookCode);
dossier.setRegisterBookName(registerBookName);
dossier.setDossierName(dossierName);
dossier.setProcessNo(process.getProcessNo());
dossier.setServerNo(process.getServerNo());
//Update submit date
if (process != null ) {
dossier.setDurationCount(process.getDurationCount());
dossier.setDurationUnit(Validator.isNotNull(process.getDurationUnit()) ? process.getDurationUnit() : 0);
}
dossierPersistence.update(dossier);
//LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
//params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
//params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
//params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
//params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
//if (option != null) {
// String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
// dossierId, option.getProcessOptionId(), process.getDossierNoPattern(), params);
// dossier.setDossierNo(dossierRef.trim());
// dossier.setSubmissionNote(option.getSubmissionNote());
//}
//dossierPersistence.update(dossier);
}
return dossier;
}
private final String ADMINISTRATIVE_REGION = "ADMINISTRATIVE_REGION";
// private final String POSTAL_ADMINISTRATIVE_REGION = "VNPOST_CODE";
private final String GOVERNMENT_AGENCY = "GOVERNMENT_AGENCY";
// private final int DUE_DATE_DEFAULT = 5;
private String getDictItemName(long groupId, String collectionCode, String itemCode) {
DictCollection dc = DictCollectionLocalServiceUtil.fetchByF_dictCollectionCode(collectionCode, groupId);
if (dc == null) return StringPool.BLANK;
_log.debug("COLLECTION UPDATE DOSSIER: " + dc + "," + collectionCode);
if (dc != null) {
_log.debug("COLLECTION UPDATE DOSSIER: " + dc.getCollectionCode() + "," + dc.getDictCollectionId() + "," + dc.getPrimaryKey());
DictItem it = DictItemLocalServiceUtil.fetchByF_dictItemCode(itemCode, dc.getPrimaryKey(), groupId);
if (it == null) return StringPool.BLANK;
_log.debug("ITEM: " + itemCode + "," + it);
return it.getItemName();
}
return StringPool.BLANK;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierOneGate(long dossierId, String applicantName, String applicantIdType,
String applicantIdNo, Date applicantIdDate, String address, String cityCode, String districtCode,
String wardCode, String contactName, String contactTelNo, String contactEmail, boolean isSameAsApplicant,
String delegateName, String delegateIdNo, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateDistrictCode, String delegateWardCode,
String applicantNote, String briefNote, String dossierNo, int viaPostal, String postalServiceCode,
String postalServiceName, String postalAddress, String postalCityCode, String postalDistrictCode,
String postalWardCode, String postalTelNo, long dossierActionId, String paymentFee, String paymentFeeNote,
ServiceContext context) throws PortalException {
Date now = new Date();
Dossier dossier = dossierLocalService.fetchDossier(dossierId);
dossier.setModifiedDate(now);
// create dossierRegister
String dossierRegister = PwdGenerator.getPassword(10).toUpperCase();
dossier.setDossierRegister(dossierRegister);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
if (Validator.isNotNull(cityCode)) {
dossier.setCityCode(cityCode);
dossier.setCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setWardCode(wardCode);
dossier.setWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
dossier.setContactEmail(contactEmail);
dossier.setContactName(contactName);
dossier.setContactTelNo(contactTelNo);
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
dossier.setDossierNo(dossierNo);
// viaPortal: 0 disable, 1: unselected, 2: selected
if (viaPostal == 1) {
dossier.setViaPostal(viaPostal);
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, postalCityCode));
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, postalDistrictCode));
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, postalWardCode));
dossier.setPostalTelNo(postalTelNo);
}
String password = PwdGenerator.getPassword(8).toUpperCase();
dossier.setPassword(password);
dossier.setOnline(false);
//LamTV_Process
// if (dossierActionId > 0) {
// DossierAction dAction = DossierActionLocalServiceUtil.fetchDossierAction(dossierActionId);
// ProcessAction process = ProcessActionLocalServiceUtil.getByServiceProcess(dAction.getServiceProcessId(),
// dAction.getActionCode());
// if (process != null) {
// process.setPaymentFee(paymentFee);
// ProcessActionLocalServiceUtil.updateProcessAction(process);
// }
// } else {
// ServiceProcess serProcess = ServiceProcessLocalServiceUtil.getServiceByCode(dossier.getGroupId(), dossier.getServiceCode(), dossier.getGovAgencyCode(),
// dossier.getDossierTemplateNo());
// if (serProcess != null) {
// ProcessAction process = ProcessActionLocalServiceUtil.getByServiceProcess(serProcess.getServiceProcessId(),
// String.valueOf(10000));
// if (process != null) {
// process.setPaymentFee(paymentFee);
// ProcessActionLocalServiceUtil.updateProcessAction(process);
// }
// }
// }
//LamTV_ Process Post payment
// long userId = context.getUserId();
// long groupId = dossier.getGroupId();
// String referenceUid = StringPool.BLANK;
// if (Validator.isNull(referenceUid)) {
// referenceUid = PortalUUIDUtil.generate();
// }
// String govAgencyCode = dossier.getGovAgencyCode();
// String govAgencyName = dossier.getGovAgencyName();
// long paymentAmount = 0;
// String epaymentProfile = StringPool.BLANK;
// String bankInfo = StringPool.BLANK;
// PaymentFileLocalServiceUtil.createPaymentFiles(userId, groupId, dossierId,
// referenceUid, govAgencyCode, govAgencyName, applicantName, applicantIdNo, paymentFee, paymentAmount,
// paymentFeeNote, epaymentProfile, bankInfo, context);
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier createDossier(long groupId, String serviceCode, String govAgencyCode, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String districtCode, String wardCode, String contactName, String contactTelNo, String contactEmail,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, String applicantNote, String briefNote, String dossierNo, String dossierTemplateNo,
int viaPostal, String postalServiceCode, String postalServiceName, String postalAddress,
String postalCityCode, String postalDistrictCode, String postalWardCode, String postalTelNo,
int originality,
ServiceContext context) throws PortalException {
Date now = new Date();
long dossierId = counterLocalService.increment(Dossier.class.getName());
long userId = context.getUserId();
// create referentUid
String referenceUid = PortalUUIDUtil.generate();
// create counterId
int counter = DossierNumberGenerator.counterDossier(userId, groupId);
Dossier dossier = dossierLocalService.createDossier(dossierId);
//setDossierStatus = new
dossier.setDossierStatus(DossierStatusConstants.NEW);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
// create dossierRegister
String dossierRegister = PwdGenerator.getPassword(10).toUpperCase();
dossier.setDossierRegister(dossierRegister);
ServiceInfo serviceInfo = serviceInfoLocalService.getByCode(groupId, serviceCode);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceInfo.getServiceName());
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(getDictItemName(groupId, GOVERNMENT_AGENCY, govAgencyCode));
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setOriginality(originality);
if (Validator.isNotNull(cityCode)) {
dossier.setCityCode(cityCode);
dossier.setCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setWardCode(wardCode);
dossier.setWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, wardCode));
}
dossier.setContactEmail(contactEmail);
dossier.setContactName(contactName);
dossier.setContactTelNo(contactTelNo);
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, delegateWardCode));
}
}
ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
long serviceProcessId = option.getServiceProcessId();
ServiceProcess serviceProcess = serviceProcessPersistence.findByPrimaryKey(serviceProcessId);
double durationCount = 0;
int durationUnit = 0;
if (serviceProcess != null ) {
durationCount = serviceProcess.getDurationCount();
durationUnit = serviceProcess.getDurationUnit();
}
// _log.debug("durationCount: "+durationCount);
// _log.debug("durationUnit: "+durationUnit);
Date dueDate = HolidayUtils.getDueDate(now, durationCount, durationUnit, groupId);
// set dueDate
dossier.setDueDate(dueDate);
// set receivedDate
dossier.setReceiveDate(now);
dossier.setDossierNote(option.getInstructionNote());
dossier.setSubmissionNote(option.getSubmissionNote());
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
//dossier.setDossierNo(dossierNo);
// viaPortal: 0 disable, 1: unselected, 2: selected
// if (viaPostal == 2) {
//LamTV_Hot fix
if (viaPostal == 1) {
dossier.setViaPostal(viaPostal);
dossier.setPostalServiceCode(postalServiceCode);
dossier.setPostalServiceName(postalServiceName);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(getDictItemName(groupId, ADMINISTRATIVE_REGION, postalCityCode));
dossier.setPostalDistrictCode(postalDistrictCode);
dossier.setPostalDistrictName(getDictItemName(groupId, ADMINISTRATIVE_REGION, postalDistrictCode));
dossier.setPostalWardCode(postalWardCode);
dossier.setPostalWardName(getDictItemName(groupId, ADMINISTRATIVE_REGION, postalWardCode));
dossier.setPostalTelNo(postalTelNo);
}
String password = PwdGenerator.getPassword(8).toUpperCase();
dossier.setPassword(password);
dossier.setOnline(false);
dossierPersistence.update(dossier);
// init DossierFile
List<DossierPart> dossierParts;
dossierParts = dossierPartPersistence.findByTP_NO(groupId, dossierTemplateNo);
for (DossierPart part : dossierParts) {
if (Validator.isNotNull(part.getFormScript()) && part.getPartType() != 2) {
String dossierFileUUID = PortalUUIDUtil.generate();
dossierFileLocalService.addDossierFile(groupId, dossierId, dossierFileUUID, dossierTemplateNo,
part.getPartNo(), part.getFileTemplateNo(), part.getPartName(), StringPool.BLANK, 0l, null,
StringPool.BLANK, StringPool.TRUE, context);
}
}
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
dossierId, option.getProcessOptionId(), serviceProcess != null ? serviceProcess.getDossierNoPattern() : StringPool.BLANK, params);
//LamTV_ Process Post payment
// String referenceUid = StringPool.BLANK;
// if (Validator.isNull(referenceUid)) {
// referenceUid = PortalUUIDUtil.generate();
// }
// String govAgencyCode = dossier.getGovAgencyCode();
// String govAgencyName = dossier.getGovAgencyName();
// String paymentNote = StringPool.BLANK;
// String epaymentProfile = StringPool.BLANK;
// String bankInfo = StringPool.BLANK;
// String paymentFee;
// long paymentAmount = 0;
if (serviceProcess != null) {
// paymentFee = serviceProcess.getPaymentFee();
// _log.debug("paymentFee: "+paymentFee);
}
// PaymentFileLocalServiceUtil.createPaymentFiles(userId, groupId, dossierId, referenceUid, govAgencyCode,
// govAgencyName, applicantName, applicantIdNo, paymentFee, paymentAmount, paymentNote, epaymentProfile,
// bankInfo, context);
// _log.debug("SERVICEPROCESS"+ serviceProcess.getDossierNoPattern());
//
// _log.debug("DOSSIER_NO_"+ dossierRef);
dossier.setDossierNo(dossierRef.trim());
dossierPersistence.update(dossier);
return dossier;
}
private ProcessOption getProcessOption(String serviceInfoCode, String govAgencyCode, String dossierTemplateNo,
long groupId) throws PortalException {
ServiceConfig config = ServiceConfigLocalServiceUtil.getBySICodeAndGAC(groupId, serviceInfoCode, govAgencyCode);
return ProcessOptionLocalServiceUtil.getByDTPLNoAndServiceCF(groupId, dossierTemplateNo,
config.getServiceConfigId());
}
@Indexable(type = IndexableType.REINDEX)
public Dossier postDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
Dossier dossier = null;
if (dossierId == 0) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
ProcessOption option = getProcessOption(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
// if (originality == DossierTerm.ORIGINALITY_MOTCUA) {
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
ServiceProcess serviceProcess = null;
_log.debug("option: "+option);
if (option != null) {
//Process submition note
_log.debug("option: "+option.getSubmissionNote());
dossier.setSubmissionNote(option.getSubmissionNote());
_log.debug("option: "+true);
long serviceProcessId = option.getServiceProcessId();
serviceProcess = serviceProcessPersistence.findByPrimaryKey(serviceProcessId);
String dossierRef = DossierNumberGenerator.generateDossierNumber(groupId, dossier.getCompanyId(),
dossierId, option.getProcessOptionId(), serviceProcess.getDossierNoPattern(), params);
dossier.setDossierNo(dossierRef.trim());
dossier.setServerNo(serviceProcess.getServerNo());
}
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
dossierPersistence.update(dossier);
// create DossierFile if it is eForm
List<DossierPart> dossierParts;
dossierParts = dossierPartPersistence.findByTP_NO(groupId, dossierTemplateNo);
for (DossierPart part : dossierParts) {
if (Validator.isNotNull(part.getFormScript()) && part.getPartType() != 2) {
String dossierFileUUID = PortalUUIDUtil.generate();
// TODO HotFix
if (groupId != 55301) {
dossierFileLocalService.addDossierFile(groupId, dossierId, dossierFileUUID, dossierTemplateNo,
part.getPartNo(), part.getFileTemplateNo(), part.getPartName(), StringPool.BLANK, 0l,
null, StringPool.BLANK, StringPool.TRUE, context);
}
}
}
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier.setDossierNote(dossierNote);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier assignToProcess(long dossierId, String dossierNote, String submissionNote, String briefNote,
String dossierNo, long folderId, long dossierActionId, String serverNo, ServiceContext context) {
Dossier dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setDossierNote(dossierNote);
dossier.setSubmissionNote(submissionNote);
dossier.setBriefNote(briefNote);
dossier.setDossierNo(dossierNo);
dossier.setFolderId(folderId);
dossier.setDossierActionId(dossierActionId);
dossier.setServerNo(serverNo);
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String dossierNote,
String submissionNote, String applicantNote, String briefNote, String dossierNo, boolean submitting,
Date correctingDate, String dossierStatus, String dossierStatusText, String dossierSubStatus,
String dossierSubStatusText, long folderId, long dossierActionId, int viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String password, boolean notification,
boolean online, String serverNo, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateUpdateDossier(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode,
districtCode, wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo, dossierNote,
submissionNote, dossierNo, submitting, dossierStatusText, dossierSubStatusText, postalAddress,
postalCityCode, postalTelNo, serverNo);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
if (Validator.isNotNull(auditUser))
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
DossierTemplate dt = dossierTemplatePersistence.findByG_DT_TPLNO(groupId, dossierTemplateNo);
if (Validator.isNotNull(dt)) {
dossier.setDossierTemplateName(dt.getTemplateName());
}
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setDossierNo(dossierNo);
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
dossier.setDossierSubStatus(dossierSubStatus);
dossier.setDossierSubStatusText(dossierSubStatusText);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setFolderId(folderId);
dossier.setDossierActionId(dossierActionId);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setPassword(password);
dossier.setNotification(notification);
dossier.setOnline(online);
dossier.setServerNo(serverNo);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setNotification(notification);
}
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String dossierNote,
String submissionNote, String applicantNote, String briefNote, String dossierNo, boolean submitting,
Date correctingDate, String dossierStatus, String dossierStatusText, String dossierSubStatus,
String dossierSubStatusText, long folderId, long dossierActionId, int viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String password, boolean notification,
boolean online, String serverNo, Date submitDate, ServiceContext context) throws PortalException {
Date now = new Date();
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateUpdateDossier(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode,
districtCode, wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo, dossierNote,
submissionNote, dossierNo, submitting, dossierStatusText, dossierSubStatusText, postalAddress,
postalCityCode, postalTelNo, serverNo);
Dossier dossier = null;
if (dossierId == 0) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(now);
dossier.setModifiedDate(now);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
if (Validator.isNotNull(auditUser))
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
DossierTemplate dt = dossierTemplatePersistence.findByG_DT_TPLNO(groupId, dossierTemplateNo);
if (Validator.isNotNull(dt)) {
dossier.setDossierTemplateName(dt.getTemplateName());
}
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setDossierNo(dossierNo);
dossier.setApplicantNote(applicantNote);
dossier.setBriefNote(briefNote);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
dossier.setDossierSubStatus(dossierSubStatus);
dossier.setDossierSubStatusText(dossierSubStatusText);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setFolderId(folderId);
dossier.setDossierActionId(dossierActionId);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setPassword(password);
dossier.setNotification(notification);
dossier.setOnline(online);
dossier.setServerNo(serverNo);
dossier.setSubmitDate(submitDate);
} else {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
dossier.setModifiedDate(now);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
dossier.setNotification(notification);
}
dossierPersistence.update(dossier);
return dossier;
}
private static final String LOCK_ALL = "LOCK ALL";
@Indexable(type = IndexableType.REINDEX)
public Dossier submitting(long groupId, long id, String refId, ServiceContext context) throws PortalException {
validateSubmitting(groupId, id, refId);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setSubmitting(true);
/*
* if (Validator.isNull(dossier.getSubmitDate())) {
* dossier.setSubmitDate(now); }
*/
if (dossier.getDossierStatus().contentEquals(DossierStatusConstants.NEW)) {
dossier.setSubmitDate(now);
}
// long dActionId = 0;
// String stepCode = StringPool.BLANK;
// long serviceProcessId = 0;
// String lockState = StringPool.BLANK;
// if (dossier != null) {
// dActionId = dossier.getDossierActionId();
// }
// if (dActionId > 0) {
// DossierAction dAction =
// DossierActionLocalServiceUtil.fetchDossierAction(dActionId);
// if (dAction != null) {
// stepCode = dAction.getStepCode();
// serviceProcessId = dAction.getServiceProcessId();
// }
// }
// if (Validator.isNotNull(stepCode) && serviceProcessId > 0) {
// ProcessStep proStep =
// ProcessStepLocalServiceUtil.fetchBySC_GID(stepCode, groupId,
// serviceProcessId);
// if (proStep != null) {
// lockState = proStep.getLockState();
// }
// }
dossier.setLockState(LOCK_ALL);
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier reset(long groupId, long id, String refId, ServiceContext context) throws PortalException {
validateReset(groupId, id, refId);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setSubmitting(false);
// dossier.setSubmitDate(null);
dossierPersistence.update(dossier);
// TODO add reset for DossierFile and PaymentFile (isNew => false)
// TODO add remove DossierFile out system
List<DossierFile> lsDF = dossierFileLocalService.getDossierFilesByDossierId(id);
for (DossierFile df : lsDF) {
if (df.getIsNew()) {
df.setIsNew(false);
dossierFileLocalService.updateDossierFile(df);
}
}
// List<PaymentFile> lsPF = paymentFileLocalService.getByDossierId(id);
// for (PaymentFile pf : lsPF) {
// if (pf.getIsNew()) {
// pf.setIsNew(false);
//
// paymentFileLocalService.updatePaymentFile(pf);
// }
// }
return dossier;
}
// @Indexable(type = IndexableType.REINDEX)
public Dossier updateStatus(long groupId, long id, String refId, String status, String statusText, String subStatus,
String subStatusText, String lockState, String stepInstruction, ServiceContext context)
throws PortalException {
validateUpdateStatus(groupId, id, refId, status, statusText, subStatus, subStatusText);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.findByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setDossierStatus(status);
dossier.setDossierStatusText(statusText);
dossier.setDossierSubStatus(subStatus);
dossier.setDossierSubStatusText(subStatusText);
dossier.setLockState(lockState);
dossier.setDossierNote(stepInstruction);
/*
* if (status.equalsIgnoreCase(DossierStatusConstants.RECEIVING)) {
* dossier.setReceiveDate(now); }
*/
if (status.equalsIgnoreCase(DossierStatusConstants.RELEASING)) {
dossier.setReleaseDate(now);
}
if (status.equalsIgnoreCase(DossierStatusConstants.DONE)) {
dossier.setFinishDate(now);
}
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateProcessDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setProcessDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateSubmittingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateSubmittingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setSubmitDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateReceivingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateReceivingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setReceiveDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDueDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateDueDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setDueDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateReleaseDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateReleaseDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setReleaseDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateCancellingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateCancellingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setCancellingDate(date);
dossier.setSubmitting(true);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateEndosementDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateCancellingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
// dossier.setCancellingDate(date);
dossier.setEndorsementDate(date);
dossier.setSubmitting(true);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateFinishDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateFinishDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setFinishDate(date);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateCorrectingDate(long groupId, long id, String refId, Date date, ServiceContext context)
throws PortalException {
validateCorrectingDate(groupId, id, refId, date);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setCorrecttingDate(date);
dossier.setSubmitting(true);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierAction(long groupId, long id, String refId, long dossierActionId,
ServiceContext context) throws PortalException {
validateDossierAction(groupId, id, refId, dossierActionId);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setDossierActionId(dossierActionId);
dossierPersistence.update(dossier);
return dossier;
}
//sondt start
@Indexable(type = IndexableType.REINDEX)
public Dossier updateViaPostal(long groupId, long id, String refId, int viaPostal,
ServiceContext context) throws PortalException {
validateViaPostal(groupId, id, refId, viaPostal);
Date now = new Date();
Dossier dossier = null;
if (id != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(id);
} else {
dossier = dossierPersistence.fetchByG_REF(groupId, refId);
}
dossier.setModifiedDate(now);
dossier.setViaPostal(viaPostal);
dossierPersistence.update(dossier);
return dossier;
}
//sondt end
public Dossier getByRef(long groupId, String refId) {
return dossierPersistence.fetchByG_REF(groupId, refId);
}
@Indexable(type = IndexableType.DELETE)
public Dossier removeDossier(long groupId, long dossierId, String refId) throws PortalException {
// TODO remove dossierLog
// TODO remove dossierFile
// TODO remove dossierAction
// TODO remove PaymentFile
validateRemoveDossier(groupId, dossierId, refId);
Dossier dossier = null;
if (dossierId != 0) {
dossier = dossierPersistence.fetchByPrimaryKey(dossierId);
} else {
dossier = dossierPersistence.findByG_REF(groupId, refId);
}
return dossierPersistence.remove(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierBriefNote(long dossierId, String dossierBriefNote) throws PortalException {
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
dossier.setBriefNote(dossierBriefNote);
return dossierPersistence.update(dossier);
}
public int countByUserId(long userId, long groupId) {
return dossierPersistence.countByG_UID(groupId, userId);
}
private void validateViaPostal(long groupId, long id, String refId, int viaPostal)
throws PortalException {
// TODO add validate for submitting
}
private void validateRemoveDossier(long groupId, long dossierId, String refId) throws PortalException {
// TODO add validate for remove Dossier
}
private void validateDossierAction(long groupId, long id, String refId, long dossierActionId)
throws PortalException {
// TODO add validate for submitting
}
private void validateSubmittingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateReceivingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateReleaseDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateFinishDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateCancellingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateDueDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateCorrectingDate(long groupId, long id, String refId, Date date) throws PortalException {
// TODO add validate
}
private void validateUpdateStatus(long groupId, long id, String refId, String status, String statusText,
String subStatus, String subStatusText) throws PortalException {
// TODO add validate
}
private void validateSubmitting(long groupId, long id, String refId) throws PortalException {
// TODO add validate for submitting
// Check dossier status
// Check DossierFile, PaymentFile
}
private void validateReset(long groupId, long id, String refId) throws PortalException {
// TODO add validate for submitting
}
private void validateInit(long groupId, long dossierId, String referenceUid, String serviceCode,
String govAgencyCode, String address, String cityCode, String districtCode, String wardCode,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo)
throws PortalException {
}
private void validateUpdateDossier(long groupId, long dossierId, String referenceUid, String serviceCode,
String govAgencyCode, String address, String cityCode, String districtCode, String wardCode,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String dossierNote,
String submissionNote, String dossierNo, boolean submitting, String dossierStatus, String dossierSubStatus,
String postalAddress, String postalCityCode, String postalTelNo, String serverNo) throws PortalException {
}
public Document getDossierById(long dossierId, long companyId) throws PortalException {
// Document document = null;
Indexer<Dossier> indexer = IndexerRegistryUtil.nullSafeGetIndexer(Dossier.class);
SearchContext searchContext = new SearchContext();
searchContext.setCompanyId(companyId);
// SearchContext searchContext =
// SearchContextFactory.getInstance(request);
searchContext.setEnd(QueryUtil.ALL_POS);
searchContext.setKeywords(StringPool.BLANK);
searchContext.setStart(QueryUtil.ALL_POS);
// searchContext.set
BooleanQuery booleanQuery = null;
booleanQuery = indexer.getFullQuery(searchContext);
if (dossierId != 0) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(dossierId));
query.addField(DossierTerm.DOSSIER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
booleanQuery.addRequiredTerm(Field.ENTRY_CLASS_NAME, CLASS_NAME);
Hits hits = IndexSearcherHelperUtil.search(searchContext, booleanQuery);
List<Document> documents = hits.toList();
if (documents.size() > 0) {
return documents.get(0);
} else {
return null;
}
}
@SuppressWarnings("deprecation")
public Hits searchLucene(LinkedHashMap<String, Object> params, Sort[] sorts, int start, int end,
SearchContext searchContext) throws ParseException, SearchException {
String keywords = (String) params.get(Field.KEYWORD_SEARCH);
String groupId = (String) params.get(Field.GROUP_ID);
String secetKey = GetterUtil.getString(params.get("secetKey"));
String status = GetterUtil.getString(params.get(DossierTerm.STATUS));
String subStatus = GetterUtil.getString(params.get(DossierTerm.SUBSTATUS));
String agency = GetterUtil.getString(params.get(DossierTerm.AGENCY));
String service = GetterUtil.getString(params.get(DossierTerm.SERVICE));
String template = GetterUtil.getString(params.get(DossierTerm.TEMPLATE));
String step = GetterUtil.getString(params.get(DossierTerm.STEP));
String state = GetterUtil.getString(params.get(DossierTerm.STATE));
String follow = GetterUtil.getString(params.get(DossierTerm.FOLLOW));
String dossierNo = GetterUtil.getString(params.get(DossierTerm.DOSSIER_NO));
// Get by certificate number
String certificateNo = (String) params.get(DossierTerm.DOSSIER_ID_CTN);
String top = GetterUtil.getString(params.get(DossierTerm.TOP));
String owner = GetterUtil.getString(params.get(DossierTerm.OWNER));
String submitting = GetterUtil.getString(params.get(DossierTerm.SUBMITTING));
int year = GetterUtil.getInteger(params.get(DossierTerm.YEAR));
int month = GetterUtil.getInteger(params.get(DossierTerm.MONTH));
int day = GetterUtil.getInteger(params.get(DossierTerm.DAY));
long userId = GetterUtil.getLong(params.get(DossierTerm.USER_ID));
String strDossierActionId = GetterUtil.getString(params.get(DossierTerm.DOSSIER_ACTION_ID));
String fromReceiveDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVEDATE));
String toReceiveDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVEDATE));
String certNo = GetterUtil.getString(params.get(DossierTerm.CERT_NO));
String fromCertDate = GetterUtil.getString(params.get(DossierTerm.FROM_CERT_DATE));
String toCertDate = GetterUtil.getString(params.get(DossierTerm.TO_CERT_DATE));
String fromSubmitDate = GetterUtil.getString(params.get(DossierTerm.FROM_SUBMIT_DATE));
String toSubmitDate = GetterUtil.getString(params.get(DossierTerm.TO_SUBMIT_DATE));
String notState = GetterUtil.getString(params.get(DossierTerm.NOT_STATE));
Long statusReg = GetterUtil.getLong(params.get(DossierTerm.STATUS_REG));
Long notStatusReg = GetterUtil.getLong(params.get(DossierTerm.NOT_STATUS_REG));
String online = GetterUtil.getString(params.get(DossierTerm.ONLINE));
String originality = GetterUtil.getString(params.get(DossierTerm.ORIGINALLITY));
String assigned = GetterUtil.getString(params.get(DossierTerm.ASSIGNED));
//LamTV_ADD
String statusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_STATUS_STEP));
String subStatusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_SUBSTATUS_STEP));
String permission = GetterUtil.getString(params.get(DossierTerm.MAPPING_PERMISSION));
String domain = GetterUtil.getString(params.get(DossierTerm.DOMAIN_CODE));
String domainName = GetterUtil.getString(params.get(DossierTerm.DOMAIN_NAME));
String applicantName = GetterUtil.getString(params.get(DossierTerm.APPLICANT_NAME));
String applicantIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_ID_NO));
String serviceName = GetterUtil.getString(params.get(DossierTerm.SERVICE_NAME));
String emailLogin = GetterUtil.getString(params.get(DossierTerm.EMAIL_USER_LOGIN));
String fromReleaseDate = GetterUtil.getString(params.get(DossierTerm.FROM_RELEASE_DATE));
String toReleaseDate = GetterUtil.getString(params.get(DossierTerm.TO_RELEASE_DATE));
//
String fromFinishDate = GetterUtil.getString(params.get(DossierTerm.FROM_FINISH_DATE));
String toFinishDate = GetterUtil.getString(params.get(DossierTerm.TO_FINISH_DATE));
String fromReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVE_NOTDONE_DATE));
String toReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVE_NOTDONE_DATE));
String paymentStatus = GetterUtil.getString(params.get(PaymentFileTerm.PAYMENT_STATUS));
String origin = GetterUtil.getString(params.get(DossierTerm.ORIGIN));
String fromStatisticDate = GetterUtil.getString(params.get(DossierTerm.FROM_STATISTIC_DATE));
String toStatisticDate = GetterUtil.getString(params.get(DossierTerm.TO_STATISTIC_DATE));
Integer originDossierId = (params.get(DossierTerm.ORIGIN_DOSSIER_ID) != null)
? GetterUtil.getInteger(params.get(DossierTerm.ORIGIN_DOSSIER_ID))
: null;
String time = GetterUtil.getString(params.get(DossierTerm.TIME));
String register = GetterUtil.getString(params.get(DossierTerm.REGISTER));
Long groupDossierId = GetterUtil.getLong(params.get(DossierTerm.GROUP_DOSSIER_ID));
String applicantFollowIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_FOLLOW_ID_NO));
String assignedUserId = GetterUtil.getString(params.get(DossierTerm.ASSIGNED_USER_ID));
Indexer<Dossier> indexer = IndexerRegistryUtil.nullSafeGetIndexer(Dossier.class);
searchContext.addFullQueryEntryClassName(CLASS_NAME);
searchContext.setEntryClassNames(new String[] { CLASS_NAME });
searchContext.setAttribute("paginationType", "regular");
searchContext.setLike(true);
searchContext.setStart(start);
searchContext.setEnd(end);
searchContext.setAndSearch(true);
searchContext.setSorts(sorts);
BooleanQuery booleanQuery = null;
if (Validator.isNotNull(keywords)) {
booleanQuery = BooleanQueryFactoryUtil.create(searchContext);
} else {
booleanQuery = indexer.getFullQuery(searchContext);
}
//Search follow params default
BooleanQuery booleanCommon = processSearchCommon(keywords, secetKey, groupId, owner, userId, follow, step,
template, top, emailLogin, originality, applicantFollowIdNo, booleanQuery);
// Search follow param input
BooleanQuery booleanInput = processSearchInput(status, subStatus, state, online, submitting, agency, service,
userId, top, year, month, dossierNo, certificateNo, strDossierActionId, fromReceiveDate, toReceiveDate,
certNo, fromCertDate, toCertDate, fromSubmitDate, toSubmitDate, notState, statusReg, notStatusReg,
follow, originality, assigned, statusStep, subStatusStep, permission, domain, domainName, applicantName,
applicantIdNo, serviceName, fromReleaseDate, toReleaseDate, fromFinishDate, toFinishDate,
fromReceiveNotDoneDate, toReceiveNotDoneDate, paymentStatus, origin, fromStatisticDate, toStatisticDate,
originDossierId, time, register, day, groupDossierId, assignedUserId, booleanCommon);
booleanQuery.addRequiredTerm(Field.ENTRY_CLASS_NAME, CLASS_NAME);
return IndexSearcherHelperUtil.search(searchContext, booleanInput);
}
@SuppressWarnings("deprecation")
public long countLucene(LinkedHashMap<String, Object> params, SearchContext searchContext)
throws ParseException, SearchException {
String keywords = (String) params.get(Field.KEYWORD_SEARCH);
String groupId = (String) params.get(Field.GROUP_ID);
String secetKey = GetterUtil.getString(params.get("secetKey"));
String status = GetterUtil.getString(params.get(DossierTerm.STATUS));
String subStatus = GetterUtil.getString(params.get(DossierTerm.SUBSTATUS));
String agency = GetterUtil.getString(params.get(DossierTerm.AGENCY));
String service = GetterUtil.getString(params.get(DossierTerm.SERVICE));
String template = GetterUtil.getString(params.get(DossierTerm.TEMPLATE));
String state = GetterUtil.getString(params.get(DossierTerm.STATE));
String step = GetterUtil.getString(params.get(DossierTerm.STEP));
String dossierNo = GetterUtil.getString(params.get(DossierTerm.DOSSIER_NO));
// Get by certificate number
String certificateNo = (String) params.get(DossierTerm.DOSSIER_ID_CTN);
String online = GetterUtil.getString(params.get(DossierTerm.ONLINE));
String follow = GetterUtil.getString(params.get(DossierTerm.FOLLOW));
String top = GetterUtil.getString(params.get(DossierTerm.TOP));
String owner = GetterUtil.getString(params.get(DossierTerm.OWNER));
String submitting = GetterUtil.getString(params.get(DossierTerm.SUBMITTING));
long userId = GetterUtil.getLong(params.get(DossierTerm.USER_ID));
int year = GetterUtil.getInteger(params.get(DossierTerm.YEAR));
int month = GetterUtil.getInteger(params.get(DossierTerm.MONTH));
int day = GetterUtil.getInteger(params.get(DossierTerm.DAY));
String strDossierActionId = GetterUtil.getString(params.get(DossierTerm.DOSSIER_ACTION_ID));
String fromReceiveDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVEDATE));
String toReceiveDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVEDATE));
String certNo = GetterUtil.getString(params.get(DossierTerm.CERT_NO));
String fromCertDate = GetterUtil.getString(params.get(DossierTerm.FROM_CERT_DATE));
String toCertDate = GetterUtil.getString(params.get(DossierTerm.TO_CERT_DATE));
String fromSubmitDate = GetterUtil.getString(params.get(DossierTerm.FROM_SUBMIT_DATE));
String toSubmitDate = GetterUtil.getString(params.get(DossierTerm.TO_SUBMIT_DATE));
String notState = GetterUtil.getString(params.get(DossierTerm.NOT_STATE));
Long statusReg = GetterUtil.getLong(params.get(DossierTerm.STATUS_REG));
Long notStatusReg = GetterUtil.getLong(params.get(DossierTerm.NOT_STATUS_REG));
String originality = GetterUtil.getString(params.get(DossierTerm.ORIGINALLITY));
String assigned = GetterUtil.getString(params.get(DossierTerm.ASSIGNED));
//LamTV_ADD
String statusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_STATUS_STEP));
String subStatusStep = GetterUtil.getString(params.get(DossierTerm.DOSSIER_SUBSTATUS_STEP));
String permission = GetterUtil.getString(params.get(DossierTerm.MAPPING_PERMISSION));
String domain = GetterUtil.getString(params.get(DossierTerm.DOMAIN_CODE));
String domainName = GetterUtil.getString(params.get(DossierTerm.DOMAIN_NAME));
String applicantName = GetterUtil.getString(params.get(DossierTerm.APPLICANT_NAME));
String applicantIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_ID_NO));
String serviceName = GetterUtil.getString(params.get(DossierTerm.SERVICE_NAME));
String emailLogin = GetterUtil.getString(params.get(DossierTerm.EMAIL_USER_LOGIN));
String fromReleaseDate = GetterUtil.getString(params.get(DossierTerm.FROM_RELEASE_DATE));
String toReleaseDate = GetterUtil.getString(params.get(DossierTerm.TO_RELEASE_DATE));
//
String fromFinishDate = GetterUtil.getString(params.get(DossierTerm.FROM_FINISH_DATE));
String toFinishDate = GetterUtil.getString(params.get(DossierTerm.TO_FINISH_DATE));
String fromReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.FROM_RECEIVE_NOTDONE_DATE));
String toReceiveNotDoneDate = GetterUtil.getString(params.get(DossierTerm.TO_RECEIVE_NOTDONE_DATE));
String paymentStatus = GetterUtil.getString(params.get(PaymentFileTerm.PAYMENT_STATUS));
//
String fromStatisticDate = GetterUtil.getString(params.get(DossierTerm.FROM_STATISTIC_DATE));
String toStatisticDate = GetterUtil.getString(params.get(DossierTerm.TO_STATISTIC_DATE));
String origin = GetterUtil.getString(params.get(DossierTerm.ORIGIN));
Integer originDossierId = (params.get(DossierTerm.ORIGIN_DOSSIER_ID) != null
? GetterUtil.getInteger(params.get(DossierTerm.ORIGIN_DOSSIER_ID))
: null);
String time = GetterUtil.getString(params.get(DossierTerm.TIME));
String register = GetterUtil.getString(params.get(DossierTerm.REGISTER));
Long groupDossierId = GetterUtil.getLong(params.get(DossierTerm.GROUP_DOSSIER_ID));
String applicantFollowIdNo = GetterUtil.getString(params.get(DossierTerm.APPLICANT_FOLLOW_ID_NO));
String assignedUserId = GetterUtil.getString(params.get(DossierTerm.ASSIGNED_USER_ID));
Indexer<Dossier> indexer = IndexerRegistryUtil.nullSafeGetIndexer(Dossier.class);
searchContext.addFullQueryEntryClassName(CLASS_NAME);
searchContext.setEntryClassNames(new String[] { CLASS_NAME });
searchContext.setAttribute("paginationType", "regular");
searchContext.setLike(true);
searchContext.setAndSearch(true);
BooleanQuery booleanQuery = null;
if (Validator.isNotNull(keywords)) {
booleanQuery = BooleanQueryFactoryUtil.create(searchContext);
} else {
booleanQuery = indexer.getFullQuery(searchContext);
}
//Search follow params default
BooleanQuery booleanCommon = processSearchCommon(keywords, secetKey, groupId, owner, userId, follow, step,
template, top, emailLogin, originality, applicantFollowIdNo, booleanQuery);
// Search follow param input
BooleanQuery booleanInput = processSearchInput(status, subStatus, state, online, submitting, agency, service,
userId, top, year, month, dossierNo, certificateNo, strDossierActionId, fromReceiveDate, toReceiveDate,
certNo, fromCertDate, toCertDate, fromSubmitDate, toSubmitDate, notState, statusReg, notStatusReg,
follow, originality, assigned, statusStep, subStatusStep, permission, domain, domainName, applicantName,
applicantIdNo, serviceName, fromReleaseDate, toReleaseDate, fromFinishDate, toFinishDate,
fromReceiveNotDoneDate, toReceiveNotDoneDate, paymentStatus, origin, fromStatisticDate, toStatisticDate,
originDossierId, time, register, day, groupDossierId, assignedUserId, booleanCommon);
booleanQuery.addRequiredTerm(Field.ENTRY_CLASS_NAME, CLASS_NAME);
return IndexSearcherHelperUtil.searchCount(searchContext, booleanInput);
}
private BooleanQuery processSearchCommon(String keywords, String secetKey, String groupId, String owner,
long userId, String follow, String step, String template, String top, String emailLogin, String originality,
String applicantFollowIdNo, BooleanQuery booleanQuery) throws ParseException {
// LamTV: Process search LIKE
if (Validator.isNotNull(keywords)) {
BooleanQuery queryBool = new BooleanQueryImpl();
String[] subQuerieArr = new String[] { DossierTerm.SERVICE_NAME_SEARCH, DossierTerm.APPLICANT_NAME,
DossierTerm.DOSSIER_NO_SEARCH, DossierTerm.DOSSIER_ID_CTN, DossierTerm.BRIEF_NOTE,
DossierTerm.DOSSIER_NAME_SEARCH, DossierTerm.CURRENT_ACTION_USER,
DossierTerm.ORIGIN_DOSSIER_NO_SEARCH, ServiceInfoTerm.SERVICE_CODE_SEARCH,
DossierTerm.DELEGATE_NAME_SEARCH};
String[] keywordArr = keywords.split(StringPool.SPACE);
for (String fieldSearch : subQuerieArr) {
BooleanQuery query = new BooleanQueryImpl();
for (String key : keywordArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(fieldSearch,
StringPool.STAR + key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
queryBool.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(queryBool, BooleanClauseOccur.MUST);
}
if (!(Validator.isNotNull(secetKey) && secetKey.contentEquals("OPENCPSV2"))) {
if (Validator.isNotNull(groupId)) {
MultiMatchQuery query = new MultiMatchQuery(groupId);
query.addFields(Field.GROUP_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(owner) && Boolean.parseBoolean(owner) && userId > 0) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(userId));
query.addField(DossierTerm.USER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(follow) && Boolean.parseBoolean(follow) && userId > 0) {
if (Validator.isNotNull(originality) && Long.valueOf(originality) == DossierTerm.ORIGINALITY_PUBLISH) {
//_log.info("applicantFollowIdNo: "+applicantFollowIdNo);
MultiMatchQuery query = new MultiMatchQuery(applicantFollowIdNo);
query.addField(DossierTerm.APPLICANT_ID_NO);
booleanQuery.add(query, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(userId));
query.addField(DossierTerm.ACTION_MAPPING_USERID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(step)) {
String[] stepArr = StringUtil.split(step);
if (stepArr != null && stepArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < stepArr.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(stepArr[i]);
query.addField(DossierTerm.STEP_CODE);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(step);
query.addFields(DossierTerm.STEP_CODE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(template)) {
MultiMatchQuery query = new MultiMatchQuery(template);
query.addFields(DossierTerm.TEMPLATE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// //OriginDossierId = 0
// MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(0));
// queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
// booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
return booleanQuery;
}
private BooleanQuery processSearchInput(String status, String subStatus, String state, String online,
String submitting, String agency, String service, long userId, String top, int year, int month,
String dossierNo, String certificateNo, String strDossierActionId, String fromReceiveDate,
String toReceiveDate, String certNo, String fromCertDate, String toCertDate, String fromSubmitDate,
String toSubmitDate, String notState, Long statusReg, Long notStatusReg, String follow, String originality,
String assigned, String statusStep, String subStatusStep, String permission, String domain,
String domainName, String applicantName, String applicantIdNo, String serviceName, String fromReleaseDate,
String toReleaseDate, String fromFinishDate, String toFinishDate, String fromReceiveNotDoneDate,
String toReceiveNotDoneDate, String paymentStatus, String origin, String fromStatisticDate,
String toStatisticDate, Integer originDossierId, String time, String register, int day, Long groupDossierId,
String assignedUserId, BooleanQuery booleanQuery) throws ParseException {
if (Validator.isNotNull(status)) {
String[] lstStatus = StringUtil.split(status);
if (lstStatus != null && lstStatus.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < lstStatus.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(lstStatus[i]);
query.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(status);
query.addFields(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(subStatus)) {
String[] lstSubStatus = StringUtil.split(subStatus);
if (lstSubStatus != null && lstSubStatus.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < lstSubStatus.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(lstSubStatus[i]);
query.addField(DossierTerm.DOSSIER_SUB_STATUS);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(subStatus);
query.addFields(DossierTerm.DOSSIER_SUB_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(state)) {
if (state.equals(ConstantsTerm.CANCELLING)) {
BooleanQuery subQuery = new BooleanQueryImpl();
MultiMatchQuery query1 = new MultiMatchQuery(String.valueOf(0));
query1.addField(DossierTerm.CANCELLING_DATE_TIMESTAMP);
MultiMatchQuery query2 = new MultiMatchQuery(ConstantsTerm.CANCELLED);
query2.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(query1, BooleanClauseOccur.MUST_NOT);
subQuery.add(query2, BooleanClauseOccur.MUST_NOT);
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
}
if (state.equals(ConstantsTerm.CORRECTING)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.CORRECTING_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
}
if (state.equals(ConstantsTerm.ENDORSEMENT)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.ENDORSEMENT_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
}
}
if (Validator.isNotNull(online)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(online));
query.addField(DossierTerm.ONLINE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(submitting) && Boolean.parseBoolean(submitting)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(submitting));
query.addField(DossierTerm.SUBMITTING);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(agency)) {
MultiMatchQuery query = new MultiMatchQuery(agency);
query.addFields(DossierTerm.GOV_AGENCY_CODE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(service)) {
MultiMatchQuery query = new MultiMatchQuery(service);
query.addFields(ServiceInfoTerm.SERVICE_CODE_SEARCH);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (DossierTerm.STATISTIC.equals(top.toLowerCase())) {
if (month > 0 && year > 0) {
int minDayOfMonth = DossierMgtUtils.minDay(month, year);
//_log.debug("minDayOfMonth: "+minDayOfMonth);
if (minDayOfMonth > 0) {
String strMonth;
String strMonthEnd;
String strMinDay;
int monthEnd = month + 1;
if (month < 10) {
strMonth = "0" + month;
} else {
strMonth = String.valueOf(month);
}
if (monthEnd < 10) {
strMonthEnd = "0" + monthEnd;
} else {
strMonthEnd = String.valueOf(monthEnd);
}
if (minDayOfMonth < 10) {
strMinDay = "0" + minDayOfMonth;
} else {
strMinDay = String.valueOf(minDayOfMonth);
}
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
String fromStatisDateFilter = year + strMonth + strMinDay + ConstantsTerm.HOUR_START;
String toStatisDateFilter = year + strMonthEnd + strMinDay + ConstantsTerm.HOUR_START;
//Check startDate <= receiveDate < endDate
TermRangeQueryImpl termRangeQueryOne = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromStatisDateFilter, toStatisDateFilter, true, false);
subQueryOne.add(termRangeQueryOne, BooleanClauseOccur.SHOULD);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - START **/
// Check receiveDate < startDate
TermRangeQueryImpl termRangeQueryTwo = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
null, fromStatisDateFilter, false, false);
subQueryTwo.add(termRangeQueryTwo, BooleanClauseOccur.MUST);
// Check startDate <= releaseDate
TermRangeQueryImpl termRangeQueryThree = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromStatisDateFilter, null, true, true);
subQueryThree.add(termRangeQueryThree, BooleanClauseOccur.SHOULD);
// Check releaseDate = null
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryThree.add(queryRelease, BooleanClauseOccur.SHOULD);
//
subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - END **/
subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
//
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
}
} else if (Validator.isNotNull(fromStatisticDate) && Validator.isNotNull(toStatisticDate)) {
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
String fromStatisDateFilter = fromStatisticDate + ConstantsTerm.HOUR_START;
String toStatisDateFilter = toStatisticDate + ConstantsTerm.HOUR_END;
_log.debug("fromStatisDateFilter: "+fromStatisDateFilter);
_log.debug("toStatisDateFilter: "+toStatisDateFilter);
//Check startDate <= receiveDate < endDate
TermRangeQueryImpl termRangeQueryOne = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromStatisDateFilter, toStatisDateFilter, true, true);
subQueryOne.add(termRangeQueryOne, BooleanClauseOccur.SHOULD);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - START **/
// Check receiveDate < startDate
TermRangeQueryImpl termRangeQueryTwo = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
null, fromStatisDateFilter, false, false);
subQueryTwo.add(termRangeQueryTwo, BooleanClauseOccur.MUST);
// Check startDate <= releaseDate
TermRangeQueryImpl termRangeQueryThree = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromStatisDateFilter, null, true, true);
subQueryThree.add(termRangeQueryThree, BooleanClauseOccur.SHOULD);
// Check startDate <= finishDate
TermRangeQueryImpl termRangeQueryFinish = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromStatisDateFilter, toStatisDateFilter, true, true);
subQueryThree.add(termRangeQueryFinish, BooleanClauseOccur.SHOULD);
// Check releaseDate = null
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryThree.add(queryRelease, BooleanClauseOccur.SHOULD);
//
subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
/** Check receiveDate < startDate and (startDate <= releaseDate or releaseDate = null) - END **/
subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
//
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
}
} else {
if (year > 0 || month > 0) {
if (year > 0) {
// _log.debug("year: "+year);
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(year));
//MultiMatchQuery queryYearTwo = new MultiMatchQuery(String.valueOf(year));
// if (Validator.isNotNull(top) && DossierTerm.STATISTIC.equals(top.toLowerCase())) {
// MultiMatchQuery queryReceive = new MultiMatchQuery(String.valueOf(0));
// MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
// BooleanQuery subQueryOne = new BooleanQueryImpl();
// BooleanQuery subQueryTwo = new BooleanQueryImpl();
// BooleanQuery subQueryThree = new BooleanQueryImpl();
//
// //Check receiveDate != null
// queryReceive.addField(DossierTerm.YEAR_DOSSIER);
// subQueryOne.add(queryReceive, BooleanClauseOccur.MUST_NOT);
// //Check receiveDate
// queryYearTwo.addFields(DossierTerm.YEAR_DOSSIER);
// subQueryOne.add(queryYearTwo, BooleanClauseOccur.SHOULD);
// /**Check receiveDate < now && releaseDate = null or releaseDate = now**/
// TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.YEAR_DOSSIER,
// String.valueOf(0), String.valueOf(month), false, false);
// subQueryTwo.add(termRangeQuery, BooleanClauseOccur.MUST);
//
// queryRelease.addField(DossierTerm.YEAR_RELEASE);
// subQueryTwo.add(queryRelease, BooleanClauseOccur.SHOULD);
//
// subQueryThree.add(queryYearTwo, BooleanClauseOccur.SHOULD);
//
// queryYearTwo.addFields(DossierTerm.YEAR_RELEASE);
// subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
// //
// subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
// //
// booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
// } else {
query.addFields(DossierTerm.YEAR_DOSSIER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
}
if (month > 0) {
// _log.debug("month: "+month);
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(month));
//MultiMatchQuery queryMonthTwo = new MultiMatchQuery(String.valueOf(month));
// if (Validator.isNotNull(top) && DossierTerm.STATISTIC.equals(top.toLowerCase())) {
// MultiMatchQuery queryReceive = new MultiMatchQuery(String.valueOf(0));
// MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
// BooleanQuery subQueryOne = new BooleanQueryImpl();
// BooleanQuery subQueryTwo = new BooleanQueryImpl();
// BooleanQuery subQueryThree = new BooleanQueryImpl();
//
// //Check receiveDate != null
// queryReceive.addField(DossierTerm.MONTH_DOSSIER);
// subQueryOne.add(queryReceive, BooleanClauseOccur.MUST_NOT);
// //Check receiveDate
// queryMonthTwo.addFields(DossierTerm.MONTH_DOSSIER);
// subQueryOne.add(queryMonthTwo, BooleanClauseOccur.SHOULD);
// /**Check receiveDate < now && releaseDate = null or releaseDate = now**/
// // Check receiveDate < now
//// Calendar calDate = Calendar.getInstance();
//// calDate.setTime(new Date());
//// int monthCurrent = calDate.get(Calendar.MONTH) + 1;
// TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.MONTH_DOSSIER,
// String.valueOf(0), String.valueOf(month), false, false);
// subQueryTwo.add(termRangeQuery, BooleanClauseOccur.MUST);
//
// queryRelease.addField(DossierTerm.MONTH_RELEASE);
// subQueryTwo.add(queryRelease, BooleanClauseOccur.SHOULD);
//
// subQueryThree.add(queryMonthTwo, BooleanClauseOccur.SHOULD);
//
// queryMonthTwo.addFields(DossierTerm.MONTH_RELEASE);
// subQueryTwo.add(subQueryThree, BooleanClauseOccur.MUST);
//// //
// subQueryOne.add(subQueryTwo, BooleanClauseOccur.SHOULD);
// //
// booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
// } else {
query.addFields(DossierTerm.MONTH_DOSSIER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
}
}
//Temporatory comment for dossier has not received
// else {
// MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
// query.addField(DossierTerm.RECEIVE_DATE_TIMESTAMP);
// booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
// }
}
if (Validator.isNotNull(top)) {
if (DossierTerm.PASSED.equals(top.toLowerCase())) {
// _log.debug("top: "+top);
MultiMatchQuery queryAction = new MultiMatchQuery(String.valueOf(userId));
queryAction.addField(DossierTerm.USER_DOSSIER_ACTION_ID);
booleanQuery.add(queryAction, BooleanClauseOccur.MUST);
} else if (!DossierTerm.STATISTIC.equals(top.toLowerCase())) {
BooleanQuery subQuery = new BooleanQueryImpl();
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQuery.add(queryRelease, BooleanClauseOccur.MUST);
// Dossier is delay
if (top.toLowerCase().equals(DossierTerm.DELAY)) {
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition status != waiting **/
MultiMatchQuery queryWaiting = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(1));
query.addFields(DossierTerm.COMPARE_DELAY_DATE);
subQuery.add(query, BooleanClauseOccur.MUST);
// Dossier is overDue
} else if (top.toLowerCase().equals(DossierTerm.OVER_DUE)) {
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition status != waiting **/
MultiMatchQuery queryWaiting = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
/** Check condition status != receiving **/
MultiMatchQuery queryReceiving = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_RECEIVING);
queryReceiving.addField(DossierTerm.DOSSIER_STATUS);
subQuery.add(queryReceiving, BooleanClauseOccur.MUST_NOT);
/** Check condition releaseDate > dueDate **/
MultiMatchQuery queryCompareRelease = new MultiMatchQuery(String.valueOf(1));
queryCompareRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryOne.add(queryCompareRelease, BooleanClauseOccur.MUST);
/** Check condition nowDate >= dueDate **/
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(0), String.valueOf(nowTime), false, false);
subQueryTwo.add(termRangeQuery, BooleanClauseOccur.MUST);
/** Check condition (releaseDate > dueDate || nowDate >= dueDate) **/
subQueryThree.add(subQueryTwo, BooleanClauseOccur.SHOULD);
subQueryThree.add(subQueryOne, BooleanClauseOccur.SHOULD);
/** Check condition dueDate!=null && (releaseDate>=dueDate || now>=dueDate) **/
subQuery.add(subQueryThree, BooleanClauseOccur.MUST);
// Dossier is coming
} else if (top.toLowerCase().equals(DossierTerm.COMING)) {
/** Check condition dueDate != null **/
MultiMatchQuery querydueDateNull = new MultiMatchQuery(String.valueOf(0));
querydueDateNull.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQuery.add(querydueDateNull, BooleanClauseOccur.MUST_NOT);
//Check dossier is not dueDate
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addFields(DossierTerm.DUE_DATE_COMING);
subQuery.add(query, BooleanClauseOccur.MUST_NOT);
//Check dossier has dueDate
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_COMING,
String.valueOf(0), String.valueOf(nowTime), false, true);
subQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
//Check nowDate < dueDate
TermRangeQueryImpl termRangeQueryNow = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(nowTime), null, true, true);
subQuery.add(termRangeQueryNow, BooleanClauseOccur.MUST);
}
//
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(dossierNo)) {
String[] keyDossier = dossierNo.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : keyDossier) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.DOSSIER_NO_SEARCH,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(certificateNo)) {
MultiMatchQuery query = new MultiMatchQuery(certificateNo);
query.addFields(DossierTerm.DOSSIER_ID_CTN);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(strDossierActionId)) {
String[] sliptDossierActionId = StringUtil.split(strDossierActionId);
if (sliptDossierActionId != null && sliptDossierActionId.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (String dossierActionId : sliptDossierActionId) {
if (Validator.isNotNull(dossierActionId)) {
MultiMatchQuery query = new MultiMatchQuery(dossierActionId);
query.addFields(DossierTerm.DOSSIER_ACTION_ID);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(strDossierActionId);
query.addFields(DossierTerm.DOSSIER_ACTION_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
String fromReceiveDateFilter = fromReceiveDate + ConstantsTerm.HOUR_START;
String toReceiveDateFilter = toReceiveDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromReceiveDate)) {
if (Validator.isNotNull(toReceiveDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveDateFilter, toReceiveDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveDateFilter, toReceiveDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toReceiveDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveDateFilter, toReceiveDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(certNo)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(certNo));
query.addField(DossierTerm.CERT_NO_SEARCH);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
String fromCertDateFilter = fromCertDate + ConstantsTerm.HOUR_START;
String toCertDateFilter = toCertDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromCertDate)) {
if (Validator.isNotNull(toCertDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.CERT_DATE, fromCertDateFilter,
toCertDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.CERT_DATE, fromCertDateFilter,
toCertDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toCertDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.CERT_DATE, fromCertDateFilter,
toCertDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
String fromSubmitDateFilter = fromSubmitDate + ConstantsTerm.HOUR_START;
String toSubmitDateFilter = toSubmitDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromSubmitDate)) {
if (Validator.isNotNull(toSubmitDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.SUBMIT_DATE,
fromSubmitDateFilter, toSubmitDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.SUBMIT_DATE,
fromSubmitDateFilter, toSubmitDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toSubmitDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromSubmitDateFilter, toSubmitDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(notState)) {
// LamTV: Case not have flag cancel
if (notState.equals(ConstantsTerm.CANCELLING)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.CANCELLING_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Case not have flag correct and endorsement
if (notState.contains(ConstantsTerm.CORRECTING)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.CORRECTING_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (notState.contains(ConstantsTerm.ENDORSEMENT)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(0));
query.addField(DossierTerm.ENDORSEMENT_DATE_TIMESTAMP);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
// LamTV: Add process case abnormal
if (Validator.isNotNull(statusReg)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(statusReg));
query.addField(DossierTerm.STATUS_REG);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(notStatusReg)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(notStatusReg));
query.addField(DossierTerm.STATUS_REG);
booleanQuery.add(query, BooleanClauseOccur.MUST_NOT);
}
// LamTV: Process originality and assigned
if (Validator.isNotNull(assigned)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(assigned));
query.addField(DossierTerm.ASSIGNED);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process originality and assigned
if (Validator.isNotNull(assignedUserId)) {
String[] assignedArr = StringUtil.split(assignedUserId);
if (assignedArr != null && assignedArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < assignedArr.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(assignedArr[i]);
query.addField(DossierTerm.ASSIGNED_USER_ID);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(assignedUserId);
query.addFields(DossierTerm.ASSIGNED_USER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
//_log.debug("originality: "+originality);
if (Validator.isNotNull(originality)) {
if (originality.contains(StringPool.COMMA)) {
String[] originalArr = StringUtil.split(originality);
if (originalArr != null && originalArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < originalArr.length; i++) {
int orginalInt = GetterUtil.getInteger(originalArr[i]);
if (orginalInt >= 0) {
MultiMatchQuery query = new MultiMatchQuery(originalArr[i]);
query.addField(DossierTerm.ORIGINALLITY);
subQuery.add(query, BooleanClauseOccur.SHOULD);
} else {
String originalSearch = String.valueOf(DossierTerm.CONSTANT_INDEX_ORIGINALITY + orginalInt);
MultiMatchQuery query = new MultiMatchQuery(originalSearch);
query.addField(DossierTerm.ORIGINALLITY);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
}
} else {
Integer originalityInt = GetterUtil.getInteger(originality);
if (originalityInt == -1) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.ORIGINALLITY,
String.valueOf(DossierTerm.ORIGINALITY_MOTCUA),
String.valueOf(DossierTerm.CONSTANT_INDEX_ORIGINALITY), false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else if (originalityInt >= 0) {
//_log.debug("originalityxxxx: "+originality);
MultiMatchQuery query = new MultiMatchQuery(originality);
query.addFields(DossierTerm.ORIGINALLITY);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
} else {
// _log.debug("START originality: "+originality);
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.ORIGINALLITY,
String.valueOf(DossierTerm.ORIGINALITY_PUBLISH), String.valueOf(DossierTerm.ORIGINALITY_HSLT), true,
true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
//Check original by action
if (Validator.isNotNull(originDossierId) && originDossierId > 0) {
MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(originDossierId));
queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
} else {
if (Validator.isNotNull(originality)) {
Integer originalityInt = GetterUtil.getInteger(originality);
if (Validator.isNotNull(follow) && Boolean.valueOf(follow)
&& originalityInt == DossierTerm.ORIGINALITY_PUBLISH) {
} else if (originalityInt != 9) {
MultiMatchQuery queryDossierAction = new MultiMatchQuery(String.valueOf(0));
queryDossierAction.addField(DossierTerm.DOSSIER_ACTION_ID);
booleanQuery.add(queryDossierAction, BooleanClauseOccur.MUST_NOT);
//
// MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(0));
// queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
// booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
}
} else {
MultiMatchQuery queryDossierAction = new MultiMatchQuery(String.valueOf(0));
queryDossierAction.addField(DossierTerm.DOSSIER_ACTION_ID);
booleanQuery.add(queryDossierAction, BooleanClauseOccur.MUST_NOT);
//
MultiMatchQuery queryOrigin = new MultiMatchQuery(String.valueOf(0));
queryOrigin.addField(DossierTerm.ORIGIN_DOSSIER_ID);
booleanQuery.add(queryOrigin, BooleanClauseOccur.MUST);
}
}
//LamTV_Test
// if (Validator.isNotNull(statusStep)) {
// String[] statusStepArr = StringUtil.split(statusStep);
//
// if (statusStepArr != null && statusStepArr.length > 0) {
// BooleanQuery subQuery = new BooleanQueryImpl();
// for (int i = 0; i < statusStepArr.length; i++) {
// MultiMatchQuery query = new MultiMatchQuery(statusStepArr[i]);
// query.addField(DossierTerm.DOSSIER_STATUS);
// subQuery.add(query, BooleanClauseOccur.SHOULD);
// }
// booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
// } else {
// MultiMatchQuery query = new MultiMatchQuery(statusStep);
// query.addFields(DossierTerm.DOSSIER_STATUS);
// booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
// }
// Set<String> addedSubStatuses = new HashSet<>();
// if (Validator.isNotNull(subStatusStep)) {
// String[] subStatusStepArr = StringUtil.split(subStatusStep);
// if (subStatusStepArr != null && subStatusStepArr.length > 0) {
// BooleanQuery subQuery = new BooleanQueryImpl();
// for (int i = 0; i < subStatusStepArr.length; i++) {
// String subStatusStepDetail = subStatusStepArr[i];
// if (!"empty".equals(subStatusStepDetail) && !addedSubStatuses.contains(subStatusStepDetail)) {
// MultiMatchQuery query = new MultiMatchQuery(subStatusStepArr[i]);
// query.addField(DossierTerm.DOSSIER_SUB_STATUS);
// subQuery.add(query, BooleanClauseOccur.SHOULD);
// addedSubStatuses.add(subStatusStepArr[i]);
//
// }
// }
// booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
// } else {
// if (!"empty".equals(subStatusStep)) {
// MultiMatchQuery query = new MultiMatchQuery(subStatusStep);
// query.addFields(DossierTerm.DOSSIER_SUB_STATUS);
// booleanQuery.add(query, BooleanClauseOccur.MUST);
// }
// }
// }
if (Validator.isNotNull(statusStep)
&& Validator.isNotNull(subStatusStep)) {
String[] statusStepArr = StringUtil.split(statusStep);
String[] subStatusStepArr = StringUtil.split(subStatusStep);
if (statusStepArr != null && statusStepArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < statusStepArr.length; i++) {
BooleanQuery matchedQuery = new BooleanQueryImpl();
MultiMatchQuery query = new MultiMatchQuery(statusStepArr[i]);
query.addField(DossierTerm.DOSSIER_STATUS);
matchedQuery.add(query, BooleanClauseOccur.MUST);
if (!"empty".equals(subStatusStepArr[i])) {
MultiMatchQuery querySub = new MultiMatchQuery(subStatusStepArr[i]);
querySub.addField(DossierTerm.DOSSIER_SUB_STATUS);
matchedQuery.add(querySub, BooleanClauseOccur.MUST);
}
subQuery.add(matchedQuery, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(statusStep);
query.addFields(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
MultiMatchQuery querySub = new MultiMatchQuery(subStatusStep);
query.addFields(DossierTerm.DOSSIER_SUB_STATUS);
booleanQuery.add(querySub, BooleanClauseOccur.MUST);
}
}
// _log.debug("Permission: " + permission);
if (Validator.isNotNull(permission)) {
String[] permissionArr = StringUtil.split(permission);
if (permissionArr != null && permissionArr.length > 0) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < permissionArr.length; i++) {
MultiMatchQuery query = new MultiMatchQuery(permissionArr[i]);
query.addField(DossierTerm.MAPPING_PERMISSION);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
MultiMatchQuery query = new MultiMatchQuery(permission);
query.addFields(DossierTerm.MAPPING_PERMISSION);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(domain)) {
MultiMatchQuery query = new MultiMatchQuery(domain);
query.addFields(DossierTerm.DOMAIN_CODE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process search LIKE
if (Validator.isNotNull(domainName)) {
String[] domainArr = domainName.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : domainArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.DOMAIN_NAME,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(applicantName)) {
String[] applicantArr = applicantName.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : applicantArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.APPLICANT_NAME,
StringPool.STAR + key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process search LIKE
if (Validator.isNotNull(applicantIdNo)) {
String[] keywordArr = applicantIdNo.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : keywordArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.APPLICANT_ID_NO,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// LamTV: Process search LIKE
if (Validator.isNotNull(serviceName)) {
String[] serviceArr = serviceName.split(StringPool.SPACE);
BooleanQuery query = new BooleanQueryImpl();
for (String key : serviceArr) {
WildcardQuery wildQuery = new WildcardQueryImpl(DossierTerm.SERVICE_NAME,
key.toLowerCase() + StringPool.STAR);
query.add(wildQuery, BooleanClauseOccur.MUST);
}
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
String fromReleaseDateFilter = fromReleaseDate + ConstantsTerm.HOUR_START;
String toReleaseDateFilter = toReleaseDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromReleaseDate)) {
if (Validator.isNotNull(toReleaseDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromReleaseDateFilter, toReleaseDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
fromReleaseDateFilter, null, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toReleaseDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RELEASE_DATE_LUCENE,
null, toReleaseDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
//Process Statistic
//TODO
if (Validator.isNotNull(fromFinishDate) || Validator.isNotNull(toFinishDate)) {
String fromFinishDateFilter = fromFinishDate + ConstantsTerm.HOUR_START;
String toFinishDateFilter = toFinishDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromFinishDate)) {
if (Validator.isNotNull(toFinishDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromFinishDateFilter, toFinishDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromFinishDateFilter, toFinishDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toFinishDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.FINISH_DATE_LUCENE,
fromFinishDateFilter, toFinishDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
//
MultiMatchQuery query = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_DONE);
query.addField(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(fromReceiveNotDoneDate) || Validator.isNotNull(toReceiveNotDoneDate)) {
//Check Release is null
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
//
String fromReceiveNotDoneDateFilter = fromReceiveNotDoneDate + ConstantsTerm.HOUR_START;
String toReceiveNotDoneDateFilter = toReceiveNotDoneDate + ConstantsTerm.HOUR_END;
if (Validator.isNotNull(fromReceiveNotDoneDate)) {
if (Validator.isNotNull(toReceiveNotDoneDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveNotDoneDateFilter, toReceiveNotDoneDateFilter, true, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveNotDoneDateFilter, toReceiveNotDoneDateFilter, true, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
} else {
if (Validator.isNotNull(toReceiveNotDoneDate)) {
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.RECEIVE_DATE,
fromReceiveNotDoneDateFilter, toReceiveNotDoneDateFilter, false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
}
}
}
if (Validator.isNotNull(paymentStatus)) {
MultiMatchQuery query = new MultiMatchQuery(paymentStatus);
query.addFields(PaymentFileTerm.PAYMENT_STATUS);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(origin)) {
MultiMatchQuery query = new MultiMatchQuery(origin);
query.addFields(DossierTerm.ORIGIN);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
// Check statistic with key "time"
if (Validator.isNotNull(time)) {
String[] lstTimes = StringUtil.split(time);
if (lstTimes != null && lstTimes.length > 1) {
BooleanQuery subQuery = new BooleanQueryImpl();
for (int i = 0; i < lstTimes.length; i++) {
BooleanQuery query = processStatisticDossier(lstTimes[i]);
subQuery.add(query, BooleanClauseOccur.SHOULD);
}
booleanQuery.add(subQuery, BooleanClauseOccur.MUST);
} else {
booleanQuery.add(processStatisticDossier(time), BooleanClauseOccur.MUST);
}
}
if (Validator.isNotNull(register)) {
MultiMatchQuery query = new MultiMatchQuery(register);
query.addFields(DossierTerm.REGISTER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (day > 0) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(day));
query.addFields(DossierTerm.DAY_DOSSIER);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
if (Validator.isNotNull(groupDossierId)) {
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(groupDossierId));
query.addField(DossierTerm.GROUP_DOSSIER_ID);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
return booleanQuery;
}
private BooleanQuery processStatisticDossier(String subTime) throws ParseException {
BooleanQuery booleanQuery = new BooleanQueryImpl();
// Check list dossier is betimes
if (subTime.equals(DossierTerm.BE_TIME)) {
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
BooleanQuery subQueryFour = new BooleanQueryImpl();
/** Check condition dueDate != null **/
MultiMatchQuery queryDueDate = new MultiMatchQuery(String.valueOf(0));
queryDueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryOne.add(queryDueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition extendDate != null and releaseDate < dueDate **/
//Check extendDate != null
MultiMatchQuery queryExtend = new MultiMatchQuery(String.valueOf(0));
queryExtend.addField(DossierTerm.EXTEND_DATE_TIMESTAMP);
subQueryTwo.add(queryExtend, BooleanClauseOccur.MUST_NOT);
// Check releaseDate < dueDate
//TermRangeQueryImpl termRangeRelease = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_RELEASE,
// null, String.valueOf(0), true, false);
//subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
MultiMatchQuery termRangeRelease = new MultiMatchQuery("3");
termRangeRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
/** Check condition finishDate < dueDate **/
//TermRangeQueryImpl termRangeFinish = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_FINISH,
// null, String.valueOf(0), true, false);
MultiMatchQuery termRangeFinish = new MultiMatchQuery(String.valueOf(3));
termRangeFinish.addField(DossierTerm.VALUE_COMPARE_FINISH);
subQueryThree.add(termRangeFinish, BooleanClauseOccur.MUST);
/** Check condition (extendDate != null && releaseDate < dueDate) || (finishDate < dueDate) **/
subQueryFour.add(subQueryThree, BooleanClauseOccur.SHOULD);
subQueryFour.add(subQueryTwo, BooleanClauseOccur.SHOULD);
/** Check condition dueDate != null && subQueryTwo **/
subQueryOne.add(subQueryFour, BooleanClauseOccur.MUST);
/** Add search all **/
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.OVER_TIME)) { // Check list dossier is overtime
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
/** Check condition releaseDate != null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryOne.add(queryRelease, BooleanClauseOccur.MUST_NOT);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryOne.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition releaseDate > dueDate **/
MultiMatchQuery termRangeRelease = new MultiMatchQuery(String.valueOf(1));
termRangeRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
//TermRangeQueryImpl termRangeRelease = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_RELEASE,
// String.valueOf(0), null, false, false);
//subQueryTwo.add(termRangeRelease, BooleanClauseOccur.MUST);
/** Check condition releaseDate != null && dueDate != null && subQueryTwo **/
subQueryOne.add(subQueryTwo, BooleanClauseOccur.MUST);
/** Add search all **/
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.ON_TIME)) { // Check list dossier is ontime
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
BooleanQuery subQueryFour = new BooleanQueryImpl();
BooleanQuery subQueryFive = new BooleanQueryImpl();
BooleanQuery subQuerySix = new BooleanQueryImpl();
BooleanQuery subQuerySeven = new BooleanQueryImpl();
/** Check condition releaseDate!=null && (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate))) - START **/
/** Check condition releaseDate != null **/
MultiMatchQuery queryReleaseEmpty = new MultiMatchQuery(String.valueOf(0));
queryReleaseEmpty.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryOne.add(queryReleaseEmpty, BooleanClauseOccur.MUST_NOT);
/** Check condition (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate)) - START **/
/** Check condition dueDate == null **/
MultiMatchQuery queryDueDateEmpty = new MultiMatchQuery(String.valueOf(0));
queryDueDateEmpty.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryTwo.add(queryDueDateEmpty, BooleanClauseOccur.MUST);
/** Check condition (extendDate == null and releaseDate < dueDate && (finishDate==null||finishDate>=dueDate))- START **/
/** Check condition extendDate == null and releaseDate < dueDate **/
//Check extendDate == null
MultiMatchQuery queryExtend = new MultiMatchQuery(String.valueOf(0));
queryExtend.addField(DossierTerm.EXTEND_DATE_TIMESTAMP);
subQueryThree.add(queryExtend, BooleanClauseOccur.MUST);
//Check dueDate != null
MultiMatchQuery queryDueDate = new MultiMatchQuery(String.valueOf(0));
queryDueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryThree.add(queryDueDate, BooleanClauseOccur.MUST_NOT);
// Check releaseDate < dueDate
// TermRangeQueryImpl queryCompareRelease = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_RELEASE,
// String.valueOf(2), String.valueOf(2), true, true);
MultiMatchQuery queryCompareRelease = new MultiMatchQuery(String.valueOf(2));
queryCompareRelease.addField(DossierTerm.VALUE_COMPARE_RELEASE);
subQueryThree.add(queryCompareRelease, BooleanClauseOccur.MUST);
/** Check condition (finishDate == null) || (finishDate != null && finishDate >= dueDate) - START **/
/** Check condition (finishDate == null) **/
MultiMatchQuery queryFinishDateEmpty = new MultiMatchQuery(String.valueOf(0));
queryFinishDateEmpty.addField(DossierTerm.FINISH_DATE_TIMESTAMP);
subQueryFour.add(queryFinishDateEmpty, BooleanClauseOccur.MUST);
/** Check condition (finishDate != null && finishDate >= dueDate) **/
//Check finishDate != null
MultiMatchQuery queryFinishDate = new MultiMatchQuery(String.valueOf(0));
queryFinishDate.addField(DossierTerm.FINISH_DATE_TIMESTAMP);
subQueryFive.add(queryFinishDate, BooleanClauseOccur.MUST_NOT);
//Check finishDate >= dueDate
// TermRangeQueryImpl queryCompareFinish = new TermRangeQueryImpl(DossierTerm.VALUE_COMPARE_FINISH,
// String.valueOf(1), String.valueOf(2), true, true);
MultiMatchQuery queryCompareFinish = new MultiMatchQuery(String.valueOf(2));
queryCompareFinish.addField(DossierTerm.VALUE_COMPARE_FINISH);
subQueryFive.add(queryCompareFinish, BooleanClauseOccur.MUST);
/** Check condition (finishDate == null) || (finishDate != null && finishDate >= dueDate) - END **/
subQuerySix.add(subQueryFive, BooleanClauseOccur.SHOULD);
subQuerySix.add(subQueryFour, BooleanClauseOccur.SHOULD);
/** Check condition (releaseDate < dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate))- END **/
subQueryThree.add(subQuerySix, BooleanClauseOccur.MUST);
/** Check condition (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate)) - END **/
subQuerySeven.add(subQueryThree, BooleanClauseOccur.SHOULD);
subQuerySeven.add(subQueryTwo, BooleanClauseOccur.SHOULD);
/** Check condition releaseDate!=null && (dueDate==null || (releaseDate<dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate))) - END **/
subQueryOne.add(subQuerySeven, BooleanClauseOccur.MUST);
/** Add search all **/
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.OVER_DUE)) {// List dossier is processing overdue
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
booleanQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition status != waiting **/
MultiMatchQuery queryWaiting = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
/** Check condition status != receiving **/
MultiMatchQuery queryReceiving = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_RECEIVING);
queryReceiving.addField(DossierTerm.DOSSIER_STATUS);
booleanQuery.add(queryReceiving, BooleanClauseOccur.MUST_NOT);
/** Check condition lockState != PAUSE **/
//MultiMatchQuery queryLockState = new MultiMatchQuery(DossierTerm.DOSSIER_STATUS_WAITING);
//queryWaiting.addField(DossierTerm.DOSSIER_STATUS);
//booleanQuery.add(queryWaiting, BooleanClauseOccur.MUST_NOT);
/** Check condition dueDate < now **/
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(0), String.valueOf(nowTime), false, false);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.UN_DUE)){// List dossier is processing undue
BooleanQuery subQueryOne = new BooleanQueryImpl();
BooleanQuery subQueryTwo = new BooleanQueryImpl();
BooleanQuery subQueryThree = new BooleanQueryImpl();
BooleanQuery subQueryFour = new BooleanQueryImpl();
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
subQueryOne.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate == null **/
MultiMatchQuery querydueDateNull = new MultiMatchQuery(String.valueOf(0));
querydueDateNull.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryTwo.add(querydueDateNull, BooleanClauseOccur.MUST);
/** Check condition (dueDate != null && now < dueDate) - START **/
// Check dueDate != null
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
subQueryThree.add(querydueDate, BooleanClauseOccur.MUST_NOT);
// Check condition dueDate < now
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(nowTime), null, true, false);
subQueryThree.add(termRangeQuery, BooleanClauseOccur.MUST);
/** Check condition (dueDate != null && now < dueDate) - END **/
/** Check condition (dueDate==null || (dueDate!=null && now<dueDate)) **/
subQueryFour.add(subQueryThree, BooleanClauseOccur.SHOULD);
subQueryFour.add(subQueryTwo, BooleanClauseOccur.SHOULD);
/** Check condition releaseDate==null && (dueDate==null || (dueDate!=null && now<dueDate)) **/
subQueryOne.add(subQueryFour, BooleanClauseOccur.MUST);
//
booleanQuery.add(subQueryOne, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.COMING)){// List dossier is processing comming
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDateNull = new MultiMatchQuery(String.valueOf(0));
querydueDateNull.addField(DossierTerm.DUE_DATE_TIMESTAMP);
booleanQuery.add(querydueDateNull, BooleanClauseOccur.MUST_NOT);
/** Check condition (dueDate-duration/5) < now **/
Date date = new Date();
long nowTime = date.getTime();
TermRangeQueryImpl termRangeQuery = new TermRangeQueryImpl(DossierTerm.DUE_DATE_COMING,
String.valueOf(0), String.valueOf(nowTime), false, true);
booleanQuery.add(termRangeQuery, BooleanClauseOccur.MUST);
/** Check conditionnowDate < dueDate **/
TermRangeQueryImpl termRangeQueryNow = new TermRangeQueryImpl(DossierTerm.DUE_DATE_TIMESTAMP,
String.valueOf(nowTime), null, true, true);
booleanQuery.add(termRangeQueryNow, BooleanClauseOccur.MUST);
} else if (subTime.equals(DossierTerm.DELAY)){// List dossier is processing delay
/** Check condition releaseDate == null **/
MultiMatchQuery queryRelease = new MultiMatchQuery(String.valueOf(0));
queryRelease.addField(DossierTerm.RELEASE_DATE_TIMESTAMP);
booleanQuery.add(queryRelease, BooleanClauseOccur.MUST);
/** Check condition dueDate != null **/
MultiMatchQuery querydueDate = new MultiMatchQuery(String.valueOf(0));
querydueDate.addField(DossierTerm.DUE_DATE_TIMESTAMP);
booleanQuery.add(querydueDate, BooleanClauseOccur.MUST_NOT);
/** Check condition extendDate > dueDate **/
MultiMatchQuery query = new MultiMatchQuery(String.valueOf(1));
query.addFields(DossierTerm.COMPARE_DELAY_DATE);
booleanQuery.add(query, BooleanClauseOccur.MUST);
}
return booleanQuery;
}
private String getDossierTemplateName(long groupId, String dossierTemplateCode) {
String name = StringPool.BLANK;
DossierTemplate template = dossierTemplatePersistence.fetchByG_DT_TPLNO(groupId, dossierTemplateCode);
if (Validator.isNotNull(template)) {
name = template.getTemplateName();
}
return name;
}
private String getDossierNote(String serviceInfoCode, String govAgencyCode, String dossierTemplateNo,
long groupId) {
String dossierNote = StringPool.BLANK;
ServiceInfo serviceInfo = serviceInfoPersistence.fetchBySC_GI(serviceInfoCode, groupId);
try {
ServiceConfig config = ServiceConfigLocalServiceUtil.getBySICodeAndGAC(groupId, serviceInfoCode,
govAgencyCode);
ProcessOption option = ProcessOptionLocalServiceUtil.getByDTPLNoAndServiceCF(groupId, dossierTemplateNo,
config.getServiceConfigId());
dossierNote = option.getInstructionNote();
if (Validator.isNull(dossierNote)) {
throw new Exception();
}
} catch (Exception e) {
_log.debug(e);
if (Validator.isNotNull(serviceInfo)) {
dossierNote = serviceInfo.getProcessText();
}
}
return dossierNote;
}
private String getDossierNote(ServiceInfo serviceInfo, ProcessOption option) {
if (option != null) {
return option.getInstructionNote();
} else if(Validator.isNotNull(serviceInfo)){
return serviceInfo.getProcessText();
}
return StringPool.BLANK;
}
public long countDossierByG_C_GAC_SC_DTNO_NOTDS(long groupId, long companyId, String govAgencyCode,
String serviceCode, String dossierTemplateNo, String dossierStatus) {
return dossierPersistence.countByG_C_GAC_SC_DTNO_NOTDS(groupId, companyId, govAgencyCode, serviceCode,
dossierTemplateNo, dossierStatus);
}
private String getServerNo(long groupId) {
try {
List<ServerConfig> sc = ServerConfigLocalServiceUtil.getGroupId(groupId);
// _log.debug("sc.get(0).getServerNo():" + sc.get(0).getServerNo());
return sc.get(0).getServerNo();
} catch (Exception e) {
_log.error(e);
return StringPool.BLANK;
}
}
// TrungDK: Process
public List<Dossier> getDossierByG_NOTO_DS(int originality, String dossierStatus) {
return dossierPersistence.findByNOTO_DS(originality, dossierStatus);
}
public List<Dossier> getDossierByG_NOTO_DS(int[] originalityArr, String dossierStatus) {
return dossierPersistence.findByNOTO_DS(originalityArr, dossierStatus);
}
public void removeDossierByG_NOTO_DS(int[] originalityArr, String dossierStatus) {
List<Dossier> lstDossiers = dossierPersistence.findByNOTO_DS(originalityArr, dossierStatus);
Date now = new Date();
if (lstDossiers != null && lstDossiers.size() > 0) {
for (Dossier dossier : lstDossiers) {
long diffInMillies = Math.abs(now.getTime() - dossier.getCreateDate().getTime());
long diff = TimeUnit.MINUTES.convert(diffInMillies, TimeUnit.MILLISECONDS);
try {
if (diff > DossierTerm.GARBAGE_COLLECTOR_TIME)
dossierPersistence.remove(dossier.getDossierId());
} catch (NoSuchDossierException e) {
_log.error(e);
}
}
}
}
public void removeDossierByF_OG_DS(int originality, String dossierStatus) {
List<Dossier> lstDossiers = dossierPersistence.findByF_OG_DS(originality, dossierStatus);
Date now = new Date();
if (lstDossiers != null && lstDossiers.size() > 0) {
for (Dossier dossier : lstDossiers) {
long diffInMillies = Math.abs(now.getTime() - dossier.getCreateDate().getTime());
long diff = TimeUnit.HOURS.convert(diffInMillies, TimeUnit.MILLISECONDS);
try {
if (diff > DossierTerm.GARBAGE_COLLECTOR_GROUP_DOSSIER)
dossierPersistence.remove(dossier.getDossierId());
} catch (NoSuchDossierException e) {
_log.error(e);
}
}
}
}
public static final String CLASS_NAME = Dossier.class.getName();
//LamTV: Process get Dossier by dossierId, govAgency, serviceProcess
public Dossier getByIdAndGovService(long groupId, String serviceCode, String govAgencyCode, long dossierId) {
return dossierPersistence.fetchByF_GID_GOV_DID(groupId, govAgencyCode, serviceCode, dossierId);
}
public List<Dossier> getByNotO_DS_SC_GC(long groupId, int originality, String dossierStatus, String serviceCode, String govAgencyCode) {
return dossierPersistence.findByG_NOTO_DS_SC_GC(groupId, originality, dossierStatus, serviceCode, govAgencyCode);
}
//LamTV_Process update dossier
@Indexable(type = IndexableType.REINDEX)
public Dossier initUpdateDossier(long groupId, long id, String applicantName, String applicantIdType,
String applicantIdNo, String applicantIdDate, String address, String cityCode, String cityName,
String districtCode, String districtName, String wardCode, String wardName, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, Integer viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String applicantNote,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, Long sampleCount, ServiceContext serviceContext) {
Date now = new Date();
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = dossierPersistence.fetchByPrimaryKey(id);
dossier.setModifiedDate(now);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
//
if (Validator.isNotNull(applicantName))
dossier.setApplicantName(applicantName);
if (Validator.isNotNull(applicantIdType))
dossier.setApplicantIdType(applicantIdType);
if (Validator.isNotNull(applicantIdNo))
dossier.setApplicantIdNo(applicantIdNo);
if (Validator.isNotNull(applicantIdDate))
dossier.setApplicantIdDate(
APIDateTimeUtils.convertStringToDate(applicantIdDate, APIDateTimeUtils._NORMAL_PARTTERN));
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(sampleCount))
dossier.setSampleCount(sampleCount);
if (Validator.isNotNull(viaPostal)) {
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
}
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateEmail(contactEmail);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initUpdateDossier(long groupId, long id, String applicantName, String applicantIdType,
String applicantIdNo, String applicantIdDate, String address, String cityCode, String cityName,
String districtCode, String districtName, String wardCode, String wardName, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, Integer viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String applicantNote,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, Long sampleCount, String dossierName, ServiceContext serviceContext) {
Date now = new Date();
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = dossierPersistence.fetchByPrimaryKey(id);
dossier.setModifiedDate(now);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
//
if (Validator.isNotNull(applicantName))
dossier.setApplicantName(applicantName);
if (Validator.isNotNull(applicantIdType))
dossier.setApplicantIdType(applicantIdType);
if (Validator.isNotNull(applicantIdNo))
dossier.setApplicantIdNo(applicantIdNo);
if (Validator.isNotNull(applicantIdDate))
dossier.setApplicantIdDate(
APIDateTimeUtils.convertStringToDate(applicantIdDate, APIDateTimeUtils._NORMAL_PARTTERN));
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(sampleCount))
dossier.setSampleCount(sampleCount);
if (Validator.isNotNull(dossierName)) {
dossier.setDossierName(dossierName);
}
if (Validator.isNotNull(viaPostal)) {
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
}
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateEmail(contactEmail);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateApplicantInfo(long dossierId,
Date applicantIdDate,
String applicantIdNo,
String applicantIdType,
String applicantName,
String address,
String cityCode,
String cityName,
String districtCode,
String districtName,
String wardCode,
String wardName,
String contactEmail,
String contactTelNo) throws NoSuchDossierException {
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantName(applicantName);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(cityName);
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(districtName);
dossier.setDelegateEmail(contactEmail);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateName(applicantName);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(wardName);
return dossierPersistence.update(dossier);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossier(long dossierId, JSONObject obj) throws NoSuchDossierException {
// _log.debug("Object dossier update: " + obj.toJSONString());
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
if (obj.has(DossierTerm.DOSSIER_NOTE)) {
if (!obj.getString(DossierTerm.DOSSIER_NOTE).equals(dossier.getDossierNote())) {
dossier.setDossierNote(obj.getString(DossierTerm.DOSSIER_NOTE));
}
}
if (obj.has(DossierTerm.EXTEND_DATE) && Validator.isNotNull(obj.get(DossierTerm.EXTEND_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.EXTEND_DATE)) > 0) {
if (dossier.getExtendDate() == null || obj.getLong(DossierTerm.EXTEND_DATE) != dossier.getExtendDate().getTime()) {
dossier.setExtendDate(new Date(obj.getLong(DossierTerm.EXTEND_DATE)));
}
}
if (obj.has(DossierTerm.DOSSIER_NO)) {
//_log.debug("Sync dossier no");
if (Validator.isNotNull(obj.getString(DossierTerm.DOSSIER_NO)) && !obj.getString(DossierTerm.DOSSIER_NO).equals(dossier.getDossierNo())) {
//_log.debug("Sync set dossier no");
dossier.setDossierNo(obj.getString(DossierTerm.DOSSIER_NO));
}
}
if (obj.has(DossierTerm.DUE_DATE) && Validator.isNotNull(obj.get(DossierTerm.DUE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.DUE_DATE)) > 0) {
if (dossier.getDueDate() == null || obj.getLong(DossierTerm.DUE_DATE) != dossier.getDueDate().getTime()) {
dossier.setDueDate(new Date(obj.getLong(DossierTerm.DUE_DATE)));
}
}
if (obj.has(DossierTerm.FINISH_DATE) && Validator.isNotNull(obj.get(DossierTerm.FINISH_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.FINISH_DATE)) > 0) {
if (dossier.getFinishDate() == null || obj.getLong(DossierTerm.FINISH_DATE) != dossier.getFinishDate().getTime()) {
dossier.setFinishDate(new Date(obj.getLong(DossierTerm.FINISH_DATE)));
}
}
if (obj.has(DossierTerm.RECEIVE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RECEIVE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RECEIVE_DATE)) > 0) {
if (dossier.getReceiveDate() == null || obj.getLong(DossierTerm.RECEIVE_DATE) != dossier.getReceiveDate().getTime()) {
dossier.setReceiveDate(new Date(obj.getLong(DossierTerm.RECEIVE_DATE)));
}
}
if (obj.has(DossierTerm.SUBMIT_DATE) && Validator.isNotNull(obj.get(DossierTerm.SUBMIT_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.SUBMIT_DATE)) > 0) {
if (dossier.getSubmitDate() == null || (dossier.getSubmitDate() != null && obj.getLong(DossierTerm.SUBMIT_DATE) != dossier.getSubmitDate().getTime())) {
dossier.setSubmitDate(new Date(obj.getLong(DossierTerm.SUBMIT_DATE)));
}
}
if (obj.has(DossierTerm.EXTEND_DATE) && Validator.isNotNull(obj.get(DossierTerm.EXTEND_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.EXTEND_DATE)) > 0) {
if (dossier.getExtendDate() == null || obj.getLong(DossierTerm.EXTEND_DATE) != dossier.getExtendDate().getTime()) {
dossier.setExtendDate(new Date(obj.getLong(DossierTerm.EXTEND_DATE)));
}
}
if (obj.has(DossierTerm.DOSSIER_NOTE)) {
if (dossier.getDossierNote() == null || !obj.getString(DossierTerm.DOSSIER_NOTE).equals(dossier.getDossierNote())) {
dossier.setDossierNote(obj.getString(DossierTerm.DOSSIER_NOTE));
}
}
if (obj.has(DossierTerm.SUBMISSION_NOTE)) {
if (!obj.getString(DossierTerm.SUBMISSION_NOTE).equals(dossier.getDossierNote())) {
dossier.setSubmissionNote(obj.getString(DossierTerm.SUBMISSION_NOTE));
}
}
if (obj.has(DossierTerm.RELEASE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RELEASE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RELEASE_DATE)) > 0) {
if (dossier.getReleaseDate() == null || obj.getLong(DossierTerm.RELEASE_DATE) != dossier.getReleaseDate().getTime()) {
dossier.setReleaseDate(new Date(obj.getLong(DossierTerm.RELEASE_DATE)));
}
}
if (obj.has(DossierTerm.LOCK_STATE)) {
if (!obj.getString(DossierTerm.LOCK_STATE).equals(dossier.getLockState())) {
dossier.setLockState(obj.getString(DossierTerm.LOCK_STATE));
}
}
if (obj.has(DossierTerm.BRIEF_NOTE)) {
if (!obj.getString(DossierTerm.BRIEF_NOTE).equals(dossier.getBriefNote())) {
dossier.setBriefNote(obj.getString(DossierTerm.BRIEF_NOTE));
}
}
return dossierPersistence.update(dossier);
}
public Dossier getByOrigin(long groupId, long originDossierId) {
return dossierPersistence.fetchByG_O_DID_First(groupId, originDossierId, null);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier publishDossier(long groupId, long dossierId, String referenceUid, int counter, String serviceCode,
String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String cityCode,
String cityName, String districtCode, String districtName, String wardCode, String wardName,
String contactName, String contactTelNo, String contactEmail, String dossierTemplateNo, String password,
int viaPostal, String postalAddress, String postalCityCode, String postalCityName, String postalTelNo,
boolean online, boolean notification, String applicantNote, int originality, Date createDate,
Date modifiedDate, Date submitDate, Date receiveDate, Date dueDate, Date releaseDate, Date finishDate,
Date cancellingDate, Date correctingDate, Date endorsementDate, Date extendDate, Date processDate,
String dossierNo, String dossierStatus, String dossierStatusText, String dossierSubStatus,
String dossierSubStatusText, long dossierActionId, String submissionNote, String lockState,
String delegateName, String delegateIdNo, String delegateTelNo, String delegateEmail,
String delegateAddress, String delegateCityCode, String delegateCityName, String delegateDistrictCode,
String delegateDistrictName, String delegateWardCode, String delegateWardName, double durationCount,
int durationUnit, String dossierName, String processNo, String metaData, ServiceContext context) throws PortalException {
long userId = context.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
validateInit(groupId, dossierId, referenceUid, serviceCode, govAgencyCode, address, cityCode, districtCode,
wardCode, contactName, contactTelNo, contactEmail, dossierTemplateNo);
Dossier dossier = null;
dossier = getByRef(groupId, referenceUid);
if (dossier == null) {
String dossierTemplateName = getDossierTemplateName(groupId, dossierTemplateNo);
dossierId = counterLocalService.increment(Dossier.class.getName());
String dossierNote = getDossierNote(serviceCode, govAgencyCode, dossierTemplateNo, groupId);
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(createDate);
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
dossier.setCancellingDate(cancellingDate);
dossier.setCorrecttingDate(correctingDate);
dossier.setEndorsementDate(endorsementDate);
dossier.setExtendDate(extendDate);
dossier.setProcessDate(processDate);
dossier.setCompanyId(context.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setPassword(password);
dossier.setOnline(online);
dossier.setDossierNote(dossierNote);
dossier.setAddress(address);
dossier.setCityCode(cityCode);
dossier.setCityName(cityName);
dossier.setDistrictCode(districtCode);
dossier.setDistrictName(districtName);
dossier.setWardCode(wardCode);
dossier.setWardName(wardName);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(viaPostal);
dossier.setPostalAddress(postalAddress);
dossier.setPostalCityCode(postalCityCode);
dossier.setPostalCityName(postalCityName);
dossier.setPostalTelNo(postalTelNo);
dossier.setApplicantNote(applicantNote);
// dossier.setServerNo(getServerNo(groupId));
dossier.setOriginality(originality);
//
dossier.setDossierNo(dossierNo);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
dossier.setDossierSubStatus(dossierSubStatus);
dossier.setDossierSubStatusText(dossierSubStatusText);
dossier.setDossierActionId(dossierActionId);
dossier.setSubmissionNote(submissionNote);
dossier.setLockState(lockState);
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateEmail(delegateEmail);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(delegateCityName);
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(delegateDistrictName);
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(delegateWardName);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setDossierName(dossierName);
dossier.setProcessNo(processNo);
dossier.setMetaData(metaData);
dossier = dossierPersistence.update(dossier);
} else {
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
dossier.setCancellingDate(cancellingDate);
dossier.setCorrecttingDate(correctingDate);
dossier.setEndorsementDate(endorsementDate);
dossier.setExtendDate(extendDate);
dossier.setProcessDate(processDate);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(dossierNo))
dossier.setDossierNo(dossierNo);
if (Validator.isNotNull(dossierStatus))
dossier.setDossierStatus(dossierStatus);
if (Validator.isNotNull(dossierStatusText))
dossier.setDossierStatusText(dossierStatusText);
if (Validator.isNotNull(dossierSubStatus))
dossier.setDossierSubStatus(dossierSubStatus);
if (Validator.isNotNull(dossierSubStatusText))
dossier.setDossierSubStatusText(dossierSubStatusText);
if (Validator.isNotNull(dossierActionId))
dossier.setDossierActionId(dossierActionId);
if (Validator.isNotNull(submissionNote))
dossier.setSubmissionNote(submissionNote);
if (Validator.isNotNull(lockState))
dossier.setLockState(lockState);
if (Validator.isNotNull(delegateName))
dossier.setDelegateName(delegateName);
if (Validator.isNotNull(delegateIdNo))
dossier.setDelegateIdNo(delegateIdNo);
if (Validator.isNotNull(delegateTelNo))
dossier.setDelegateTelNo(delegateTelNo);
if (Validator.isNotNull(delegateEmail))
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateAddress))
dossier.setDelegateAddress(delegateAddress);
if (Validator.isNotNull(delegateCityCode))
dossier.setDelegateCityCode(delegateCityCode);
if (Validator.isNotNull(delegateCityName))
dossier.setDelegateCityName(delegateCityName);
if (Validator.isNotNull(delegateDistrictCode))
dossier.setDelegateDistrictCode(delegateDistrictCode);
if (Validator.isNotNull(delegateDistrictName))
dossier.setDelegateDistrictName(delegateDistrictName);
if (Validator.isNotNull(delegateWardCode))
dossier.setDelegateWardCode(delegateWardCode);
if (Validator.isNotNull(delegateWardName))
dossier.setDelegateWardName(delegateWardName);
if (Validator.isNotNull(durationCount))
dossier.setDurationCount(durationCount);
if (Validator.isNotNull(durationUnit))
dossier.setDurationUnit(durationUnit);
if (Validator.isNotNull(dossierName))
dossier.setDossierName(dossierName);
if (Validator.isNotNull(processNo))
dossier.setProcessNo(processNo);
if (Validator.isNotNull(metaData))
dossier.setProcessNo(metaData);
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
// if (Validator.isNotNull(applicantNote))
dossier.setApplicantNote(applicantNote);
dossier = dossierPersistence.update(dossier);
}
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier rollback(Dossier dossier, DossierAction dossierAction) {
ProcessStep processStep = ProcessStepLocalServiceUtil.fetchBySC_GID(dossierAction.getStepCode(), dossier.getGroupId(), dossierAction.getServiceProcessId());
if (processStep != null) {
dossierAction.setState(DossierActionTerm.STATE_WAITING_PROCESSING);
dossierAction = DossierActionLocalServiceUtil.updateState(dossierAction.getDossierActionId(), DossierActionTerm.STATE_WAITING_PROCESSING);
JSONObject jsonDataStatusText = getStatusText(dossier.getGroupId(), DossierTerm.DOSSIER_SATUS_DC_CODE, processStep.getDossierStatus(), processStep.getDossierSubStatus());
dossier.setDossierActionId(dossierAction.getDossierActionId());
dossier.setDossierStatus(processStep.getDossierStatus());
dossier.setDossierStatusText(jsonDataStatusText != null ? jsonDataStatusText.getString(processStep.getDossierStatus()) : StringPool.BLANK);
dossier.setDossierSubStatus(processStep.getDossierSubStatus());
dossier.setDossierSubStatusText(jsonDataStatusText != null ? jsonDataStatusText.getString(processStep.getDossierSubStatus()) : StringPool.BLANK);
}
return dossierPersistence.update(dossier);
}
private JSONObject getStatusText(long groupId, String collectionCode, String curStatus, String curSubStatus) {
JSONObject jsonData = null;
DictCollection dc = DictCollectionLocalServiceUtil.fetchByF_dictCollectionCode(collectionCode, groupId);
if (Validator.isNotNull(dc) && Validator.isNotNull(curStatus)) {
jsonData = JSONFactoryUtil.createJSONObject();
DictItem it = DictItemLocalServiceUtil.fetchByF_dictItemCode(curStatus, dc.getPrimaryKey(), groupId);
if (Validator.isNotNull(it)) {
jsonData.put(curStatus, it.getItemName());
if (Validator.isNotNull(curSubStatus)) {
DictItem dItem = DictItemLocalServiceUtil.fetchByF_dictItemCode(curSubStatus, dc.getPrimaryKey(),
groupId);
if (Validator.isNotNull(dItem)) {
jsonData.put(curSubStatus, dItem.getItemName());
}
}
}
}
return jsonData;
}
public long countDossierByGroup(long groupId) {
return dossierPersistence.countByG(groupId);
}
public List<Dossier> findDossierByGroup(long groupId) {
return dossierPersistence.findByG(groupId);
}
public List<Dossier> findByDN_AN(String dossierNo, String applicantIdNo) {
return dossierPersistence.findByDN_AN(dossierNo, applicantIdNo);
}
public List<Dossier> getByU_G_C_DS_SC_GC_O(long userId, long groupId, String serviceCode, String govAgencyCode, long dossierActionId, int originality) {
return dossierPersistence.findByU_G_GAC_SC_DTNO_DAI_O(userId, groupId, govAgencyCode, serviceCode, dossierActionId, originality);
}
public List<Dossier> findByVIAPOSTAL(int viaPostal) {
return dossierPersistence.findByVIAPOSTAL(viaPostal);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier cloneDossier(Dossier srcDossier) throws PortalException {
long desDossierId = counterLocalService.increment(Dossier.class.getName());
Dossier desDossier = dossierPersistence.create(desDossierId);
int counter = DossierNumberGenerator.counterDossier(srcDossier.getUserId(), desDossierId);
String referenceUid = DossierNumberGenerator.generateReferenceUID(desDossier.getGroupId());
desDossier.setCounter(counter);
desDossier.setReferenceUid(referenceUid);
LinkedHashMap<String, Object> params = new LinkedHashMap<String, Object>();
params.put(DossierTerm.GOV_AGENCY_CODE, srcDossier.getGovAgencyCode());
params.put(DossierTerm.SERVICE_CODE, srcDossier.getServiceCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, srcDossier.getDossierTemplateNo());
params.put(DossierTerm.DOSSIER_STATUS, StringPool.BLANK);
Date now = new Date();
desDossier.setCreateDate(now);
desDossier.setModifiedDate(now);
desDossier.setCompanyId(srcDossier.getCompanyId());
desDossier.setGroupId(srcDossier.getGroupId());
desDossier.setUserId(srcDossier.getUserId());
desDossier.setUserName(srcDossier.getUserName());
// Add extent fields
desDossier.setServiceCode(srcDossier.getServiceCode());
desDossier.setServiceName(srcDossier.getServiceName());
desDossier.setGovAgencyCode(srcDossier.getGovAgencyCode());
desDossier.setGovAgencyName(srcDossier.getGovAgencyName());
desDossier.setDossierTemplateNo(srcDossier.getDossierTemplateNo());
desDossier.setDossierTemplateName(srcDossier.getDossierTemplateName());
desDossier.setApplicantName(srcDossier.getApplicantName());
desDossier.setApplicantIdType(srcDossier.getApplicantIdType());
desDossier.setApplicantIdNo(srcDossier.getApplicantIdNo());
desDossier.setApplicantIdDate(srcDossier.getApplicantIdDate());
desDossier.setPassword(srcDossier.getPassword());
desDossier.setOnline(srcDossier.getOnline());
desDossier.setDossierNote(srcDossier.getDossierNote());
desDossier.setAddress(srcDossier.getAddress());
desDossier.setCityCode(srcDossier.getCityCode());
desDossier.setCityName(srcDossier.getCityName());
desDossier.setDistrictCode(srcDossier.getDistrictCode());
desDossier.setDistrictName(srcDossier.getDistrictName());
desDossier.setWardCode(srcDossier.getWardCode());
desDossier.setWardName(srcDossier.getWardName());
desDossier.setContactName(srcDossier.getContactName());
desDossier.setContactEmail(srcDossier.getContactEmail());
desDossier.setContactTelNo(srcDossier.getContactTelNo());
desDossier.setViaPostal(srcDossier.getViaPostal());
desDossier.setPostalAddress(srcDossier.getPostalAddress());
desDossier.setPostalCityCode(srcDossier.getPostalCityCode());
desDossier.setPostalCityName(srcDossier.getPostalCityName());
desDossier.setPostalTelNo(srcDossier.getPostalTelNo());
desDossier.setApplicantNote(srcDossier.getApplicantNote());
desDossier.setServerNo(srcDossier.getServerNo());
desDossier.setOriginality(srcDossier.getOriginality());
desDossier.setDurationCount(srcDossier.getDurationCount());
desDossier.setDurationUnit(srcDossier.getDurationUnit());
//desDossier.setDossierStatus(srcDossier.getDossierStatus());
//desDossier.setDossierStatusText(srcDossier.getDossierStatusText());
//desDossier.setDossierSubStatus(srcDossier.getDossierSubStatus());
//desDossier.setDossierSubStatusText(srcDossier.getDossierSubStatusText());
desDossier.setDelegateName(srcDossier.getDelegateName());
desDossier.setDelegateAddress(srcDossier.getDelegateAddress());
desDossier.setDelegateCityCode(srcDossier.getDelegateCityCode());
desDossier.setDelegateCityName(srcDossier.getDelegateCityName());
desDossier.setDelegateDistrictCode(srcDossier.getDelegateDistrictCode());
desDossier.setDelegateDistrictName(srcDossier.getDelegateDistrictName());
desDossier.setDelegateWardCode(srcDossier.getDelegateWardCode());
desDossier.setDelegateWardName(srcDossier.getDelegateWardName());
desDossier.setDelegateEmail(srcDossier.getDelegateEmail());
desDossier.setDelegateIdNo(srcDossier.getDelegateIdNo());
desDossier.setDelegateTelNo(srcDossier.getDelegateTelNo());
desDossier.setDossierName(srcDossier.getDossierName());
desDossier.setRegisterBookCode(srcDossier.getRegisterBookCode());
desDossier.setProcessNo(srcDossier.getProcessNo());
//dossierPersistence.update(desDossier);
//ServiceProcess serviceProcess = null;
//ProcessOption option = getProcessOption(srcDossier.getServiceCode(), srcDossier.getGovAgencyCode(), srcDossier.getDossierTemplateNo(), srcDossier.getGroupId());
//_log.debug("Process option: " + option);
// if (option != null) {
// long serviceProcessId = option.getServiceProcessId();
// serviceProcess = ServiceProcessLocalServiceUtil.fetchServiceProcess(serviceProcessId);
// String dossierRef = DossierNumberGenerator.generateDossierNumber(srcDossier.getGroupId(), srcDossier.getCompanyId(),
// desDossierId, option.getProcessOptionId(), serviceProcess.getDossierNoPattern(), params);
// _log.debug("Dossier no: " + dossierRef);
// desDossier.setDossierNo(dossierRef.trim());
// }
// set dueDate
desDossier.setDueDate(srcDossier.getDueDate());
// set receivedDate
desDossier.setReceiveDate(srcDossier.getReceiveDate());
return dossierPersistence.update(desDossier);
}
public Dossier getByDossierNo(long groupId, String dossierNo) {
return dossierPersistence.fetchByG_DN(groupId, dossierNo);
}
// super_admin Generators
@Indexable(type = IndexableType.DELETE)
public Dossier adminProcessDelete(Long id) {
Dossier object = dossierPersistence.fetchByPrimaryKey(id);
if (Validator.isNull(object)) {
return null;
} else {
dossierPersistence.remove(object);
}
return object;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier adminProcessData(JSONObject objectData) {
Dossier object = null;
if (objectData.getLong("dossierId") > 0) {
object = dossierPersistence.fetchByPrimaryKey(objectData.getLong("dossierId"));
object.setModifiedDate(new Date());
} else {
long id = CounterLocalServiceUtil.increment(ServiceProcess.class.getName());
object = dossierPersistence.create(id);
object.setGroupId(objectData.getLong("groupId"));
object.setCompanyId(objectData.getLong("companyId"));
object.setCreateDate(new Date());
}
object.setUserId(objectData.getLong("userId"));
object.setUserName(objectData.getString("userName"));
object.setReferenceUid(objectData.getString("referenceUid"));
object.setCounter(objectData.getInt("counter"));
object.setRegisterBookCode(objectData.getString("registerBookCode"));
object.setRegisterBookName(objectData.getString("registerBookName"));
object.setDossierRegister(objectData.getString("dossierRegister"));
object.setProcessNo(objectData.getString("processNo"));
object.setServiceCode(objectData.getString("serviceCode"));
object.setServiceName(objectData.getString("serviceName"));
object.setGovAgencyCode(objectData.getString("govAgencyCode"));
object.setApplicantIdType(objectData.getString("applicantIdType"));
object.setApplicantIdNo(objectData.getString("applicantIdNo"));
object.setApplicantIdDate(new Date(objectData.getLong("applicantIdDate")));
object.setAddress(objectData.getString("address"));
object.setApplicantName(objectData.getString("applicantName"));
object.setPostalAddress(objectData.getString("postalAddress"));
DictItem govAgencyName = DictCollectionUtils.getDictItemByCode(DataMGTConstants.GOVERNMENT_AGENCY,
objectData.getString("govAgencyCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(govAgencyName)) {
object.setGovAgencyName(govAgencyName.getItemName());
}
object.setCityCode(objectData.getString("cityCode"));
object.setDistrictCode(objectData.getString("districtCode"));
object.setWardCode(objectData.getString("wardCode"));
object.setDelegateCityCode(objectData.getString("delegateCityCode"));
object.setDelegateDistrictCode(objectData.getString("delegateDistrictCode"));
object.setDelegateWardCode(objectData.getString("delegateWardCode"));
object.setPostalCityCode(objectData.getString("postalCityCode"));
object.setPostalDistrictCode(objectData.getString("postalDistrictCode"));
object.setPostalWardCode(objectData.getString("postalWardCode"));
DictItem dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("cityCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setCityName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("districtCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDistrictName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("wardCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setWardName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("delegateCityCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDelegateCityName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("delegateDistrictCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDelegateDistrictName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("delegateWardCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setDelegateWardName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("postalCityCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setPostalCityName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("postalDistrictCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setPostalDistrictName(dictItem.getItemName());
}
dictItem = DictCollectionUtils.getDictItemByCode(DataMGTConstants.ADMINISTRATIVE_REGION,
objectData.getString("postalWardCode"), objectData.getLong("groupId"));
if (Validator.isNotNull(dictItem)) {
object.setPostalWardName(dictItem.getItemName());
}
object.setPostalServiceCode(objectData.getString("postalServiceCode"));
object.setPostalServiceName(objectData.getString("postalServiceName"));
object.setDossierTemplateNo(objectData.getString("dossierTemplateNo"));
object.setDossierTemplateName(objectData.getString("dossierTemplateName"));
object.setDossierStatus(objectData.getString("dossierStatus"));
object.setDossierStatusText(objectData.getString("dossierStatusText"));
object.setDossierSubStatus(objectData.getString("dossierSubStatus"));
object.setDossierSubStatusText(objectData.getString("dossierSubStatusText"));
object.setContactName(objectData.getString("contactName"));
object.setContactTelNo(objectData.getString("contactTelNo"));
object.setContactEmail(objectData.getString("contactEmail"));
object.setDelegateName(objectData.getString("delegateName"));
object.setDelegateIdNo(objectData.getString("delegateIdNo"));
object.setDelegateTelNo(objectData.getString("delegateTelNo"));
object.setDelegateEmail(objectData.getString("delegateEmail"));
object.setDelegateAddress(objectData.getString("delegateAddress"));
object.setDossierNote(objectData.getString("dossierNote"));
object.setSubmissionNote(objectData.getString("submissionNote"));
object.setApplicantNote(objectData.getString("applicantNote"));
object.setBriefNote(objectData.getString("briefNote"));
object.setDossierNo(objectData.getString("dossierNo"));
object.setSubmitting(objectData.getBoolean("submitting"));
object.setProcessDate(new Date(objectData.getLong("processDate")));
object.setSubmitDate(new Date(objectData.getLong("submitDate")));
object.setReceiveDate(new Date(objectData.getLong("receiveDate")));
object.setDueDate(new Date(objectData.getLong("dueDate")));
object.setExtendDate(new Date(objectData.getLong("extendDate")));
object.setReleaseDate(new Date(objectData.getLong("releaseDate")));
object.setFinishDate(new Date(objectData.getLong("finishDate")));
object.setCancellingDate(new Date(objectData.getLong("cancellingDate")));
object.setCorrecttingDate(new Date(objectData.getLong("correcttingDate")));
// object.setFolderId(objectData.getString("userName")folderId);
object.setDossierActionId(objectData.getLong("dossierActionId"));
object.setViaPostal(objectData.getInt("viaPostal"));
object.setPostalTelNo(objectData.getString("postalTelNo"));
object.setPassword(objectData.getString("password"));
object.setNotification(objectData.getBoolean("notification"));
object.setOnline(objectData.getBoolean("online"));
object.setOriginal(objectData.getBoolean("original"));
object.setServerNo(objectData.getString("serverNo"));
object.setEndorsementDate(new Date(objectData.getLong("endorsementDate")));
object.setLockState(objectData.getString("lockState"));
object.setOriginality(objectData.getInt("originality"));
object.setOriginDossierId(objectData.getLong("originDossierId"));
object.setSampleCount(objectData.getLong("sampleCount"));
object.setDurationUnit(objectData.getInt("durationUnit"));
object.setDurationCount(objectData.getDouble("durationCount"));
object.setDossierName(objectData.getString("dossierName"));
object.setOriginDossierNo(objectData.getString("originDossierNo"));
dossierPersistence.update(object);
return object;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier updateDossierSpecial(long dossierId, JSONObject obj) throws NoSuchDossierException {
// _log.debug("Object dossier update: " + obj.toJSONString());
Dossier dossier = dossierPersistence.findByPrimaryKey(dossierId);
if (obj.has(DossierTerm.DOSSIER_NOTE)) {
if (!obj.getString(DossierTerm.DOSSIER_NOTE).equals(dossier.getDossierNote())) {
dossier.setDossierNote(obj.getString(DossierTerm.DOSSIER_NOTE));
}
}
if (obj.has(DossierTerm.DOSSIER_STATUS)) {
if (!obj.getString(DossierTerm.DOSSIER_STATUS).equals(dossier.getDossierStatus())
&& Validator.isNotNull(obj.getString(DossierTerm.DOSSIER_STATUS))) {
dossier.setDossierStatus(obj.getString(DossierTerm.DOSSIER_STATUS));
}
}
if (obj.has(DossierTerm.RECEIVE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RECEIVE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RECEIVE_DATE)) > 0) {
if (dossier.getReceiveDate() == null || obj.getLong(DossierTerm.RECEIVE_DATE) != dossier.getReceiveDate().getTime()) {
dossier.setReceiveDate(new Date(obj.getLong(DossierTerm.RECEIVE_DATE)));
}
}
if (obj.has(DossierTerm.EXTEND_DATE) && Validator.isNotNull(obj.get(DossierTerm.EXTEND_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.EXTEND_DATE)) > 0) {
if (dossier.getExtendDate() == null || obj.getLong(DossierTerm.EXTEND_DATE) != dossier.getExtendDate().getTime()) {
dossier.setExtendDate(new Date(obj.getLong(DossierTerm.EXTEND_DATE)));
}
}
if (obj.has(DossierTerm.DUE_DATE) && Validator.isNotNull(obj.get(DossierTerm.DUE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.DUE_DATE)) > 0) {
if (dossier.getDueDate() == null || obj.getLong(DossierTerm.DUE_DATE) != dossier.getDueDate().getTime()) {
dossier.setDueDate(new Date(obj.getLong(DossierTerm.DUE_DATE)));
}
}
if (obj.has(DossierTerm.FINISH_DATE) && Validator.isNotNull(obj.get(DossierTerm.FINISH_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.FINISH_DATE)) > 0) {
if (dossier.getFinishDate() == null || obj.getLong(DossierTerm.FINISH_DATE) != dossier.getFinishDate().getTime()) {
dossier.setFinishDate(new Date(obj.getLong(DossierTerm.FINISH_DATE)));
}
}
if (obj.has(DossierTerm.RELEASE_DATE) && Validator.isNotNull(obj.get(DossierTerm.RELEASE_DATE))
&& GetterUtil.getLong(obj.get(DossierTerm.RELEASE_DATE)) > 0) {
if (dossier.getReleaseDate() == null || obj.getLong(DossierTerm.RELEASE_DATE) != dossier.getReleaseDate().getTime()) {
dossier.setReleaseDate(new Date(obj.getLong(DossierTerm.RELEASE_DATE)));
}
}
return dossierPersistence.update(dossier);
}
public List<Dossier> getByG_AN(long groupId, String applicantIdNo) {
return dossierPersistence.findByG_AN(groupId, applicantIdNo);
}
public Dossier getByG_AN_SC_GAC_DTNO_ODID(long groupId, String applicantIdNo, String serviceCode, String govAgencyCode, String dossierTemplateNo, long originDossierId) {
return dossierPersistence.fetchByG_AN_SC_GAC_DTNO_ODID(groupId, applicantIdNo, serviceCode, govAgencyCode, dossierTemplateNo, originDossierId);
}
public Dossier fetchOnePublicService() {
return dossierPersistence.fetchByO_First(0, null);
}
public List<Dossier> findByNOT_ST_GT_MD(String[] statuses, Date d, int start, int end) {
return dossierPersistence.findByNOT_ST_GT_MD(statuses, d, start, end);
}
public List<Dossier> findByGID(long groupId, int start, int end) {
return dossierPersistence.findByG(groupId, start, end);
}
public Dossier updateStatus(Dossier dossier, String status, String statusText, String subStatus,
String subStatusText, String lockState, String stepInstruction, ServiceContext context)
throws PortalException {
Date now = new Date();
dossier.setModifiedDate(now);
dossier.setDossierStatus(status);
dossier.setDossierStatusText(statusText);
dossier.setDossierSubStatus(subStatus);
dossier.setDossierSubStatusText(subStatusText);
dossier.setLockState(lockState);
dossier.setDossierNote(stepInstruction);
if (status.equalsIgnoreCase(DossierStatusConstants.RELEASING)) {
dossier.setReleaseDate(now);
}
if (status.equalsIgnoreCase(DossierStatusConstants.DONE)) {
dossier.setFinishDate(now);
}
dossierPersistence.update(dossier);
return dossier;
}
@Indexable(type = IndexableType.REINDEX)
public Dossier initUpdateDossier(long groupId, long id, String applicantName, String applicantIdType,
String applicantIdNo, String applicantIdDate, String address, String cityCode, String cityName,
String districtCode, String districtName, String wardCode, String wardName, String contactName,
String contactTelNo, String contactEmail, String dossierTemplateNo, Integer viaPostal, String postalAddress,
String postalCityCode, String postalCityName, String postalTelNo, String applicantNote,
boolean isSameAsApplicant, String delegateName, String delegateIdNo, String delegateTelNo,
String delegateEmail, String delegateAddress, String delegateCityCode, String delegateDistrictCode,
String delegateWardCode, Long sampleCount, String dossierName, String briefNote, ServiceContext serviceContext) {
Date now = new Date();
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = dossierPersistence.fetchByPrimaryKey(id);
dossier.setModifiedDate(now);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
//
if (Validator.isNotNull(applicantName))
dossier.setApplicantName(applicantName);
if (Validator.isNotNull(applicantIdType))
dossier.setApplicantIdType(applicantIdType);
if (Validator.isNotNull(applicantIdNo))
dossier.setApplicantIdNo(applicantIdNo);
if (Validator.isNotNull(applicantIdDate))
dossier.setApplicantIdDate(
APIDateTimeUtils.convertStringToDate(applicantIdDate, APIDateTimeUtils._NORMAL_PARTTERN));
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(cityCode))
dossier.setCityCode(cityCode);
if (Validator.isNotNull(cityName))
dossier.setCityName(cityName);
if (Validator.isNotNull(districtCode))
dossier.setDistrictCode(districtCode);
if (Validator.isNotNull(districtName))
dossier.setDistrictName(districtName);
if (Validator.isNotNull(wardCode))
dossier.setWardCode(wardCode);
if (Validator.isNotNull(wardName))
dossier.setWardName(wardName);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(sampleCount))
dossier.setSampleCount(sampleCount);
if (Validator.isNotNull(viaPostal)) {
dossier.setViaPostal(viaPostal);
if (viaPostal == 1) {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
} else if (viaPostal == 2) {
if (Validator.isNotNull(postalAddress))
dossier.setPostalAddress(postalAddress);
if (Validator.isNotNull(postalCityCode))
dossier.setPostalCityCode(postalCityCode);
if (Validator.isNotNull(postalTelNo))
dossier.setPostalTelNo(postalTelNo);
if (Validator.isNotNull(postalCityName))
dossier.setPostalCityName(postalCityName);
} else {
dossier.setPostalAddress(StringPool.BLANK);
dossier.setPostalCityCode(StringPool.BLANK);
dossier.setPostalTelNo(StringPool.BLANK);
}
}
if (isSameAsApplicant) {
dossier.setDelegateName(applicantName);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateAddress(address);
dossier.setDelegateEmail(contactEmail);
if (Validator.isNotNull(cityCode)) {
dossier.setDelegateCityCode(cityCode);
dossier.setDelegateCityName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, cityCode));
}
if (Validator.isNotNull(districtCode)) {
dossier.setDelegateDistrictCode(districtCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, districtCode));
}
if (Validator.isNotNull(wardCode)) {
dossier.setDelegateWardCode(wardCode);
dossier.setDelegateWardName(getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, wardCode));
}
} else {
dossier.setDelegateName(delegateName);
dossier.setDelegateIdNo(delegateIdNo);
dossier.setDelegateTelNo(delegateTelNo);
dossier.setDelegateAddress(delegateAddress);
dossier.setDelegateEmail(delegateEmail);
if (Validator.isNotNull(delegateCityCode)) {
dossier.setDelegateCityCode(delegateCityCode);
dossier.setDelegateCityName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateCityCode));
}
if (Validator.isNotNull(delegateDistrictCode)) {
dossier.setDelegateDistrictCode(delegateDistrictCode);
dossier.setDelegateDistrictName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateDistrictCode));
}
if (Validator.isNotNull(delegateWardCode)) {
dossier.setDelegateWardCode(delegateWardCode);
dossier.setDelegateWardName(
getDictItemName(dossier.getGroupId(), ADMINISTRATIVE_REGION, delegateWardCode));
}
}
dossier.setApplicantNote(applicantNote);
if (Validator.isNotNull(dossierName)) {
dossier.setDossierName(dossierName);
}
System.out.println("Dossier name: " + dossierName);
dossier.setBriefNote(briefNote);
//Process add status of group dossier
if (dossier.getOriginality() == 9) {
dossier.setDossierStatus(DossierTerm.DOSSIER_STATUS_PROCESSING);
}
return dossierPersistence.update(dossier);
}
public List<Dossier> getByGroupAndOriginDossierNo(long groupId, String originDossierNo) {
return dossierPersistence.findByGID_ORI_NO(groupId, originDossierNo);
}
public int countByGroupAndOriginDossierNo(long groupId, String originDossierNo) {
return dossierPersistence.countByGID_ORI_NO(groupId, originDossierNo);
}
public int countByOriginDossierNo(String originDossierNo) {
return dossierPersistence.countByORIGIN_NO(originDossierNo);
}
public List<Dossier> getByU_G_GAC_SC_DTNO_DS_O(long userId, long groupId, String govAgencyCode, String serviceCode, String dossierTemplateNo, String dossierStatus, int originality) {
return dossierPersistence.findByU_G_GAC_SC_DTNO_DS_O(userId, groupId, govAgencyCode, serviceCode, dossierTemplateNo, dossierStatus, originality);
}
public int countByG_NOTS_O_SC(long groupId, String[] dossierStatuses, int originality, String serviceCode) {
return dossierPersistence.countByG_NOTS_O_SC(groupId, dossierStatuses, originality, serviceCode);
}
public int countByG_NOTS_O_DTN(long groupId, String[] dossierStatuses, int originality, String dossierTemplateNo) {
return dossierPersistence.countByG_NOTS_O_DTN(groupId, dossierStatuses, originality, dossierTemplateNo);
}
public int countByG_NOTS_O_PN(long groupId, String[] dossierStatuses, int originality, String processNo) {
return dossierPersistence.countByG_NOTS_O_PN(groupId, dossierStatuses, originality, processNo);
}
@Indexable(type = IndexableType.REINDEX)
public Dossier publishImportDossier(long groupId, long dossierId, String referenceUid, int counter,
String serviceCode, String serviceName, String govAgencyCode, String govAgencyName, String applicantName,
String applicantIdType, String applicantIdNo, Date applicantIdDate, String address, String contactName,
String contactTelNo, String contactEmail, Boolean online, int originality, String dossierNo,
String dossierStatus, String dossierStatusText, long dossierActionId, Double durationCount,
Integer durationUnit, Integer sampleCount, Date createDate, Date modifiedDate, Date submitDate,
Date receiveDate, Date dueDate, Date releaseDate, Date finishDate, String dossierTemplateNo,
String dossierTemplateName, ServiceContext serviceContext) {
long userId = serviceContext.getUserId();
User auditUser = userPersistence.fetchByPrimaryKey(userId);
Dossier dossier = null;
if (dossierId > 0) {
dossier = DossierLocalServiceUtil.fetchDossier(dossierId);
} else {
dossier = getByRef(groupId, referenceUid);
}
if (dossier == null) {
dossierId = counterLocalService.increment(Dossier.class.getName());
dossier = dossierPersistence.create(dossierId);
dossier.setCreateDate(createDate);
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
dossier.setCompanyId(serviceContext.getCompanyId());
dossier.setGroupId(groupId);
dossier.setUserId(userId);
dossier.setUserName(auditUser.getFullName());
// Add extent fields
dossier.setReferenceUid(referenceUid);
dossier.setCounter(counter);
dossier.setServiceCode(serviceCode);
dossier.setServiceName(serviceName);
dossier.setGovAgencyCode(govAgencyCode);
dossier.setGovAgencyName(govAgencyName);
dossier.setDossierTemplateNo(dossierTemplateNo);
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setApplicantName(applicantName);
dossier.setApplicantIdType(applicantIdType);
dossier.setApplicantIdNo(applicantIdNo);
dossier.setApplicantIdDate(applicantIdDate);
dossier.setOnline(online);
dossier.setAddress(address);
dossier.setContactName(contactName);
dossier.setContactEmail(contactEmail);
dossier.setContactTelNo(contactTelNo);
dossier.setViaPostal(0);
dossier.setOriginality(originality);
dossier.setDossierNo(dossierNo);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
if ("releasing".equals(dossierStatus)) {
dossier.setDossierSubStatus("releasing_0");
dossier.setDossierSubStatusText("Chờ trả kết quả tại một cửa");
}
dossier.setDossierActionId(dossierActionId);
dossier.setCounter(counter);
dossier.setDelegateName(applicantName);
dossier.setDelegateAddress(address);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateEmail(contactEmail);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setSampleCount(sampleCount);
dossier.setDossierName(serviceName);
dossier = dossierPersistence.update(dossier);
} else {
dossier.setModifiedDate(modifiedDate);
dossier.setSubmitDate(submitDate);
dossier.setReceiveDate(receiveDate);
dossier.setDueDate(dueDate);
dossier.setReleaseDate(releaseDate);
dossier.setFinishDate(finishDate);
if (Validator.isNotNull(address))
dossier.setAddress(address);
if (Validator.isNotNull(contactName))
dossier.setContactName(contactName);
if (Validator.isNotNull(contactEmail))
dossier.setContactEmail(contactEmail);
if (Validator.isNotNull(contactTelNo))
dossier.setContactTelNo(contactTelNo);
if (Validator.isNotNull(dossierTemplateNo))
dossier.setDossierTemplateNo(dossierTemplateNo);
if (Validator.isNotNull(dossierTemplateName))
dossier.setDossierTemplateName(dossierTemplateName);
dossier.setViaPostal(0);
dossier.setOriginality(originality);
dossier.setDossierNo(dossierNo);
dossier.setDossierStatus(dossierStatus);
dossier.setDossierStatusText(dossierStatusText);
if ("releasing".equals(dossierStatus)) {
dossier.setDossierSubStatus("releasing_0");
dossier.setDossierSubStatusText("Chờ trả kết quả tại một cửa");
}
dossier.setDossierActionId(dossierActionId);
dossier.setCounter(counter);
dossier.setDelegateName(applicantName);
dossier.setDelegateAddress(address);
dossier.setDelegateIdNo(applicantIdNo);
dossier.setDelegateTelNo(contactTelNo);
dossier.setDelegateEmail(contactEmail);
dossier.setDurationCount(durationCount);
dossier.setDurationUnit(durationUnit);
dossier.setSampleCount(sampleCount);
dossier.setDossierName(serviceName);
dossier = dossierPersistence.update(dossier);
}
return dossier;
}
public List<Dossier> getByF_GID_AN_DS(long groupId, String applicantIdNo, String dossierStatus) {
return dossierPersistence.findByF_GID_AN_DS(groupId, applicantIdNo, dossierStatus);
}
public List<Dossier> getByGID_GC_SC_DTN_DS_APP_ORI(long groupId, String govAgencyCode, String serviceCode,
String dossierTemplateNo, String[] statusArr, String applicantIdType, int originality) {
try {
return dossierPersistence.findByGID_GC_SC_DTN_DS_APP_ORI(groupId, govAgencyCode, serviceCode,
dossierTemplateNo, statusArr, applicantIdType, originality);
} catch (Exception e) {
_log.debug(e);
}
return null;
}
public List<Dossier> findByG_GDID(long groupId, long groupDossierId) {
return dossierPersistence.findByG_GDID(groupId, groupDossierId);
}
}
|
Update find on time
|
modules/backend-dossiermgt/backend-dossiermgt-service/src/main/java/org/opencps/dossiermgt/service/impl/DossierLocalServiceImpl.java
|
Update find on time
|
<ide><path>odules/backend-dossiermgt/backend-dossiermgt-service/src/main/java/org/opencps/dossiermgt/service/impl/DossierLocalServiceImpl.java
<ide>
<ide> subQueryFive.add(queryCompareFinish, BooleanClauseOccur.MUST);
<ide> /** Check condition (finishDate == null) || (finishDate != null && finishDate >= dueDate) - END **/
<del> subQuerySix.add(subQueryFive, BooleanClauseOccur.SHOULD);
<del> subQuerySix.add(subQueryFour, BooleanClauseOccur.SHOULD);
<add>// subQuerySix.add(subQueryFive, BooleanClauseOccur.SHOULD);
<add>// subQuerySix.add(subQueryFour, BooleanClauseOccur.SHOULD);
<ide>
<ide> /** Check condition (releaseDate < dueDate && extendDate==null && (finishDate==null||finishDate>=dueDate))- END **/
<ide> subQueryThree.add(subQuerySix, BooleanClauseOccur.MUST);
|
|
Java
|
apache-2.0
|
48bb65418b036815b8a077674d3b1124367d083e
| 0 |
nagyistoce/kanbanik,kanbanik/kanbanik,mortenpoulsen/kanbanik,aymenhs/kanbanik,kanbanik/kanbanik,gudtago/kanbanik,mortenpoulsen/kanbanik,gudtago/kanbanik,aymenhs/kanbanik,nagyistoce/kanbanik
|
package com.googlecode.kanbanik.client.modules.editworkflow.workflow.v2;
import com.allen_sauer.gwt.dnd.client.DragContext;
import com.allen_sauer.gwt.dnd.client.VetoDragException;
import com.allen_sauer.gwt.dnd.client.drop.FlowPanelDropController;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Widget;
import com.googlecode.kanbanik.client.KanbanikAsyncCallback;
import com.googlecode.kanbanik.client.ServerCommandInvokerManager;
import com.googlecode.kanbanik.client.messaging.MessageBus;
import com.googlecode.kanbanik.client.modules.editworkflow.workflow.v2.WorkflowEditingComponent.Position;
import com.googlecode.kanbanik.dto.WorkflowitemDto;
import com.googlecode.kanbanik.dto.shell.EditWorkflowParams;
import com.googlecode.kanbanik.dto.shell.SimpleParams;
import com.googlecode.kanbanik.shared.ServerCommand;
public class WorkflowEditingDropController extends FlowPanelDropController {
private final WorkflowitemDto contextItem;
private final WorkflowitemDto currentItem;
private final Position position;
public WorkflowEditingDropController(FlowPanel dropTarget,
WorkflowitemDto contextItem, WorkflowitemDto currentItem,
Position position) {
super(dropTarget);
this.contextItem = contextItem;
this.currentItem = currentItem;
this.position = position;
}
@Override
public void onPreviewDrop(DragContext context) throws VetoDragException {
// veto if dropped before or after himself
Widget w = context.selectedWidgets.iterator().next();
if (!(w instanceof WorkflowitemWidget)) {
return;
}
WorkflowitemDto droppedItem = ((WorkflowitemWidget) w)
.getWorkflowitem();
if (droppedItem.getId() != null && currentItem.getId() != null) {
if (droppedItem.getId().equals(currentItem.getId())) {
throw new VetoDragException();
}
WorkflowitemDto nextItem = findNextItem();
if (nextItem != null && nextItem.getId() != null
&& droppedItem.getId().equals(nextItem.getId())) {
throw new VetoDragException();
}
}
super.onPreviewDrop(context);
}
@Override
public void onDrop(DragContext context) {
super.onDrop(context);
if (context.selectedWidgets.size() > 1) {
throw new UnsupportedOperationException(
"Only one workflowitem can be dragged at a time");
}
Widget w = context.selectedWidgets.iterator().next();
if (!(w instanceof WorkflowitemWidget)) {
return;
}
WorkflowitemDto droppedItem = ((WorkflowitemWidget) w)
.getWorkflowitem();
WorkflowitemDto nextItem = findNextItem();
droppedItem.setNextItem(nextItem);
ServerCommandInvokerManager
.getInvoker()
.<EditWorkflowParams, SimpleParams<WorkflowitemDto>> invokeCommand(
ServerCommand.EDIT_WORKFLOW,
new EditWorkflowParams(droppedItem, contextItem),
new KanbanikAsyncCallback<SimpleParams<WorkflowitemDto>>() {
@Override
public void success(
SimpleParams<WorkflowitemDto> result) {
MessageBus
.sendMessage(new RefreshBoardsRequestMessage(
"", this));
}
});
}
private WorkflowitemDto findNextItem() {
if (position == Position.BEFORE) {
return currentItem;
} else if (position == Position.AFTER) {
return currentItem.getNextItem();
} else {
// this can happen only if it has no children => has no next item
return null;
}
}
}
|
kanbanik-web/src/main/java/com/googlecode/kanbanik/client/modules/editworkflow/workflow/v2/WorkflowEditingDropController.java
|
package com.googlecode.kanbanik.client.modules.editworkflow.workflow.v2;
import com.allen_sauer.gwt.dnd.client.DragContext;
import com.allen_sauer.gwt.dnd.client.VetoDragException;
import com.allen_sauer.gwt.dnd.client.drop.FlowPanelDropController;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Widget;
import com.googlecode.kanbanik.client.KanbanikAsyncCallback;
import com.googlecode.kanbanik.client.ServerCommandInvokerManager;
import com.googlecode.kanbanik.client.messaging.MessageBus;
import com.googlecode.kanbanik.client.modules.editworkflow.workflow.v2.WorkflowEditingComponent.Position;
import com.googlecode.kanbanik.dto.WorkflowitemDto;
import com.googlecode.kanbanik.dto.shell.EditWorkflowParams;
import com.googlecode.kanbanik.dto.shell.SimpleParams;
import com.googlecode.kanbanik.shared.ServerCommand;
public class WorkflowEditingDropController extends FlowPanelDropController {
private final WorkflowitemDto contextItem;
private final WorkflowitemDto currentItem;
private final Position position;
public WorkflowEditingDropController(
FlowPanel dropTarget,
WorkflowitemDto contextItem,
WorkflowitemDto currentItem,
Position position) {
super(dropTarget);
this.contextItem = contextItem;
this.currentItem = currentItem;
this.position = position;
}
@Override
public void onPreviewDrop(DragContext context) throws VetoDragException {
Widget w = context.selectedWidgets.iterator().next();
if (!(w instanceof WorkflowitemWidget)) {
return;
}
WorkflowitemDto droppedItem = ((WorkflowitemWidget) w).getWorkflowitem();
WorkflowitemDto nextItem = findNextItem();
if (droppedItem.getId() != null &&
nextItem.getId() != null &&
droppedItem.getId().equals(nextItem.getId())) {
// dropped before himself
throw new VetoDragException();
}
super.onPreviewDrop(context);
}
@Override
public void onDrop(DragContext context) {
super.onDrop(context);
if (context.selectedWidgets.size() > 1) {
throw new UnsupportedOperationException("Only one workflowitem can be dragged at a time");
}
Widget w = context.selectedWidgets.iterator().next();
if (!(w instanceof WorkflowitemWidget)) {
return;
}
WorkflowitemDto droppedItem = ((WorkflowitemWidget) w).getWorkflowitem();
WorkflowitemDto nextItem = findNextItem();
droppedItem.setNextItem(nextItem);
ServerCommandInvokerManager.getInvoker().<EditWorkflowParams, SimpleParams<WorkflowitemDto>> invokeCommand(
ServerCommand.EDIT_WORKFLOW,
new EditWorkflowParams(droppedItem, contextItem),
new KanbanikAsyncCallback<SimpleParams<WorkflowitemDto>>() {
@Override
public void success(SimpleParams<WorkflowitemDto> result) {
MessageBus.sendMessage(new RefreshBoardsRequestMessage("", this));
}
});
}
private WorkflowitemDto findNextItem() {
if (position == Position.BEFORE) {
return currentItem;
} else if (position == Position.AFTER) {
return currentItem.getNextItem();
} else {
// this can happen only if it has no children => has no next item
return null;
}
}
}
|
Fixed infinite loop when item moved before itself
|
kanbanik-web/src/main/java/com/googlecode/kanbanik/client/modules/editworkflow/workflow/v2/WorkflowEditingDropController.java
|
Fixed infinite loop when item moved before itself
|
<ide><path>anbanik-web/src/main/java/com/googlecode/kanbanik/client/modules/editworkflow/workflow/v2/WorkflowEditingDropController.java
<ide>
<ide> public class WorkflowEditingDropController extends FlowPanelDropController {
<ide> private final WorkflowitemDto contextItem;
<del>
<add>
<ide> private final WorkflowitemDto currentItem;
<del>
<add>
<ide> private final Position position;
<ide>
<del> public WorkflowEditingDropController(
<del> FlowPanel dropTarget,
<del> WorkflowitemDto contextItem,
<del> WorkflowitemDto currentItem,
<add> public WorkflowEditingDropController(FlowPanel dropTarget,
<add> WorkflowitemDto contextItem, WorkflowitemDto currentItem,
<ide> Position position) {
<ide> super(dropTarget);
<ide> this.contextItem = contextItem;
<ide> this.currentItem = currentItem;
<ide> this.position = position;
<ide> }
<del>
<add>
<ide> @Override
<ide> public void onPreviewDrop(DragContext context) throws VetoDragException {
<add> // veto if dropped before or after himself
<add>
<ide> Widget w = context.selectedWidgets.iterator().next();
<ide> if (!(w instanceof WorkflowitemWidget)) {
<ide> return;
<ide> }
<del> WorkflowitemDto droppedItem = ((WorkflowitemWidget) w).getWorkflowitem();
<del> WorkflowitemDto nextItem = findNextItem();
<del> if (droppedItem.getId() != null &&
<del> nextItem.getId() != null &&
<del> droppedItem.getId().equals(nextItem.getId())) {
<del> // dropped before himself
<del> throw new VetoDragException();
<add> WorkflowitemDto droppedItem = ((WorkflowitemWidget) w)
<add> .getWorkflowitem();
<add> if (droppedItem.getId() != null && currentItem.getId() != null) {
<add> if (droppedItem.getId().equals(currentItem.getId())) {
<add> throw new VetoDragException();
<add> }
<add>
<add> WorkflowitemDto nextItem = findNextItem();
<add> if (nextItem != null && nextItem.getId() != null
<add> && droppedItem.getId().equals(nextItem.getId())) {
<add> throw new VetoDragException();
<add> }
<ide> }
<add>
<ide> super.onPreviewDrop(context);
<ide> }
<del>
<add>
<ide> @Override
<ide> public void onDrop(DragContext context) {
<ide> super.onDrop(context);
<del>
<add>
<ide> if (context.selectedWidgets.size() > 1) {
<del> throw new UnsupportedOperationException("Only one workflowitem can be dragged at a time");
<add> throw new UnsupportedOperationException(
<add> "Only one workflowitem can be dragged at a time");
<ide> }
<del>
<add>
<ide> Widget w = context.selectedWidgets.iterator().next();
<ide> if (!(w instanceof WorkflowitemWidget)) {
<ide> return;
<ide> }
<del>
<del> WorkflowitemDto droppedItem = ((WorkflowitemWidget) w).getWorkflowitem();
<add>
<add> WorkflowitemDto droppedItem = ((WorkflowitemWidget) w)
<add> .getWorkflowitem();
<ide> WorkflowitemDto nextItem = findNextItem();
<del>
<add>
<ide> droppedItem.setNextItem(nextItem);
<del>
<del> ServerCommandInvokerManager.getInvoker().<EditWorkflowParams, SimpleParams<WorkflowitemDto>> invokeCommand(
<del> ServerCommand.EDIT_WORKFLOW,
<del> new EditWorkflowParams(droppedItem, contextItem),
<del> new KanbanikAsyncCallback<SimpleParams<WorkflowitemDto>>() {
<ide>
<del> @Override
<del> public void success(SimpleParams<WorkflowitemDto> result) {
<del> MessageBus.sendMessage(new RefreshBoardsRequestMessage("", this));
<del> }
<del> });
<add> ServerCommandInvokerManager
<add> .getInvoker()
<add> .<EditWorkflowParams, SimpleParams<WorkflowitemDto>> invokeCommand(
<add> ServerCommand.EDIT_WORKFLOW,
<add> new EditWorkflowParams(droppedItem, contextItem),
<add> new KanbanikAsyncCallback<SimpleParams<WorkflowitemDto>>() {
<add>
<add> @Override
<add> public void success(
<add> SimpleParams<WorkflowitemDto> result) {
<add> MessageBus
<add> .sendMessage(new RefreshBoardsRequestMessage(
<add> "", this));
<add> }
<add> });
<ide> }
<del>
<add>
<ide> private WorkflowitemDto findNextItem() {
<ide> if (position == Position.BEFORE) {
<ide> return currentItem;
<ide> // this can happen only if it has no children => has no next item
<ide> return null;
<ide> }
<del>
<add>
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
d19af9857596f3b808c3d153b2b942e34a85d96c
| 0 |
louishust/incubator-ignite,ilantukh/ignite,apache/ignite,alexzaitzev/ignite,avinogradovgg/ignite,SomeFire/ignite,mcherkasov/ignite,voipp/ignite,ilantukh/ignite,shroman/ignite,endian675/ignite,andrey-kuznetsov/ignite,apache/ignite,f7753/ignite,xtern/ignite,apacheignite/ignite,dlnufox/ignite,vldpyatkov/ignite,xtern/ignite,tkpanther/ignite,alexzaitzev/ignite,ilantukh/ignite,agura/incubator-ignite,voipp/ignite,DoudTechData/ignite,SomeFire/ignite,leveyj/ignite,SharplEr/ignite,sylentprayer/ignite,thuTom/ignite,StalkXT/ignite,dlnufox/ignite,adeelmahmood/ignite,nizhikov/ignite,gridgain/apache-ignite,WilliamDo/ignite,gargvish/ignite,louishust/incubator-ignite,SharplEr/ignite,SharplEr/ignite,kidaa/incubator-ignite,tkpanther/ignite,amirakhmedov/ignite,avinogradovgg/ignite,avinogradovgg/ignite,vladisav/ignite,vldpyatkov/ignite,rfqu/ignite,apache/ignite,adeelmahmood/ignite,irudyak/ignite,sk0x50/ignite,apacheignite/ignite,dream-x/ignite,nizhikov/ignite,SharplEr/ignite,pperalta/ignite,dream-x/ignite,vsisko/incubator-ignite,dream-x/ignite,agura/incubator-ignite,andrey-kuznetsov/ignite,vsisko/incubator-ignite,voipp/ignite,xtern/ignite,VladimirErshov/ignite,zzcclp/ignite,nivanov/ignite,kidaa/incubator-ignite,ryanzz/ignite,SomeFire/ignite,vadopolski/ignite,ntikhonov/ignite,apacheignite/ignite,ashutakGG/incubator-ignite,wmz7year/ignite,rfqu/ignite,dream-x/ignite,apache/ignite,zzcclp/ignite,vsuslov/incubator-ignite,pperalta/ignite,vsuslov/incubator-ignite,chandresh-pancholi/ignite,kromulan/ignite,ashutakGG/incubator-ignite,nizhikov/ignite,a1vanov/ignite,avinogradovgg/ignite,a1vanov/ignite,zzcclp/ignite,dream-x/ignite,ntikhonov/ignite,gridgain/apache-ignite,nivanov/ignite,agura/incubator-ignite,alexzaitzev/ignite,chandresh-pancholi/ignite,mcherkasov/ignite,shurun19851206/ignite,ptupitsyn/ignite,a1vanov/ignite,irudyak/ignite,ptupitsyn/ignite,murador/ignite,NSAmelchev/ignite,iveselovskiy/ignite,daradurvs/ignite,leveyj/ignite,ntikhonov/ignite,WilliamDo/ignite,apache/ignite,zzcclp/ignite,sk0x50/ignite,voipp/ignite,apache/ignite,gargvish/ignite,vldpyatkov/ignite,SharplEr/ignite,akuznetsov-gridgain/ignite,daradurvs/ignite,a1vanov/ignite,thuTom/ignite,iveselovskiy/ignite,vsisko/incubator-ignite,a1vanov/ignite,vsuslov/incubator-ignite,vsuslov/incubator-ignite,VladimirErshov/ignite,ptupitsyn/ignite,irudyak/ignite,agoncharuk/ignite,xtern/ignite,avinogradovgg/ignite,vsisko/incubator-ignite,andrey-kuznetsov/ignite,apache/ignite,BiryukovVA/ignite,agoncharuk/ignite,louishust/incubator-ignite,vldpyatkov/ignite,psadusumilli/ignite,vsisko/incubator-ignite,shurun19851206/ignite,chandresh-pancholi/ignite,wmz7year/ignite,chandresh-pancholi/ignite,NSAmelchev/ignite,dream-x/ignite,agoncharuk/ignite,daradurvs/ignite,tkpanther/ignite,tkpanther/ignite,abhishek-ch/incubator-ignite,kidaa/incubator-ignite,pperalta/ignite,thuTom/ignite,sylentprayer/ignite,WilliamDo/ignite,irudyak/ignite,amirakhmedov/ignite,SomeFire/ignite,alexzaitzev/ignite,adeelmahmood/ignite,svladykin/ignite,xtern/ignite,StalkXT/ignite,voipp/ignite,pperalta/ignite,endian675/ignite,dmagda/incubator-ignite,daradurvs/ignite,amirakhmedov/ignite,StalkXT/ignite,zzcclp/ignite,dream-x/ignite,thuTom/ignite,andrey-kuznetsov/ignite,ashutakGG/incubator-ignite,shroman/ignite,vadopolski/ignite,murador/ignite,ilantukh/ignite,tkpanther/ignite,leveyj/ignite,dlnufox/ignite,daradurvs/ignite,nivanov/ignite,iveselovskiy/ignite,kromulan/ignite,kromulan/ignite,ascherbakoff/ignite,xtern/ignite,agoncharuk/ignite,VladimirErshov/ignite,ilantukh/ignite,nivanov/ignite,agoncharuk/ignite,NSAmelchev/ignite,murador/ignite,VladimirErshov/ignite,BiryukovVA/ignite,BiryukovVA/ignite,a1vanov/ignite,gridgain/apache-ignite,dmagda/incubator-ignite,ntikhonov/ignite,wmz7year/ignite,SomeFire/ignite,nivanov/ignite,iveselovskiy/ignite,arijitt/incubator-ignite,kidaa/incubator-ignite,SharplEr/ignite,vsuslov/incubator-ignite,shurun19851206/ignite,DoudTechData/ignite,rfqu/ignite,louishust/incubator-ignite,ilantukh/ignite,endian675/ignite,abhishek-ch/incubator-ignite,vadopolski/ignite,pperalta/ignite,mcherkasov/ignite,NSAmelchev/ignite,psadusumilli/ignite,ashutakGG/incubator-ignite,BiryukovVA/ignite,samaitra/ignite,ascherbakoff/ignite,alexzaitzev/ignite,kidaa/incubator-ignite,wmz7year/ignite,agura/incubator-ignite,ascherbakoff/ignite,voipp/ignite,shroman/ignite,adeelmahmood/ignite,arijitt/incubator-ignite,agura/incubator-ignite,leveyj/ignite,f7753/ignite,dmagda/incubator-ignite,nizhikov/ignite,sylentprayer/ignite,f7753/ignite,SomeFire/ignite,WilliamDo/ignite,samaitra/ignite,StalkXT/ignite,sylentprayer/ignite,vldpyatkov/ignite,vldpyatkov/ignite,BiryukovVA/ignite,voipp/ignite,agoncharuk/ignite,ilantukh/ignite,sk0x50/ignite,ryanzz/ignite,vladisav/ignite,VladimirErshov/ignite,a1vanov/ignite,vladisav/ignite,vadopolski/ignite,BiryukovVA/ignite,apache/ignite,svladykin/ignite,endian675/ignite,andrey-kuznetsov/ignite,shroman/ignite,shroman/ignite,apacheignite/ignite,SomeFire/ignite,amirakhmedov/ignite,daradurvs/ignite,samaitra/ignite,rfqu/ignite,rfqu/ignite,rfqu/ignite,iveselovskiy/ignite,abhishek-ch/incubator-ignite,gargvish/ignite,murador/ignite,ascherbakoff/ignite,amirakhmedov/ignite,ptupitsyn/ignite,dmagda/incubator-ignite,amirakhmedov/ignite,afinka77/ignite,nizhikov/ignite,ptupitsyn/ignite,agoncharuk/ignite,BiryukovVA/ignite,dlnufox/ignite,vsisko/incubator-ignite,WilliamDo/ignite,akuznetsov-gridgain/ignite,dream-x/ignite,BiryukovVA/ignite,ryanzz/ignite,ptupitsyn/ignite,zzcclp/ignite,abhishek-ch/incubator-ignite,dlnufox/ignite,afinka77/ignite,andrey-kuznetsov/ignite,irudyak/ignite,daradurvs/ignite,mcherkasov/ignite,NSAmelchev/ignite,sk0x50/ignite,StalkXT/ignite,nizhikov/ignite,sk0x50/ignite,pperalta/ignite,nivanov/ignite,shroman/ignite,ascherbakoff/ignite,agura/incubator-ignite,svladykin/ignite,a1vanov/ignite,ptupitsyn/ignite,vadopolski/ignite,VladimirErshov/ignite,psadusumilli/ignite,endian675/ignite,ntikhonov/ignite,NSAmelchev/ignite,dlnufox/ignite,adeelmahmood/ignite,shroman/ignite,ptupitsyn/ignite,SomeFire/ignite,vsuslov/incubator-ignite,shroman/ignite,irudyak/ignite,shroman/ignite,ntikhonov/ignite,tkpanther/ignite,murador/ignite,xtern/ignite,amirakhmedov/ignite,vldpyatkov/ignite,chandresh-pancholi/ignite,wmz7year/ignite,gridgain/apache-ignite,shurun19851206/ignite,shurun19851206/ignite,mcherkasov/ignite,tkpanther/ignite,f7753/ignite,f7753/ignite,afinka77/ignite,rfqu/ignite,samaitra/ignite,nizhikov/ignite,DoudTechData/ignite,svladykin/ignite,ryanzz/ignite,endian675/ignite,vladisav/ignite,thuTom/ignite,ashutakGG/incubator-ignite,adeelmahmood/ignite,leveyj/ignite,gargvish/ignite,ptupitsyn/ignite,amirakhmedov/ignite,NSAmelchev/ignite,thuTom/ignite,psadusumilli/ignite,wmz7year/ignite,kidaa/incubator-ignite,ascherbakoff/ignite,samaitra/ignite,vsisko/incubator-ignite,dmagda/incubator-ignite,dlnufox/ignite,abhishek-ch/incubator-ignite,leveyj/ignite,StalkXT/ignite,wmz7year/ignite,ptupitsyn/ignite,psadusumilli/ignite,psadusumilli/ignite,mcherkasov/ignite,nivanov/ignite,apacheignite/ignite,f7753/ignite,afinka77/ignite,arijitt/incubator-ignite,shurun19851206/ignite,kromulan/ignite,andrey-kuznetsov/ignite,akuznetsov-gridgain/ignite,VladimirErshov/ignite,andrey-kuznetsov/ignite,vladisav/ignite,daradurvs/ignite,kromulan/ignite,ntikhonov/ignite,vadopolski/ignite,gargvish/ignite,psadusumilli/ignite,pperalta/ignite,mcherkasov/ignite,ascherbakoff/ignite,StalkXT/ignite,irudyak/ignite,wmz7year/ignite,ryanzz/ignite,thuTom/ignite,gargvish/ignite,nivanov/ignite,arijitt/incubator-ignite,WilliamDo/ignite,ascherbakoff/ignite,gargvish/ignite,svladykin/ignite,sylentprayer/ignite,apacheignite/ignite,DoudTechData/ignite,endian675/ignite,shurun19851206/ignite,alexzaitzev/ignite,DoudTechData/ignite,StalkXT/ignite,sylentprayer/ignite,ntikhonov/ignite,StalkXT/ignite,afinka77/ignite,xtern/ignite,DoudTechData/ignite,SomeFire/ignite,abhishek-ch/incubator-ignite,vadopolski/ignite,sylentprayer/ignite,sk0x50/ignite,samaitra/ignite,DoudTechData/ignite,vladisav/ignite,sylentprayer/ignite,psadusumilli/ignite,voipp/ignite,alexzaitzev/ignite,murador/ignite,leveyj/ignite,avinogradovgg/ignite,avinogradovgg/ignite,f7753/ignite,afinka77/ignite,tkpanther/ignite,alexzaitzev/ignite,SomeFire/ignite,rfqu/ignite,NSAmelchev/ignite,chandresh-pancholi/ignite,afinka77/ignite,gridgain/apache-ignite,murador/ignite,gridgain/apache-ignite,irudyak/ignite,akuznetsov-gridgain/ignite,apache/ignite,mcherkasov/ignite,zzcclp/ignite,ryanzz/ignite,vldpyatkov/ignite,apacheignite/ignite,thuTom/ignite,irudyak/ignite,kromulan/ignite,endian675/ignite,sk0x50/ignite,DoudTechData/ignite,samaitra/ignite,arijitt/incubator-ignite,ryanzz/ignite,iveselovskiy/ignite,chandresh-pancholi/ignite,NSAmelchev/ignite,chandresh-pancholi/ignite,ascherbakoff/ignite,ilantukh/ignite,agura/incubator-ignite,sk0x50/ignite,afinka77/ignite,andrey-kuznetsov/ignite,louishust/incubator-ignite,svladykin/ignite,SharplEr/ignite,ilantukh/ignite,xtern/ignite,daradurvs/ignite,alexzaitzev/ignite,ilantukh/ignite,f7753/ignite,zzcclp/ignite,gargvish/ignite,kromulan/ignite,samaitra/ignite,akuznetsov-gridgain/ignite,dmagda/incubator-ignite,samaitra/ignite,ryanzz/ignite,BiryukovVA/ignite,arijitt/incubator-ignite,samaitra/ignite,WilliamDo/ignite,agura/incubator-ignite,pperalta/ignite,sk0x50/ignite,shurun19851206/ignite,nizhikov/ignite,shroman/ignite,dmagda/incubator-ignite,vsisko/incubator-ignite,BiryukovVA/ignite,akuznetsov-gridgain/ignite,ashutakGG/incubator-ignite,agoncharuk/ignite,voipp/ignite,kromulan/ignite,nizhikov/ignite,chandresh-pancholi/ignite,gridgain/apache-ignite,VladimirErshov/ignite,adeelmahmood/ignite,vladisav/ignite,SharplEr/ignite,vladisav/ignite,svladykin/ignite,andrey-kuznetsov/ignite,adeelmahmood/ignite,daradurvs/ignite,apacheignite/ignite,amirakhmedov/ignite,SharplEr/ignite,leveyj/ignite,louishust/incubator-ignite,dlnufox/ignite,dmagda/incubator-ignite,vadopolski/ignite,WilliamDo/ignite,murador/ignite
|
/* @java.file.header */
/* _________ _____ __________________ _____
* __ ____/___________(_)______ /__ ____/______ ____(_)_______
* _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \
* / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / /
* \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/
*/
package org.gridgain.grid.kernal.processors.cache.distributed.dht.atomic;
import org.gridgain.grid.*;
import org.gridgain.grid.cache.*;
import org.gridgain.grid.kernal.processors.cache.*;
import org.gridgain.grid.kernal.processors.cache.distributed.dht.*;
import org.gridgain.grid.kernal.processors.cache.distributed.dht.preloader.*;
import org.gridgain.grid.kernal.processors.cache.distributed.near.*;
import org.gridgain.grid.kernal.processors.cache.dr.*;
import org.gridgain.grid.kernal.processors.dr.*;
import org.gridgain.grid.kernal.processors.timeout.*;
import org.gridgain.grid.lang.*;
import org.gridgain.grid.util.*;
import org.gridgain.grid.util.future.*;
import org.gridgain.grid.util.lang.*;
import org.gridgain.grid.util.tostring.*;
import org.gridgain.grid.util.typedef.*;
import org.gridgain.grid.util.typedef.internal.*;
import org.jdk8.backport.*;
import org.jetbrains.annotations.*;
import sun.misc.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import java.util.concurrent.locks.*;
import static org.gridgain.grid.GridSystemProperties.*;
import static org.gridgain.grid.cache.GridCacheAtomicWriteOrderMode.*;
import static org.gridgain.grid.cache.GridCachePeekMode.*;
import static org.gridgain.grid.cache.GridCacheWriteSynchronizationMode.*;
import static org.gridgain.grid.kernal.processors.cache.GridCacheOperation.*;
import static org.gridgain.grid.kernal.processors.cache.GridCacheUtils.*;
import static org.gridgain.grid.kernal.processors.dr.GridDrType.*;
/**
* Non-transactional partitioned cache.
*/
@GridToStringExclude
public class GridDhtAtomicCache<K, V> extends GridDhtCacheAdapter<K, V> {
/** Deferred update response buffer size. */
private static final int DEFERRED_UPDATE_RESPONSE_BUFFER_SIZE =
Integer.getInteger(GG_ATOMIC_DEFERRED_ACK_BUFFER_SIZE, 256);
/** Deferred update response timeout. */
private static final int DEFERRED_UPDATE_RESPONSE_TIMEOUT =
Integer.getInteger(GG_ATOMIC_DEFERRED_ACK_TIMEOUT, 500);
/** Unsafe instance. */
private static final Unsafe UNSAFE = GridUnsafe.unsafe();
/** Will be {@code true} if affinity has backups. */
private boolean hasBackups;
/** Update reply closure. */
private CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> updateReplyClos;
/** Pending */
private ConcurrentMap<UUID, DeferredResponseBuffer> pendingResponses = new ConcurrentHashMap8<>();
/** */
private GridNearAtomicCache<K, V> near;
/**
* Empty constructor required by {@link Externalizable}.
*/
public GridDhtAtomicCache() {
// No-op.
}
/**
* @param ctx Cache context.
*/
public GridDhtAtomicCache(GridCacheContext<K, V> ctx) {
super(ctx);
}
/**
* @param ctx Cache context.
* @param map Cache concurrent map.
*/
public GridDhtAtomicCache(GridCacheContext<K, V> ctx, GridCacheConcurrentMap<K, V> map) {
super(ctx, map);
}
/** {@inheritDoc} */
@Override public boolean isDhtAtomic() {
return true;
}
/** {@inheritDoc} */
@Override protected void init() {
map.setEntryFactory(new GridCacheMapEntryFactory<K, V>() {
/** {@inheritDoc} */
@Override public GridCacheMapEntry<K, V> create(GridCacheContext<K, V> ctx, long topVer, K key, int hash,
V val, GridCacheMapEntry<K, V> next, long ttl, int hdrId) {
return new GridDhtAtomicCacheEntry<>(ctx, topVer, key, hash, val, next, ttl, hdrId);
}
});
updateReplyClos = new CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>>() {
@Override public void apply(GridNearAtomicUpdateRequest<K, V> req, GridNearAtomicUpdateResponse<K, V> res) {
if (ctx.config().getAtomicWriteOrderMode() == CLOCK) {
// Always send reply in CLOCK ordering mode.
sendNearUpdateReply(res.nodeId(), res);
return;
}
// Request should be for primary keys only in PRIMARY ordering mode.
assert req.hasPrimary();
if (req.writeSynchronizationMode() != FULL_ASYNC)
sendNearUpdateReply(res.nodeId(), res);
else {
if (!F.isEmpty(res.remapKeys()))
// Remap keys on primary node in FULL_ASYNC mode.
remapToNewPrimary(req);
else if (res.error() != null) {
U.error(log, "Failed to process write update request in FULL_ASYNC mode for keys: " +
res.failedKeys(), res.error());
}
}
}
};
}
/** {@inheritDoc} */
@SuppressWarnings({"IfMayBeConditional", "SimplifiableIfStatement"})
@Override public void start() throws GridException {
hasBackups = ctx.config().getBackups() > 0;
preldr = new GridDhtPreloader<>(ctx);
preldr.start();
ctx.io().addHandler(GridNearGetRequest.class, new CI2<UUID, GridNearGetRequest<K, V>>() {
@Override public void apply(UUID nodeId, GridNearGetRequest<K, V> req) {
processNearGetRequest(nodeId, req);
}
});
ctx.io().addHandler(GridNearAtomicUpdateRequest.class, new CI2<UUID, GridNearAtomicUpdateRequest<K, V>>() {
@Override public void apply(UUID nodeId, GridNearAtomicUpdateRequest<K, V> req) {
processNearAtomicUpdateRequest(nodeId, req);
}
});
ctx.io().addHandler(GridNearAtomicUpdateResponse.class, new CI2<UUID, GridNearAtomicUpdateResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridNearAtomicUpdateResponse<K, V> res) {
processNearAtomicUpdateResponse(nodeId, res);
}
});
ctx.io().addHandler(GridDhtAtomicUpdateRequest.class, new CI2<UUID, GridDhtAtomicUpdateRequest<K, V>>() {
@Override public void apply(UUID nodeId, GridDhtAtomicUpdateRequest<K, V> req) {
processDhtAtomicUpdateRequest(nodeId, req);
}
});
ctx.io().addHandler(GridDhtAtomicUpdateResponse.class, new CI2<UUID, GridDhtAtomicUpdateResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridDhtAtomicUpdateResponse<K, V> res) {
processDhtAtomicUpdateResponse(nodeId, res);
}
});
ctx.io().addHandler(GridDhtAtomicDeferredUpdateResponse.class,
new CI2<UUID, GridDhtAtomicDeferredUpdateResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridDhtAtomicDeferredUpdateResponse<K, V> res) {
processDhtAtomicDeferredUpdateResponse(nodeId, res);
}
});
if (near == null) {
ctx.io().addHandler(GridNearGetResponse.class, new CI2<UUID, GridNearGetResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridNearGetResponse<K, V> res) {
processNearGetResponse(nodeId, res);
}
});
}
}
/**
* @param near Near cache.
*/
public void near(GridNearAtomicCache<K, V> near) {
this.near = near;
}
/** {@inheritDoc} */
@Override public GridNearCacheAdapter<K, V> near() {
return near;
}
/**
* @return Whether backups are configured for this cache.
*/
public boolean hasBackups() {
return hasBackups;
}
/** {@inheritDoc} */
@Override public GridCacheEntry<K, V> entry(K key) {
return new GridDhtCacheEntryImpl<>(ctx.projectionPerCall(), ctx, key, null);
}
/** {@inheritDoc} */
@Override public V peek(K key, @Nullable Collection<GridCachePeekMode> modes) throws GridException {
GridTuple<V> val = null;
if (ctx.isReplicated() || !modes.contains(NEAR_ONLY)) {
try {
val = peek0(true, key, modes, ctx.tm().txx());
}
catch (GridCacheFilterFailedException ignored) {
if (log.isDebugEnabled())
log.debug("Filter validation failed for key: " + key);
return null;
}
}
return val != null ? val.get() : null;
}
/** {@inheritDoc} */
@Override public GridCacheTxLocalAdapter<K, V> newTx(
boolean implicit,
boolean implicitSingle,
GridCacheTxConcurrency concurrency,
GridCacheTxIsolation isolation,
long timeout,
boolean invalidate,
boolean syncCommit,
boolean syncRollback,
boolean swapEnabled,
boolean storeEnabled,
int txSize,
@Nullable Object grpLockKey,
boolean partLock
) {
throw new UnsupportedOperationException("Transactions are not supported for " +
"GridCacheAtomicityMode.ATOMIC mode (use GridCacheAtomicityMode.TRANSACTIONAL instead)");
}
/** {@inheritDoc} */
@Override public GridFuture<Map<K, V>> getAllAsync(
@Nullable final Collection<? extends K> keys,
final boolean forcePrimary,
boolean skipTx,
@Nullable final GridCacheEntryEx<K, V> entry,
@Nullable final GridPredicate<GridCacheEntry<K, V>>[] filter
) {
return asyncOp(new CO<GridFuture<Map<K, V>>>() {
@Override public GridFuture<Map<K, V>> apply() {
return getAllAsync0(keys, false, forcePrimary, filter);
}
});
}
/** {@inheritDoc} */
@Override public V put(K key, V val, @Nullable GridCacheEntryEx<K, V> cached, long ttl,
@Nullable GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
return putAsync(key, val, cached, ttl, filter).get();
}
/** {@inheritDoc} */
@Override public boolean putx(K key, V val, @Nullable GridCacheEntryEx<K, V> cached,
long ttl, @Nullable GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
return putxAsync(key, val, cached, ttl, filter).get();
}
/** {@inheritDoc} */
@Override public boolean putx(K key, V val,
GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
return putxAsync(key, val, filter).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<V> putAsync(K key, V val, @Nullable GridCacheEntryEx<K, V> entry,
long ttl, @Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return updateAllAsync0(F0.asMap(key, val), null, null, null, true, false, entry, ttl, filter);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<Boolean> putxAsync(K key, V val, @Nullable GridCacheEntryEx<K, V> entry, long ttl,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return updateAllAsync0(F0.asMap(key, val), null, null, null, false, false, entry, ttl, filter);
}
/** {@inheritDoc} */
@Override public V putIfAbsent(K key, V val) throws GridException {
return putIfAbsentAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<V> putIfAbsentAsync(K key, V val) {
return putAsync(key, val, ctx.noPeekArray());
}
/** {@inheritDoc} */
@Override public boolean putxIfAbsent(K key, V val) throws GridException {
return putxIfAbsentAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> putxIfAbsentAsync(K key, V val) {
return putxAsync(key, val, ctx.noPeekArray());
}
/** {@inheritDoc} */
@Override public V replace(K key, V val) throws GridException {
return replaceAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<V> replaceAsync(K key, V val) {
return putAsync(key, val, ctx.hasPeekArray());
}
/** {@inheritDoc} */
@Override public boolean replacex(K key, V val) throws GridException {
return replacexAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> replacexAsync(K key, V val) {
return putxAsync(key, val, ctx.hasPeekArray());
}
/** {@inheritDoc} */
@Override public boolean replace(K key, V oldVal, V newVal) throws GridException {
return replaceAsync(key, oldVal, newVal).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> replaceAsync(K key, V oldVal, V newVal) {
return putxAsync(key, newVal, ctx.equalsPeekArray(oldVal));
}
/** {@inheritDoc} */
@Override public GridCacheReturn<V> removex(K key, V val) throws GridException {
return removexAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridCacheReturn<V> replacex(K key, V oldVal, V newVal) throws GridException {
return replacexAsync(key, oldVal, newVal).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<GridCacheReturn<V>> removexAsync(K key, V val) {
return removeAllAsync0(F.asList(key), null, null, true, true, ctx.equalsPeekArray(val));
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<GridCacheReturn<V>> replacexAsync(K key, V oldVal, V newVal) {
return updateAllAsync0(F.asMap(key, newVal), null, null, null, true, true, null, 0,
ctx.equalsPeekArray(oldVal));
}
/** {@inheritDoc} */
@Override public void putAll(Map<? extends K, ? extends V> m,
GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
putAllAsync(m, filter).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> putAllAsync(Map<? extends K, ? extends V> m,
@Nullable GridPredicate<GridCacheEntry<K, V>>[] filter) {
return updateAllAsync0(m, null, null, null, false, false, null, 0, filter);
}
/** {@inheritDoc} */
@Override public void putAllDr(Map<? extends K, GridCacheDrInfo<V>> drMap) throws GridException {
putAllDrAsync(drMap).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> putAllDrAsync(Map<? extends K, GridCacheDrInfo<V>> drMap) {
metrics.onReceiveCacheEntriesReceived(drMap.size());
return updateAllAsync0(null, null, drMap, null, false, false, null, 0, null);
}
/** {@inheritDoc} */
@Override public void transform(K key, GridClosure<V, V> transformer) throws GridException {
transformAsync(key, transformer).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> transformAsync(K key, GridClosure<V, V> transformer,
@Nullable GridCacheEntryEx<K, V> entry, long ttl) {
return updateAllAsync0(null, Collections.singletonMap(key, transformer), null, null, false, false, entry, ttl,
null);
}
/** {@inheritDoc} */
@Override public void transformAll(@Nullable Map<? extends K, ? extends GridClosure<V, V>> m) throws GridException {
transformAllAsync(m).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> transformAllAsync(@Nullable Map<? extends K, ? extends GridClosure<V, V>> m) {
if (F.isEmpty(m))
return new GridFinishedFuture<Object>(ctx.kernalContext());
return updateAllAsync0(null, m, null, null, false, false, null, 0, null);
}
/** {@inheritDoc} */
@Override public V remove(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
return removeAsync(key, entry, filter).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<V> removeAsync(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return removeAllAsync0(Collections.singletonList(key), null, entry, true, false, filter);
}
/** {@inheritDoc} */
@Override public void removeAll(Collection<? extends K> keys,
GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
removeAllAsync(keys, filter).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> removeAllAsync(Collection<? extends K> keys,
GridPredicate<GridCacheEntry<K, V>>[] filter) {
return removeAllAsync0(keys, null, null, false, false, filter);
}
/** {@inheritDoc} */
@Override public boolean removex(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
return removexAsync(key, entry, filter).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<Boolean> removexAsync(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return removeAllAsync0(Collections.singletonList(key), null, entry, false, false, filter);
}
/** {@inheritDoc} */
@Override public boolean remove(K key, V val) throws GridException {
return removeAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> removeAsync(K key, V val) {
return removexAsync(key, ctx.equalsPeekArray(val));
}
/** {@inheritDoc} */
@Override public void removeAll(GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
removeAllAsync(filter).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> removeAllAsync(GridPredicate<GridCacheEntry<K, V>>[] filter) {
return removeAllAsync(keySet(filter), filter);
}
/** {@inheritDoc} */
@Override public void removeAllDr(Map<? extends K, GridCacheVersion> drMap) throws GridException {
removeAllDrAsync(drMap).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> removeAllDrAsync(Map<? extends K, GridCacheVersion> drMap) {
metrics.onReceiveCacheEntriesReceived(drMap.size());
return removeAllAsync0(null, drMap, null, false, false, null);
}
/**
* @return {@code True} if store enabled.
*/
private boolean storeEnabled() {
return ctx.isStoreEnabled() && ctx.config().getStore() != null;
}
/**
* @param op Operation closure.
* @return Future.
*/
@SuppressWarnings("unchecked")
protected <T> GridFuture<T> asyncOp(final CO<GridFuture<T>> op) {
GridFuture<T> fail = asyncOpAcquire();
if (fail != null)
return fail;
FutureHolder holder = lastFut.get();
holder.lock();
try {
GridFuture fut = holder.future();
if (fut != null && !fut.isDone()) {
GridFuture<T> f = new GridEmbeddedFuture<>(fut,
new C2<T, Exception, GridFuture<T>>() {
@Override public GridFuture<T> apply(T t, Exception e) {
return op.apply();
}
}, ctx.kernalContext());
saveFuture(holder, f);
return f;
}
GridFuture<T> f = op.apply();
saveFuture(holder, f);
return f;
}
finally {
holder.unlock();
}
}
/** {@inheritDoc} */
@Override protected GridFuture<Boolean> lockAllAsync(Collection<? extends K> keys,
long timeout,
@Nullable GridCacheTxLocalEx<K, V> tx,
boolean isInvalidate,
boolean isRead,
boolean retval,
@Nullable GridCacheTxIsolation isolation,
GridPredicate<GridCacheEntry<K, V>>[] filter) {
return new FinishedLockFuture(new UnsupportedOperationException("Locks are not supported for " +
"GridCacheAtomicityMode.ATOMIC mode (use GridCacheAtomicityMode.TRANSACTIONAL instead)"));
}
/**
* Entry point for all public API put/transform methods.
*
* @param map Put map. Either {@code map}, {@code transformMap} or {@code drMap} should be passed.
* @param transformMap Transform map. Either {@code map}, {@code transformMap} or {@code drMap} should be passed.
* @param drPutMap DR put map.
* @param drRmvMap DR remove map.
* @param retval Return value required flag.
* @param rawRetval Return {@code GridCacheReturn} instance.
* @param cached Cached cache entry for key. May be passed if and only if map size is {@code 1}.
* @param ttl Entry time-to-live.
* @param filter Cache entry filter for atomic updates.
* @return Completion future.
*/
private GridFuture updateAllAsync0(
@Nullable final Map<? extends K, ? extends V> map,
@Nullable final Map<? extends K, ? extends GridClosure<V, V>> transformMap,
@Nullable final Map<? extends K, GridCacheDrInfo<V>> drPutMap,
@Nullable final Map<? extends K, GridCacheVersion> drRmvMap,
final boolean retval,
final boolean rawRetval,
@Nullable GridCacheEntryEx<K, V> cached,
long ttl,
@Nullable final GridPredicate<GridCacheEntry<K, V>>[] filter
) {
final GridNearAtomicUpdateFuture<K, V> updateFut = new GridNearAtomicUpdateFuture<>(
ctx,
this,
ctx.config().getWriteSynchronizationMode(),
transformMap != null ? TRANSFORM : UPDATE,
map != null ? map.keySet() : transformMap != null ? transformMap.keySet() : drPutMap != null ?
drPutMap.keySet() : drRmvMap.keySet(),
map != null ? map.values() : transformMap != null ? transformMap.values() : null,
drPutMap != null ? drPutMap.values() : null,
drRmvMap != null ? drRmvMap.values() : null,
retval,
rawRetval,
cached,
ttl,
filter);
return asyncOp(new CO<GridFuture<Object>>() {
@Override public GridFuture<Object> apply() {
updateFut.map();
return updateFut;
}
});
}
/**
* Entry point for all public API remove methods.
*
* @param keys Keys to remove.
* @param drMap DR map.
* @param cached Cached cache entry for key. May be passed if and only if keys size is {@code 1}.
* @param retval Return value required flag.
* @param rawRetval Return {@code GridCacheReturn} instance.
* @param filter Cache entry filter for atomic removes.
* @return Completion future.
*/
private GridFuture removeAllAsync0(
@Nullable final Collection<? extends K> keys,
@Nullable final Map<? extends K, GridCacheVersion> drMap,
@Nullable GridCacheEntryEx<K, V> cached,
final boolean retval,
boolean rawRetval,
@Nullable final GridPredicate<GridCacheEntry<K, V>>[] filter
) {
assert keys != null || drMap != null;
final GridNearAtomicUpdateFuture<K, V> updateFut = new GridNearAtomicUpdateFuture<>(
ctx,
this,
ctx.config().getWriteSynchronizationMode(),
DELETE,
keys != null ? keys : drMap.keySet(),
null,
null,
keys != null ? null : drMap.values(),
retval,
rawRetval,
cached,
0,
filter);
return asyncOp(new CO<GridFuture<Object>>() {
@Override public GridFuture<Object> apply() {
updateFut.map();
return updateFut;
}
});
}
/**
* Entry point to all public API get methods.
*
* @param keys Keys to remove.
* @param reload Reload flag.
* @param forcePrimary Force primary flag.
* @param filter Filter.
* @return Get future.
*/
private GridFuture<Map<K, V>> getAllAsync0(@Nullable Collection<? extends K> keys, boolean reload,
boolean forcePrimary, @Nullable GridPredicate<GridCacheEntry<K, V>>[] filter) {
if (F.isEmpty(keys))
return new GridFinishedFuture<>(ctx.kernalContext(), Collections.<K, V>emptyMap());
// Optimisation: try to resolve value locally and escape 'get future' creation.
if (!reload && !forcePrimary) {
Map<K, V> locVals = new HashMap<>(keys.size(), 1.0f);
GridCacheVersion obsoleteVer = null;
boolean success = true;
long topVer = ctx.affinity().affinityTopologyVersion();
// Optimistically expect that all keys are available locally (avoid creation of get future).
for (K key : keys) {
GridCacheEntryEx<K, V> entry = null;
while (true) {
try {
entry = ctx.isSwapOrOffheapEnabled() ? entryEx(key) : peekEx(key);
// If our DHT cache do has value, then we peek it.
if (entry != null) {
boolean isNew = entry.isNewLocked();
V v = entry.innerGet(null, /*swap*/true, /*read-through*/false, /*fail-fast*/true,
/*unmarshal*/true, /**update-metrics*/true, true, filter);
// Entry was not in memory or in swap, so we remove it from cache.
if (v == null) {
if (obsoleteVer == null)
obsoleteVer = context().versions().next();
if (isNew && entry.markObsoleteIfEmpty(obsoleteVer))
removeIfObsolete(key);
success = false;
}
else
locVals.put(key, v);
}
else
success = false;
break; // While.
}
catch (GridCacheEntryRemovedException ignored) {
// No-op, retry.
}
catch (GridCacheFilterFailedException ignored) {
// No-op, skip the key.
break;
}
catch (GridDhtInvalidPartitionException ignored) {
success = false;
break; // While.
}
catch (GridException e) {
return new GridFinishedFuture<>(ctx.kernalContext(), e);
}
finally {
if (entry != null)
ctx.evicts().touch(entry, topVer);
}
}
if (!success)
break;
}
if (success)
return ctx.wrapCloneMap(new GridFinishedFuture<>(ctx.kernalContext(), locVals));
}
// Either reload or not all values are available locally.
GridPartitionedGetFuture<K, V> fut = new GridPartitionedGetFuture<>(ctx, keys, reload, forcePrimary, null,
filter);
fut.init();
return ctx.wrapCloneMap(fut);
}
/**
* Executes local update.
*
* @param nodeId Node ID.
* @param req Update request.
* @param cached Cached entry if updating single local entry.
* @param completionCb Completion callback.
*/
public void updateAllAsyncInternal(
final UUID nodeId,
final GridNearAtomicUpdateRequest<K, V> req,
@Nullable final GridCacheEntryEx<K, V> cached,
final CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb
) {
GridFuture<Object> forceFut = preldr.request(req.keys(), req.topologyVersion());
if (forceFut.isDone())
updateAllAsyncInternal0(nodeId, req, cached, completionCb);
else {
forceFut.listenAsync(new CI1<GridFuture<Object>>() {
@Override public void apply(GridFuture<Object> t) {
updateAllAsyncInternal0(nodeId, req, cached, completionCb);
}
});
}
}
/**
* Executes local update after preloader fetched values.
*
* @param nodeId Node ID.
* @param req Update request.
* @param cached Cached entry if updating single local entry.
* @param completionCb Completion callback.
*/
public void updateAllAsyncInternal0(
UUID nodeId,
GridNearAtomicUpdateRequest<K, V> req,
@Nullable GridCacheEntryEx<K, V> cached,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb
) {
GridNearAtomicUpdateResponse<K, V> res = new GridNearAtomicUpdateResponse<>(nodeId, req.futureVersion());
List<K> keys = req.keys();
assert !req.returnValue() || keys.size() == 1;
GridDhtAtomicUpdateFuture<K, V> dhtFut = null;
boolean remap = false;
try {
// If batch store update is enabled, we need to lock all entries.
// First, need to acquire locks on cache entries, then check filter.
List<GridDhtCacheEntry<K, V>> locked = lockEntries(keys, req.topologyVersion());
Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted = null;
try {
topology().readLock();
try {
// Do not check topology version for CLOCK versioning since
// partition exchange will wait for near update future.
if (topology().topologyVersion() == req.topologyVersion() ||
ctx.config().getAtomicWriteOrderMode() == CLOCK) {
GridNode node = ctx.discovery().node(nodeId);
if (node == null) {
U.warn(log, "Node originated update request left grid: " + nodeId);
return;
}
boolean hasNear = U.hasNearCache(node, name());
GridCacheVersion ver = req.updateVersion();
if (ver == null) {
// Assign next version for update inside entries lock.
ver = ctx.versions().next(req.topologyVersion());
if (hasNear)
res.nearVersion(ver);
}
assert ver != null : "Got null version for update request: " + req;
if (log.isDebugEnabled())
log.debug("Using cache version for update request on primary node [ver=" + ver +
", req=" + req + ']');
dhtFut = createDhtFuture(ver, req, res, completionCb, false);
GridCacheReturn<V> retVal = null;
boolean replicate = ctx.isDrEnabled();
if (storeEnabled() && keys.size() > 1 && cacheCfg.getDrReceiverConfiguration() == null) {
// This method can only be used when there are no replicated entries in the batch.
UpdateBatchResult<K, V> updRes = updateWithBatch(nodeId, hasNear, req, res, locked, ver,
dhtFut, completionCb, replicate);
deleted = updRes.deleted();
dhtFut = updRes.dhtFuture();
}
else {
UpdateSingleResult<K, V> updRes = updateSingle(nodeId, hasNear, req, res, locked, ver,
dhtFut, completionCb, replicate);
retVal = updRes.returnValue();
deleted = updRes.deleted();
dhtFut = updRes.dhtFuture();
}
if (retVal == null)
retVal = new GridCacheReturn<>(null, true);
res.returnValue(retVal);
}
else
// Should remap all keys.
remap = true;
}
finally {
topology().readUnlock();
}
}
catch (GridCacheEntryRemovedException e) {
assert false : "Entry should not become obsolete while holding lock.";
e.printStackTrace();
}
finally {
unlockEntries(locked, req.topologyVersion());
// Enqueue if necessary after locks release.
if (deleted != null) {
assert !deleted.isEmpty();
assert ctx.deferredDelete();
for (GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion> e : deleted)
ctx.onDeferredDelete(e.get1(), e.get2());
}
}
}
catch (GridDhtInvalidPartitionException ignore) {
assert ctx.config().getAtomicWriteOrderMode() == PRIMARY;
if (log.isDebugEnabled())
log.debug("Caught invalid partition exception for cache entry (will remap update request): " + req);
remap = true;
}
if (remap) {
assert dhtFut == null;
res.remapKeys(req.keys());
completionCb.apply(req, res);
}
else {
// If there are backups, map backup update future.
if (dhtFut != null)
dhtFut.map();
// Otherwise, complete the call.
else
completionCb.apply(req, res);
}
}
/**
* Updates locked entries using batched write-through.
*
* @param nodeId Sender node ID.
* @param hasNear {@code True} if originating node has near cache.
* @param req Update request.
* @param res Update response.
* @param locked Locked entries.
* @param ver Assigned version.
* @param dhtFut Optional DHT future.
* @param completionCb Completion callback to invoke when DHT future is completed.
* @param replicate Whether replication is enabled.
* @return Deleted entries.
* @throws GridCacheEntryRemovedException Should not be thrown.
*/
private UpdateBatchResult<K, V> updateWithBatch(
UUID nodeId,
boolean hasNear,
GridNearAtomicUpdateRequest<K, V> req,
GridNearAtomicUpdateResponse<K, V> res,
List<GridDhtCacheEntry<K, V>> locked,
GridCacheVersion ver,
@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
boolean replicate
) throws GridCacheEntryRemovedException {
// Cannot update in batches during DR due to possible conflicts.
assert !req.returnValue(); // Should not request return values for putAll.
int size = req.keys().size();
Map<K, V> putMap = null;
Collection<K> rmvKeys = null;
UpdateBatchResult<K, V> updRes = new UpdateBatchResult<>();
List<GridDhtCacheEntry<K, V>> filtered = new ArrayList<>(size);
GridCacheOperation op = req.operation();
int firstEntryIdx = 0;
for (int i = 0; i < locked.size(); i++) {
GridDhtCacheEntry<K, V> entry = locked.get(i);
if (entry == null)
continue;
try {
if (!checkFilter(entry, req, res)) {
if (log.isDebugEnabled())
log.debug("Entry did not pass the filter (will skip write) [entry=" + entry +
", filter=" + Arrays.toString(req.filter()) + ", res=" + res + ']');
if (hasNear)
res.addSkippedIndex(i);
firstEntryIdx++;
continue;
}
filtered.add(entry);
if (op == TRANSFORM) {
V old = entry.innerGet(
null,
/*read swap*/true,
/*read through*/true,
/*fail fast*/false,
/*unmarshal*/true,
/*metrics*/true,
/*event*/true,
CU.<K, V>empty());
GridClosure<V, V> transform = req.transformClosure(i);
V updated = transform.apply(old);
if (updated == null) {
// Update previous batch.
if (putMap != null) {
dhtFut = updatePartialBatch(
hasNear,
firstEntryIdx,
filtered,
ver,
nodeId,
putMap,
null,
dhtFut,
completionCb,
req,
res,
replicate,
updRes);
firstEntryIdx = i + 1;
putMap = null;
filtered = new ArrayList<>();
}
// Start collecting new batch.
if (rmvKeys == null)
rmvKeys = new ArrayList<>(size);
rmvKeys.add(entry.key());
}
else {
// Update previous batch.
if (rmvKeys != null) {
dhtFut = updatePartialBatch(
hasNear,
firstEntryIdx,
filtered,
ver,
nodeId,
null,
rmvKeys,
dhtFut,
completionCb,
req,
res,
replicate,
updRes);
firstEntryIdx = i + 1;
rmvKeys = null;
filtered = new ArrayList<>();
}
if (putMap == null)
putMap = new LinkedHashMap<>(size, 1.0f);
putMap.put(entry.key(), updated);
}
}
else if (op == UPDATE) {
if (putMap == null)
putMap = new LinkedHashMap<>(size, 1.0f);
V updated = req.value(i);
assert updated != null;
putMap.put(entry.key(), updated);
}
else {
assert op == DELETE;
if (rmvKeys == null)
rmvKeys = new ArrayList<>(size);
rmvKeys.add(entry.key());
}
}
catch (GridException e) {
res.addFailedKey(entry.key(), e);
}
catch (GridCacheFilterFailedException ignore) {
assert false : "Filter should never fail with failFast=false and empty filter.";
}
}
// Store final batch.
if (putMap != null || rmvKeys != null) {
dhtFut = updatePartialBatch(
hasNear,
firstEntryIdx,
filtered,
ver,
nodeId,
putMap,
rmvKeys,
dhtFut,
completionCb,
req,
res,
replicate,
updRes);
}
else
assert filtered.isEmpty();
updRes.dhtFuture(dhtFut);
return updRes;
}
/**
* Updates locked entries one-by-one.
*
* @param nodeId Originating node ID.
* @param hasNear {@code True} if originating node has near cache.
* @param req Update request.
* @param res Update response.
* @param locked Locked entries.
* @param ver Assigned update version.
* @param dhtFut Optional DHT future.
* @param completionCb Completion callback to invoke when DHT future is completed.
* @param replicate Whether DR is enabled for that cache.
* @return Return value.
* @throws GridCacheEntryRemovedException Should be never thrown.
*/
private UpdateSingleResult<K, V> updateSingle(
UUID nodeId,
boolean hasNear,
GridNearAtomicUpdateRequest<K, V> req,
GridNearAtomicUpdateResponse<K, V> res,
List<GridDhtCacheEntry<K, V>> locked,
GridCacheVersion ver,
@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
boolean replicate
) throws GridCacheEntryRemovedException {
GridCacheReturn<V> retVal = null;
Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted = null;
List<K> keys = req.keys();
long topVer = req.topologyVersion();
boolean checkReaders = hasNear || ctx.discovery().hasNearCache(name(), topVer);
boolean readersOnly = false;
// Avoid iterator creation.
for (int i = 0; i < keys.size(); i++) {
K k = keys.get(i);
GridCacheOperation op = req.operation();
// We are holding java-level locks on entries at this point.
// No GridCacheEntryRemovedException can be thrown.
try {
GridDhtCacheEntry<K, V> entry = locked.get(i);
if (entry == null)
continue;
GridCacheVersion newDrVer = req.drVersion(i);
long newDrTtl = req.drTtl(i);
long newDrExpireTime = req.drExpireTime(i);
assert !(newDrVer instanceof GridCacheVersionEx) : newDrVer; // Plain version is expected here.
if (newDrVer == null)
newDrVer = ver;
boolean primary = !req.fastMap() || ctx.affinity().primary(ctx.localNode(), entry.key(),
req.topologyVersion());
byte[] newValBytes = req.valueBytes(i);
Object writeVal = req.writeValue(i);
Collection<UUID> readers = null;
Collection<UUID> filteredReaders = null;
if (checkReaders) {
readers = entry.readers();
filteredReaders = F.view(entry.readers(), F.notEqualTo(nodeId));
}
GridCacheUpdateAtomicResult<K, V> updRes = entry.innerUpdate(
ver,
nodeId,
locNodeId,
op,
writeVal,
newValBytes,
primary && storeEnabled(),
req.returnValue(),
req.ttl(),
true,
true,
primary,
ctx.config().getAtomicWriteOrderMode() == CLOCK, // Check version in CLOCK mode on primary node.
req.filter(),
replicate ? primary ? DR_PRIMARY : DR_BACKUP : DR_NONE,
newDrTtl,
newDrExpireTime,
newDrVer,
true);
if (dhtFut == null && !F.isEmpty(filteredReaders)) {
dhtFut = createDhtFuture(ver, req, res, completionCb, true);
readersOnly = true;
}
if (dhtFut != null) {
if (updRes.sendToDht()) { // Send to backups even in case of remove-remove scenarios.
GridDrReceiverConflictContextImpl ctx = updRes.drConflictContext();
long ttl = updRes.newTtl();
long drExpireTime = updRes.drExpireTime();
if (ctx == null)
newDrVer = null;
else if (ctx.isMerge()) {
newDrVer = null; // DR version is discarded in case of merge.
newValBytes = null; // Value has been changed.
}
if (!readersOnly)
dhtFut.addWriteEntry(entry, updRes.newValue(), newValBytes, drExpireTime >= 0L ? ttl : -1L,
drExpireTime, newDrVer, drExpireTime < 0L ? ttl : 0L);
if (!F.isEmpty(filteredReaders))
dhtFut.addNearWriteEntries(filteredReaders, entry, updRes.newValue(), newValBytes,
drExpireTime < 0L ? ttl : 0L);
}
else {
if (log.isDebugEnabled())
log.debug("Entry did not pass the filter or conflict resolution (will skip write) " +
"[entry=" + entry + ", filter=" + Arrays.toString(req.filter()) + ']');
}
}
if (hasNear) {
if (primary && updRes.sendToDht()) {
if (!U.nodeIds(context().affinity().nodes(entry.partition(), topVer)).contains(nodeId)) {
GridDrReceiverConflictContextImpl ctx = updRes.drConflictContext();
res.nearTtl(updRes.newTtl());
if (ctx != null && ctx.isMerge())
newValBytes = null;
// If put the same value as in request then do not need to send it back.
if (op == TRANSFORM || writeVal != updRes.newValue())
res.addNearValue(i, updRes.newValue(), newValBytes);
if (updRes.newValue() != null || newValBytes != null) {
GridFuture<Boolean> f = entry.addReader(nodeId, req.messageId(), topVer);
assert f == null : f;
}
}
else if (F.contains(readers, nodeId)) // Reader became primary or backup.
entry.removeReader(nodeId, req.messageId());
else
res.addSkippedIndex(i);
}
else
res.addSkippedIndex(i);
}
if (updRes.removeVersion() != null) {
if (deleted == null)
deleted = new ArrayList<>(keys.size());
deleted.add(F.t(entry, updRes.removeVersion()));
}
// Create only once.
if (retVal == null)
retVal = new GridCacheReturn<>(updRes.oldValue(), updRes.success());
}
catch (GridException e) {
res.addFailedKey(k, e);
}
}
return new UpdateSingleResult<>(retVal, deleted, dhtFut);
}
/**
* @param hasNear {@code True} if originating node has near cache.
* @param firstEntryIdx Index of the first entry in the request keys collection.
* @param entries Entries to update.
* @param ver Version to set.
* @param nodeId Originating node ID.
* @param putMap Values to put.
* @param rmvKeys Keys to remove.
* @param dhtFut DHT update future if has backups.
* @param completionCb Completion callback to invoke when DHT future is completed.
* @param req Request.
* @param res Response.
* @param replicate Whether replication is enabled.
* @param batchRes Batch update result.
* @return Deleted entries.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
@Nullable private GridDhtAtomicUpdateFuture<K, V> updatePartialBatch(
boolean hasNear,
int firstEntryIdx,
List<GridDhtCacheEntry<K, V>> entries,
final GridCacheVersion ver,
UUID nodeId,
@Nullable Map<K, V> putMap,
@Nullable Collection<K> rmvKeys,
@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
final GridNearAtomicUpdateRequest<K, V> req,
final GridNearAtomicUpdateResponse<K, V> res,
boolean replicate,
UpdateBatchResult<K, V> batchRes
) {
assert putMap == null ^ rmvKeys == null;
assert req.drVersions() == null : "updatePartialBatch cannot be called when there are DR entries in the batch.";
long topVer = req.topologyVersion();
boolean checkReaders = hasNear || ctx.discovery().hasNearCache(name(), topVer);
try {
GridCacheOperation op;
if (putMap != null) {
// If fast mapping, filter primary keys for write to store.
Map<K, V> storeMap = req.fastMap() ?
F.view(putMap, new P1<K>() {
@Override public boolean apply(K key) {
return ctx.affinity().primary(ctx.localNode(), key, req.topologyVersion());
}
}) :
putMap;
ctx.store().putAllToStore(null, F.viewReadOnly(storeMap, new C1<V, GridBiTuple<V, GridCacheVersion>>() {
@Override public GridBiTuple<V, GridCacheVersion> apply(V v) {
return F.t(v, ver);
}
}));
op = UPDATE;
}
else {
// If fast mapping, filter primary keys for write to store.
Collection<K> storeKeys = req.fastMap() ?
F.view(rmvKeys, new P1<K>() {
@Override public boolean apply(K key) {
return ctx.affinity().primary(ctx.localNode(), key, req.topologyVersion());
}
}) :
rmvKeys;
ctx.store().removeAllFromStore(null, storeKeys);
op = DELETE;
}
// Avoid iterator creation.
for (int i = 0; i < entries.size(); i++) {
GridDhtCacheEntry<K, V> entry = entries.get(i);
assert Thread.holdsLock(entry);
if (entry.obsolete()) {
assert req.operation() == DELETE : "Entry can become obsolete only after remove: " + entry;
continue;
}
try {
// We are holding java-level locks on entries at this point.
V writeVal = op == UPDATE ? putMap.get(entry.key()) : null;
assert writeVal != null || op == DELETE : "null write value found.";
boolean primary = !req.fastMap() || ctx.affinity().primary(ctx.localNode(), entry.key(),
req.topologyVersion());
Collection<UUID> readers = null;
Collection<UUID> filteredReaders = null;
if (checkReaders) {
readers = entry.readers();
filteredReaders = F.view(entry.readers(), F.notEqualTo(nodeId));
}
GridCacheUpdateAtomicResult<K, V> updRes = entry.innerUpdate(
ver,
nodeId,
locNodeId,
op,
writeVal,
null,
false,
false,
req.ttl(),
true,
true,
primary,
ctx.config().getAtomicWriteOrderMode() == CLOCK, // Check version in CLOCK mode on primary node.
req.filter(),
replicate ? primary ? DR_PRIMARY : DR_BACKUP : DR_NONE,
-1L,
-1L,
null,
false);
batchRes.addDeleted(entry, updRes, entries);
if (dhtFut == null && !F.isEmpty(filteredReaders)) {
dhtFut = createDhtFuture(ver, req, res, completionCb, true);
batchRes.readersOnly(true);
}
if (dhtFut != null) {
GridCacheValueBytes valBytesTuple = op == DELETE ? GridCacheValueBytes.nil():
entry.valueBytes();
byte[] valBytes = valBytesTuple.getIfMarshaled();
if (!batchRes.readersOnly())
dhtFut.addWriteEntry(entry, writeVal, valBytes, -1, -1, null, req.ttl());
if (!F.isEmpty(filteredReaders))
dhtFut.addNearWriteEntries(filteredReaders, entry, writeVal, valBytes, req.ttl());
}
if (hasNear) {
if (primary) {
if (!U.nodeIds(context().affinity().nodes(entry.partition(), topVer)).contains(nodeId)) {
if (req.operation() == TRANSFORM) {
int idx = firstEntryIdx + i;
GridCacheValueBytes valBytesTuple = entry.valueBytes();
byte[] valBytes = valBytesTuple.getIfMarshaled();
res.addNearValue(idx, writeVal, valBytes);
}
res.nearTtl(req.ttl());
if (writeVal != null || !entry.valueBytes().isNull()) {
GridFuture<Boolean> f = entry.addReader(nodeId, req.messageId(), topVer);
assert f == null : f;
}
} else if (readers.contains(nodeId)) // Reader became primary or backup.
entry.removeReader(nodeId, req.messageId());
else
res.addSkippedIndex(firstEntryIdx + i);
}
else
res.addSkippedIndex(firstEntryIdx + i);
}
}
catch (GridCacheEntryRemovedException e) {
assert false : "Entry cannot become obsolete while holding lock.";
e.printStackTrace();
}
}
}
catch (GridException e) {
res.addFailedKeys(putMap != null ? putMap.keySet() : rmvKeys, e);
}
return dhtFut;
}
/**
* Acquires java-level locks on cache entries. Returns collection of locked entries.
*
* @param keys Keys to lock.
* @param topVer Topology version to lock on.
* @return Collection of locked entries.
* @throws GridDhtInvalidPartitionException If entry does not belong to local node. If exception is thrown,
* locks are released.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
private List<GridDhtCacheEntry<K, V>> lockEntries(List<K> keys, long topVer)
throws GridDhtInvalidPartitionException {
if (keys.size() == 1) {
K key = keys.get(0);
while (true) {
try {
GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
UNSAFE.monitorEnter(entry);
if (entry.obsolete())
UNSAFE.monitorExit(entry);
else
return Collections.singletonList(entry);
}
catch (GridDhtInvalidPartitionException e) {
// Ignore invalid partition exception in CLOCK ordering mode.
if (ctx.config().getAtomicWriteOrderMode() == CLOCK)
return Collections.singletonList(null);
else
throw e;
}
}
}
else {
List<GridDhtCacheEntry<K, V>> locked = new ArrayList<>(keys.size());
while (true) {
for (K key : keys) {
try {
GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
locked.add(entry);
}
catch (GridDhtInvalidPartitionException e) {
// Ignore invalid partition exception in CLOCK ordering mode.
if (ctx.config().getAtomicWriteOrderMode() == CLOCK)
locked.add(null);
else
throw e;
}
}
boolean retry = false;
for (int i = 0; i < locked.size(); i++) {
GridCacheMapEntry<K, V> entry = locked.get(i);
if (entry == null)
continue;
UNSAFE.monitorEnter(entry);
if (entry.obsolete()) {
// Unlock all locked.
for (int j = 0; j <= i; j++) {
if (locked.get(j) != null)
UNSAFE.monitorExit(locked.get(j));
}
// Clear entries.
locked.clear();
// Retry.
retry = true;
break;
}
}
if (!retry)
return locked;
}
}
}
/**
* Releases java-level locks on cache entries.
*
* @param locked Locked entries.
*/
private void unlockEntries(Collection<GridDhtCacheEntry<K, V>> locked, long topVer) {
// Process deleted entries before locks release.
assert ctx.deferredDelete();
// Entries to skip eviction manager notification for.
// Enqueue entries while holding locks.
Collection<K> skip = null;
for (GridCacheMapEntry<K, V> entry : locked) {
if (entry != null && entry.deleted()) {
if (skip == null)
skip = new HashSet<>(locked.size(), 1.0f);
skip.add(entry.key());
}
}
// Release locks.
for (GridCacheMapEntry<K, V> entry : locked) {
if (entry != null)
UNSAFE.monitorExit(entry);
}
if (skip != null && skip.size() == locked.size())
// Optimization.
return;
// Must touch all entries since update may have deleted entries.
// Eviction manager will remove empty entries.
for (GridCacheMapEntry<K, V> entry : locked) {
if (entry != null && (skip == null || !skip.contains(entry.key())))
ctx.evicts().touch(entry, topVer);
}
}
/**
* @param entry Entry to check.
* @param req Update request.
* @param res Update response. If filter evaluation failed, key will be added to failed keys and method
* will return false.
* @return {@code True} if filter evaluation succeeded.
*/
private boolean checkFilter(GridCacheEntryEx<K, V> entry, GridNearAtomicUpdateRequest<K, V> req,
GridNearAtomicUpdateResponse<K, V> res) {
try {
return ctx.isAll(entry.wrapFilterLocked(), req.filter());
}
catch (GridException e) {
res.addFailedKey(entry.key(), e);
return false;
}
}
/**
* @param req Request to remap.
*/
private void remapToNewPrimary(GridNearAtomicUpdateRequest<K, V> req) {
if (log.isDebugEnabled())
log.debug("Remapping near update request locally: " + req);
Collection<?> vals;
Collection<GridCacheDrInfo<V>> drPutVals;
Collection<GridCacheVersion> drRmvVals;
if (req.drVersions() == null) {
vals = req.values();
drPutVals = null;
drRmvVals = null;
}
else if (req.operation() == UPDATE) {
int size = req.keys().size();
drPutVals = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
Long ttl = req.drTtl(i);
if (ttl == null)
drPutVals.add(new GridCacheDrInfo<>(req.value(i), req.drVersion(i)));
else
drPutVals.add(new GridCacheDrExpirationInfo<>(req.value(i), req.drVersion(i), ttl,
req.drExpireTime(i)));
}
vals = null;
drRmvVals = null;
}
else {
assert req.operation() == DELETE;
drRmvVals = req.drVersions();
vals = null;
drPutVals = null;
}
final GridNearAtomicUpdateFuture<K, V> updateFut = new GridNearAtomicUpdateFuture<>(
ctx,
this,
ctx.config().getWriteSynchronizationMode(),
req.operation(),
req.keys(),
vals,
drPutVals,
drRmvVals,
req.returnValue(),
false,
null,
req.ttl(),
req.filter());
updateFut.map();
}
/**
* Creates backup update future if necessary.
*
* @param writeVer Write version.
* @param updateReq Update request.
* @param updateRes Update response.
* @param completionCb Completion callback to invoke when future is completed.
* @param force If {@code true} then creates future without optimizations checks.
* @return Backup update future or {@code null} if there are no backups.
*/
@Nullable private GridDhtAtomicUpdateFuture<K, V> createDhtFuture(
GridCacheVersion writeVer,
GridNearAtomicUpdateRequest<K, V> updateReq,
GridNearAtomicUpdateResponse<K, V> updateRes,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
boolean force
) {
if (!force) {
if (!hasBackups || updateReq.fastMap())
return null;
long topVer = updateReq.topologyVersion();
Collection<GridNode> nodes = ctx.kernalContext().discovery().cacheAffinityNodes(name(), topVer);
// We are on primary node for some key.
assert !nodes.isEmpty();
if (nodes.size() == 1) {
if (log.isDebugEnabled())
log.debug("Partitioned cache topology has only one node, will not create DHT atomic update future " +
"[topVer=" + topVer + ", updateReq=" + updateReq + ']');
return null;
}
}
GridDhtAtomicUpdateFuture<K, V> fut = new GridDhtAtomicUpdateFuture<>(ctx, completionCb, writeVer, updateReq,
updateRes);
ctx.mvcc().addAtomicFuture(fut.version(), fut);
return fut;
}
/**
* @param nodeId Sender node ID.
* @param res Near get response.
*/
private void processNearGetResponse(UUID nodeId, GridNearGetResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing near get response [nodeId=" + nodeId + ", res=" + res + ']');
GridPartitionedGetFuture<K, V> fut = (GridPartitionedGetFuture<K, V>)ctx.mvcc().<Map<K, V>>future(
res.version(), res.futureId());
if (fut == null) {
if (log.isDebugEnabled())
log.debug("Failed to find future for get response [sender=" + nodeId + ", res=" + res + ']');
return;
}
fut.onResult(nodeId, res);
}
/**
* @param nodeId Sender node ID.
* @param req Near atomic update request.
*/
private void processNearAtomicUpdateRequest(UUID nodeId, GridNearAtomicUpdateRequest<K, V> req) {
if (log.isDebugEnabled())
log.debug("Processing near atomic update request [nodeId=" + nodeId + ", req=" + req + ']');
req.nodeId(ctx.localNodeId());
updateAllAsyncInternal(nodeId, req, null, updateReplyClos);
}
/**
* @param nodeId Sender node ID.
* @param res Near atomic update response.
*/
@SuppressWarnings("unchecked")
private void processNearAtomicUpdateResponse(UUID nodeId, GridNearAtomicUpdateResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing near atomic update response [nodeId=" + nodeId + ", res=" + res + ']');
res.nodeId(ctx.localNodeId());
GridNearAtomicUpdateFuture<K, V> fut = (GridNearAtomicUpdateFuture)ctx.mvcc().atomicFuture(res.futureVersion());
if (fut != null)
fut.onResult(nodeId, res);
else
U.warn(log, "Failed to find near update future for update response (will ignore) " +
"[nodeId=" + nodeId + ", res=" + res + ']');
}
/**
* @param nodeId Sender node ID.
* @param req Dht atomic update request.
*/
private void processDhtAtomicUpdateRequest(UUID nodeId, GridDhtAtomicUpdateRequest<K, V> req) {
if (log.isDebugEnabled())
log.debug("Processing dht atomic update request [nodeId=" + nodeId + ", req=" + req + ']');
GridCacheVersion ver = req.writeVersion();
// Always send update reply.
GridDhtAtomicUpdateResponse<K, V> res = new GridDhtAtomicUpdateResponse<>(req.futureVersion());
Boolean replicate = ctx.isDrEnabled();
for (int i = 0; i < req.size(); i++) {
K key = req.key(i);
try {
while (true) {
GridCacheEntryEx<K, V> entry = null;
try {
entry = entryEx(key);
V val = req.value(i);
byte[] valBytes = req.valueBytes(i);
GridCacheOperation op = (val != null || valBytes != null) ? UPDATE : DELETE;
GridCacheUpdateAtomicResult<K, V> updRes = entry.innerUpdate(
ver,
nodeId,
nodeId,
op,
val,
valBytes,
/*write-through*/false,
/*retval*/false,
req.ttl(),
/*event*/true,
/*metrics*/true,
/*primary*/false,
/*check version*/true,
CU.<K, V>empty(),
replicate ? DR_BACKUP : DR_NONE,
req.drTtl(i),
req.drExpireTime(i),
req.drVersion(i),
false);
if (updRes.removeVersion() != null)
ctx.onDeferredDelete(entry, updRes.removeVersion());
break; // While.
}
catch (GridCacheEntryRemovedException ignored) {
if (log.isDebugEnabled())
log.debug("Got removed entry while updating backup value (will retry): " + key);
entry = null;
}
finally {
if (entry != null)
ctx.evicts().touch(entry, req.topologyVersion());
}
}
}
catch (GridDhtInvalidPartitionException ignored) {
// Ignore.
}
catch (GridException e) {
res.addFailedKey(key, new GridException("Failed to update key on backup node: " + key, e));
}
}
if (isNearEnabled(cacheCfg))
((GridNearAtomicCache<K, V>)near()).processDhtAtomicUpdateRequest(nodeId, req, res);
try {
if (res.failedKeys() != null || res.nearEvicted() != null || req.writeSynchronizationMode() == FULL_SYNC)
ctx.io().send(nodeId, res);
else {
// No failed keys and sync mode is not FULL_SYNC, thus sending deferred response.
sendDeferredUpdateResponse(nodeId, req.futureVersion());
}
}
catch (GridTopologyException ignored) {
U.warn(log, "Failed to send DHT atomic update response to node because it left grid: " +
req.nodeId());
}
catch (GridException e) {
U.error(log, "Failed to send DHT atomic update response (did node leave grid?) [nodeId=" + nodeId +
", req=" + req + ']', e);
}
}
/**
* @param nodeId Node ID to send message to.
* @param ver Version to ack.
*/
private void sendDeferredUpdateResponse(UUID nodeId, GridCacheVersion ver) {
while (true) {
DeferredResponseBuffer buf = pendingResponses.get(nodeId);
if (buf == null) {
buf = new DeferredResponseBuffer(nodeId);
DeferredResponseBuffer old = pendingResponses.putIfAbsent(nodeId, buf);
if (old == null) {
// We have successfully added buffer to map.
ctx.time().addTimeoutObject(buf);
}
else
buf = old;
}
if (!buf.addResponse(ver))
// Some thread is sending filled up buffer, we can remove it.
pendingResponses.remove(nodeId, buf);
else
break;
}
}
/**
* @param nodeId Sender node ID.
* @param res Dht atomic update response.
*/
private void processDhtAtomicUpdateResponse(UUID nodeId, GridDhtAtomicUpdateResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing dht atomic update response [nodeId=" + nodeId + ", res=" + res + ']');
GridDhtAtomicUpdateFuture<K, V> updateFut = (GridDhtAtomicUpdateFuture<K, V>)ctx.mvcc().
atomicFuture(res.futureVersion());
if (updateFut != null)
updateFut.onResult(nodeId, res);
else
U.warn(log, "Failed to find DHT update future for update response [nodeId=" + nodeId +
", res=" + res + ']');
}
/**
* @param nodeId Sender node ID.
* @param res Deferred atomic update response.
*/
private void processDhtAtomicDeferredUpdateResponse(UUID nodeId, GridDhtAtomicDeferredUpdateResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing deferred dht atomic update response [nodeId=" + nodeId + ", res=" + res + ']');
for (GridCacheVersion ver : res.futureVersions()) {
GridDhtAtomicUpdateFuture<K, V> updateFut = (GridDhtAtomicUpdateFuture<K, V>)ctx.mvcc().atomicFuture(ver);
if (updateFut != null)
updateFut.onResult(nodeId);
else
U.warn(log, "Failed to find DHT update future for deferred update response [nodeId=" +
nodeId + ", res=" + res + ']');
}
}
/**
* @param nodeId Originating node ID.
* @param res Near update response.
*/
private void sendNearUpdateReply(UUID nodeId, GridNearAtomicUpdateResponse<K, V> res) {
try {
ctx.io().send(nodeId, res);
}
catch (GridTopologyException ignored) {
U.warn(log, "Failed to send near update reply to node because it left grid: " +
nodeId);
}
catch (GridException e) {
U.error(log, "Failed to send near update reply (did node leave grid?) [nodeId=" + nodeId +
", res=" + res + ']', e);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridDhtAtomicCache.class, this, super.toString());
}
/**
* Result of {@link GridDhtAtomicCache#updateSingle} execution.
*/
private static class UpdateSingleResult<K, V> {
/** */
private final GridCacheReturn<V> retVal;
/** */
private final Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted;
/** */
private final GridDhtAtomicUpdateFuture<K, V> dhtFut;
/**
* @param retVal Return value.
* @param deleted Deleted entries.
* @param dhtFut DHT future.
*/
private UpdateSingleResult(GridCacheReturn<V> retVal,
Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted,
GridDhtAtomicUpdateFuture<K, V> dhtFut) {
this.retVal = retVal;
this.deleted = deleted;
this.dhtFut = dhtFut;
}
/**
* @return Return value.
*/
private GridCacheReturn<V> returnValue() {
return retVal;
}
/**
* @return Deleted entries.
*/
private Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted() {
return deleted;
}
/**
* @return DHT future.
*/
public GridDhtAtomicUpdateFuture<K, V> dhtFuture() {
return dhtFut;
}
}
/**
* Result of {@link GridDhtAtomicCache#updateWithBatch} execution.
*/
private static class UpdateBatchResult<K, V> {
/** */
private Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted;
/** */
private GridDhtAtomicUpdateFuture<K, V> dhtFut;
/** */
private boolean readersOnly;
/**
* @param entry Entry.
* @param updRes Entry update result.
* @param entries All entries.
*/
private void addDeleted(GridDhtCacheEntry<K, V> entry, GridCacheUpdateAtomicResult<K, V> updRes,
Collection<GridDhtCacheEntry<K, V>> entries) {
if (updRes.removeVersion() != null) {
if (deleted == null)
deleted = new ArrayList<>(entries.size());
deleted.add(F.t(entry, updRes.removeVersion()));
}
}
/**
* @return Deleted entries.
*/
private Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted() {
return deleted;
}
/**
* @return DHT future.
*/
public GridDhtAtomicUpdateFuture<K, V> dhtFuture() {
return dhtFut;
}
/**
* @param dhtFut DHT future.
*/
private void dhtFuture(@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut) {
this.dhtFut = dhtFut;
}
/**
* @return {@code True} if only readers (not backups) should be updated.
*/
private boolean readersOnly() {
return readersOnly;
}
/**
* @param readersOnly {@code True} if only readers (not backups) should be updated.
*/
private void readersOnly(boolean readersOnly) {
this.readersOnly = readersOnly;
}
}
/**
*
*/
private static class FinishedLockFuture extends GridFinishedFutureEx<Boolean> implements GridDhtFuture<Boolean> {
/**
* Empty constructor required by {@link Externalizable}.
*/
public FinishedLockFuture() {
// No-op.
}
/**
* @param err Error.
*/
private FinishedLockFuture(Throwable err) {
super(err);
}
/** {@inheritDoc} */
@Override public Collection<Integer> invalidPartitions() {
return Collections.emptyList();
}
}
/**
* Deferred response buffer.
*/
private class DeferredResponseBuffer extends ReentrantReadWriteLock implements GridTimeoutObject {
/** Filled atomic flag. */
private AtomicBoolean guard = new AtomicBoolean(false);
/** Response versions. */
private Collection<GridCacheVersion> respVers = new ConcurrentLinkedDeque8<>();
/** Node ID. */
private final UUID nodeId;
/** Timeout ID. */
private final GridUuid timeoutId;
/** End time. */
private final long endTime;
/**
* @param nodeId Node ID to send message to.
*/
private DeferredResponseBuffer(UUID nodeId) {
this.nodeId = nodeId;
timeoutId = GridUuid.fromUuid(nodeId);
endTime = U.currentTimeMillis() + DEFERRED_UPDATE_RESPONSE_TIMEOUT;
}
/** {@inheritDoc} */
@Override public GridUuid timeoutId() {
return timeoutId;
}
/** {@inheritDoc} */
@Override public long endTime() {
return endTime;
}
/** {@inheritDoc} */
@Override public void onTimeout() {
if (guard.compareAndSet(false, true)) {
writeLock().lock();
try {
finish();
}
finally {
writeLock().unlock();
}
}
}
/**
* Adds deferred response to buffer.
*
* @param ver Version to send.
* @return {@code True} if response was handled, {@code false} if this buffer is filled and cannot be used.
*/
public boolean addResponse(GridCacheVersion ver) {
readLock().lock();
boolean snd = false;
try {
if (guard.get())
return false;
respVers.add(ver);
if (respVers.size() > DEFERRED_UPDATE_RESPONSE_BUFFER_SIZE && guard.compareAndSet(false, true))
snd = true;
}
finally {
readLock().unlock();
}
if (snd) {
// Wait all threads in read lock to finish.
writeLock().lock();
try {
finish();
ctx.time().removeTimeoutObject(this);
}
finally {
writeLock().unlock();
}
}
return true;
}
/**
* Sends deferred notification message and removes this buffer from pending responses map.
*/
private void finish() {
GridDhtAtomicDeferredUpdateResponse<K, V> msg = new GridDhtAtomicDeferredUpdateResponse<>(respVers);
try {
ctx.io().send(nodeId, msg);
}
catch (GridTopologyException ignored) {
if (log.isDebugEnabled())
log.debug("Failed to send deferred dht update response to remote node (did node leave grid?) " +
"[nodeId=" + nodeId + ", msg=" + msg + ']');
}
catch (GridException e) {
U.error(log, "Failed to send deferred dht update response to remote node [nodeId="
+ nodeId + ", msg=" + msg + ']', e);
}
pendingResponses.remove(nodeId, this);
}
}
}
|
modules/core/java/org/gridgain/grid/kernal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java
|
/* @java.file.header */
/* _________ _____ __________________ _____
* __ ____/___________(_)______ /__ ____/______ ____(_)_______
* _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \
* / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / /
* \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/
*/
package org.gridgain.grid.kernal.processors.cache.distributed.dht.atomic;
import org.gridgain.grid.*;
import org.gridgain.grid.cache.*;
import org.gridgain.grid.kernal.processors.cache.*;
import org.gridgain.grid.kernal.processors.cache.distributed.dht.*;
import org.gridgain.grid.kernal.processors.cache.distributed.dht.preloader.*;
import org.gridgain.grid.kernal.processors.cache.distributed.near.*;
import org.gridgain.grid.kernal.processors.cache.dr.*;
import org.gridgain.grid.kernal.processors.dr.*;
import org.gridgain.grid.kernal.processors.timeout.*;
import org.gridgain.grid.lang.*;
import org.gridgain.grid.util.*;
import org.gridgain.grid.util.future.*;
import org.gridgain.grid.util.lang.*;
import org.gridgain.grid.util.tostring.*;
import org.gridgain.grid.util.typedef.*;
import org.gridgain.grid.util.typedef.internal.*;
import org.jdk8.backport.*;
import org.jetbrains.annotations.*;
import sun.misc.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import java.util.concurrent.locks.*;
import static org.gridgain.grid.GridSystemProperties.*;
import static org.gridgain.grid.cache.GridCacheAtomicWriteOrderMode.*;
import static org.gridgain.grid.cache.GridCachePeekMode.*;
import static org.gridgain.grid.cache.GridCacheWriteSynchronizationMode.*;
import static org.gridgain.grid.kernal.processors.cache.GridCacheOperation.*;
import static org.gridgain.grid.kernal.processors.cache.GridCacheUtils.*;
import static org.gridgain.grid.kernal.processors.dr.GridDrType.*;
/**
* Non-transactional partitioned cache.
*/
@GridToStringExclude
public class GridDhtAtomicCache<K, V> extends GridDhtCacheAdapter<K, V> {
/** Deferred update response buffer size. */
private static final int DEFERRED_UPDATE_RESPONSE_BUFFER_SIZE =
Integer.getInteger(GG_ATOMIC_DEFERRED_ACK_BUFFER_SIZE, 256);
/** Deferred update response timeout. */
private static final int DEFERRED_UPDATE_RESPONSE_TIMEOUT =
Integer.getInteger(GG_ATOMIC_DEFERRED_ACK_TIMEOUT, 500);
/** Unsafe instance. */
private static final Unsafe UNSAFE = GridUnsafe.unsafe();
/** Will be {@code true} if affinity has backups. */
private boolean hasBackups;
/** Update reply closure. */
private CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> updateReplyClos;
/** Pending */
private ConcurrentMap<UUID, DeferredResponseBuffer> pendingResponses = new ConcurrentHashMap8<>();
/** */
private GridNearAtomicCache<K, V> near;
/**
* Empty constructor required by {@link Externalizable}.
*/
public GridDhtAtomicCache() {
// No-op.
}
/**
* @param ctx Cache context.
*/
public GridDhtAtomicCache(GridCacheContext<K, V> ctx) {
super(ctx);
}
/**
* @param ctx Cache context.
* @param map Cache concurrent map.
*/
public GridDhtAtomicCache(GridCacheContext<K, V> ctx, GridCacheConcurrentMap<K, V> map) {
super(ctx, map);
}
/** {@inheritDoc} */
@Override public boolean isDhtAtomic() {
return true;
}
/** {@inheritDoc} */
@Override protected void init() {
map.setEntryFactory(new GridCacheMapEntryFactory<K, V>() {
/** {@inheritDoc} */
@Override public GridCacheMapEntry<K, V> create(GridCacheContext<K, V> ctx, long topVer, K key, int hash,
V val, GridCacheMapEntry<K, V> next, long ttl, int hdrId) {
return new GridDhtAtomicCacheEntry<>(ctx, topVer, key, hash, val, next, ttl, hdrId);
}
});
updateReplyClos = new CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>>() {
@Override public void apply(GridNearAtomicUpdateRequest<K, V> req, GridNearAtomicUpdateResponse<K, V> res) {
if (ctx.config().getAtomicWriteOrderMode() == CLOCK) {
// Always send reply in CLOCK ordering mode.
sendNearUpdateReply(res.nodeId(), res);
return;
}
// Request should be for primary keys only in PRIMARY ordering mode.
assert req.hasPrimary();
if (req.writeSynchronizationMode() != FULL_ASYNC)
sendNearUpdateReply(res.nodeId(), res);
else {
if (!F.isEmpty(res.remapKeys()))
// Remap keys on primary node in FULL_ASYNC mode.
remapToNewPrimary(req);
else if (res.error() != null) {
U.error(log, "Failed to process write update request in FULL_ASYNC mode for keys: " +
res.failedKeys(), res.error());
}
}
}
};
}
/** {@inheritDoc} */
@SuppressWarnings({"IfMayBeConditional", "SimplifiableIfStatement"})
@Override public void start() throws GridException {
hasBackups = ctx.config().getBackups() > 0;
preldr = new GridDhtPreloader<>(ctx);
preldr.start();
ctx.io().addHandler(GridNearGetRequest.class, new CI2<UUID, GridNearGetRequest<K, V>>() {
@Override public void apply(UUID nodeId, GridNearGetRequest<K, V> req) {
processNearGetRequest(nodeId, req);
}
});
ctx.io().addHandler(GridNearAtomicUpdateRequest.class, new CI2<UUID, GridNearAtomicUpdateRequest<K, V>>() {
@Override public void apply(UUID nodeId, GridNearAtomicUpdateRequest<K, V> req) {
processNearAtomicUpdateRequest(nodeId, req);
}
});
ctx.io().addHandler(GridNearAtomicUpdateResponse.class, new CI2<UUID, GridNearAtomicUpdateResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridNearAtomicUpdateResponse<K, V> res) {
processNearAtomicUpdateResponse(nodeId, res);
}
});
ctx.io().addHandler(GridDhtAtomicUpdateRequest.class, new CI2<UUID, GridDhtAtomicUpdateRequest<K, V>>() {
@Override public void apply(UUID nodeId, GridDhtAtomicUpdateRequest<K, V> req) {
processDhtAtomicUpdateRequest(nodeId, req);
}
});
ctx.io().addHandler(GridDhtAtomicUpdateResponse.class, new CI2<UUID, GridDhtAtomicUpdateResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridDhtAtomicUpdateResponse<K, V> res) {
processDhtAtomicUpdateResponse(nodeId, res);
}
});
ctx.io().addHandler(GridDhtAtomicDeferredUpdateResponse.class,
new CI2<UUID, GridDhtAtomicDeferredUpdateResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridDhtAtomicDeferredUpdateResponse<K, V> res) {
processDhtAtomicDeferredUpdateResponse(nodeId, res);
}
});
if (near == null) {
ctx.io().addHandler(GridNearGetResponse.class, new CI2<UUID, GridNearGetResponse<K, V>>() {
@Override public void apply(UUID nodeId, GridNearGetResponse<K, V> res) {
processNearGetResponse(nodeId, res);
}
});
}
}
/**
* @param near Near cache.
*/
public void near(GridNearAtomicCache<K, V> near) {
this.near = near;
}
/** {@inheritDoc} */
@Override public GridNearCacheAdapter<K, V> near() {
return near;
}
/**
* @return Whether backups are configured for this cache.
*/
public boolean hasBackups() {
return hasBackups;
}
/** {@inheritDoc} */
@Override public GridCacheEntry<K, V> entry(K key) {
return new GridDhtCacheEntryImpl<>(ctx.projectionPerCall(), ctx, key, null);
}
/** {@inheritDoc} */
@Override public V peek(K key, @Nullable Collection<GridCachePeekMode> modes) throws GridException {
GridTuple<V> val = null;
if (ctx.isReplicated() || !modes.contains(NEAR_ONLY)) {
try {
val = peek0(true, key, modes, ctx.tm().txx());
}
catch (GridCacheFilterFailedException ignored) {
if (log.isDebugEnabled())
log.debug("Filter validation failed for key: " + key);
return null;
}
}
return val != null ? val.get() : null;
}
/** {@inheritDoc} */
@Override public GridCacheTxLocalAdapter<K, V> newTx(
boolean implicit,
boolean implicitSingle,
GridCacheTxConcurrency concurrency,
GridCacheTxIsolation isolation,
long timeout,
boolean invalidate,
boolean syncCommit,
boolean syncRollback,
boolean swapEnabled,
boolean storeEnabled,
int txSize,
@Nullable Object grpLockKey,
boolean partLock
) {
throw new UnsupportedOperationException("Transactions are not supported for " +
"GridCacheAtomicityMode.ATOMIC mode (use GridCacheAtomicityMode.TRANSACTIONAL instead)");
}
/** {@inheritDoc} */
@Override public GridFuture<Map<K, V>> getAllAsync(
@Nullable final Collection<? extends K> keys,
final boolean forcePrimary,
boolean skipTx,
@Nullable final GridCacheEntryEx<K, V> entry,
@Nullable final GridPredicate<GridCacheEntry<K, V>>[] filter
) {
return asyncOp(new CO<GridFuture<Map<K, V>>>() {
@Override public GridFuture<Map<K, V>> apply() {
return getAllAsync0(keys, false, forcePrimary, filter);
}
});
}
/** {@inheritDoc} */
@Override public V put(K key, V val, @Nullable GridCacheEntryEx<K, V> cached, long ttl,
@Nullable GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
return putAsync(key, val, cached, ttl, filter).get();
}
/** {@inheritDoc} */
@Override public boolean putx(K key, V val, @Nullable GridCacheEntryEx<K, V> cached,
long ttl, @Nullable GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
return putxAsync(key, val, cached, ttl, filter).get();
}
/** {@inheritDoc} */
@Override public boolean putx(K key, V val,
GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
return putxAsync(key, val, filter).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<V> putAsync(K key, V val, @Nullable GridCacheEntryEx<K, V> entry,
long ttl, @Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return updateAllAsync0(F0.asMap(key, val), null, null, null, true, false, entry, ttl, filter);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<Boolean> putxAsync(K key, V val, @Nullable GridCacheEntryEx<K, V> entry, long ttl,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return updateAllAsync0(F0.asMap(key, val), null, null, null, false, false, entry, ttl, filter);
}
/** {@inheritDoc} */
@Override public V putIfAbsent(K key, V val) throws GridException {
return putIfAbsentAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<V> putIfAbsentAsync(K key, V val) {
return putAsync(key, val, ctx.noPeekArray());
}
/** {@inheritDoc} */
@Override public boolean putxIfAbsent(K key, V val) throws GridException {
return putxIfAbsentAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> putxIfAbsentAsync(K key, V val) {
return putxAsync(key, val, ctx.noPeekArray());
}
/** {@inheritDoc} */
@Override public V replace(K key, V val) throws GridException {
return replaceAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<V> replaceAsync(K key, V val) {
return putAsync(key, val, ctx.hasPeekArray());
}
/** {@inheritDoc} */
@Override public boolean replacex(K key, V val) throws GridException {
return replacexAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> replacexAsync(K key, V val) {
return putxAsync(key, val, ctx.hasPeekArray());
}
/** {@inheritDoc} */
@Override public boolean replace(K key, V oldVal, V newVal) throws GridException {
return replaceAsync(key, oldVal, newVal).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> replaceAsync(K key, V oldVal, V newVal) {
return putxAsync(key, newVal, ctx.equalsPeekArray(oldVal));
}
/** {@inheritDoc} */
@Override public GridCacheReturn<V> removex(K key, V val) throws GridException {
return removexAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridCacheReturn<V> replacex(K key, V oldVal, V newVal) throws GridException {
return replacexAsync(key, oldVal, newVal).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<GridCacheReturn<V>> removexAsync(K key, V val) {
return removeAllAsync0(F.asList(key), null, null, true, true, ctx.equalsPeekArray(val));
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<GridCacheReturn<V>> replacexAsync(K key, V oldVal, V newVal) {
return updateAllAsync0(F.asMap(key, newVal), null, null, null, true, true, null, 0,
ctx.equalsPeekArray(oldVal));
}
/** {@inheritDoc} */
@Override public void putAll(Map<? extends K, ? extends V> m,
GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
putAllAsync(m, filter).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> putAllAsync(Map<? extends K, ? extends V> m,
@Nullable GridPredicate<GridCacheEntry<K, V>>[] filter) {
return updateAllAsync0(m, null, null, null, false, false, null, 0, filter);
}
/** {@inheritDoc} */
@Override public void putAllDr(Map<? extends K, GridCacheDrInfo<V>> drMap) throws GridException {
putAllDrAsync(drMap).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> putAllDrAsync(Map<? extends K, GridCacheDrInfo<V>> drMap) {
metrics.onReceiveCacheEntriesReceived(drMap.size());
return updateAllAsync0(null, null, drMap, null, false, false, null, 0, null);
}
/** {@inheritDoc} */
@Override public void transform(K key, GridClosure<V, V> transformer) throws GridException {
transformAsync(key, transformer).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> transformAsync(K key, GridClosure<V, V> transformer,
@Nullable GridCacheEntryEx<K, V> entry, long ttl) {
return updateAllAsync0(null, Collections.singletonMap(key, transformer), null, null, false, false, entry, ttl,
null);
}
/** {@inheritDoc} */
@Override public void transformAll(@Nullable Map<? extends K, ? extends GridClosure<V, V>> m) throws GridException {
transformAllAsync(m).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> transformAllAsync(@Nullable Map<? extends K, ? extends GridClosure<V, V>> m) {
if (F.isEmpty(m))
return new GridFinishedFuture<Object>(ctx.kernalContext());
return updateAllAsync0(null, m, null, null, false, false, null, 0, null);
}
/** {@inheritDoc} */
@Override public V remove(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
return removeAsync(key, entry, filter).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<V> removeAsync(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return removeAllAsync0(Collections.singletonList(key), null, entry, true, false, filter);
}
/** {@inheritDoc} */
@Override public void removeAll(Collection<? extends K> keys,
GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
removeAllAsync(keys, filter).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> removeAllAsync(Collection<? extends K> keys,
GridPredicate<GridCacheEntry<K, V>>[] filter) {
return removeAllAsync0(keys, null, null, false, false, filter);
}
/** {@inheritDoc} */
@Override public boolean removex(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) throws GridException {
return removexAsync(key, entry, filter).get();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public GridFuture<Boolean> removexAsync(K key, @Nullable GridCacheEntryEx<K, V> entry,
@Nullable GridPredicate<GridCacheEntry<K, V>>... filter) {
return removeAllAsync0(Collections.singletonList(key), null, entry, false, false, filter);
}
/** {@inheritDoc} */
@Override public boolean remove(K key, V val) throws GridException {
return removeAsync(key, val).get();
}
/** {@inheritDoc} */
@Override public GridFuture<Boolean> removeAsync(K key, V val) {
return removexAsync(key, ctx.equalsPeekArray(val));
}
/** {@inheritDoc} */
@Override public void removeAll(GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException {
removeAllAsync(filter).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> removeAllAsync(GridPredicate<GridCacheEntry<K, V>>[] filter) {
return removeAllAsync(keySet(filter), filter);
}
/** {@inheritDoc} */
@Override public void removeAllDr(Map<? extends K, GridCacheVersion> drMap) throws GridException {
removeAllDrAsync(drMap).get();
}
/** {@inheritDoc} */
@Override public GridFuture<?> removeAllDrAsync(Map<? extends K, GridCacheVersion> drMap) {
metrics.onReceiveCacheEntriesReceived(drMap.size());
return removeAllAsync0(null, drMap, null, false, false, null);
}
/**
* @return {@code True} if store enabled.
*/
private boolean storeEnabled() {
return ctx.isStoreEnabled() && ctx.config().getStore() != null;
}
/**
* @param op Operation closure.
* @return Future.
*/
@SuppressWarnings("unchecked")
protected <T> GridFuture<T> asyncOp(final CO<GridFuture<T>> op) {
GridFuture<T> fail = asyncOpAcquire();
if (fail != null)
return fail;
FutureHolder holder = lastFut.get();
holder.lock();
try {
GridFuture fut = holder.future();
if (fut != null && !fut.isDone()) {
GridFuture<T> f = new GridEmbeddedFuture<>(fut,
new C2<T, Exception, GridFuture<T>>() {
@Override public GridFuture<T> apply(T t, Exception e) {
return op.apply();
}
}, ctx.kernalContext());
saveFuture(holder, f);
return f;
}
GridFuture<T> f = op.apply();
saveFuture(holder, f);
return f;
}
finally {
holder.unlock();
}
}
/** {@inheritDoc} */
@Override protected GridFuture<Boolean> lockAllAsync(Collection<? extends K> keys,
long timeout,
@Nullable GridCacheTxLocalEx<K, V> tx,
boolean isInvalidate,
boolean isRead,
boolean retval,
@Nullable GridCacheTxIsolation isolation,
GridPredicate<GridCacheEntry<K, V>>[] filter) {
return new FinishedLockFuture(new UnsupportedOperationException("Locks are not supported for " +
"GridCacheAtomicityMode.ATOMIC mode (use GridCacheAtomicityMode.TRANSACTIONAL instead)"));
}
/**
* Entry point for all public API put/transform methods.
*
* @param map Put map. Either {@code map}, {@code transformMap} or {@code drMap} should be passed.
* @param transformMap Transform map. Either {@code map}, {@code transformMap} or {@code drMap} should be passed.
* @param drPutMap DR put map.
* @param drRmvMap DR remove map.
* @param retval Return value required flag.
* @param rawRetval Return {@code GridCacheReturn} instance.
* @param cached Cached cache entry for key. May be passed if and only if map size is {@code 1}.
* @param ttl Entry time-to-live.
* @param filter Cache entry filter for atomic updates.
* @return Completion future.
*/
private GridFuture updateAllAsync0(
@Nullable final Map<? extends K, ? extends V> map,
@Nullable final Map<? extends K, ? extends GridClosure<V, V>> transformMap,
@Nullable final Map<? extends K, GridCacheDrInfo<V>> drPutMap,
@Nullable final Map<? extends K, GridCacheVersion> drRmvMap,
final boolean retval,
final boolean rawRetval,
@Nullable GridCacheEntryEx<K, V> cached,
long ttl,
@Nullable final GridPredicate<GridCacheEntry<K, V>>[] filter
) {
final GridNearAtomicUpdateFuture<K, V> updateFut = new GridNearAtomicUpdateFuture<>(
ctx,
this,
ctx.config().getWriteSynchronizationMode(),
transformMap != null ? TRANSFORM : UPDATE,
map != null ? map.keySet() : transformMap != null ? transformMap.keySet() : drPutMap != null ?
drPutMap.keySet() : drRmvMap.keySet(),
map != null ? map.values() : transformMap != null ? transformMap.values() : null,
drPutMap != null ? drPutMap.values() : null,
drRmvMap != null ? drRmvMap.values() : null,
retval,
rawRetval,
cached,
ttl,
filter);
return asyncOp(new CO<GridFuture<Object>>() {
@Override public GridFuture<Object> apply() {
updateFut.map();
return updateFut;
}
});
}
/**
* Entry point for all public API remove methods.
*
* @param keys Keys to remove.
* @param drMap DR map.
* @param cached Cached cache entry for key. May be passed if and only if keys size is {@code 1}.
* @param retval Return value required flag.
* @param rawRetval Return {@code GridCacheReturn} instance.
* @param filter Cache entry filter for atomic removes.
* @return Completion future.
*/
private GridFuture removeAllAsync0(
@Nullable final Collection<? extends K> keys,
@Nullable final Map<? extends K, GridCacheVersion> drMap,
@Nullable GridCacheEntryEx<K, V> cached,
final boolean retval,
boolean rawRetval,
@Nullable final GridPredicate<GridCacheEntry<K, V>>[] filter
) {
assert keys != null || drMap != null;
final GridNearAtomicUpdateFuture<K, V> updateFut = new GridNearAtomicUpdateFuture<>(
ctx,
this,
ctx.config().getWriteSynchronizationMode(),
DELETE,
keys != null ? keys : drMap.keySet(),
null,
null,
keys != null ? null : drMap.values(),
retval,
rawRetval,
cached,
0,
filter);
return asyncOp(new CO<GridFuture<Object>>() {
@Override public GridFuture<Object> apply() {
updateFut.map();
return updateFut;
}
});
}
/**
* Entry point to all public API get methods.
*
* @param keys Keys to remove.
* @param reload Reload flag.
* @param forcePrimary Force primary flag.
* @param filter Filter.
* @return Get future.
*/
private GridFuture<Map<K, V>> getAllAsync0(@Nullable Collection<? extends K> keys, boolean reload,
boolean forcePrimary, @Nullable GridPredicate<GridCacheEntry<K, V>>[] filter) {
if (F.isEmpty(keys))
return new GridFinishedFuture<>(ctx.kernalContext(), Collections.<K, V>emptyMap());
// Optimisation: try to resolve value locally and escape 'get future' creation.
if (!reload && !forcePrimary) {
Map<K, V> locVals = new HashMap<>(keys.size(), 1.0f);
GridCacheVersion obsoleteVer = null;
boolean success = true;
long topVer = ctx.affinity().affinityTopologyVersion();
// Optimistically expect that all keys are available locally (avoid creation of get future).
for (K key : keys) {
GridCacheEntryEx<K, V> entry = null;
while (true) {
try {
entry = ctx.isSwapOrOffheapEnabled() ? entryEx(key) : peekEx(key);
// If our DHT cache do has value, then we peek it.
if (entry != null) {
boolean isNew = entry.isNewLocked();
V v = entry.innerGet(null, /*swap*/true, /*read-through*/false, /*fail-fast*/true,
/*unmarshal*/true, /**update-metrics*/true, true, filter);
// Entry was not in memory or in swap, so we remove it from cache.
if (v == null) {
if (obsoleteVer == null)
obsoleteVer = context().versions().next();
if (isNew && entry.markObsoleteIfEmpty(obsoleteVer))
removeIfObsolete(key);
success = false;
}
else
locVals.put(key, v);
}
else
success = false;
break; // While.
}
catch (GridCacheEntryRemovedException ignored) {
// No-op, retry.
}
catch (GridCacheFilterFailedException ignored) {
// No-op, skip the key.
break;
}
catch (GridDhtInvalidPartitionException ignored) {
success = false;
break; // While.
}
catch (GridException e) {
return new GridFinishedFuture<>(ctx.kernalContext(), e);
}
finally {
if (entry != null)
ctx.evicts().touch(entry, topVer);
}
}
if (!success)
break;
}
if (success)
return ctx.wrapCloneMap(new GridFinishedFuture<>(ctx.kernalContext(), locVals));
}
// Either reload or not all values are available locally.
GridPartitionedGetFuture<K, V> fut = new GridPartitionedGetFuture<>(ctx, keys, reload, forcePrimary, null,
filter);
fut.init();
return ctx.wrapCloneMap(fut);
}
/**
* Executes local update.
*
* @param nodeId Node ID.
* @param req Update request.
* @param cached Cached entry if updating single local entry.
* @param completionCb Completion callback.
*/
public void updateAllAsyncInternal(
final UUID nodeId,
final GridNearAtomicUpdateRequest<K, V> req,
@Nullable final GridCacheEntryEx<K, V> cached,
final CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb
) {
GridFuture<Object> forceFut = preldr.request(req.keys(), req.topologyVersion());
if (forceFut.isDone())
updateAllAsyncInternal0(nodeId, req, cached, completionCb);
else {
forceFut.listenAsync(new CI1<GridFuture<Object>>() {
@Override public void apply(GridFuture<Object> t) {
updateAllAsyncInternal0(nodeId, req, cached, completionCb);
}
});
}
}
/**
* Executes local update after preloader fetched values.
*
* @param nodeId Node ID.
* @param req Update request.
* @param cached Cached entry if updating single local entry.
* @param completionCb Completion callback.
*/
public void updateAllAsyncInternal0(
UUID nodeId,
GridNearAtomicUpdateRequest<K, V> req,
@Nullable GridCacheEntryEx<K, V> cached,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb
) {
GridNearAtomicUpdateResponse<K, V> res = new GridNearAtomicUpdateResponse<>(nodeId, req.futureVersion());
List<K> keys = req.keys();
assert !req.returnValue() || keys.size() == 1;
GridDhtAtomicUpdateFuture<K, V> dhtFut = null;
boolean remap = false;
try {
// If batch store update is enabled, we need to lock all entries.
// First, need to acquire locks on cache entries, then check filter.
List<GridDhtCacheEntry<K, V>> locked = lockEntries(keys, req.topologyVersion());
Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted = null;
try {
topology().readLock();
try {
// Do not check topology version for CLOCK versioning since
// partition exchange will wait for near update future.
if (topology().topologyVersion() == req.topologyVersion() ||
ctx.config().getAtomicWriteOrderMode() == CLOCK) {
GridNode node = ctx.discovery().node(nodeId);
if (node == null) {
U.warn(log, "Node originated update request left grid: " + nodeId);
return;
}
boolean hasNear = U.hasNearCache(node, name());
GridCacheVersion ver = req.updateVersion();
if (ver == null) {
// Assign next version for update inside entries lock.
ver = ctx.versions().next(req.topologyVersion());
if (hasNear)
res.nearVersion(ver);
}
assert ver != null : "Got null version for update request: " + req;
if (log.isDebugEnabled())
log.debug("Using cache version for update request on primary node [ver=" + ver +
", req=" + req + ']');
dhtFut = createDhtFuture(ver, req, res, completionCb, false);
GridCacheReturn<V> retVal = null;
boolean replicate = ctx.isDrEnabled();
if (storeEnabled() && keys.size() > 1 && cacheCfg.getDrReceiverConfiguration() == null) {
// This method can only be used when there are no replicated entries in the batch.
UpdateBatchResult<K, V> updRes = updateWithBatch(nodeId, hasNear, req, res, locked, ver,
dhtFut, completionCb, replicate);
deleted = updRes.deleted();
dhtFut = updRes.dhtFuture();
}
else {
UpdateSingleResult<K, V> updRes = updateSingle(nodeId, hasNear, req, res, locked, ver,
dhtFut, completionCb, replicate);
retVal = updRes.returnValue();
deleted = updRes.deleted();
dhtFut = updRes.dhtFuture();
}
if (retVal == null)
retVal = new GridCacheReturn<>(null, true);
res.returnValue(retVal);
}
else
// Should remap all keys.
remap = true;
}
finally {
topology().readUnlock();
}
}
catch (GridCacheEntryRemovedException e) {
assert false : "Entry should not become obsolete while holding lock.";
e.printStackTrace();
}
finally {
unlockEntries(locked, req.topologyVersion());
// Enqueue if necessary after locks release.
if (deleted != null) {
assert !deleted.isEmpty();
assert ctx.deferredDelete();
for (GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion> e : deleted)
ctx.onDeferredDelete(e.get1(), e.get2());
}
}
}
catch (GridDhtInvalidPartitionException ignore) {
assert ctx.config().getAtomicWriteOrderMode() == PRIMARY;
if (log.isDebugEnabled())
log.debug("Caught invalid partition exception for cache entry (will remap update request): " + req);
remap = true;
}
if (remap) {
assert dhtFut == null;
res.remapKeys(req.keys());
completionCb.apply(req, res);
}
else {
// If there are backups, map backup update future.
if (dhtFut != null)
dhtFut.map();
// Otherwise, complete the call.
else
completionCb.apply(req, res);
}
}
/**
* Updates locked entries using batched write-through.
*
* @param nodeId Sender node ID.
* @param hasNear {@code True} if originating node has near cache.
* @param req Update request.
* @param res Update response.
* @param locked Locked entries.
* @param ver Assigned version.
* @param dhtFut Optional DHT future.
* @param completionCb Completion callback to invoke when DHT future is completed.
* @param replicate Whether replication is enabled.
* @return Deleted entries.
* @throws GridCacheEntryRemovedException Should not be thrown.
*/
private UpdateBatchResult<K, V> updateWithBatch(
UUID nodeId,
boolean hasNear,
GridNearAtomicUpdateRequest<K, V> req,
GridNearAtomicUpdateResponse<K, V> res,
List<GridDhtCacheEntry<K, V>> locked,
GridCacheVersion ver,
@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
boolean replicate
) throws GridCacheEntryRemovedException {
// Cannot update in batches during DR due to possible conflicts.
assert !req.returnValue(); // Should not request return values for putAll.
int size = req.keys().size();
Map<K, V> putMap = null;
Collection<K> rmvKeys = null;
UpdateBatchResult<K, V> updRes = new UpdateBatchResult<>();
List<GridDhtCacheEntry<K, V>> filtered = new ArrayList<>(size);
GridCacheOperation op = req.operation();
int firstEntryIdx = 0;
for (int i = 0; i < locked.size(); i++) {
GridDhtCacheEntry<K, V> entry = locked.get(i);
try {
if (!checkFilter(entry, req, res)) {
if (log.isDebugEnabled())
log.debug("Entry did not pass the filter (will skip write) [entry=" + entry +
", filter=" + Arrays.toString(req.filter()) + ", res=" + res + ']');
if (hasNear)
res.addSkippedIndex(i);
firstEntryIdx++;
continue;
}
filtered.add(entry);
if (op == TRANSFORM) {
V old = entry.innerGet(
null,
/*read swap*/true,
/*read through*/true,
/*fail fast*/false,
/*unmarshal*/true,
/*metrics*/true,
/*event*/true,
CU.<K, V>empty());
GridClosure<V, V> transform = req.transformClosure(i);
V updated = transform.apply(old);
if (updated == null) {
// Update previous batch.
if (putMap != null) {
dhtFut = updatePartialBatch(
hasNear,
firstEntryIdx,
filtered,
ver,
nodeId,
putMap,
null,
dhtFut,
completionCb,
req,
res,
replicate,
updRes);
firstEntryIdx = i + 1;
putMap = null;
filtered = new ArrayList<>();
}
// Start collecting new batch.
if (rmvKeys == null)
rmvKeys = new ArrayList<>(size);
rmvKeys.add(entry.key());
}
else {
// Update previous batch.
if (rmvKeys != null) {
dhtFut = updatePartialBatch(
hasNear,
firstEntryIdx,
filtered,
ver,
nodeId,
null,
rmvKeys,
dhtFut,
completionCb,
req,
res,
replicate,
updRes);
firstEntryIdx = i + 1;
rmvKeys = null;
filtered = new ArrayList<>();
}
if (putMap == null)
putMap = new LinkedHashMap<>(size, 1.0f);
putMap.put(entry.key(), updated);
}
}
else if (op == UPDATE) {
if (putMap == null)
putMap = new LinkedHashMap<>(size, 1.0f);
V updated = req.value(i);
assert updated != null;
putMap.put(entry.key(), updated);
}
else {
assert op == DELETE;
if (rmvKeys == null)
rmvKeys = new ArrayList<>(size);
rmvKeys.add(entry.key());
}
}
catch (GridException e) {
res.addFailedKey(entry.key(), e);
}
catch (GridCacheFilterFailedException ignore) {
assert false : "Filter should never fail with failFast=false and empty filter.";
}
}
// Store final batch.
if (putMap != null || rmvKeys != null) {
dhtFut = updatePartialBatch(
hasNear,
firstEntryIdx,
filtered,
ver,
nodeId,
putMap,
rmvKeys,
dhtFut,
completionCb,
req,
res,
replicate,
updRes);
}
else
assert filtered.isEmpty();
updRes.dhtFuture(dhtFut);
return updRes;
}
/**
* Updates locked entries one-by-one.
*
* @param nodeId Originating node ID.
* @param hasNear {@code True} if originating node has near cache.
* @param req Update request.
* @param res Update response.
* @param locked Locked entries.
* @param ver Assigned update version.
* @param dhtFut Optional DHT future.
* @param completionCb Completion callback to invoke when DHT future is completed.
* @param replicate Whether DR is enabled for that cache.
* @return Return value.
* @throws GridCacheEntryRemovedException Should be never thrown.
*/
private UpdateSingleResult<K, V> updateSingle(
UUID nodeId,
boolean hasNear,
GridNearAtomicUpdateRequest<K, V> req,
GridNearAtomicUpdateResponse<K, V> res,
List<GridDhtCacheEntry<K, V>> locked,
GridCacheVersion ver,
@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
boolean replicate
) throws GridCacheEntryRemovedException {
GridCacheReturn<V> retVal = null;
Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted = null;
List<K> keys = req.keys();
long topVer = req.topologyVersion();
boolean checkReaders = hasNear || ctx.discovery().hasNearCache(name(), topVer);
boolean readersOnly = false;
// Avoid iterator creation.
for (int i = 0; i < keys.size(); i++) {
K k = keys.get(i);
GridCacheOperation op = req.operation();
// We are holding java-level locks on entries at this point.
// No GridCacheEntryRemovedException can be thrown.
try {
GridDhtCacheEntry<K, V> entry = locked.get(i);
GridCacheVersion newDrVer = req.drVersion(i);
long newDrTtl = req.drTtl(i);
long newDrExpireTime = req.drExpireTime(i);
assert !(newDrVer instanceof GridCacheVersionEx) : newDrVer; // Plain version is expected here.
if (newDrVer == null)
newDrVer = ver;
boolean primary = !req.fastMap() || ctx.affinity().primary(ctx.localNode(), entry.key(),
req.topologyVersion());
byte[] newValBytes = req.valueBytes(i);
Object writeVal = req.writeValue(i);
Collection<UUID> readers = null;
Collection<UUID> filteredReaders = null;
if (checkReaders) {
readers = entry.readers();
filteredReaders = F.view(entry.readers(), F.notEqualTo(nodeId));
}
GridCacheUpdateAtomicResult<K, V> updRes = entry.innerUpdate(
ver,
nodeId,
locNodeId,
op,
writeVal,
newValBytes,
primary && storeEnabled(),
req.returnValue(),
req.ttl(),
true,
true,
primary,
ctx.config().getAtomicWriteOrderMode() == CLOCK, // Check version in CLOCK mode on primary node.
req.filter(),
replicate ? primary ? DR_PRIMARY : DR_BACKUP : DR_NONE,
newDrTtl,
newDrExpireTime,
newDrVer,
true);
if (dhtFut == null && !F.isEmpty(filteredReaders)) {
dhtFut = createDhtFuture(ver, req, res, completionCb, true);
readersOnly = true;
}
if (dhtFut != null) {
if (updRes.sendToDht()) { // Send to backups even in case of remove-remove scenarios.
GridDrReceiverConflictContextImpl ctx = updRes.drConflictContext();
long ttl = updRes.newTtl();
long drExpireTime = updRes.drExpireTime();
if (ctx == null)
newDrVer = null;
else if (ctx.isMerge()) {
newDrVer = null; // DR version is discarded in case of merge.
newValBytes = null; // Value has been changed.
}
if (!readersOnly)
dhtFut.addWriteEntry(entry, updRes.newValue(), newValBytes, drExpireTime >= 0L ? ttl : -1L,
drExpireTime, newDrVer, drExpireTime < 0L ? ttl : 0L);
if (!F.isEmpty(filteredReaders))
dhtFut.addNearWriteEntries(filteredReaders, entry, updRes.newValue(), newValBytes,
drExpireTime < 0L ? ttl : 0L);
}
else {
if (log.isDebugEnabled())
log.debug("Entry did not pass the filter or conflict resolution (will skip write) " +
"[entry=" + entry + ", filter=" + Arrays.toString(req.filter()) + ']');
}
}
if (hasNear) {
if (primary && updRes.sendToDht()) {
if (!U.nodeIds(context().affinity().nodes(entry.partition(), topVer)).contains(nodeId)) {
GridDrReceiverConflictContextImpl ctx = updRes.drConflictContext();
res.nearTtl(updRes.newTtl());
if (ctx != null && ctx.isMerge())
newValBytes = null;
// If put the same value as in request then do not need to send it back.
if (op == TRANSFORM || writeVal != updRes.newValue())
res.addNearValue(i, updRes.newValue(), newValBytes);
if (updRes.newValue() != null || newValBytes != null) {
GridFuture<Boolean> f = entry.addReader(nodeId, req.messageId(), topVer);
assert f == null : f;
}
}
else if (F.contains(readers, nodeId)) // Reader became primary or backup.
entry.removeReader(nodeId, req.messageId());
else
res.addSkippedIndex(i);
}
else
res.addSkippedIndex(i);
}
if (updRes.removeVersion() != null) {
if (deleted == null)
deleted = new ArrayList<>(keys.size());
deleted.add(F.t(entry, updRes.removeVersion()));
}
// Create only once.
if (retVal == null)
retVal = new GridCacheReturn<>(updRes.oldValue(), updRes.success());
}
catch (GridException e) {
res.addFailedKey(k, e);
}
}
return new UpdateSingleResult<>(retVal, deleted, dhtFut);
}
/**
* @param hasNear {@code True} if originating node has near cache.
* @param firstEntryIdx Index of the first entry in the request keys collection.
* @param entries Entries to update.
* @param ver Version to set.
* @param nodeId Originating node ID.
* @param putMap Values to put.
* @param rmvKeys Keys to remove.
* @param dhtFut DHT update future if has backups.
* @param completionCb Completion callback to invoke when DHT future is completed.
* @param req Request.
* @param res Response.
* @param replicate Whether replication is enabled.
* @param batchRes Batch update result.
* @return Deleted entries.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
@Nullable private GridDhtAtomicUpdateFuture<K, V> updatePartialBatch(
boolean hasNear,
int firstEntryIdx,
List<GridDhtCacheEntry<K, V>> entries,
final GridCacheVersion ver,
UUID nodeId,
@Nullable Map<K, V> putMap,
@Nullable Collection<K> rmvKeys,
@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
final GridNearAtomicUpdateRequest<K, V> req,
final GridNearAtomicUpdateResponse<K, V> res,
boolean replicate,
UpdateBatchResult<K, V> batchRes
) {
assert putMap == null ^ rmvKeys == null;
assert req.drVersions() == null : "updatePartialBatch cannot be called when there are DR entries in the batch.";
long topVer = req.topologyVersion();
boolean checkReaders = hasNear || ctx.discovery().hasNearCache(name(), topVer);
try {
GridCacheOperation op;
if (putMap != null) {
// If fast mapping, filter primary keys for write to store.
Map<K, V> storeMap = req.fastMap() ?
F.view(putMap, new P1<K>() {
@Override public boolean apply(K key) {
return ctx.affinity().primary(ctx.localNode(), key, req.topologyVersion());
}
}) :
putMap;
ctx.store().putAllToStore(null, F.viewReadOnly(storeMap, new C1<V, GridBiTuple<V, GridCacheVersion>>() {
@Override public GridBiTuple<V, GridCacheVersion> apply(V v) {
return F.t(v, ver);
}
}));
op = UPDATE;
}
else {
// If fast mapping, filter primary keys for write to store.
Collection<K> storeKeys = req.fastMap() ?
F.view(rmvKeys, new P1<K>() {
@Override public boolean apply(K key) {
return ctx.affinity().primary(ctx.localNode(), key, req.topologyVersion());
}
}) :
rmvKeys;
ctx.store().removeAllFromStore(null, storeKeys);
op = DELETE;
}
// Avoid iterator creation.
for (int i = 0; i < entries.size(); i++) {
GridDhtCacheEntry<K, V> entry = entries.get(i);
assert Thread.holdsLock(entry);
if (entry.obsolete()) {
assert req.operation() == DELETE : "Entry can become obsolete only after remove: " + entry;
continue;
}
try {
// We are holding java-level locks on entries at this point.
V writeVal = op == UPDATE ? putMap.get(entry.key()) : null;
assert writeVal != null || op == DELETE : "null write value found.";
boolean primary = !req.fastMap() || ctx.affinity().primary(ctx.localNode(), entry.key(),
req.topologyVersion());
Collection<UUID> readers = null;
Collection<UUID> filteredReaders = null;
if (checkReaders) {
readers = entry.readers();
filteredReaders = F.view(entry.readers(), F.notEqualTo(nodeId));
}
GridCacheUpdateAtomicResult<K, V> updRes = entry.innerUpdate(
ver,
nodeId,
locNodeId,
op,
writeVal,
null,
false,
false,
req.ttl(),
true,
true,
primary,
ctx.config().getAtomicWriteOrderMode() == CLOCK, // Check version in CLOCK mode on primary node.
req.filter(),
replicate ? primary ? DR_PRIMARY : DR_BACKUP : DR_NONE,
-1L,
-1L,
null,
false);
batchRes.addDeleted(entry, updRes, entries);
if (dhtFut == null && !F.isEmpty(filteredReaders)) {
dhtFut = createDhtFuture(ver, req, res, completionCb, true);
batchRes.readersOnly(true);
}
if (dhtFut != null) {
GridCacheValueBytes valBytesTuple = op == DELETE ? GridCacheValueBytes.nil():
entry.valueBytes();
byte[] valBytes = valBytesTuple.getIfMarshaled();
if (!batchRes.readersOnly())
dhtFut.addWriteEntry(entry, writeVal, valBytes, -1, -1, null, req.ttl());
if (!F.isEmpty(filteredReaders))
dhtFut.addNearWriteEntries(filteredReaders, entry, writeVal, valBytes, req.ttl());
}
if (hasNear) {
if (primary) {
if (!U.nodeIds(context().affinity().nodes(entry.partition(), topVer)).contains(nodeId)) {
if (req.operation() == TRANSFORM) {
int idx = firstEntryIdx + i;
GridCacheValueBytes valBytesTuple = entry.valueBytes();
byte[] valBytes = valBytesTuple.getIfMarshaled();
res.addNearValue(idx, writeVal, valBytes);
}
res.nearTtl(req.ttl());
if (writeVal != null || !entry.valueBytes().isNull()) {
GridFuture<Boolean> f = entry.addReader(nodeId, req.messageId(), topVer);
assert f == null : f;
}
} else if (readers.contains(nodeId)) // Reader became primary or backup.
entry.removeReader(nodeId, req.messageId());
else
res.addSkippedIndex(firstEntryIdx + i);
}
else
res.addSkippedIndex(firstEntryIdx + i);
}
}
catch (GridCacheEntryRemovedException e) {
assert false : "Entry cannot become obsolete while holding lock.";
e.printStackTrace();
}
}
}
catch (GridException e) {
res.addFailedKeys(putMap != null ? putMap.keySet() : rmvKeys, e);
}
return dhtFut;
}
/**
* Acquires java-level locks on cache entries. Returns collection of locked entries.
*
* @param keys Keys to lock.
* @param topVer Topology version to lock on.
* @return Collection of locked entries.
* @throws GridDhtInvalidPartitionException If entry does not belong to local node. If exception is thrown,
* locks are released.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
private List<GridDhtCacheEntry<K, V>> lockEntries(List<K> keys, long topVer)
throws GridDhtInvalidPartitionException {
if (keys.size() == 1) {
K key = keys.get(0);
while (true) {
GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
UNSAFE.monitorEnter(entry);
if (entry.obsolete())
UNSAFE.monitorExit(entry);
else
return Collections.singletonList(entry);
}
}
else {
List<GridDhtCacheEntry<K, V>> locked = new ArrayList<>(keys.size());
while (true) {
for (K key : keys) {
GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
locked.add(entry);
}
for (int i = 0; i < locked.size(); i++) {
GridCacheMapEntry<K, V> entry = locked.get(i);
UNSAFE.monitorEnter(entry);
if (entry.obsolete()) {
// Unlock all locked.
for (int j = 0; j <= i; j++)
UNSAFE.monitorExit(locked.get(j));
// Clear entries.
locked.clear();
// Retry.
break;
}
}
if (!locked.isEmpty())
return locked;
}
}
}
/**
* Releases java-level locks on cache entries.
*
* @param locked Locked entries.
*/
private void unlockEntries(Collection<GridDhtCacheEntry<K, V>> locked, long topVer) {
// Process deleted entries before locks release.
assert ctx.deferredDelete();
// Entries to skip eviction manager notification for.
// Enqueue entries while holding locks.
Collection<K> skip = null;
for (GridCacheMapEntry<K, V> entry : locked) {
if (entry.deleted()) {
if (skip == null)
skip = new HashSet<>(locked.size(), 1.0f);
skip.add(entry.key());
}
}
// Release locks.
for (GridCacheMapEntry<K, V> entry : locked)
UNSAFE.monitorExit(entry);
if (skip != null && skip.size() == locked.size())
// Optimization.
return;
// Must touch all entries since update may have deleted entries.
// Eviction manager will remove empty entries.
for (GridCacheMapEntry<K, V> entry : locked) {
if (skip == null || !skip.contains(entry.key()))
ctx.evicts().touch(entry, topVer);
}
}
/**
* @param entry Entry to check.
* @param req Update request.
* @param res Update response. If filter evaluation failed, key will be added to failed keys and method
* will return false.
* @return {@code True} if filter evaluation succeeded.
*/
private boolean checkFilter(GridCacheEntryEx<K, V> entry, GridNearAtomicUpdateRequest<K, V> req,
GridNearAtomicUpdateResponse<K, V> res) {
try {
return ctx.isAll(entry.wrapFilterLocked(), req.filter());
}
catch (GridException e) {
res.addFailedKey(entry.key(), e);
return false;
}
}
/**
* @param req Request to remap.
*/
private void remapToNewPrimary(GridNearAtomicUpdateRequest<K, V> req) {
if (log.isDebugEnabled())
log.debug("Remapping near update request locally: " + req);
Collection<?> vals;
Collection<GridCacheDrInfo<V>> drPutVals;
Collection<GridCacheVersion> drRmvVals;
if (req.drVersions() == null) {
vals = req.values();
drPutVals = null;
drRmvVals = null;
}
else if (req.operation() == UPDATE) {
int size = req.keys().size();
drPutVals = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
Long ttl = req.drTtl(i);
if (ttl == null)
drPutVals.add(new GridCacheDrInfo<>(req.value(i), req.drVersion(i)));
else
drPutVals.add(new GridCacheDrExpirationInfo<>(req.value(i), req.drVersion(i), ttl,
req.drExpireTime(i)));
}
vals = null;
drRmvVals = null;
}
else {
assert req.operation() == DELETE;
drRmvVals = req.drVersions();
vals = null;
drPutVals = null;
}
final GridNearAtomicUpdateFuture<K, V> updateFut = new GridNearAtomicUpdateFuture<>(
ctx,
this,
ctx.config().getWriteSynchronizationMode(),
req.operation(),
req.keys(),
vals,
drPutVals,
drRmvVals,
req.returnValue(),
false,
null,
req.ttl(),
req.filter());
updateFut.map();
}
/**
* Creates backup update future if necessary.
*
* @param writeVer Write version.
* @param updateReq Update request.
* @param updateRes Update response.
* @param completionCb Completion callback to invoke when future is completed.
* @param force If {@code true} then creates future without optimizations checks.
* @return Backup update future or {@code null} if there are no backups.
*/
@Nullable private GridDhtAtomicUpdateFuture<K, V> createDhtFuture(
GridCacheVersion writeVer,
GridNearAtomicUpdateRequest<K, V> updateReq,
GridNearAtomicUpdateResponse<K, V> updateRes,
CI2<GridNearAtomicUpdateRequest<K, V>, GridNearAtomicUpdateResponse<K, V>> completionCb,
boolean force
) {
if (!force) {
if (!hasBackups || updateReq.fastMap())
return null;
long topVer = updateReq.topologyVersion();
Collection<GridNode> nodes = ctx.kernalContext().discovery().cacheAffinityNodes(name(), topVer);
// We are on primary node for some key.
assert !nodes.isEmpty();
if (nodes.size() == 1) {
if (log.isDebugEnabled())
log.debug("Partitioned cache topology has only one node, will not create DHT atomic update future " +
"[topVer=" + topVer + ", updateReq=" + updateReq + ']');
return null;
}
}
GridDhtAtomicUpdateFuture<K, V> fut = new GridDhtAtomicUpdateFuture<>(ctx, completionCb, writeVer, updateReq,
updateRes);
ctx.mvcc().addAtomicFuture(fut.version(), fut);
return fut;
}
/**
* @param nodeId Sender node ID.
* @param res Near get response.
*/
private void processNearGetResponse(UUID nodeId, GridNearGetResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing near get response [nodeId=" + nodeId + ", res=" + res + ']');
GridPartitionedGetFuture<K, V> fut = (GridPartitionedGetFuture<K, V>)ctx.mvcc().<Map<K, V>>future(
res.version(), res.futureId());
if (fut == null) {
if (log.isDebugEnabled())
log.debug("Failed to find future for get response [sender=" + nodeId + ", res=" + res + ']');
return;
}
fut.onResult(nodeId, res);
}
/**
* @param nodeId Sender node ID.
* @param req Near atomic update request.
*/
private void processNearAtomicUpdateRequest(UUID nodeId, GridNearAtomicUpdateRequest<K, V> req) {
if (log.isDebugEnabled())
log.debug("Processing near atomic update request [nodeId=" + nodeId + ", req=" + req + ']');
req.nodeId(ctx.localNodeId());
updateAllAsyncInternal(nodeId, req, null, updateReplyClos);
}
/**
* @param nodeId Sender node ID.
* @param res Near atomic update response.
*/
@SuppressWarnings("unchecked")
private void processNearAtomicUpdateResponse(UUID nodeId, GridNearAtomicUpdateResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing near atomic update response [nodeId=" + nodeId + ", res=" + res + ']');
res.nodeId(ctx.localNodeId());
GridNearAtomicUpdateFuture<K, V> fut = (GridNearAtomicUpdateFuture)ctx.mvcc().atomicFuture(res.futureVersion());
if (fut != null)
fut.onResult(nodeId, res);
else
U.warn(log, "Failed to find near update future for update response (will ignore) " +
"[nodeId=" + nodeId + ", res=" + res + ']');
}
/**
* @param nodeId Sender node ID.
* @param req Dht atomic update request.
*/
private void processDhtAtomicUpdateRequest(UUID nodeId, GridDhtAtomicUpdateRequest<K, V> req) {
if (log.isDebugEnabled())
log.debug("Processing dht atomic update request [nodeId=" + nodeId + ", req=" + req + ']');
GridCacheVersion ver = req.writeVersion();
// Always send update reply.
GridDhtAtomicUpdateResponse<K, V> res = new GridDhtAtomicUpdateResponse<>(req.futureVersion());
Boolean replicate = ctx.isDrEnabled();
for (int i = 0; i < req.size(); i++) {
K key = req.key(i);
try {
while (true) {
GridCacheEntryEx<K, V> entry = null;
try {
entry = entryEx(key);
V val = req.value(i);
byte[] valBytes = req.valueBytes(i);
GridCacheOperation op = (val != null || valBytes != null) ? UPDATE : DELETE;
GridCacheUpdateAtomicResult<K, V> updRes = entry.innerUpdate(
ver,
nodeId,
nodeId,
op,
val,
valBytes,
/*write-through*/false,
/*retval*/false,
req.ttl(),
/*event*/true,
/*metrics*/true,
/*primary*/false,
/*check version*/true,
CU.<K, V>empty(),
replicate ? DR_BACKUP : DR_NONE,
req.drTtl(i),
req.drExpireTime(i),
req.drVersion(i),
false);
if (updRes.removeVersion() != null)
ctx.onDeferredDelete(entry, updRes.removeVersion());
break; // While.
}
catch (GridCacheEntryRemovedException ignored) {
if (log.isDebugEnabled())
log.debug("Got removed entry while updating backup value (will retry): " + key);
entry = null;
}
finally {
if (entry != null)
ctx.evicts().touch(entry, req.topologyVersion());
}
}
}
catch (GridDhtInvalidPartitionException ignored) {
// Ignore.
}
catch (GridException e) {
res.addFailedKey(key, new GridException("Failed to update key on backup node: " + key, e));
}
}
if (isNearEnabled(cacheCfg))
((GridNearAtomicCache<K, V>)near()).processDhtAtomicUpdateRequest(nodeId, req, res);
try {
if (res.failedKeys() != null || res.nearEvicted() != null || req.writeSynchronizationMode() == FULL_SYNC)
ctx.io().send(nodeId, res);
else {
// No failed keys and sync mode is not FULL_SYNC, thus sending deferred response.
sendDeferredUpdateResponse(nodeId, req.futureVersion());
}
}
catch (GridTopologyException ignored) {
U.warn(log, "Failed to send DHT atomic update response to node because it left grid: " +
req.nodeId());
}
catch (GridException e) {
U.error(log, "Failed to send DHT atomic update response (did node leave grid?) [nodeId=" + nodeId +
", req=" + req + ']', e);
}
}
/**
* @param nodeId Node ID to send message to.
* @param ver Version to ack.
*/
private void sendDeferredUpdateResponse(UUID nodeId, GridCacheVersion ver) {
while (true) {
DeferredResponseBuffer buf = pendingResponses.get(nodeId);
if (buf == null) {
buf = new DeferredResponseBuffer(nodeId);
DeferredResponseBuffer old = pendingResponses.putIfAbsent(nodeId, buf);
if (old == null) {
// We have successfully added buffer to map.
ctx.time().addTimeoutObject(buf);
}
else
buf = old;
}
if (!buf.addResponse(ver))
// Some thread is sending filled up buffer, we can remove it.
pendingResponses.remove(nodeId, buf);
else
break;
}
}
/**
* @param nodeId Sender node ID.
* @param res Dht atomic update response.
*/
private void processDhtAtomicUpdateResponse(UUID nodeId, GridDhtAtomicUpdateResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing dht atomic update response [nodeId=" + nodeId + ", res=" + res + ']');
GridDhtAtomicUpdateFuture<K, V> updateFut = (GridDhtAtomicUpdateFuture<K, V>)ctx.mvcc().
atomicFuture(res.futureVersion());
if (updateFut != null)
updateFut.onResult(nodeId, res);
else
U.warn(log, "Failed to find DHT update future for update response [nodeId=" + nodeId +
", res=" + res + ']');
}
/**
* @param nodeId Sender node ID.
* @param res Deferred atomic update response.
*/
private void processDhtAtomicDeferredUpdateResponse(UUID nodeId, GridDhtAtomicDeferredUpdateResponse<K, V> res) {
if (log.isDebugEnabled())
log.debug("Processing deferred dht atomic update response [nodeId=" + nodeId + ", res=" + res + ']');
for (GridCacheVersion ver : res.futureVersions()) {
GridDhtAtomicUpdateFuture<K, V> updateFut = (GridDhtAtomicUpdateFuture<K, V>)ctx.mvcc().atomicFuture(ver);
if (updateFut != null)
updateFut.onResult(nodeId);
else
U.warn(log, "Failed to find DHT update future for deferred update response [nodeId=" +
nodeId + ", res=" + res + ']');
}
}
/**
* @param nodeId Originating node ID.
* @param res Near update response.
*/
private void sendNearUpdateReply(UUID nodeId, GridNearAtomicUpdateResponse<K, V> res) {
try {
ctx.io().send(nodeId, res);
}
catch (GridTopologyException ignored) {
U.warn(log, "Failed to send near update reply to node because it left grid: " +
nodeId);
}
catch (GridException e) {
U.error(log, "Failed to send near update reply (did node leave grid?) [nodeId=" + nodeId +
", res=" + res + ']', e);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridDhtAtomicCache.class, this, super.toString());
}
/**
* Result of {@link GridDhtAtomicCache#updateSingle} execution.
*/
private static class UpdateSingleResult<K, V> {
/** */
private final GridCacheReturn<V> retVal;
/** */
private final Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted;
/** */
private final GridDhtAtomicUpdateFuture<K, V> dhtFut;
/**
* @param retVal Return value.
* @param deleted Deleted entries.
* @param dhtFut DHT future.
*/
private UpdateSingleResult(GridCacheReturn<V> retVal,
Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted,
GridDhtAtomicUpdateFuture<K, V> dhtFut) {
this.retVal = retVal;
this.deleted = deleted;
this.dhtFut = dhtFut;
}
/**
* @return Return value.
*/
private GridCacheReturn<V> returnValue() {
return retVal;
}
/**
* @return Deleted entries.
*/
private Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted() {
return deleted;
}
/**
* @return DHT future.
*/
public GridDhtAtomicUpdateFuture<K, V> dhtFuture() {
return dhtFut;
}
}
/**
* Result of {@link GridDhtAtomicCache#updateWithBatch} execution.
*/
private static class UpdateBatchResult<K, V> {
/** */
private Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted;
/** */
private GridDhtAtomicUpdateFuture<K, V> dhtFut;
/** */
private boolean readersOnly;
/**
* @param entry Entry.
* @param updRes Entry update result.
* @param entries All entries.
*/
private void addDeleted(GridDhtCacheEntry<K, V> entry, GridCacheUpdateAtomicResult<K, V> updRes,
Collection<GridDhtCacheEntry<K, V>> entries) {
if (updRes.removeVersion() != null) {
if (deleted == null)
deleted = new ArrayList<>(entries.size());
deleted.add(F.t(entry, updRes.removeVersion()));
}
}
/**
* @return Deleted entries.
*/
private Collection<GridBiTuple<GridDhtCacheEntry<K, V>, GridCacheVersion>> deleted() {
return deleted;
}
/**
* @return DHT future.
*/
public GridDhtAtomicUpdateFuture<K, V> dhtFuture() {
return dhtFut;
}
/**
* @param dhtFut DHT future.
*/
private void dhtFuture(@Nullable GridDhtAtomicUpdateFuture<K, V> dhtFut) {
this.dhtFut = dhtFut;
}
/**
* @return {@code True} if only readers (not backups) should be updated.
*/
private boolean readersOnly() {
return readersOnly;
}
/**
* @param readersOnly {@code True} if only readers (not backups) should be updated.
*/
private void readersOnly(boolean readersOnly) {
this.readersOnly = readersOnly;
}
}
/**
*
*/
private static class FinishedLockFuture extends GridFinishedFutureEx<Boolean> implements GridDhtFuture<Boolean> {
/**
* Empty constructor required by {@link Externalizable}.
*/
public FinishedLockFuture() {
// No-op.
}
/**
* @param err Error.
*/
private FinishedLockFuture(Throwable err) {
super(err);
}
/** {@inheritDoc} */
@Override public Collection<Integer> invalidPartitions() {
return Collections.emptyList();
}
}
/**
* Deferred response buffer.
*/
private class DeferredResponseBuffer extends ReentrantReadWriteLock implements GridTimeoutObject {
/** Filled atomic flag. */
private AtomicBoolean guard = new AtomicBoolean(false);
/** Response versions. */
private Collection<GridCacheVersion> respVers = new ConcurrentLinkedDeque8<>();
/** Node ID. */
private final UUID nodeId;
/** Timeout ID. */
private final GridUuid timeoutId;
/** End time. */
private final long endTime;
/**
* @param nodeId Node ID to send message to.
*/
private DeferredResponseBuffer(UUID nodeId) {
this.nodeId = nodeId;
timeoutId = GridUuid.fromUuid(nodeId);
endTime = U.currentTimeMillis() + DEFERRED_UPDATE_RESPONSE_TIMEOUT;
}
/** {@inheritDoc} */
@Override public GridUuid timeoutId() {
return timeoutId;
}
/** {@inheritDoc} */
@Override public long endTime() {
return endTime;
}
/** {@inheritDoc} */
@Override public void onTimeout() {
if (guard.compareAndSet(false, true)) {
writeLock().lock();
try {
finish();
}
finally {
writeLock().unlock();
}
}
}
/**
* Adds deferred response to buffer.
*
* @param ver Version to send.
* @return {@code True} if response was handled, {@code false} if this buffer is filled and cannot be used.
*/
public boolean addResponse(GridCacheVersion ver) {
readLock().lock();
boolean snd = false;
try {
if (guard.get())
return false;
respVers.add(ver);
if (respVers.size() > DEFERRED_UPDATE_RESPONSE_BUFFER_SIZE && guard.compareAndSet(false, true))
snd = true;
}
finally {
readLock().unlock();
}
if (snd) {
// Wait all threads in read lock to finish.
writeLock().lock();
try {
finish();
ctx.time().removeTimeoutObject(this);
}
finally {
writeLock().unlock();
}
}
return true;
}
/**
* Sends deferred notification message and removes this buffer from pending responses map.
*/
private void finish() {
GridDhtAtomicDeferredUpdateResponse<K, V> msg = new GridDhtAtomicDeferredUpdateResponse<>(respVers);
try {
ctx.io().send(nodeId, msg);
}
catch (GridTopologyException ignored) {
if (log.isDebugEnabled())
log.debug("Failed to send deferred dht update response to remote node (did node leave grid?) " +
"[nodeId=" + nodeId + ", msg=" + msg + ']');
}
catch (GridException e) {
U.error(log, "Failed to send deferred dht update response to remote node [nodeId="
+ nodeId + ", msg=" + msg + ']', e);
}
pendingResponses.remove(nodeId, this);
}
}
}
|
GG-7663 - Fixing remapping for CLOCK versioning mode.
|
modules/core/java/org/gridgain/grid/kernal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java
|
GG-7663 - Fixing remapping for CLOCK versioning mode.
|
<ide><path>odules/core/java/org/gridgain/grid/kernal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java
<ide> for (int i = 0; i < locked.size(); i++) {
<ide> GridDhtCacheEntry<K, V> entry = locked.get(i);
<ide>
<add> if (entry == null)
<add> continue;
<add>
<ide> try {
<ide> if (!checkFilter(entry, req, res)) {
<ide> if (log.isDebugEnabled())
<ide> // No GridCacheEntryRemovedException can be thrown.
<ide> try {
<ide> GridDhtCacheEntry<K, V> entry = locked.get(i);
<add>
<add> if (entry == null)
<add> continue;
<ide>
<ide> GridCacheVersion newDrVer = req.drVersion(i);
<ide> long newDrTtl = req.drTtl(i);
<ide> K key = keys.get(0);
<ide>
<ide> while (true) {
<del> GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
<del>
<del> UNSAFE.monitorEnter(entry);
<del>
<del> if (entry.obsolete())
<del> UNSAFE.monitorExit(entry);
<del> else
<del> return Collections.singletonList(entry);
<add> try {
<add> GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
<add>
<add> UNSAFE.monitorEnter(entry);
<add>
<add> if (entry.obsolete())
<add> UNSAFE.monitorExit(entry);
<add> else
<add> return Collections.singletonList(entry);
<add> }
<add> catch (GridDhtInvalidPartitionException e) {
<add> // Ignore invalid partition exception in CLOCK ordering mode.
<add> if (ctx.config().getAtomicWriteOrderMode() == CLOCK)
<add> return Collections.singletonList(null);
<add> else
<add> throw e;
<add> }
<ide> }
<ide> }
<ide> else {
<ide>
<ide> while (true) {
<ide> for (K key : keys) {
<del> GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
<del>
<del> locked.add(entry);
<del> }
<add> try {
<add> GridDhtCacheEntry<K, V> entry = entryExx(key, topVer);
<add>
<add> locked.add(entry);
<add> }
<add> catch (GridDhtInvalidPartitionException e) {
<add> // Ignore invalid partition exception in CLOCK ordering mode.
<add> if (ctx.config().getAtomicWriteOrderMode() == CLOCK)
<add> locked.add(null);
<add> else
<add> throw e;
<add> }
<add> }
<add>
<add> boolean retry = false;
<ide>
<ide> for (int i = 0; i < locked.size(); i++) {
<ide> GridCacheMapEntry<K, V> entry = locked.get(i);
<ide>
<add> if (entry == null)
<add> continue;
<add>
<ide> UNSAFE.monitorEnter(entry);
<ide>
<ide> if (entry.obsolete()) {
<ide> // Unlock all locked.
<del> for (int j = 0; j <= i; j++)
<del> UNSAFE.monitorExit(locked.get(j));
<add> for (int j = 0; j <= i; j++) {
<add> if (locked.get(j) != null)
<add> UNSAFE.monitorExit(locked.get(j));
<add> }
<ide>
<ide> // Clear entries.
<ide> locked.clear();
<ide>
<ide> // Retry.
<add> retry = true;
<add>
<ide> break;
<ide> }
<ide> }
<ide>
<del> if (!locked.isEmpty())
<add> if (!retry)
<ide> return locked;
<ide> }
<ide> }
<ide> Collection<K> skip = null;
<ide>
<ide> for (GridCacheMapEntry<K, V> entry : locked) {
<del> if (entry.deleted()) {
<add> if (entry != null && entry.deleted()) {
<ide> if (skip == null)
<ide> skip = new HashSet<>(locked.size(), 1.0f);
<ide>
<ide> }
<ide>
<ide> // Release locks.
<del> for (GridCacheMapEntry<K, V> entry : locked)
<del> UNSAFE.monitorExit(entry);
<add> for (GridCacheMapEntry<K, V> entry : locked) {
<add> if (entry != null)
<add> UNSAFE.monitorExit(entry);
<add> }
<ide>
<ide> if (skip != null && skip.size() == locked.size())
<ide> // Optimization.
<ide> // Must touch all entries since update may have deleted entries.
<ide> // Eviction manager will remove empty entries.
<ide> for (GridCacheMapEntry<K, V> entry : locked) {
<del> if (skip == null || !skip.contains(entry.key()))
<add> if (entry != null && (skip == null || !skip.contains(entry.key())))
<ide> ctx.evicts().touch(entry, topVer);
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
7c601f59396ce6d425fc078127389cdfa73db37b
| 0 |
stephenc/redmine-java-api,taskadapter/redmine-java-api,Wurmy/My_Redmine-Java-API_XML,sleroy/redmine-java-api,taskadapter/redmine-java-api,redminenb/redmine-java-api,redminenb/redmine-java-api,sleroy/redmine-java-api
|
package org.redmine.ta;
import org.junit.*;
import org.redmine.ta.RedmineManager.INCLUDE;
import org.redmine.ta.beans.*;
import org.redmine.ta.internal.logging.Logger;
import org.redmine.ta.internal.logging.LoggerFactory;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
import static org.junit.Assert.*;
/**
* This class and its dependencies are located in org.redmine.ta.api project.
*/
public class RedmineManagerTest {
// TODO We don't know activities IDs!
// see feature request http://www.redmine.org/issues/7506
private static final Integer ACTIVITY_ID = 8;
private static Logger logger = LoggerFactory.getLogger(RedmineManagerTest.class);
private static RedmineManager mgr;
private static String projectKey;
private static TestConfig testConfig;
@BeforeClass
public static void oneTimeSetUp() {
testConfig = new TestConfig();
logger.info("Running redmine tests using: " + testConfig.getURI());
// mgr = new RedmineManager(TestConfig.getURI(), TestConfig.getApiKey());
mgr = new RedmineManager(testConfig.getURI());
mgr.setLogin(testConfig.getLogin());
mgr.setPassword(testConfig.getPassword());
Project junitTestProject = new Project();
junitTestProject.setName("test project");
junitTestProject.setIdentifier("test"
+ Calendar.getInstance().getTimeInMillis());
try {
Project createdProject = mgr.createProject(junitTestProject);
projectKey = createdProject.getIdentifier();
} catch (Exception e) {
logger.error(e, "Exception while creating test project");
Assert.fail("can't create a test project. " + e.getMessage());
}
}
@AfterClass
public static void oneTimeTearDown() {
try {
if (mgr != null && projectKey != null) {
mgr.deleteProject(projectKey);
}
} catch (Exception e) {
logger.error(e, "Exception while deleting test project");
Assert.fail("can't delete the test project '" + projectKey + ". reason: "
+ e.getMessage());
}
}
@Test
public void testCreateIssue() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("test zzx");
Calendar startCal = Calendar.getInstance();
// have to clear them because they are ignored by Redmine and prevent from comparison later
startCal.clear(Calendar.HOUR_OF_DAY);
startCal.clear(Calendar.MINUTE);
startCal.clear(Calendar.SECOND);
startCal.clear(Calendar.MILLISECOND);
startCal.add(Calendar.DATE, 5);
issueToCreate.setStartDate(startCal.getTime());
Calendar due = Calendar.getInstance();
due.add(Calendar.MONTH, 1);
issueToCreate.setDueDate(due.getTime());
User assignee = getOurUser();
issueToCreate.setAssignee(assignee);
String description = "This is the description for the new task." +
"\nIt has several lines." +
"\nThis is the last line.";
issueToCreate.setDescription(description);
float estimatedHours = 44;
issueToCreate.setEstimatedHours(estimatedHours);
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
// System.out.println("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check startDate
Calendar returnedStartCal = Calendar.getInstance();
returnedStartCal.setTime(newIssue.getStartDate());
Assert.assertEquals(startCal.get(Calendar.YEAR), returnedStartCal.get(Calendar.YEAR));
Assert.assertEquals(startCal.get(Calendar.MONTH), returnedStartCal.get(Calendar.MONTH));
Assert.assertEquals(startCal.get(Calendar.DAY_OF_MONTH), returnedStartCal.get(Calendar.DAY_OF_MONTH));
// check dueDate
Calendar returnedDueCal = Calendar.getInstance();
returnedDueCal.setTime(newIssue.getDueDate());
Assert.assertEquals(due.get(Calendar.YEAR), returnedDueCal.get(Calendar.YEAR));
Assert.assertEquals(due.get(Calendar.MONTH), returnedDueCal.get(Calendar.MONTH));
Assert.assertEquals(due.get(Calendar.DAY_OF_MONTH), returnedDueCal.get(Calendar.DAY_OF_MONTH));
// check ASSIGNEE
User actualAssignee = newIssue.getAssignee();
Assert.assertNotNull("Checking assignee not null", actualAssignee);
Assert.assertEquals("Checking assignee id", assignee.getId(),
actualAssignee.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
// check ESTIMATED TIME
Assert.assertEquals((Float) estimatedHours, newIssue.getEstimatedHours());
// check multi-line DESCRIPTION
String regexpStripExtra = "\\r|\\n|\\s";
description = description.replaceAll(regexpStripExtra, "");
String actualDescription = newIssue.getDescription();
actualDescription = actualDescription.replaceAll(regexpStripExtra, "");
Assert.assertEquals(description, actualDescription);
// PRIORITY
Assert.assertNotNull(newIssue.getPriorityId());
Assert.assertTrue(newIssue.getPriorityId() > 0);
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testCreateIssueWithParent() {
try {
Issue parentIssue = new Issue();
parentIssue.setSubject("parent 1");
Issue newParentIssue = mgr.createIssue(projectKey, parentIssue);
logger.debug("created parent: " + newParentIssue);
Assert.assertNotNull("Checking parent was created", newParentIssue);
Assert.assertNotNull("Checking ID of parent issue is not null",
newParentIssue.getId());
// Integer parentId = 46;
Integer parentId = newParentIssue.getId();
Issue childIssue = new Issue();
childIssue.setSubject("child 1");
childIssue.setParentId(parentId);
Issue newChildIssue = mgr.createIssue(projectKey, childIssue);
logger.debug("created child: " + newChildIssue);
Assert.assertEquals("Checking parent ID of the child issue", parentId,
newChildIssue.getParentId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testStartDateNull() {
try {
Issue issue = new Issue();
issue.setSubject("test start date");
issue.setStartDate(null);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue loadedIssue = mgr.getIssueById(newIssue.getId());
Assert.assertNull(loadedIssue.getStartDate());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testGetIssuesBySummary() {
String summary = "issue with subject ABC";
try {
Issue issue = new Issue();
issue.setSubject(summary);
User assignee = getOurUser();
issue.setAssignee(assignee);
Issue newIssue = mgr.createIssue(projectKey, issue);
logger.debug("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Checking if search results is not NULL", foundIssues);
Assert.assertTrue("Search results must be not empty",
!(foundIssues.isEmpty()));
Issue loadedIssue1 = RedmineTestUtils.findIssueInList(foundIssues, newIssue.getId());
Assert.assertNotNull(loadedIssue1);
Assert.assertEquals(summary, loadedIssue1.getSubject());
// User actualAssignee = newIssue.getAssignee();
// assertNotNull("Checking assignee not null", actualAssignee);
// assertEquals("Checking assignee Name", assignee.getName(),
// actualAssignee.getName());
// assertEquals("Checking assignee Id", assignee.getId(),
// actualAssignee.getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testTryFindNonExistingIssue() {
String summary = "some summary here for issue which does not exist";
try {
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Search result must be not null", foundIssues);
Assert.assertTrue("Search result list must be empty",
foundIssues.isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
private static User getOurUser() {
Integer userId = Integer
.parseInt(testConfig.getParam("createissue.userid"));
String login = testConfig.getLogin();
String fName = testConfig.getParam("userFName");
String lName = testConfig.getParam("userLName");
User user = new User();
user.setId(userId);
user.setLogin(login);
user.setFirstName(fName);
user.setLastName(lName);
return user;
}
@Test(expected = IllegalArgumentException.class)
public void testNULLHostParameter() {
new RedmineManager(null);
}
@Test(expected = IllegalArgumentException.class)
public void testEmptyHostParameter() throws RuntimeException {
new RedmineManager("");
}
@Test(expected = AuthenticationException.class)
public void noAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrEmpty = new RedmineManager(testConfig.getURI());
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrEmpty.createIssue(projectKey, issue);
}
@Test(expected = AuthenticationException.class)
public void wrongAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrInvalidKey = new RedmineManager(testConfig.getURI(), "wrong_key");
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrInvalidKey.createIssue(projectKey, issue);
}
@Test
public void testUpdateIssue() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
String changedSubject = "changed subject";
newIssue.setSubject(changedSubject);
mgr.update(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'update issue' operation changed the 'subject' field",
changedSubject, reloadedFromRedmineIssue.getSubject());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
/**
* Tests the retrieval of an {@link Issue} by its ID.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueById() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'get issue by ID' operation returned issue with same 'subject' field",
originalSubject, reloadedFromRedmineIssue.getSubject());
Tracker tracker = reloadedFromRedmineIssue.getTracker();
Assert.assertNotNull("Tracker of issue should not be null", tracker);
Assert.assertNotNull("ID of tracker of issue should not be null", tracker.getId());
Assert.assertNotNull("Name of tracker of issue should not be null", tracker.getName());
}
/**
* Tests the retrieval of {@link Project}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetProjects() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// retrieve projects
List<Project> projects = mgr.getProjects();
// asserts
Assert.assertTrue(projects.size() > 0);
boolean found = false;
for (Project project : projects) {
if (project.getIdentifier().equals(projectKey)) {
found = true;
break;
}
}
if (!found) {
Assert.fail("Our project with key '" + projectKey + "' is not found on the server");
}
}
@Test
public void testGetIssues() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("getIssues() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 0);
boolean found = false;
for (Issue issue : issues) {
if (issue.getId().equals(newIssue.getId())) {
found = true;
break;
}
}
if (!found) {
Assert.fail("getIssues() didn't return the issue we just created. The query "
+ " must have returned all issues created during the last 2 days");
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test(expected = NotFoundException.class)
public void testGetIssuesInvalidQueryId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Integer invalidQueryId = 9999999;
mgr.getIssues(projectKey, invalidQueryId);
}
@Test
public void testCreateProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
Assert.assertNotNull("checking that a non-null project is returned", createdProject);
Assert.assertEquals(projectToCreate.getIdentifier(), createdProject.getIdentifier());
Assert.assertEquals(projectToCreate.getName(), createdProject.getName());
Assert.assertEquals(projectToCreate.getDescription(), createdProject.getDescription());
Assert.assertEquals(projectToCreate.getHomepage(), createdProject.getHomepage());
List<Tracker> trackers = createdProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateGetUpdateDeleteProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
projectToCreate.setIdentifier("id" + new Date().getTime());
logger.debug("trying to create a project with id " + projectToCreate.getIdentifier());
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
String newDescr = "NEW123";
String newName = "new name here";
createdProject.setName(newName);
createdProject.setDescription(newDescr);
mgr.update(createdProject);
Project updatedProject = mgr.getProjectByKey(key);
Assert.assertNotNull(updatedProject);
Assert.assertEquals(createdProject.getIdentifier(), updatedProject.getIdentifier());
Assert.assertEquals(newName, updatedProject.getName());
Assert.assertEquals(newDescr, updatedProject.getDescription());
List<Tracker> trackers = updatedProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateProjectFailsWithReservedIdentifier() throws Exception {
Project projectToCreate = new Project();
projectToCreate.setName("new");
projectToCreate.setIdentifier("new");
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
// in case if the creation haven't failed (although it should have had!),
// need to cleanup - delete this project
key = createdProject.getIdentifier();
} catch (RedmineException e) {
Assert.assertNotNull(e.getErrors());
Assert.assertEquals(1, e.getErrors().size());
Assert.assertEquals("Identifier is reserved", e.getErrors().get(0));
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
private static Project generateRandomProject() {
Project project = new Project();
Long timeStamp = Calendar.getInstance().getTimeInMillis();
String key = "projkey" + timeStamp;
String name = "project number " + timeStamp;
String description = "some description for the project";
project.setIdentifier(key);
project.setName(name);
project.setDescription(description);
project.setHomepage("www.randompage" + timeStamp + ".com");
return project;
}
@Test
public void testCreateIssueNonUnicodeSymbols() {
try {
String nonLatinSymbols = "Example with accents A��o";
Issue toCreate = new Issue();
toCreate.setSubject(nonLatinSymbols);
Issue created = mgr.createIssue(projectKey, toCreate);
Assert.assertEquals(nonLatinSymbols, created.getSubject());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateIssueSummaryOnly() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("This is the summary line 123");
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test(expected = NotFoundException.class)
public void testCreateIssueInvalidProjectKey() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("Summary line 100");
mgr.createIssue("someNotExistingProjectKey", issueToCreate);
}
@Test(expected = NotFoundException.class)
public void testGetProjectNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getProjectByKey("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testDeleteNonExistingProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.deleteProject("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testGetIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int someNonExistingID = 999999;
mgr.getIssueById(someNonExistingID);
}
@Test(expected = NotFoundException.class)
public void testUpdateIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int nonExistingId = 999999;
Issue issue = new Issue();
issue.setId(nonExistingId);
mgr.update(issue);
}
@Test
public void testGetUsers() {
try {
List<User> users = mgr.getUsers();
Assert.assertTrue(users.size() > 0);
// boolean found = false;
// for (Project project : projects) {
// if (project.getIdentifier().equals(projectKey)) {
// found = true;
// break;
// }
// }
// if (!found) {
// fail("Our project with key '" + projectKey+"' is not found on the server");
// }
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
User currentUser = mgr.getCurrentUser();
Assert.assertEquals(getOurUser().getId(), currentUser.getId());
Assert.assertEquals(getOurUser().getLogin(), currentUser.getLogin());
}
@Test
public void testGetUserById() throws IOException, AuthenticationException, NotFoundException, RedmineException {
User loadedUser = mgr.getUserById(getOurUser().getId());
Assert.assertEquals(getOurUser().getId(), loadedUser.getId());
Assert.assertEquals(getOurUser().getLogin(), loadedUser.getLogin());
}
@Test(expected = NotFoundException.class)
public void testGetUserNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getUserById(999999);
}
@Test(expected = NotFoundException.class)
public void testInvalidGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
RedmineManager invalidManager = new RedmineManager(testConfig.getURI() + "/INVALID");
invalidManager.setLogin("Invalid");
invalidManager.setPassword("Invalid");
invalidManager.getCurrentUser();
}
@Test
public void testCreateUser() throws IOException, AuthenticationException, NotFoundException, RedmineException {
User createdUser = null;
try {
User userToCreate = generateRandomUser();
createdUser = mgr.createUser(userToCreate);
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
Assert.assertEquals(userToCreate.getLogin(), createdUser.getLogin());
Assert.assertEquals(userToCreate.getFirstName(), createdUser.getFirstName());
Assert.assertEquals(userToCreate.getLastName(), createdUser.getLastName());
Integer id = createdUser.getId();
Assert.assertNotNull(id);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
if (createdUser != null) {
mgr.deleteUser(createdUser.getId());
}
}
}
private static User generateRandomUser() {
User user = new User();
user.setFirstName("fname");
user.setLastName("lname");
long randomNumber = new Date().getTime();
user.setLogin("login" + randomNumber);
user.setMail("somemail" + randomNumber + "@somedomain.com");
user.setPassword("zzzz");
return user;
}
@Test
public void testUpdateUser() throws IOException, AuthenticationException, NotFoundException {
User userToCreate = new User();
userToCreate.setFirstName("fname2");
userToCreate.setLastName("lname2");
long randomNumber = new Date().getTime();
userToCreate.setLogin("login33" + randomNumber);
userToCreate.setMail("email" + randomNumber + "@somedomain.com");
userToCreate.setPassword("1234");
try {
User createdUser = mgr.createUser(userToCreate);
Integer userId = createdUser.getId();
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
String newFirstName = "fnameNEW";
String newLastName = "lnameNEW";
String newMail = "newmail" + randomNumber + "@asd.com";
createdUser.setFirstName(newFirstName);
createdUser.setLastName(newLastName);
createdUser.setMail(newMail);
mgr.update(createdUser);
User updatedUser = mgr.getUserById(userId);
Assert.assertEquals(newFirstName, updatedUser.getFirstName());
Assert.assertEquals(newLastName, updatedUser.getLastName());
Assert.assertEquals(newMail, updatedUser.getMail());
Assert.assertEquals(userId, updatedUser.getId());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void userCanBeDeleted() throws IOException, AuthenticationException, RedmineException, NotFoundException {
User user = generateRandomUser();
User createdUser = mgr.createUser(user);
Integer newUserId = createdUser.getId();
try {
mgr.deleteUser(newUserId);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
try {
mgr.getUserById(newUserId);
fail("Must have failed with NotFoundException because we tried to delete the user");
} catch (NotFoundException e) {
// ignore: the user should not be found
}
}
@Test(expected = NotFoundException.class)
public void deletingNonExistingUserThrowsNFE() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.deleteUser(999999);
}
@Test
public void testGetIssuesPaging() {
try {
// create 27 issues. default page size is 25.
createIssues(27);
// mgr.setObjectsPerPage(5); <-- does not work now
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("testGetIssuesPaging() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 26);
Set<Issue> issueSet = new HashSet<Issue>(issues);
Assert.assertEquals(issues.size(), issueSet.size());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
private List<Issue> createIssues(int issuesNumber) throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = new ArrayList<Issue>(issuesNumber);
for (int i = 0; i < issuesNumber; i++) {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("some issue " + i + " " + new Date());
Issue issue = mgr.createIssue(projectKey, issueToCreate);
issues.add(issue);
}
return issues;
}
private Issue generateRandomIssue() {
Random r = new Random();
Issue issue = new Issue();
issue.setSubject("some issue " + r.nextInt() + " " + new Date());
return issue;
}
@Test
public void testProjectsAllPagesLoaded() throws IOException, AuthenticationException, NotFoundException, URISyntaxException, RedmineException {
int NUM = 27; // must be larger than 25, which is a default page size in Redmine
List<Project> projects = createProjects(NUM);
List<Project> loadedProjects = mgr.getProjects();
Assert.assertTrue(
"Number of projects loaded from the server must be bigger than "
+ NUM + ", but it's " + loadedProjects.size(),
loadedProjects.size() > NUM);
deleteProjects(projects);
}
private List<Project> createProjects(int num) throws IOException, AuthenticationException, RedmineException, NotFoundException {
List<Project> projects = new ArrayList<Project>(num);
for (int i = 0; i < num; i++) {
Project projectToCreate = generateRandomProject();
Project p = mgr.createProject(projectToCreate);
projects.add(p);
}
return projects;
}
private void deleteProjects(List<Project> projects) throws IOException, AuthenticationException, NotFoundException, RedmineException {
for (Project p : projects) {
mgr.deleteProject(p.getIdentifier());
}
}
@Test
public void testGetTimeEntries() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<TimeEntry> list = mgr.getTimeEntries();
Assert.assertNotNull(list);
}
@Test
public void testCreateGetTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 11f;
entry.setHours(hours);
entry.setIssueId(issueId);
// TODO We don't know activities IDs!
// see feature request http://www.redmine.org/issues/7506
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
logger.debug("Created time entry " + createdEntry);
Assert.assertEquals(hours, createdEntry.getHours());
Float newHours = 22f;
createdEntry.setHours(newHours);
mgr.update(createdEntry);
TimeEntry updatedEntry = mgr.getTimeEntry(createdEntry.getId());
Assert.assertEquals(newHours, updatedEntry.getHours());
}
@Test(expected = NotFoundException.class)
public void testCreateDeleteTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 4f;
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
mgr.deleteTimeEntry(createdEntry.getId());
mgr.getTimeEntry(createdEntry.getId());
}
@Test
public void testGetTimeEntriesForIssue() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
Float hours1 = 2f;
Float hours2 = 7f;
Float totalHoursExpected = hours1 + hours2;
TimeEntry createdEntry1 = createTimeEntry(issueId, hours1);
TimeEntry createdEntry2 = createTimeEntry(issueId, hours2);
Assert.assertNotNull(createdEntry1);
Assert.assertNotNull(createdEntry2);
List<TimeEntry> entries = mgr.getTimeEntriesForIssue(issueId);
Assert.assertEquals(2, entries.size());
Float totalTime = 0f;
for (TimeEntry timeEntry : entries) {
totalTime += timeEntry.getHours();
}
Assert.assertEquals(totalHoursExpected, totalTime);
}
private TimeEntry createTimeEntry(Integer issueId, float hours) throws IOException,
AuthenticationException, NotFoundException, RedmineException {
TimeEntry entry = new TimeEntry();
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
return mgr.createTimeEntry(entry);
}
@Test(expected = NotFoundException.class)
public void testDeleteIssue() throws IOException, AuthenticationException,
NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Issue retrievedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(issue, retrievedIssue);
mgr.deleteIssue(issue.getId());
mgr.getIssueById(issue.getId());
}
@Test
public void testUpdateIssueSpecialXMLtags() throws Exception {
Issue issue = createIssues(1).get(0);
String newSubject = "\"text in quotes\" and <xml> tags";
String newDescription = "<taghere>\"abc\"</here>";
issue.setSubject(newSubject);
issue.setDescription(newDescription);
mgr.update(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(newSubject, updatedIssue.getSubject());
Assert.assertEquals(newDescription, updatedIssue.getDescription());
}
/**
* The custom fields used here MUST ALREADY EXIST on the server and be
* associated with the required task type (bug/feature/task/..).
* <p/>
* See feature request http://www.redmine.org/issues/9664
*/
@Test
public void testCustomFields() throws Exception {
Issue issue = createIssues(1).get(0);
// default empty values
Assert.assertEquals(2, issue.getCustomFields().size());
// TODO update this!
int id1 = 1; // TODO this is pretty much a hack, we don't generally know these ids!
String custom1FieldName = "my_custom_1";
String custom1Value = "some value 123";
int id2 = 2;
String custom2FieldName = "custom_boolean_1";
String custom2Value = "true";
issue.setCustomFields(new ArrayList<CustomField>());
issue.getCustomFields().add(new CustomField(id1, custom1FieldName, custom1Value));
issue.getCustomFields().add(new CustomField(id2, custom2FieldName, custom2Value));
mgr.update(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(2, updatedIssue.getCustomFields().size());
Assert.assertEquals(custom1Value, updatedIssue.getCustomField(custom1FieldName));
Assert.assertEquals(custom2Value, updatedIssue.getCustomField(custom2FieldName));
}
@Test
public void testUpdateIssueDoesNotChangeEstimatedTime() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Estimated hours must be NULL", null, newIssue.getEstimatedHours());
mgr.update(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals("Estimated hours must be NULL", null, reloadedFromRedmineIssue.getEstimatedHours());
} catch (Exception e) {
Assert.fail();
}
}
/**
* Tests the correct retrieval of the parent id of sub {@link Project}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testSubProjectIsCreatedWithCorrectParentId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project createdMainProject = null;
try {
createdMainProject = createProject();
Project subProject = createSubProject(createdMainProject);
Assert.assertEquals("Must have correct parent ID",
createdMainProject.getId(), subProject.getParentId());
} finally {
if (createdMainProject != null) {
mgr.deleteProject(createdMainProject.getIdentifier());
}
}
}
private Project createProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project mainProject = new Project();
long id = new Date().getTime();
mainProject.setName("project" + id);
mainProject.setIdentifier("project" + id);
return mgr.createProject(mainProject);
}
private Project createSubProject(Project parent) throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project project = new Project();
long id = new Date().getTime();
project.setName("sub_pr" + id);
project.setIdentifier("subpr" + id);
project.setParentId(parent.getId());
return mgr.createProject(project);
}
@Test
public void testIssueDoneRatio() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
issue.setSubject(subject);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Initial 'done ratio' must be 0", (Integer) 0, createdIssue.getDoneRatio());
Integer doneRatio = 50;
createdIssue.setDoneRatio(doneRatio);
mgr.update(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Checking if 'update issue' operation changed 'done ratio' field",
doneRatio, reloadedFromRedmineIssue.getDoneRatio());
Integer invalidDoneRatio = 130;
reloadedFromRedmineIssue.setDoneRatio(invalidDoneRatio);
try {
mgr.update(reloadedFromRedmineIssue);
} catch (RedmineException e) {
Assert.assertEquals("Must be 1 error", 1, e.getErrors().size());
Assert.assertEquals("Checking error text", "% Done is not included in the list", e.getErrors().get(0));
}
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"'done ratio' must have remained unchanged after invalid value",
doneRatio, reloadedFromRedmineIssueUnchanged.getDoneRatio());
} catch (Exception e) {
fail(e.toString());
}
}
@Test
public void testIssueNullDescriptionDoesNotEraseIt() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
String descr = "Some description";
issue.setSubject(subject);
issue.setDescription(descr);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Checking description", descr, createdIssue.getDescription());
createdIssue.setDescription(null);
mgr.update(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must not be erased",
descr, reloadedFromRedmineIssue.getDescription());
reloadedFromRedmineIssue.setDescription("");
mgr.update(reloadedFromRedmineIssue);
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must be erased",
"", reloadedFromRedmineIssueUnchanged.getDescription());
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testIssueJournals() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Issue loadedIssueWithJournals = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertTrue(loadedIssueWithJournals.getJournals().isEmpty());
String commentDescribingTheUpdate = "some comment describing the issue update";
loadedIssueWithJournals.setSubject("new subject");
loadedIssueWithJournals.setNotes(commentDescribingTheUpdate);
mgr.update(loadedIssueWithJournals);
Issue loadedIssueWithJournals2 = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertEquals(1, loadedIssueWithJournals2.getJournals().size());
Journal journalItem = loadedIssueWithJournals2.getJournals().get(0);
Assert.assertEquals(commentDescribingTheUpdate, journalItem.getNotes());
User ourUser = getOurUser();
// can't compare User objects because either of them is not completely filled
Assert.assertEquals(ourUser.getId(), journalItem.getUser().getId());
Assert.assertEquals(ourUser.getFirstName(), journalItem.getUser().getFirstName());
Assert.assertEquals(ourUser.getLastName(), journalItem.getUser().getLastName());
Issue loadedIssueWithoutJournals = mgr.getIssueById(newIssue.getId());
Assert.assertTrue(loadedIssueWithoutJournals.getJournals().isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateRelation() {
try {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
IssueRelation r = mgr.createRelation(src.getId(), target.getId(), relationText);
assertEquals(src.getId(), r.getIssueId());
Assert.assertEquals(target.getId(), r.getIssueToId());
Assert.assertEquals(relationText, r.getType());
} catch (Exception e) {
Assert.fail(e.toString());
}
}
private IssueRelation createTwoRelatedIssues() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
return mgr.createRelation(src.getId(), target.getId(), relationText);
}
@Test
public void issueRelationsAreCreatedAndLoadedOK() {
try {
IssueRelation relation = createTwoRelatedIssues();
Issue issue = mgr.getIssueById(relation.getIssueId(), INCLUDE.relations);
Issue issueTarget = mgr.getIssueById(relation.getIssueToId(), INCLUDE.relations);
Assert.assertEquals(1, issue.getRelations().size());
Assert.assertEquals(1, issueTarget.getRelations().size());
IssueRelation relation1 = issue.getRelations().get(0);
assertEquals(issue.getId(), relation1.getIssueId());
assertEquals(issueTarget.getId(), relation1.getIssueToId());
assertEquals("precedes", relation1.getType());
assertEquals((Integer) 0, relation1.getDelay());
IssueRelation reverseRelation = issueTarget.getRelations().get(0);
// both forward and reverse relations are the same!
Assert.assertEquals(relation1, reverseRelation);
} catch (Exception e) {
Assert.fail(e.toString());
}
}
@Test
public void testIssureRelationDelete() throws IOException, AuthenticationException, RedmineException, NotFoundException {
IssueRelation relation = createTwoRelatedIssues();
mgr.deleteRelation(relation.getId());
Issue issue = mgr.getIssueById(relation.getIssueId(), INCLUDE.relations);
Assert.assertEquals(0, issue.getRelations().size());
}
@Test
public void testIssueRelationsDelete() throws IOException, AuthenticationException, RedmineException, NotFoundException {
List<Issue> issues = createIssues(3);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
mgr.createRelation(src.getId(), target.getId(), relationText);
target = issues.get(2);
mgr.createRelation(src.getId(), target.getId(), relationText);
src = mgr.getIssueById(src.getId(), INCLUDE.relations);
mgr.deleteIssueRelations(src);
Issue issue = mgr.getIssueById(src.getId(), INCLUDE.relations);
Assert.assertEquals(0, issue.getRelations().size());
}
/**
* this test is ignored because:
* 1) we can't create Versions. see http://www.redmine.org/issues/9088
* 2) we don't currently set versions when creating issues.
*/
@Ignore
@Test
public void issueFixVersionIsSet() throws Exception {
String existingProjectKey = "test";
Issue toCreate = generateRandomIssue();
Version v = new Version();
String versionName = "1.0";
v.setName("1.0");
v.setId(1);
toCreate.setTargetVersion(v);
Issue createdIssue = mgr.createIssue(existingProjectKey, toCreate);
Assert.assertNotNull(createdIssue.getTargetVersion());
Assert.assertEquals(createdIssue.getTargetVersion().getName(), versionName);
}
// Redmine ignores this parameter for "get projects" request. see bug http://www.redmine.org/issues/8545
@Ignore
@Test
public void testGetProjectsIncludesTrackers() {
try {
List<Project> projects = mgr.getProjects();
Assert.assertTrue(projects.size() > 0);
Project p1 = projects.get(0);
Assert.assertNotNull(p1.getTrackers());
// XXX there could be a case when a project does not have any trackers
// need to create a project with some trackers to make this test deterministic
Assert.assertTrue(!p1.getTrackers().isEmpty());
logger.debug("Created trackers " + p1.getTrackers());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Ignore
@Test
public void testSpentTimeFieldLoaded() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
issue.setSubject(subject);
float spentHours = 2;
issue.setSpentHours(spentHours);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Issue newIssue = mgr.getIssueById(createdIssue.getId());
Assert.assertEquals((Float)spentHours, newIssue.getSpentHours());
} catch (Exception e) {
Assert.fail();
}
}
@Ignore
@Test
public void testSpentTime() {
// TODO need to use "Time Entries"
// float spentHours = 12.5f;
// issueToCreate.setSpentHours(spentHours);
// check SPENT TIME
// assertEquals((Float) spentHours, newIssue.getSpentHours());
}
@Test(expected = IllegalArgumentException.class)
public void invalidTimeEntryFailsWithIAEOnCreate() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.createTimeEntry(createIncompleteTimeEntry());
}
@Test(expected = IllegalArgumentException.class)
public void invalidTimeEntryFailsWithIAEOnUpdate() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.update(createIncompleteTimeEntry());
}
private TimeEntry createIncompleteTimeEntry() {
TimeEntry timeEntry = new TimeEntry();
timeEntry.setActivityId(ACTIVITY_ID);
timeEntry.setSpentOn(new Date());
timeEntry.setHours(1.5f);
return timeEntry;
}
@Test
public void testViolateTimeEntryConstraint_ProjectOrIssueID_issue66() throws IOException, AuthenticationException, RedmineException {
TimeEntry timeEntry = createIncompleteTimeEntry();
// Now can try to verify with project ID (only test with issue ID seems to be already covered)
int projectId = mgr.getProjects().get(0).getId();
timeEntry.setProjectId(projectId);
try {
TimeEntry created = mgr.createTimeEntry(timeEntry);
logger.debug("Created time entry " + created);
} catch (Exception e) {
e.printStackTrace();
fail("Unexpected " + e.getClass().getSimpleName() + ": " + e.getMessage());
}
}
/**
* tests the retrieval of statuses.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetStatuses() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO we should create some statuses first, but the Redmine Java API does not support this presently
List<IssueStatus> statuses = mgr.getStatuses();
Assert.assertFalse("Expected list of statuses not to be empty", statuses.isEmpty());
for (IssueStatus issueStatus : statuses) {
// asserts on status
assertNotNull("ID of status must not be null", issueStatus.getId());
assertNotNull("Name of status must not be null", issueStatus.getName());
}
}
/**
* tests the creation of an invalid {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
mgr.createVersion(version);
}
/**
* tests the deletion of an invalid {@link Version}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test version
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
version.setDescription("An invalid test version created by " + this.getClass());
// set invalid id
version.setId(-1);
// now try to delete version
mgr.deleteVersion(version);
}
/**
* tests the deletion of a {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testDeleteVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test version
Version version = new Version(project, "Test version " + UUID.randomUUID().toString());
version.setDescription("A test version created by " + this.getClass());
version.setStatus("open");
Version newVersion = mgr.createVersion(version);
// assert new test version
Assert.assertNotNull("Expected new version not to be null", newVersion);
// now delete version
mgr.deleteVersion(newVersion);
// assert that the version is gone
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertTrue("List of versions of test project must be empty now but is " + versions, versions.isEmpty());
}
/**
* tests the retrieval of {@link Version}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetVersions() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some versions
Version testVersion1 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
Version testVersion2 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
try {
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertEquals("Wrong number of versions for project " + project.getName() + " delivered by Redmine Java API", 2, versions.size());
for (Version version : versions) {
// assert version
Assert.assertNotNull("ID of version must not be null", version.getId());
Assert.assertNotNull("Name of version must not be null", version.getName());
Assert.assertNotNull("Project of version must not be null", version.getProject());
}
} finally {
if (testVersion1 != null) {
mgr.deleteVersion(testVersion1);
}
if (testVersion2 != null) {
mgr.deleteVersion(testVersion2);
}
}
}
@Ignore // see Redmine bug http://www.redmine.org/issues/10241
@Test
public void versionIsRetrievedById() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
Version createdVersion = mgr.createVersion(new Version(project, "Version_1_" + UUID.randomUUID()));
Version versionById = mgr.getVersionById(createdVersion.getId());
assertEquals(createdVersion, versionById);
}
@Ignore // see Redmine bug http://www.redmine.org/issues/10241
@Test
public void versionIsUpdated() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
Version createdVersion = mgr.createVersion(new Version(project, "Version_1_" + UUID.randomUUID()));
String description = "new description";
createdVersion.setDescription(description);
mgr.update(createdVersion);
Version versionById = mgr.getVersionById(createdVersion.getId());
assertEquals(description, versionById.getDescription());
}
/**
* tests the creation and deletion of a {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testCreateAndDeleteIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test category
IssueCategory category = new IssueCategory(project, "Category" + new Date().getTime());
category.setAssignee(getOurUser());
IssueCategory newIssueCategory = mgr.createCategory(category);
// assert new test category
Assert.assertNotNull("Expected new category not to be null", newIssueCategory);
Assert.assertNotNull("Expected project of new category not to be null", newIssueCategory.getProject());
Assert.assertNotNull("Expected assignee of new category not to be null", newIssueCategory.getAssignee());
// now delete category
mgr.deleteCategory(newIssueCategory);
// assert that the category is gone
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertTrue("List of categories of test project must be empty now but is " + categories, categories.isEmpty());
}
/**
* tests the retrieval of {@link IssueCategory}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueCategories() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some categories
IssueCategory testIssueCategory1 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory1.setAssignee(getOurUser());
IssueCategory newIssueCategory1 = mgr.createCategory(testIssueCategory1);
IssueCategory testIssueCategory2 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory2.setAssignee(getOurUser());
IssueCategory newIssueCategory2 = mgr.createCategory(testIssueCategory2);
try {
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertEquals("Wrong number of categories for project " + project.getName() + " delivered by Redmine Java API", 2, categories.size());
for (IssueCategory category : categories) {
// assert category
Assert.assertNotNull("ID of category must not be null", category.getId());
Assert.assertNotNull("Name of category must not be null", category.getName());
Assert.assertNotNull("Project of category must not be null", category.getProject());
Assert.assertNotNull("Assignee of category must not be null", category.getAssignee());
}
} finally {
// scrub test categories
if (newIssueCategory1 != null) {
mgr.deleteCategory(newIssueCategory1);
}
if (newIssueCategory2 != null) {
mgr.deleteCategory(newIssueCategory2);
}
}
}
/**
* tests the creation of an invalid {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
mgr.createCategory(category);
}
/**
* tests the deletion of an invalid {@link IssueCategory}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test category
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
// set invalid id
category.setId(-1);
// now try to delete category
mgr.deleteCategory(category);
}
/**
* Tests the retrieval of {@link Tracker}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetTrackers() throws RedmineException, IOException, AuthenticationException, NotFoundException {
List<Tracker> trackers = mgr.getTrackers();
assertNotNull("List of trackers returned should not be null", trackers);
assertFalse("List of trackers returned should not be empty", trackers.isEmpty());
for (Tracker tracker : trackers) {
assertNotNull("Tracker returned should not be null", tracker);
assertNotNull("ID of tracker returned should not be null", tracker.getId());
assertNotNull("Name of tracker returned should not be null", tracker.getName());
}
}
/**
* Tests the retrieval of an {@link Issue}, inlcuding the {@link org.redmine.ta.beans.Attachment}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueWithAttachments() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Issue newIssue = null;
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssueAttachment_" + UUID.randomUUID());
newIssue = mgr.createIssue(projectKey, issueToCreate);
// TODO create test attachments for the issue once the Redmine REST API allows for it
// retrieve issue attachments
Issue retrievedIssue = mgr.getIssueById(newIssue.getId(), INCLUDE.attachments);
Assert.assertNotNull("List of attachments retrieved for issue " + newIssue.getId() + " delivered by Redmine Java API should not be null", retrievedIssue.getAttachments());
// TODO assert attachments once we actually receive ones for our test issue
} finally {
// scrub test issue
if (newIssue != null) {
mgr.deleteIssue(newIssue.getId());
}
}
}
/**
* Tests the retrieval of an {@link org.redmine.ta.beans.Attachment} by its ID.
* TODO reactivate once the Redmine REST API allows for creating attachments
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
// @Test
public void testGetAttachmentById() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO where do we get a valid attachment number from? We can't create an attachment by our own for the test as the Redmine REST API does not support that.
int attachmentID = 1;
Attachment attachment = mgr.getAttachmentById(attachmentID);
Assert.assertNotNull("Attachment retrieved by ID " + attachmentID + " should not be null", attachment);
Assert.assertNotNull("Content URL of attachment retrieved by ID " + attachmentID + " should not be null", attachment.getContentURL());
// TODO more asserts on the attachment once this delivers an attachment
}
/**
* Tests the download of the content of an {@link org.redmine.ta.beans.Attachment}.
* TODO reactivate once the Redmine REST API allows for creating attachments
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
// @Test
public void testDownloadAttachmentContent() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO where do we get a valid attachment number from? We can't create an attachment by our own for the test as the Redmine REST API does not support that.
int attachmentID = 1;
// retrieve issue attachment
Attachment attachment = mgr.getAttachmentById(attachmentID);
// download attachment content
byte[] attachmentContent = mgr.downloadAttachmentContent(attachment);
Assert.assertNotNull("Download of content of attachment with content URL " + attachment.getContentURL() + " should not be null", attachmentContent);
}
/**
* Tests the creation and retrieval of an {@link org.redmine.ta.beans.Issue} with a {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testCreateAndGetIssueWithCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
IssueCategory newIssueCategory = null;
Issue newIssue = null;
try {
Project project = mgr.getProjectByKey(projectKey);
// create an issue category
IssueCategory category = new IssueCategory(project, "Category_" + new Date().getTime());
category.setAssignee(getOurUser());
newIssueCategory = mgr.createCategory(category);
// create an issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssueWithCategory_" + UUID.randomUUID());
issueToCreate.setCategory(newIssueCategory);
newIssue = mgr.createIssue(projectKey, issueToCreate);
// retrieve issue
Issue retrievedIssue = mgr.getIssueById(newIssue.getId());
// assert retrieved category of issue
IssueCategory retrievedCategory = retrievedIssue.getCategory();
Assert.assertNotNull("Category retrieved for issue " + newIssue.getId() + " should not be null", retrievedCategory);
Assert.assertEquals("ID of category retrieved for issue " + newIssue.getId() + " is wrong", newIssueCategory.getId(), retrievedCategory.getId());
Assert.assertEquals("Name of category retrieved for issue " + newIssue.getId() + " is wrong", newIssueCategory.getName(), retrievedCategory.getName());
} finally {
if (newIssue != null) {
mgr.deleteIssue(newIssue.getId());
}
if (newIssueCategory != null) {
mgr.deleteCategory(newIssueCategory);
}
}
}
@Test
public void getNewsDoesNotFailForNULLProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getNews(null);
}
@Test
public void getNewsDoesNotFailForTempProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getNews(projectKey);
}
@Test
public void getSavedQueriesDoesNotFailForTempProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getSavedQueries(projectKey);
}
@Test
public void getSavedQueriesDoesNotFailForNULLProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getSavedQueries(null);
}
}
|
src/test/java/org/redmine/ta/RedmineManagerTest.java
|
package org.redmine.ta;
import org.junit.*;
import org.redmine.ta.RedmineManager.INCLUDE;
import org.redmine.ta.beans.*;
import org.redmine.ta.internal.logging.Logger;
import org.redmine.ta.internal.logging.LoggerFactory;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
import static org.junit.Assert.*;
/**
* This class and its dependencies are located in org.redmine.ta.api project.
*/
public class RedmineManagerTest {
// TODO We don't know activities IDs!
// see feature request http://www.redmine.org/issues/7506
private static final Integer ACTIVITY_ID = 8;
private static Logger logger = LoggerFactory.getLogger(RedmineManagerTest.class);
private static RedmineManager mgr;
private static String projectKey;
private static TestConfig testConfig;
@BeforeClass
public static void oneTimeSetUp() {
testConfig = new TestConfig();
logger.info("Running redmine tests using: " + testConfig.getURI());
// mgr = new RedmineManager(TestConfig.getURI(), TestConfig.getApiKey());
mgr = new RedmineManager(testConfig.getURI());
mgr.setLogin(testConfig.getLogin());
mgr.setPassword(testConfig.getPassword());
Project junitTestProject = new Project();
junitTestProject.setName("test project");
junitTestProject.setIdentifier("test"
+ Calendar.getInstance().getTimeInMillis());
try {
Project createdProject = mgr.createProject(junitTestProject);
projectKey = createdProject.getIdentifier();
} catch (Exception e) {
logger.error(e, "Exception while creating test project");
Assert.fail("can't create a test project. " + e.getMessage());
}
}
@AfterClass
public static void oneTimeTearDown() {
try {
if (mgr != null && projectKey != null) {
mgr.deleteProject(projectKey);
}
} catch (Exception e) {
logger.error(e, "Exception while deleting test project");
Assert.fail("can't delete the test project '" + projectKey + ". reason: "
+ e.getMessage());
}
}
@Test
public void testCreateIssue() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("test zzx");
Calendar startCal = Calendar.getInstance();
// have to clear them because they are ignored by Redmine and prevent from comparison later
startCal.clear(Calendar.HOUR_OF_DAY);
startCal.clear(Calendar.MINUTE);
startCal.clear(Calendar.SECOND);
startCal.clear(Calendar.MILLISECOND);
startCal.add(Calendar.DATE, 5);
issueToCreate.setStartDate(startCal.getTime());
Calendar due = Calendar.getInstance();
due.add(Calendar.MONTH, 1);
issueToCreate.setDueDate(due.getTime());
User assignee = getOurUser();
issueToCreate.setAssignee(assignee);
String description = "This is the description for the new task." +
"\nIt has several lines." +
"\nThis is the last line.";
issueToCreate.setDescription(description);
float estimatedHours = 44;
issueToCreate.setEstimatedHours(estimatedHours);
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
// System.out.println("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check startDate
Calendar returnedStartCal = Calendar.getInstance();
returnedStartCal.setTime(newIssue.getStartDate());
Assert.assertEquals(startCal.get(Calendar.YEAR), returnedStartCal.get(Calendar.YEAR));
Assert.assertEquals(startCal.get(Calendar.MONTH), returnedStartCal.get(Calendar.MONTH));
Assert.assertEquals(startCal.get(Calendar.DAY_OF_MONTH), returnedStartCal.get(Calendar.DAY_OF_MONTH));
// check dueDate
Calendar returnedDueCal = Calendar.getInstance();
returnedDueCal.setTime(newIssue.getDueDate());
Assert.assertEquals(due.get(Calendar.YEAR), returnedDueCal.get(Calendar.YEAR));
Assert.assertEquals(due.get(Calendar.MONTH), returnedDueCal.get(Calendar.MONTH));
Assert.assertEquals(due.get(Calendar.DAY_OF_MONTH), returnedDueCal.get(Calendar.DAY_OF_MONTH));
// check ASSIGNEE
User actualAssignee = newIssue.getAssignee();
Assert.assertNotNull("Checking assignee not null", actualAssignee);
Assert.assertEquals("Checking assignee id", assignee.getId(),
actualAssignee.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
// check ESTIMATED TIME
Assert.assertEquals((Float) estimatedHours, newIssue.getEstimatedHours());
// check multi-line DESCRIPTION
String regexpStripExtra = "\\r|\\n|\\s";
description = description.replaceAll(regexpStripExtra, "");
String actualDescription = newIssue.getDescription();
actualDescription = actualDescription.replaceAll(regexpStripExtra, "");
Assert.assertEquals(description, actualDescription);
// PRIORITY
Assert.assertNotNull(newIssue.getPriorityId());
Assert.assertTrue(newIssue.getPriorityId() > 0);
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testCreateIssueWithParent() {
try {
Issue parentIssue = new Issue();
parentIssue.setSubject("parent 1");
Issue newParentIssue = mgr.createIssue(projectKey, parentIssue);
logger.debug("created parent: " + newParentIssue);
Assert.assertNotNull("Checking parent was created", newParentIssue);
Assert.assertNotNull("Checking ID of parent issue is not null",
newParentIssue.getId());
// Integer parentId = 46;
Integer parentId = newParentIssue.getId();
Issue childIssue = new Issue();
childIssue.setSubject("child 1");
childIssue.setParentId(parentId);
Issue newChildIssue = mgr.createIssue(projectKey, childIssue);
logger.debug("created child: " + newChildIssue);
Assert.assertEquals("Checking parent ID of the child issue", parentId,
newChildIssue.getParentId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testStartDateNull() {
try {
Issue issue = new Issue();
issue.setSubject("test start date");
issue.setStartDate(null);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue loadedIssue = mgr.getIssueById(newIssue.getId());
Assert.assertNull(loadedIssue.getStartDate());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testGetIssuesBySummary() {
String summary = "issue with subject ABC";
try {
Issue issue = new Issue();
issue.setSubject(summary);
User assignee = getOurUser();
issue.setAssignee(assignee);
Issue newIssue = mgr.createIssue(projectKey, issue);
logger.debug("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Checking if search results is not NULL", foundIssues);
Assert.assertTrue("Search results must be not empty",
!(foundIssues.isEmpty()));
Issue loadedIssue1 = RedmineTestUtils.findIssueInList(foundIssues, newIssue.getId());
Assert.assertNotNull(loadedIssue1);
Assert.assertEquals(summary, loadedIssue1.getSubject());
// User actualAssignee = newIssue.getAssignee();
// assertNotNull("Checking assignee not null", actualAssignee);
// assertEquals("Checking assignee Name", assignee.getName(),
// actualAssignee.getName());
// assertEquals("Checking assignee Id", assignee.getId(),
// actualAssignee.getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testTryFindNonExistingIssue() {
String summary = "some summary here for issue which does not exist";
try {
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Search result must be not null", foundIssues);
Assert.assertTrue("Search result list must be empty",
foundIssues.isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
private static User getOurUser() {
Integer userId = Integer
.parseInt(testConfig.getParam("createissue.userid"));
String login = testConfig.getLogin();
String fName = testConfig.getParam("userFName");
String lName = testConfig.getParam("userLName");
User user = new User();
user.setId(userId);
user.setLogin(login);
user.setFirstName(fName);
user.setLastName(lName);
return user;
}
@Test(expected = IllegalArgumentException.class)
public void testNULLHostParameter() {
new RedmineManager(null);
}
@Test(expected = IllegalArgumentException.class)
public void testEmptyHostParameter() throws RuntimeException {
new RedmineManager("");
}
@Test(expected = AuthenticationException.class)
public void noAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrEmpty = new RedmineManager(testConfig.getURI());
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrEmpty.createIssue(projectKey, issue);
}
@Test(expected = AuthenticationException.class)
public void wrongAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrInvalidKey = new RedmineManager(testConfig.getURI(), "wrong_key");
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrInvalidKey.createIssue(projectKey, issue);
}
@Test
public void testUpdateIssue() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
String changedSubject = "changed subject";
newIssue.setSubject(changedSubject);
mgr.update(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'update issue' operation changed the 'subject' field",
changedSubject, reloadedFromRedmineIssue.getSubject());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
/**
* Tests the retrieval of an {@link Issue} by its ID.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueById() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'get issue by ID' operation returned issue with same 'subject' field",
originalSubject, reloadedFromRedmineIssue.getSubject());
Tracker tracker = reloadedFromRedmineIssue.getTracker();
Assert.assertNotNull("Tracker of issue should not be null", tracker);
Assert.assertNotNull("ID of tracker of issue should not be null", tracker.getId());
Assert.assertNotNull("Name of tracker of issue should not be null", tracker.getName());
}
/**
* Tests the retrieval of {@link Project}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetProjects() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// retrieve projects
List<Project> projects = mgr.getProjects();
// asserts
Assert.assertTrue(projects.size() > 0);
boolean found = false;
for (Project project : projects) {
if (project.getIdentifier().equals(projectKey)) {
found = true;
break;
}
}
if (!found) {
Assert.fail("Our project with key '" + projectKey + "' is not found on the server");
}
}
@Test
public void testGetIssues() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("getIssues() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 0);
boolean found = false;
for (Issue issue : issues) {
if (issue.getId().equals(newIssue.getId())) {
found = true;
break;
}
}
if (!found) {
Assert.fail("getIssues() didn't return the issue we just created. The query "
+ " must have returned all issues created during the last 2 days");
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test(expected = NotFoundException.class)
public void testGetIssuesInvalidQueryId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Integer invalidQueryId = 9999999;
mgr.getIssues(projectKey, invalidQueryId);
}
@Test
public void testCreateProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
Assert.assertNotNull("checking that a non-null project is returned", createdProject);
Assert.assertEquals(projectToCreate.getIdentifier(), createdProject.getIdentifier());
Assert.assertEquals(projectToCreate.getName(), createdProject.getName());
Assert.assertEquals(projectToCreate.getDescription(), createdProject.getDescription());
Assert.assertEquals(projectToCreate.getHomepage(), createdProject.getHomepage());
List<Tracker> trackers = createdProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateGetUpdateDeleteProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
projectToCreate.setIdentifier("id" + new Date().getTime());
logger.debug("trying to create a project with id " + projectToCreate.getIdentifier());
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
String newDescr = "NEW123";
String newName = "new name here";
createdProject.setName(newName);
createdProject.setDescription(newDescr);
mgr.update(createdProject);
Project updatedProject = mgr.getProjectByKey(key);
Assert.assertNotNull(updatedProject);
Assert.assertEquals(createdProject.getIdentifier(), updatedProject.getIdentifier());
Assert.assertEquals(newName, updatedProject.getName());
Assert.assertEquals(newDescr, updatedProject.getDescription());
List<Tracker> trackers = updatedProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateProjectFailsWithReservedIdentifier() throws Exception {
Project projectToCreate = new Project();
projectToCreate.setName("new");
projectToCreate.setIdentifier("new");
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
// in case if the creation haven't failed (although it should have had!),
// need to cleanup - delete this project
key = createdProject.getIdentifier();
} catch (RedmineException e) {
Assert.assertNotNull(e.getErrors());
Assert.assertEquals(1, e.getErrors().size());
Assert.assertEquals("Identifier is reserved", e.getErrors().get(0));
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
private static Project generateRandomProject() {
Project project = new Project();
Long timeStamp = Calendar.getInstance().getTimeInMillis();
String key = "projkey" + timeStamp;
String name = "project number " + timeStamp;
String description = "some description for the project";
project.setIdentifier(key);
project.setName(name);
project.setDescription(description);
project.setHomepage("www.randompage" + timeStamp + ".com");
return project;
}
@Test
public void testCreateIssueNonUnicodeSymbols() {
try {
String nonLatinSymbols = "Example with accents A��o";
Issue toCreate = new Issue();
toCreate.setSubject(nonLatinSymbols);
Issue created = mgr.createIssue(projectKey, toCreate);
Assert.assertEquals(nonLatinSymbols, created.getSubject());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateIssueSummaryOnly() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("This is the summary line 123");
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test(expected = NotFoundException.class)
public void testCreateIssueInvalidProjectKey() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("Summary line 100");
mgr.createIssue("someNotExistingProjectKey", issueToCreate);
}
@Test(expected = NotFoundException.class)
public void testGetProjectNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getProjectByKey("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testDeleteNonExistingProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.deleteProject("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testGetIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int someNonExistingID = 999999;
mgr.getIssueById(someNonExistingID);
}
@Test(expected = NotFoundException.class)
public void testUpdateIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int nonExistingId = 999999;
Issue issue = new Issue();
issue.setId(nonExistingId);
mgr.update(issue);
}
@Test
public void testGetUsers() {
try {
List<User> users = mgr.getUsers();
Assert.assertTrue(users.size() > 0);
// boolean found = false;
// for (Project project : projects) {
// if (project.getIdentifier().equals(projectKey)) {
// found = true;
// break;
// }
// }
// if (!found) {
// fail("Our project with key '" + projectKey+"' is not found on the server");
// }
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
User currentUser = mgr.getCurrentUser();
Assert.assertEquals(getOurUser().getId(), currentUser.getId());
Assert.assertEquals(getOurUser().getLogin(), currentUser.getLogin());
}
@Test
public void testGetUserById() throws IOException, AuthenticationException, NotFoundException, RedmineException {
User loadedUser = mgr.getUserById(getOurUser().getId());
Assert.assertEquals(getOurUser().getId(), loadedUser.getId());
Assert.assertEquals(getOurUser().getLogin(), loadedUser.getLogin());
}
@Test(expected = NotFoundException.class)
public void testGetUserNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getUserById(999999);
}
@Test(expected = NotFoundException.class)
public void testInvalidGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
RedmineManager invalidManager = new RedmineManager(testConfig.getURI() + "/INVALID");
invalidManager.setLogin("Invalid");
invalidManager.setPassword("Invalid");
invalidManager.getCurrentUser();
}
@Test
public void testCreateUser() throws IOException, AuthenticationException, NotFoundException, RedmineException {
User createdUser = null;
try {
User userToCreate = generateRandomUser();
createdUser = mgr.createUser(userToCreate);
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
Assert.assertEquals(userToCreate.getLogin(), createdUser.getLogin());
Assert.assertEquals(userToCreate.getFirstName(), createdUser.getFirstName());
Assert.assertEquals(userToCreate.getLastName(), createdUser.getLastName());
Integer id = createdUser.getId();
Assert.assertNotNull(id);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
if (createdUser != null) {
mgr.deleteUser(createdUser.getId());
}
}
}
private static User generateRandomUser() {
User user = new User();
user.setFirstName("fname");
user.setLastName("lname");
long randomNumber = new Date().getTime();
user.setLogin("login" + randomNumber);
user.setMail("somemail" + randomNumber + "@somedomain.com");
user.setPassword("zzzz");
return user;
}
@Test
public void testUpdateUser() throws IOException, AuthenticationException, NotFoundException {
User userToCreate = new User();
userToCreate.setFirstName("fname2");
userToCreate.setLastName("lname2");
long randomNumber = new Date().getTime();
userToCreate.setLogin("login33" + randomNumber);
userToCreate.setMail("email" + randomNumber + "@somedomain.com");
userToCreate.setPassword("1234");
try {
User createdUser = mgr.createUser(userToCreate);
Integer userId = createdUser.getId();
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
String newFirstName = "fnameNEW";
String newLastName = "lnameNEW";
String newMail = "newmail" + randomNumber + "@asd.com";
createdUser.setFirstName(newFirstName);
createdUser.setLastName(newLastName);
createdUser.setMail(newMail);
mgr.update(createdUser);
User updatedUser = mgr.getUserById(userId);
Assert.assertEquals(newFirstName, updatedUser.getFirstName());
Assert.assertEquals(newLastName, updatedUser.getLastName());
Assert.assertEquals(newMail, updatedUser.getMail());
Assert.assertEquals(userId, updatedUser.getId());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void userCanBeDeleted() throws IOException, AuthenticationException, RedmineException, NotFoundException {
User user = generateRandomUser();
User createdUser = mgr.createUser(user);
Integer newUserId = createdUser.getId();
try {
mgr.deleteUser(newUserId);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
try {
mgr.getUserById(newUserId);
fail("Must have failed with NotFoundException because we tried to delete the user");
} catch (NotFoundException e) {
// ignore: the user should not be found
}
}
@Test(expected = NotFoundException.class)
public void deletingNonExistingUserThrowsNFE() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.deleteUser(999999);
}
@Test
public void testGetIssuesPaging() {
try {
// create 27 issues. default page size is 25.
createIssues(27);
// mgr.setObjectsPerPage(5); <-- does not work now
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("testGetIssuesPaging() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 26);
Set<Issue> issueSet = new HashSet<Issue>(issues);
Assert.assertEquals(issues.size(), issueSet.size());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
private List<Issue> createIssues(int issuesNumber) throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = new ArrayList<Issue>(issuesNumber);
for (int i = 0; i < issuesNumber; i++) {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("some issue " + i + " " + new Date());
Issue issue = mgr.createIssue(projectKey, issueToCreate);
issues.add(issue);
}
return issues;
}
private Issue generateRandomIssue() {
Random r = new Random();
Issue issue = new Issue();
issue.setSubject("some issue " + r.nextInt() + " " + new Date());
return issue;
}
@Test
public void testProjectsAllPagesLoaded() throws IOException, AuthenticationException, NotFoundException, URISyntaxException, RedmineException {
int NUM = 27; // must be larger than 25, which is a default page size in Redmine
List<Project> projects = createProjects(NUM);
List<Project> loadedProjects = mgr.getProjects();
Assert.assertTrue(
"Number of projects loaded from the server must be bigger than "
+ NUM + ", but it's " + loadedProjects.size(),
loadedProjects.size() > NUM);
deleteProjects(projects);
}
private List<Project> createProjects(int num) throws IOException, AuthenticationException, RedmineException, NotFoundException {
List<Project> projects = new ArrayList<Project>(num);
for (int i = 0; i < num; i++) {
Project projectToCreate = generateRandomProject();
Project p = mgr.createProject(projectToCreate);
projects.add(p);
}
return projects;
}
private void deleteProjects(List<Project> projects) throws IOException, AuthenticationException, NotFoundException, RedmineException {
for (Project p : projects) {
mgr.deleteProject(p.getIdentifier());
}
}
@Test
public void testGetTimeEntries() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<TimeEntry> list = mgr.getTimeEntries();
Assert.assertNotNull(list);
}
@Test
public void testCreateGetTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 11f;
entry.setHours(hours);
entry.setIssueId(issueId);
// TODO We don't know activities IDs!
// see feature request http://www.redmine.org/issues/7506
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
logger.debug("Created time entry " + createdEntry);
Assert.assertEquals(hours, createdEntry.getHours());
Float newHours = 22f;
createdEntry.setHours(newHours);
mgr.update(createdEntry);
TimeEntry updatedEntry = mgr.getTimeEntry(createdEntry.getId());
Assert.assertEquals(newHours, updatedEntry.getHours());
}
@Test(expected = NotFoundException.class)
public void testCreateDeleteTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 4f;
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
mgr.deleteTimeEntry(createdEntry.getId());
mgr.getTimeEntry(createdEntry.getId());
}
@Test
public void testGetTimeEntriesForIssue() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
Float hours1 = 2f;
Float hours2 = 7f;
Float totalHoursExpected = hours1 + hours2;
TimeEntry createdEntry1 = createTimeEntry(issueId, hours1);
TimeEntry createdEntry2 = createTimeEntry(issueId, hours2);
Assert.assertNotNull(createdEntry1);
Assert.assertNotNull(createdEntry2);
List<TimeEntry> entries = mgr.getTimeEntriesForIssue(issueId);
Assert.assertEquals(2, entries.size());
Float totalTime = 0f;
for (TimeEntry timeEntry : entries) {
totalTime += timeEntry.getHours();
}
Assert.assertEquals(totalHoursExpected, totalTime);
}
private TimeEntry createTimeEntry(Integer issueId, float hours) throws IOException,
AuthenticationException, NotFoundException, RedmineException {
TimeEntry entry = new TimeEntry();
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
return mgr.createTimeEntry(entry);
}
@Test(expected = NotFoundException.class)
public void testDeleteIssue() throws IOException, AuthenticationException,
NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Issue retrievedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(issue, retrievedIssue);
mgr.deleteIssue(issue.getId());
mgr.getIssueById(issue.getId());
}
@Test
public void testUpdateIssueSpecialXMLtags() throws Exception {
Issue issue = createIssues(1).get(0);
String newSubject = "\"text in quotes\" and <xml> tags";
String newDescription = "<taghere>\"abc\"</here>";
issue.setSubject(newSubject);
issue.setDescription(newDescription);
mgr.update(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(newSubject, updatedIssue.getSubject());
Assert.assertEquals(newDescription, updatedIssue.getDescription());
}
/**
* The custom fields used here MUST ALREADY EXIST on the server and be
* associated with the required task type (bug/feature/task/..).
* <p/>
* See feature request http://www.redmine.org/issues/9664
*/
@Test
public void testCustomFields() throws Exception {
Issue issue = createIssues(1).get(0);
// default empty values
Assert.assertEquals(2, issue.getCustomFields().size());
// TODO update this!
int id1 = 1; // TODO this is pretty much a hack, we don't generally know these ids!
String custom1FieldName = "my_custom_1";
String custom1Value = "some value 123";
int id2 = 2;
String custom2FieldName = "custom_boolean_1";
String custom2Value = "true";
issue.setCustomFields(new ArrayList<CustomField>());
issue.getCustomFields().add(new CustomField(id1, custom1FieldName, custom1Value));
issue.getCustomFields().add(new CustomField(id2, custom2FieldName, custom2Value));
mgr.update(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(2, updatedIssue.getCustomFields().size());
Assert.assertEquals(custom1Value, updatedIssue.getCustomField(custom1FieldName));
Assert.assertEquals(custom2Value, updatedIssue.getCustomField(custom2FieldName));
}
@Test
public void testUpdateIssueDoesNotChangeEstimatedTime() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Estimated hours must be NULL", null, newIssue.getEstimatedHours());
mgr.update(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals("Estimated hours must be NULL", null, reloadedFromRedmineIssue.getEstimatedHours());
} catch (Exception e) {
Assert.fail();
}
}
/**
* Tests the correct retrieval of the parent id of sub {@link Project}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testSubProjectIsCreatedWithCorrectParentId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project createdMainProject = null;
try {
createdMainProject = createProject();
Project subProject = createSubProject(createdMainProject);
Assert.assertEquals("Must have correct parent ID",
createdMainProject.getId(), subProject.getParentId());
} finally {
if (createdMainProject != null) {
mgr.deleteProject(createdMainProject.getIdentifier());
}
}
}
private Project createProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project mainProject = new Project();
long id = new Date().getTime();
mainProject.setName("project" + id);
mainProject.setIdentifier("project" + id);
return mgr.createProject(mainProject);
}
private Project createSubProject(Project parent) throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project project = new Project();
long id = new Date().getTime();
project.setName("sub_pr" + id);
project.setIdentifier("subpr" + id);
project.setParentId(parent.getId());
return mgr.createProject(project);
}
@Test
public void testIssueDoneRatio() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
issue.setSubject(subject);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Initial 'done ratio' must be 0", (Integer) 0, createdIssue.getDoneRatio());
Integer doneRatio = 50;
createdIssue.setDoneRatio(doneRatio);
mgr.update(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Checking if 'update issue' operation changed 'done ratio' field",
doneRatio, reloadedFromRedmineIssue.getDoneRatio());
Integer invalidDoneRatio = 130;
reloadedFromRedmineIssue.setDoneRatio(invalidDoneRatio);
try {
mgr.update(reloadedFromRedmineIssue);
} catch (RedmineException e) {
Assert.assertEquals("Must be 1 error", 1, e.getErrors().size());
Assert.assertEquals("Checking error text", "% Done is not included in the list", e.getErrors().get(0));
}
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"'done ratio' must have remained unchanged after invalid value",
doneRatio, reloadedFromRedmineIssueUnchanged.getDoneRatio());
} catch (Exception e) {
fail(e.toString());
}
}
@Test
public void testIssueNullDescriptionDoesNotEraseIt() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
String descr = "Some description";
issue.setSubject(subject);
issue.setDescription(descr);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Checking description", descr, createdIssue.getDescription());
createdIssue.setDescription(null);
mgr.update(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must not be erased",
descr, reloadedFromRedmineIssue.getDescription());
reloadedFromRedmineIssue.setDescription("");
mgr.update(reloadedFromRedmineIssue);
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must be erased",
"", reloadedFromRedmineIssueUnchanged.getDescription());
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testIssueJournals() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Issue loadedIssueWithJournals = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertTrue(loadedIssueWithJournals.getJournals().isEmpty());
String commentDescribingTheUpdate = "some comment describing the issue update";
loadedIssueWithJournals.setSubject("new subject");
loadedIssueWithJournals.setNotes(commentDescribingTheUpdate);
mgr.update(loadedIssueWithJournals);
Issue loadedIssueWithJournals2 = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertEquals(1, loadedIssueWithJournals2.getJournals().size());
Journal journalItem = loadedIssueWithJournals2.getJournals().get(0);
Assert.assertEquals(commentDescribingTheUpdate, journalItem.getNotes());
User ourUser = getOurUser();
// can't compare User objects because either of them is not completely filled
Assert.assertEquals(ourUser.getId(), journalItem.getUser().getId());
Assert.assertEquals(ourUser.getFirstName(), journalItem.getUser().getFirstName());
Assert.assertEquals(ourUser.getLastName(), journalItem.getUser().getLastName());
Issue loadedIssueWithoutJournals = mgr.getIssueById(newIssue.getId());
Assert.assertTrue(loadedIssueWithoutJournals.getJournals().isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateRelation() {
try {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
IssueRelation r = mgr.createRelation(src.getId(), target.getId(), relationText);
assertEquals(src.getId(), r.getIssueId());
Assert.assertEquals(target.getId(), r.getIssueToId());
Assert.assertEquals(relationText, r.getType());
} catch (Exception e) {
Assert.fail(e.toString());
}
}
private IssueRelation createTwoRelatedIssues() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
return mgr.createRelation(src.getId(), target.getId(), relationText);
}
@Test
public void issueRelationsAreCreatedAndLoadedOK() {
try {
IssueRelation relation = createTwoRelatedIssues();
Issue issue = mgr.getIssueById(relation.getIssueId(), INCLUDE.relations);
Issue issueTarget = mgr.getIssueById(relation.getIssueToId(), INCLUDE.relations);
Assert.assertEquals(1, issue.getRelations().size());
Assert.assertEquals(1, issueTarget.getRelations().size());
IssueRelation relation1 = issue.getRelations().get(0);
assertEquals(issue.getId(), relation1.getIssueId());
assertEquals(issueTarget.getId(), relation1.getIssueToId());
assertEquals("precedes", relation1.getType());
assertEquals((Integer) 0, relation1.getDelay());
IssueRelation reverseRelation = issueTarget.getRelations().get(0);
// both forward and reverse relations are the same!
Assert.assertEquals(relation1, reverseRelation);
} catch (Exception e) {
Assert.fail(e.toString());
}
}
@Test
public void testIssureRelationDelete() throws IOException, AuthenticationException, RedmineException, NotFoundException {
IssueRelation relation = createTwoRelatedIssues();
mgr.deleteRelation(relation.getId());
Issue issue = mgr.getIssueById(relation.getIssueId(), INCLUDE.relations);
Assert.assertEquals(0, issue.getRelations().size());
}
@Test
public void testIssueRelationsDelete() throws IOException, AuthenticationException, RedmineException, NotFoundException {
List<Issue> issues = createIssues(3);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
mgr.createRelation(src.getId(), target.getId(), relationText);
target = issues.get(2);
mgr.createRelation(src.getId(), target.getId(), relationText);
src = mgr.getIssueById(src.getId(), INCLUDE.relations);
mgr.deleteIssueRelations(src);
Issue issue = mgr.getIssueById(src.getId(), INCLUDE.relations);
Assert.assertEquals(0, issue.getRelations().size());
}
/**
* this test is ignored because:
* 1) we can't create Versions. see http://www.redmine.org/issues/9088
* 2) we don't currently set versions when creating issues.
*/
@Ignore
@Test
public void issueFixVersionIsSet() throws Exception {
String existingProjectKey = "test";
Issue toCreate = generateRandomIssue();
Version v = new Version();
String versionName = "1.0";
v.setName("1.0");
v.setId(1);
toCreate.setTargetVersion(v);
Issue createdIssue = mgr.createIssue(existingProjectKey, toCreate);
Assert.assertNotNull(createdIssue.getTargetVersion());
Assert.assertEquals(createdIssue.getTargetVersion().getName(), versionName);
}
// Redmine ignores this parameter for "get projects" request. see bug http://www.redmine.org/issues/8545
@Ignore
@Test
public void testGetProjectsIncludesTrackers() {
try {
List<Project> projects = mgr.getProjects();
Assert.assertTrue(projects.size() > 0);
Project p1 = projects.get(0);
Assert.assertNotNull(p1.getTrackers());
// XXX there could be a case when a project does not have any trackers
// need to create a project with some trackers to make this test deterministic
Assert.assertTrue(!p1.getTrackers().isEmpty());
logger.debug("Created trackers " + p1.getTrackers());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testSpentTimeFieldLoaded() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
issue.setSubject(subject);
float spentHours = 2;
issue.setSpentHours(spentHours);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Issue newIssue = mgr.getIssueById(createdIssue.getId());
Assert.assertEquals((Float)spentHours, newIssue.getSpentHours());
} catch (Exception e) {
Assert.fail();
}
}
@Ignore
@Test
public void testSpentTime() {
// TODO need to use "Time Entries"
// float spentHours = 12.5f;
// issueToCreate.setSpentHours(spentHours);
// check SPENT TIME
// assertEquals((Float) spentHours, newIssue.getSpentHours());
}
@Test(expected = IllegalArgumentException.class)
public void invalidTimeEntryFailsWithIAEOnCreate() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.createTimeEntry(createIncompleteTimeEntry());
}
@Test(expected = IllegalArgumentException.class)
public void invalidTimeEntryFailsWithIAEOnUpdate() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.update(createIncompleteTimeEntry());
}
private TimeEntry createIncompleteTimeEntry() {
TimeEntry timeEntry = new TimeEntry();
timeEntry.setActivityId(ACTIVITY_ID);
timeEntry.setSpentOn(new Date());
timeEntry.setHours(1.5f);
return timeEntry;
}
@Test
public void testViolateTimeEntryConstraint_ProjectOrIssueID_issue66() throws IOException, AuthenticationException, RedmineException {
TimeEntry timeEntry = createIncompleteTimeEntry();
// Now can try to verify with project ID (only test with issue ID seems to be already covered)
int projectId = mgr.getProjects().get(0).getId();
timeEntry.setProjectId(projectId);
try {
TimeEntry created = mgr.createTimeEntry(timeEntry);
logger.debug("Created time entry " + created);
} catch (Exception e) {
e.printStackTrace();
fail("Unexpected " + e.getClass().getSimpleName() + ": " + e.getMessage());
}
}
/**
* tests the retrieval of statuses.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetStatuses() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO we should create some statuses first, but the Redmine Java API does not support this presently
List<IssueStatus> statuses = mgr.getStatuses();
Assert.assertFalse("Expected list of statuses not to be empty", statuses.isEmpty());
for (IssueStatus issueStatus : statuses) {
// asserts on status
assertNotNull("ID of status must not be null", issueStatus.getId());
assertNotNull("Name of status must not be null", issueStatus.getName());
}
}
/**
* tests the creation of an invalid {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
mgr.createVersion(version);
}
/**
* tests the deletion of an invalid {@link Version}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test version
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
version.setDescription("An invalid test version created by " + this.getClass());
// set invalid id
version.setId(-1);
// now try to delete version
mgr.deleteVersion(version);
}
/**
* tests the deletion of a {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testDeleteVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test version
Version version = new Version(project, "Test version " + UUID.randomUUID().toString());
version.setDescription("A test version created by " + this.getClass());
version.setStatus("open");
Version newVersion = mgr.createVersion(version);
// assert new test version
Assert.assertNotNull("Expected new version not to be null", newVersion);
// now delete version
mgr.deleteVersion(newVersion);
// assert that the version is gone
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertTrue("List of versions of test project must be empty now but is " + versions, versions.isEmpty());
}
/**
* tests the retrieval of {@link Version}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetVersions() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some versions
Version testVersion1 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
Version testVersion2 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
try {
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertEquals("Wrong number of versions for project " + project.getName() + " delivered by Redmine Java API", 2, versions.size());
for (Version version : versions) {
// assert version
Assert.assertNotNull("ID of version must not be null", version.getId());
Assert.assertNotNull("Name of version must not be null", version.getName());
Assert.assertNotNull("Project of version must not be null", version.getProject());
}
} finally {
if (testVersion1 != null) {
mgr.deleteVersion(testVersion1);
}
if (testVersion2 != null) {
mgr.deleteVersion(testVersion2);
}
}
}
@Ignore // see Redmine bug http://www.redmine.org/issues/10241
@Test
public void versionIsRetrievedById() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
Version createdVersion = mgr.createVersion(new Version(project, "Version_1_" + UUID.randomUUID()));
Version versionById = mgr.getVersionById(createdVersion.getId());
assertEquals(createdVersion, versionById);
}
@Ignore // see Redmine bug http://www.redmine.org/issues/10241
@Test
public void versionIsUpdated() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
Version createdVersion = mgr.createVersion(new Version(project, "Version_1_" + UUID.randomUUID()));
String description = "new description";
createdVersion.setDescription(description);
mgr.update(createdVersion);
Version versionById = mgr.getVersionById(createdVersion.getId());
assertEquals(description, versionById.getDescription());
}
/**
* tests the creation and deletion of a {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testCreateAndDeleteIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test category
IssueCategory category = new IssueCategory(project, "Category" + new Date().getTime());
category.setAssignee(getOurUser());
IssueCategory newIssueCategory = mgr.createCategory(category);
// assert new test category
Assert.assertNotNull("Expected new category not to be null", newIssueCategory);
Assert.assertNotNull("Expected project of new category not to be null", newIssueCategory.getProject());
Assert.assertNotNull("Expected assignee of new category not to be null", newIssueCategory.getAssignee());
// now delete category
mgr.deleteCategory(newIssueCategory);
// assert that the category is gone
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertTrue("List of categories of test project must be empty now but is " + categories, categories.isEmpty());
}
/**
* tests the retrieval of {@link IssueCategory}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueCategories() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some categories
IssueCategory testIssueCategory1 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory1.setAssignee(getOurUser());
IssueCategory newIssueCategory1 = mgr.createCategory(testIssueCategory1);
IssueCategory testIssueCategory2 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory2.setAssignee(getOurUser());
IssueCategory newIssueCategory2 = mgr.createCategory(testIssueCategory2);
try {
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertEquals("Wrong number of categories for project " + project.getName() + " delivered by Redmine Java API", 2, categories.size());
for (IssueCategory category : categories) {
// assert category
Assert.assertNotNull("ID of category must not be null", category.getId());
Assert.assertNotNull("Name of category must not be null", category.getName());
Assert.assertNotNull("Project of category must not be null", category.getProject());
Assert.assertNotNull("Assignee of category must not be null", category.getAssignee());
}
} finally {
// scrub test categories
if (newIssueCategory1 != null) {
mgr.deleteCategory(newIssueCategory1);
}
if (newIssueCategory2 != null) {
mgr.deleteCategory(newIssueCategory2);
}
}
}
/**
* tests the creation of an invalid {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
mgr.createCategory(category);
}
/**
* tests the deletion of an invalid {@link IssueCategory}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test category
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
// set invalid id
category.setId(-1);
// now try to delete category
mgr.deleteCategory(category);
}
/**
* Tests the retrieval of {@link Tracker}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetTrackers() throws RedmineException, IOException, AuthenticationException, NotFoundException {
List<Tracker> trackers = mgr.getTrackers();
assertNotNull("List of trackers returned should not be null", trackers);
assertFalse("List of trackers returned should not be empty", trackers.isEmpty());
for (Tracker tracker : trackers) {
assertNotNull("Tracker returned should not be null", tracker);
assertNotNull("ID of tracker returned should not be null", tracker.getId());
assertNotNull("Name of tracker returned should not be null", tracker.getName());
}
}
/**
* Tests the retrieval of an {@link Issue}, inlcuding the {@link org.redmine.ta.beans.Attachment}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueWithAttachments() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Issue newIssue = null;
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssueAttachment_" + UUID.randomUUID());
newIssue = mgr.createIssue(projectKey, issueToCreate);
// TODO create test attachments for the issue once the Redmine REST API allows for it
// retrieve issue attachments
Issue retrievedIssue = mgr.getIssueById(newIssue.getId(), INCLUDE.attachments);
Assert.assertNotNull("List of attachments retrieved for issue " + newIssue.getId() + " delivered by Redmine Java API should not be null", retrievedIssue.getAttachments());
// TODO assert attachments once we actually receive ones for our test issue
} finally {
// scrub test issue
if (newIssue != null) {
mgr.deleteIssue(newIssue.getId());
}
}
}
/**
* Tests the retrieval of an {@link org.redmine.ta.beans.Attachment} by its ID.
* TODO reactivate once the Redmine REST API allows for creating attachments
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
// @Test
public void testGetAttachmentById() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO where do we get a valid attachment number from? We can't create an attachment by our own for the test as the Redmine REST API does not support that.
int attachmentID = 1;
Attachment attachment = mgr.getAttachmentById(attachmentID);
Assert.assertNotNull("Attachment retrieved by ID " + attachmentID + " should not be null", attachment);
Assert.assertNotNull("Content URL of attachment retrieved by ID " + attachmentID + " should not be null", attachment.getContentURL());
// TODO more asserts on the attachment once this delivers an attachment
}
/**
* Tests the download of the content of an {@link org.redmine.ta.beans.Attachment}.
* TODO reactivate once the Redmine REST API allows for creating attachments
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
// @Test
public void testDownloadAttachmentContent() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO where do we get a valid attachment number from? We can't create an attachment by our own for the test as the Redmine REST API does not support that.
int attachmentID = 1;
// retrieve issue attachment
Attachment attachment = mgr.getAttachmentById(attachmentID);
// download attachment content
byte[] attachmentContent = mgr.downloadAttachmentContent(attachment);
Assert.assertNotNull("Download of content of attachment with content URL " + attachment.getContentURL() + " should not be null", attachmentContent);
}
/**
* Tests the creation and retrieval of an {@link org.redmine.ta.beans.Issue} with a {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testCreateAndGetIssueWithCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
IssueCategory newIssueCategory = null;
Issue newIssue = null;
try {
Project project = mgr.getProjectByKey(projectKey);
// create an issue category
IssueCategory category = new IssueCategory(project, "Category_" + new Date().getTime());
category.setAssignee(getOurUser());
newIssueCategory = mgr.createCategory(category);
// create an issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssueWithCategory_" + UUID.randomUUID());
issueToCreate.setCategory(newIssueCategory);
newIssue = mgr.createIssue(projectKey, issueToCreate);
// retrieve issue
Issue retrievedIssue = mgr.getIssueById(newIssue.getId());
// assert retrieved category of issue
IssueCategory retrievedCategory = retrievedIssue.getCategory();
Assert.assertNotNull("Category retrieved for issue " + newIssue.getId() + " should not be null", retrievedCategory);
Assert.assertEquals("ID of category retrieved for issue " + newIssue.getId() + " is wrong", newIssueCategory.getId(), retrievedCategory.getId());
Assert.assertEquals("Name of category retrieved for issue " + newIssue.getId() + " is wrong", newIssueCategory.getName(), retrievedCategory.getName());
} finally {
if (newIssue != null) {
mgr.deleteIssue(newIssue.getId());
}
if (newIssueCategory != null) {
mgr.deleteCategory(newIssueCategory);
}
}
}
@Test
public void getNewsDoesNotFailForNULLProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getNews(null);
}
@Test
public void getNewsDoesNotFailForTempProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getNews(projectKey);
}
@Test
public void getSavedQueriesDoesNotFailForTempProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getSavedQueries(projectKey);
}
@Test
public void getSavedQueriesDoesNotFailForNULLProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getSavedQueries(null);
}
}
|
Disabled the test for spentTime field. REST API does not support this feature.
|
src/test/java/org/redmine/ta/RedmineManagerTest.java
|
Disabled the test for spentTime field. REST API does not support this feature.
|
<ide><path>rc/test/java/org/redmine/ta/RedmineManagerTest.java
<ide> }
<ide> }
<ide>
<add> @Ignore
<ide> @Test
<ide> public void testSpentTimeFieldLoaded() {
<ide> try {
|
|
Java
|
apache-2.0
|
51dd89b9983908f98f25dd2f093d1ce101188e2a
| 0 |
Spikhalskiy/netty,doom369/netty,doom369/netty,tbrooks8/netty,doom369/netty,netty/netty,tbrooks8/netty,johnou/netty,Spikhalskiy/netty,netty/netty,netty/netty,Spikhalskiy/netty,tbrooks8/netty,johnou/netty,doom369/netty,johnou/netty,tbrooks8/netty,netty/netty,johnou/netty,netty/netty,Spikhalskiy/netty,tbrooks8/netty,doom369/netty,johnou/netty,Spikhalskiy/netty
|
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.internal;
import io.netty.util.CharsetUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.net.URL;
import java.security.AccessController;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Enumeration;
import java.util.List;
import java.util.Set;
/**
* Helper class to load JNI resources.
*
*/
public final class NativeLibraryLoader {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(NativeLibraryLoader.class);
private static final String NATIVE_RESOURCE_HOME = "META-INF/native/";
private static final File WORKDIR;
private static final boolean DELETE_NATIVE_LIB_AFTER_LOADING;
private static final boolean TRY_TO_PATCH_SHADED_ID;
private static final boolean DETECT_NATIVE_LIBRARY_DUPLICATES;
// Just use a-Z and numbers as valid ID bytes.
private static final byte[] UNIQUE_ID_BYTES =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".getBytes(CharsetUtil.US_ASCII);
static {
String workdir = SystemPropertyUtil.get("io.netty.native.workdir");
if (workdir != null) {
File f = new File(workdir);
f.mkdirs();
try {
f = f.getAbsoluteFile();
} catch (Exception ignored) {
// Good to have an absolute path, but it's OK.
}
WORKDIR = f;
logger.debug("-Dio.netty.native.workdir: " + WORKDIR);
} else {
WORKDIR = PlatformDependent.tmpdir();
logger.debug("-Dio.netty.native.workdir: " + WORKDIR + " (io.netty.tmpdir)");
}
DELETE_NATIVE_LIB_AFTER_LOADING = SystemPropertyUtil.getBoolean(
"io.netty.native.deleteLibAfterLoading", true);
logger.debug("-Dio.netty.native.deleteLibAfterLoading: {}", DELETE_NATIVE_LIB_AFTER_LOADING);
TRY_TO_PATCH_SHADED_ID = SystemPropertyUtil.getBoolean(
"io.netty.native.tryPatchShadedId", true);
logger.debug("-Dio.netty.native.tryPatchShadedId: {}", TRY_TO_PATCH_SHADED_ID);
DETECT_NATIVE_LIBRARY_DUPLICATES = SystemPropertyUtil.getBoolean(
"io.netty.native.detectNativeLibraryDuplicates", true);
logger.debug("-Dio.netty.native.detectNativeLibraryDuplicates: {}", DETECT_NATIVE_LIBRARY_DUPLICATES);
}
/**
* Loads the first available library in the collection with the specified
* {@link ClassLoader}.
*
* @throws IllegalArgumentException
* if none of the given libraries load successfully.
*/
public static void loadFirstAvailable(ClassLoader loader, String... names) {
List<Throwable> suppressed = new ArrayList<Throwable>();
for (String name : names) {
try {
load(name, loader);
logger.debug("Loaded library with name '{}'", name);
return;
} catch (Throwable t) {
suppressed.add(t);
}
}
IllegalArgumentException iae =
new IllegalArgumentException("Failed to load any of the given libraries: " + Arrays.toString(names));
ThrowableUtil.addSuppressedAndClear(iae, suppressed);
throw iae;
}
/**
* The shading prefix added to this class's full name.
*
* @throws UnsatisfiedLinkError if the shader used something other than a prefix
*/
private static String calculatePackagePrefix() {
String maybeShaded = NativeLibraryLoader.class.getName();
// Use ! instead of . to avoid shading utilities from modifying the string
String expected = "io!netty!util!internal!NativeLibraryLoader".replace('!', '.');
if (!maybeShaded.endsWith(expected)) {
throw new UnsatisfiedLinkError(String.format(
"Could not find prefix added to %s to get %s. When shading, only adding a "
+ "package prefix is supported", expected, maybeShaded));
}
return maybeShaded.substring(0, maybeShaded.length() - expected.length());
}
/**
* Load the given library with the specified {@link ClassLoader}
*/
public static void load(String originalName, ClassLoader loader) {
// Adjust expected name to support shading of native libraries.
String packagePrefix = calculatePackagePrefix().replace('.', '_');
String name = packagePrefix + originalName;
List<Throwable> suppressed = new ArrayList<Throwable>();
try {
// first try to load from java.library.path
loadLibrary(loader, name, false);
return;
} catch (Throwable ex) {
suppressed.add(ex);
}
String libname = System.mapLibraryName(name);
String path = NATIVE_RESOURCE_HOME + libname;
InputStream in = null;
OutputStream out = null;
File tmpFile = null;
URL url = getResource(path, loader);
try {
if (url == null) {
if (PlatformDependent.isOsx()) {
String fileName = path.endsWith(".jnilib") ? NATIVE_RESOURCE_HOME + "lib" + name + ".dynlib" :
NATIVE_RESOURCE_HOME + "lib" + name + ".jnilib";
url = getResource(fileName, loader);
if (url == null) {
FileNotFoundException fnf = new FileNotFoundException(fileName);
ThrowableUtil.addSuppressedAndClear(fnf, suppressed);
throw fnf;
}
} else {
FileNotFoundException fnf = new FileNotFoundException(path);
ThrowableUtil.addSuppressedAndClear(fnf, suppressed);
throw fnf;
}
}
int index = libname.lastIndexOf('.');
String prefix = libname.substring(0, index);
String suffix = libname.substring(index);
tmpFile = PlatformDependent.createTempFile(prefix, suffix, WORKDIR);
in = url.openStream();
out = new FileOutputStream(tmpFile);
byte[] buffer = new byte[8192];
int length;
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
out.flush();
if (shouldShadedLibraryIdBePatched(packagePrefix)) {
// Let's try to patch the id and re-sign it. This is a best-effort and might fail if a
// SecurityManager is setup or the right executables are not installed :/
tryPatchShadedLibraryIdAndSign(tmpFile, originalName);
}
// Close the output stream before loading the unpacked library,
// because otherwise Windows will refuse to load it when it's in use by other process.
closeQuietly(out);
out = null;
loadLibrary(loader, tmpFile.getPath(), true);
} catch (UnsatisfiedLinkError e) {
try {
if (tmpFile != null && tmpFile.isFile() && tmpFile.canRead() &&
!NoexecVolumeDetector.canExecuteExecutable(tmpFile)) {
// Pass "io.netty.native.workdir" as an argument to allow shading tools to see
// the string. Since this is printed out to users to tell them what to do next,
// we want the value to be correct even when shading.
logger.info("{} exists but cannot be executed even when execute permissions set; " +
"check volume for \"noexec\" flag; use -D{}=[path] " +
"to set native working directory separately.",
tmpFile.getPath(), "io.netty.native.workdir");
}
} catch (Throwable t) {
suppressed.add(t);
logger.debug("Error checking if {} is on a file store mounted with noexec", tmpFile, t);
}
// Re-throw to fail the load
ThrowableUtil.addSuppressedAndClear(e, suppressed);
throw e;
} catch (Exception e) {
UnsatisfiedLinkError ule = new UnsatisfiedLinkError("could not load a native library: " + name);
ule.initCause(e);
ThrowableUtil.addSuppressedAndClear(ule, suppressed);
throw ule;
} finally {
closeQuietly(in);
closeQuietly(out);
// After we load the library it is safe to delete the file.
// We delete the file immediately to free up resources as soon as possible,
// and if this fails fallback to deleting on JVM exit.
if (tmpFile != null && (!DELETE_NATIVE_LIB_AFTER_LOADING || !tmpFile.delete())) {
tmpFile.deleteOnExit();
}
}
}
private static URL getResource(String path, ClassLoader loader) {
final Enumeration<URL> urls;
try {
if (loader == null) {
urls = ClassLoader.getSystemResources(path);
} else {
urls = loader.getResources(path);
}
} catch (IOException iox) {
throw new RuntimeException("An error occurred while getting the resources for " + path, iox);
}
List<URL> urlsList = Collections.list(urls);
int size = urlsList.size();
switch (size) {
case 0:
return null;
case 1:
return urlsList.get(0);
default:
if (DETECT_NATIVE_LIBRARY_DUPLICATES) {
try {
MessageDigest md = MessageDigest.getInstance("SHA-256");
// We found more than 1 resource with the same name. Let's check if the content of the file is
// the same as in this case it will not have any bad effect.
URL url = urlsList.get(0);
byte[] digest = digest(md, url);
boolean allSame = true;
if (digest != null) {
for (int i = 1; i < size; i++) {
byte[] digest2 = digest(md, urlsList.get(i));
if (digest2 == null || !Arrays.equals(digest, digest2)) {
allSame = false;
break;
}
}
} else {
allSame = false;
}
if (allSame) {
return url;
}
} catch (NoSuchAlgorithmException e) {
logger.debug("Don't support SHA-256, can't check if resources have same content.", e);
}
throw new IllegalStateException(
"Multiple resources found for '" + path + "' with different content: " + urlsList);
} else {
logger.warn("Multiple resources found for '" + path + "' with different content: " +
urlsList + ". Please fix your dependency graph.");
return urlsList.get(0);
}
}
}
private static byte[] digest(MessageDigest digest, URL url) {
InputStream in = null;
try {
in = url.openStream();
byte[] bytes = new byte[8192];
int i;
while ((i = in.read(bytes)) != -1) {
digest.update(bytes, 0, i);
}
return digest.digest();
} catch (IOException e) {
logger.debug("Can't read resource.", e);
return null;
} finally {
closeQuietly(in);
}
}
static void tryPatchShadedLibraryIdAndSign(File libraryFile, String originalName) {
String newId = new String(generateUniqueId(originalName.length()), CharsetUtil.UTF_8);
if (!tryExec("install_name_tool -id " + newId + " " + libraryFile.getAbsolutePath())) {
return;
}
tryExec("codesign -s - " + libraryFile.getAbsolutePath());
}
private static boolean tryExec(String cmd) {
try {
int exitValue = Runtime.getRuntime().exec(cmd).waitFor();
if (exitValue != 0) {
logger.debug("Execution of '{}' failed: {}", cmd, exitValue);
return false;
}
logger.debug("Execution of '{}' succeed: {}", cmd, exitValue);
return true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (IOException e) {
logger.info("Execution of '{}' failed.", cmd, e);
} catch (SecurityException e) {
logger.error("Execution of '{}' failed.", cmd, e);
}
return false;
}
private static boolean shouldShadedLibraryIdBePatched(String packagePrefix) {
return TRY_TO_PATCH_SHADED_ID && PlatformDependent.isOsx() && !packagePrefix.isEmpty();
}
private static byte[] generateUniqueId(int length) {
byte[] idBytes = new byte[length];
for (int i = 0; i < idBytes.length; i++) {
// We should only use bytes as replacement that are in our UNIQUE_ID_BYTES array.
idBytes[i] = UNIQUE_ID_BYTES[PlatformDependent.threadLocalRandom()
.nextInt(UNIQUE_ID_BYTES.length)];
}
return idBytes;
}
/**
* Loading the native library into the specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where the native library will be loaded into
* @param name - The native library path or name
* @param absolute - Whether the native library will be loaded by path or by name
*/
private static void loadLibrary(final ClassLoader loader, final String name, final boolean absolute) {
Throwable suppressed = null;
try {
try {
// Make sure the helper belongs to the target ClassLoader.
final Class<?> newHelper = tryToLoadClass(loader, NativeLibraryUtil.class);
loadLibraryByHelper(newHelper, name, absolute);
logger.debug("Successfully loaded the library {}", name);
return;
} catch (UnsatisfiedLinkError e) { // Should by pass the UnsatisfiedLinkError here!
suppressed = e;
} catch (Exception e) {
suppressed = e;
}
NativeLibraryUtil.loadLibrary(name, absolute); // Fallback to local helper class.
logger.debug("Successfully loaded the library {}", name);
} catch (NoSuchMethodError nsme) {
if (suppressed != null) {
ThrowableUtil.addSuppressed(nsme, suppressed);
}
rethrowWithMoreDetailsIfPossible(name, nsme);
} catch (UnsatisfiedLinkError ule) {
if (suppressed != null) {
ThrowableUtil.addSuppressed(ule, suppressed);
}
throw ule;
}
}
@SuppressJava6Requirement(reason = "Guarded by version check")
private static void rethrowWithMoreDetailsIfPossible(String name, NoSuchMethodError error) {
if (PlatformDependent.javaVersion() >= 7) {
throw new LinkageError(
"Possible multiple incompatible native libraries on the classpath for '" + name + "'?", error);
}
throw error;
}
private static void loadLibraryByHelper(final Class<?> helper, final String name, final boolean absolute)
throws UnsatisfiedLinkError {
Object ret = AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
try {
// Invoke the helper to load the native library, if succeed, then the native
// library belong to the specified ClassLoader.
Method method = helper.getMethod("loadLibrary", String.class, boolean.class);
method.setAccessible(true);
return method.invoke(null, name, absolute);
} catch (Exception e) {
return e;
}
}
});
if (ret instanceof Throwable) {
Throwable t = (Throwable) ret;
assert !(t instanceof UnsatisfiedLinkError) : t + " should be a wrapper throwable";
Throwable cause = t.getCause();
if (cause instanceof UnsatisfiedLinkError) {
throw (UnsatisfiedLinkError) cause;
}
UnsatisfiedLinkError ule = new UnsatisfiedLinkError(t.getMessage());
ule.initCause(t);
throw ule;
}
}
/**
* Try to load the helper {@link Class} into specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where to load the helper {@link Class}
* @param helper - The helper {@link Class}
* @return A new helper Class defined in the specified ClassLoader.
* @throws ClassNotFoundException Helper class not found or loading failed
*/
private static Class<?> tryToLoadClass(final ClassLoader loader, final Class<?> helper)
throws ClassNotFoundException {
try {
return Class.forName(helper.getName(), false, loader);
} catch (ClassNotFoundException e1) {
if (loader == null) {
// cannot defineClass inside bootstrap class loader
throw e1;
}
try {
// The helper class is NOT found in target ClassLoader, we have to define the helper class.
final byte[] classBinary = classToByteArray(helper);
return AccessController.doPrivileged(new PrivilegedAction<Class<?>>() {
@Override
public Class<?> run() {
try {
// Define the helper class in the target ClassLoader,
// then we can call the helper to load the native library.
Method defineClass = ClassLoader.class.getDeclaredMethod("defineClass", String.class,
byte[].class, int.class, int.class);
defineClass.setAccessible(true);
return (Class<?>) defineClass.invoke(loader, helper.getName(), classBinary, 0,
classBinary.length);
} catch (Exception e) {
throw new IllegalStateException("Define class failed!", e);
}
}
});
} catch (ClassNotFoundException e2) {
ThrowableUtil.addSuppressed(e2, e1);
throw e2;
} catch (RuntimeException e2) {
ThrowableUtil.addSuppressed(e2, e1);
throw e2;
} catch (Error e2) {
ThrowableUtil.addSuppressed(e2, e1);
throw e2;
}
}
}
/**
* Load the helper {@link Class} as a byte array, to be redefined in specified {@link ClassLoader}.
* @param clazz - The helper {@link Class} provided by this bundle
* @return The binary content of helper {@link Class}.
* @throws ClassNotFoundException Helper class not found or loading failed
*/
private static byte[] classToByteArray(Class<?> clazz) throws ClassNotFoundException {
String fileName = clazz.getName();
int lastDot = fileName.lastIndexOf('.');
if (lastDot > 0) {
fileName = fileName.substring(lastDot + 1);
}
URL classUrl = clazz.getResource(fileName + ".class");
if (classUrl == null) {
throw new ClassNotFoundException(clazz.getName());
}
byte[] buf = new byte[1024];
ByteArrayOutputStream out = new ByteArrayOutputStream(4096);
InputStream in = null;
try {
in = classUrl.openStream();
for (int r; (r = in.read(buf)) != -1;) {
out.write(buf, 0, r);
}
return out.toByteArray();
} catch (IOException ex) {
throw new ClassNotFoundException(clazz.getName(), ex);
} finally {
closeQuietly(in);
closeQuietly(out);
}
}
private static void closeQuietly(Closeable c) {
if (c != null) {
try {
c.close();
} catch (IOException ignore) {
// ignore
}
}
}
private NativeLibraryLoader() {
// Utility
}
private static final class NoexecVolumeDetector {
@SuppressJava6Requirement(reason = "Usage guarded by java version check")
private static boolean canExecuteExecutable(File file) throws IOException {
if (PlatformDependent.javaVersion() < 7) {
// Pre-JDK7, the Java API did not directly support POSIX permissions; instead of implementing a custom
// work-around, assume true, which disables the check.
return true;
}
// If we can already execute, there is nothing to do.
if (file.canExecute()) {
return true;
}
// On volumes, with noexec set, even files with the executable POSIX permissions will fail to execute.
// The File#canExecute() method honors this behavior, probaby via parsing the noexec flag when initializing
// the UnixFileStore, though the flag is not exposed via a public API. To find out if library is being
// loaded off a volume with noexec, confirm or add executalbe permissions, then check File#canExecute().
// Note: We use FQCN to not break when netty is used in java6
Set<java.nio.file.attribute.PosixFilePermission> existingFilePermissions =
java.nio.file.Files.getPosixFilePermissions(file.toPath());
Set<java.nio.file.attribute.PosixFilePermission> executePermissions =
EnumSet.of(java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE,
java.nio.file.attribute.PosixFilePermission.GROUP_EXECUTE,
java.nio.file.attribute.PosixFilePermission.OTHERS_EXECUTE);
if (existingFilePermissions.containsAll(executePermissions)) {
return false;
}
Set<java.nio.file.attribute.PosixFilePermission> newPermissions = EnumSet.copyOf(existingFilePermissions);
newPermissions.addAll(executePermissions);
java.nio.file.Files.setPosixFilePermissions(file.toPath(), newPermissions);
return file.canExecute();
}
private NoexecVolumeDetector() {
// Utility
}
}
}
|
common/src/main/java/io/netty/util/internal/NativeLibraryLoader.java
|
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.internal;
import io.netty.util.CharsetUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.net.URL;
import java.security.AccessController;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Enumeration;
import java.util.List;
import java.util.Set;
/**
* Helper class to load JNI resources.
*
*/
public final class NativeLibraryLoader {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(NativeLibraryLoader.class);
private static final String NATIVE_RESOURCE_HOME = "META-INF/native/";
private static final File WORKDIR;
private static final boolean DELETE_NATIVE_LIB_AFTER_LOADING;
private static final boolean TRY_TO_PATCH_SHADED_ID;
// Just use a-Z and numbers as valid ID bytes.
private static final byte[] UNIQUE_ID_BYTES =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".getBytes(CharsetUtil.US_ASCII);
static {
String workdir = SystemPropertyUtil.get("io.netty.native.workdir");
if (workdir != null) {
File f = new File(workdir);
f.mkdirs();
try {
f = f.getAbsoluteFile();
} catch (Exception ignored) {
// Good to have an absolute path, but it's OK.
}
WORKDIR = f;
logger.debug("-Dio.netty.native.workdir: " + WORKDIR);
} else {
WORKDIR = PlatformDependent.tmpdir();
logger.debug("-Dio.netty.native.workdir: " + WORKDIR + " (io.netty.tmpdir)");
}
DELETE_NATIVE_LIB_AFTER_LOADING = SystemPropertyUtil.getBoolean(
"io.netty.native.deleteLibAfterLoading", true);
logger.debug("-Dio.netty.native.deleteLibAfterLoading: {}", DELETE_NATIVE_LIB_AFTER_LOADING);
TRY_TO_PATCH_SHADED_ID = SystemPropertyUtil.getBoolean(
"io.netty.native.tryPatchShadedId", true);
logger.debug("-Dio.netty.native.tryPatchShadedId: {}", TRY_TO_PATCH_SHADED_ID);
}
/**
* Loads the first available library in the collection with the specified
* {@link ClassLoader}.
*
* @throws IllegalArgumentException
* if none of the given libraries load successfully.
*/
public static void loadFirstAvailable(ClassLoader loader, String... names) {
List<Throwable> suppressed = new ArrayList<Throwable>();
for (String name : names) {
try {
load(name, loader);
logger.debug("Loaded library with name '{}'", name);
return;
} catch (Throwable t) {
suppressed.add(t);
}
}
IllegalArgumentException iae =
new IllegalArgumentException("Failed to load any of the given libraries: " + Arrays.toString(names));
ThrowableUtil.addSuppressedAndClear(iae, suppressed);
throw iae;
}
/**
* The shading prefix added to this class's full name.
*
* @throws UnsatisfiedLinkError if the shader used something other than a prefix
*/
private static String calculatePackagePrefix() {
String maybeShaded = NativeLibraryLoader.class.getName();
// Use ! instead of . to avoid shading utilities from modifying the string
String expected = "io!netty!util!internal!NativeLibraryLoader".replace('!', '.');
if (!maybeShaded.endsWith(expected)) {
throw new UnsatisfiedLinkError(String.format(
"Could not find prefix added to %s to get %s. When shading, only adding a "
+ "package prefix is supported", expected, maybeShaded));
}
return maybeShaded.substring(0, maybeShaded.length() - expected.length());
}
/**
* Load the given library with the specified {@link ClassLoader}
*/
public static void load(String originalName, ClassLoader loader) {
// Adjust expected name to support shading of native libraries.
String packagePrefix = calculatePackagePrefix().replace('.', '_');
String name = packagePrefix + originalName;
List<Throwable> suppressed = new ArrayList<Throwable>();
try {
// first try to load from java.library.path
loadLibrary(loader, name, false);
return;
} catch (Throwable ex) {
suppressed.add(ex);
}
String libname = System.mapLibraryName(name);
String path = NATIVE_RESOURCE_HOME + libname;
InputStream in = null;
OutputStream out = null;
File tmpFile = null;
URL url = getResource(path, loader);
try {
if (url == null) {
if (PlatformDependent.isOsx()) {
String fileName = path.endsWith(".jnilib") ? NATIVE_RESOURCE_HOME + "lib" + name + ".dynlib" :
NATIVE_RESOURCE_HOME + "lib" + name + ".jnilib";
url = getResource(fileName, loader);
if (url == null) {
FileNotFoundException fnf = new FileNotFoundException(fileName);
ThrowableUtil.addSuppressedAndClear(fnf, suppressed);
throw fnf;
}
} else {
FileNotFoundException fnf = new FileNotFoundException(path);
ThrowableUtil.addSuppressedAndClear(fnf, suppressed);
throw fnf;
}
}
int index = libname.lastIndexOf('.');
String prefix = libname.substring(0, index);
String suffix = libname.substring(index);
tmpFile = PlatformDependent.createTempFile(prefix, suffix, WORKDIR);
in = url.openStream();
out = new FileOutputStream(tmpFile);
byte[] buffer = new byte[8192];
int length;
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
out.flush();
if (shouldShadedLibraryIdBePatched(packagePrefix)) {
// Let's try to patch the id and re-sign it. This is a best-effort and might fail if a
// SecurityManager is setup or the right executables are not installed :/
tryPatchShadedLibraryIdAndSign(tmpFile, originalName);
}
// Close the output stream before loading the unpacked library,
// because otherwise Windows will refuse to load it when it's in use by other process.
closeQuietly(out);
out = null;
loadLibrary(loader, tmpFile.getPath(), true);
} catch (UnsatisfiedLinkError e) {
try {
if (tmpFile != null && tmpFile.isFile() && tmpFile.canRead() &&
!NoexecVolumeDetector.canExecuteExecutable(tmpFile)) {
// Pass "io.netty.native.workdir" as an argument to allow shading tools to see
// the string. Since this is printed out to users to tell them what to do next,
// we want the value to be correct even when shading.
logger.info("{} exists but cannot be executed even when execute permissions set; " +
"check volume for \"noexec\" flag; use -D{}=[path] " +
"to set native working directory separately.",
tmpFile.getPath(), "io.netty.native.workdir");
}
} catch (Throwable t) {
suppressed.add(t);
logger.debug("Error checking if {} is on a file store mounted with noexec", tmpFile, t);
}
// Re-throw to fail the load
ThrowableUtil.addSuppressedAndClear(e, suppressed);
throw e;
} catch (Exception e) {
UnsatisfiedLinkError ule = new UnsatisfiedLinkError("could not load a native library: " + name);
ule.initCause(e);
ThrowableUtil.addSuppressedAndClear(ule, suppressed);
throw ule;
} finally {
closeQuietly(in);
closeQuietly(out);
// After we load the library it is safe to delete the file.
// We delete the file immediately to free up resources as soon as possible,
// and if this fails fallback to deleting on JVM exit.
if (tmpFile != null && (!DELETE_NATIVE_LIB_AFTER_LOADING || !tmpFile.delete())) {
tmpFile.deleteOnExit();
}
}
}
private static URL getResource(String path, ClassLoader loader) {
final Enumeration<URL> urls;
try {
if (loader == null) {
urls = ClassLoader.getSystemResources(path);
} else {
urls = loader.getResources(path);
}
} catch (IOException iox) {
throw new RuntimeException("An error occurred while getting the resources for " + path, iox);
}
List<URL> urlsList = Collections.list(urls);
int size = urlsList.size();
switch (size) {
case 0:
return null;
case 1:
return urlsList.get(0);
default:
try {
MessageDigest md = MessageDigest.getInstance("SHA-256");
// We found more than 1 resource with the same name. Let's check if the content of the file is the
// same as in this case it will not have any bad effect.
URL url = urlsList.get(0);
byte[] digest = digest(md, url);
boolean allSame = true;
if (digest != null) {
for (int i = 1; i < size; i++) {
byte[] digest2 = digest(md, urlsList.get(i));
if (digest2 == null || !Arrays.equals(digest, digest2)) {
allSame = false;
break;
}
}
} else {
allSame = false;
}
if (allSame) {
return url;
}
} catch (NoSuchAlgorithmException e) {
logger.debug("Don't support SHA-256, can't check if resources have same content.", e);
}
throw new IllegalStateException(
"Multiple resources found for '" + path + "' with different content: " + urlsList);
}
}
private static byte[] digest(MessageDigest digest, URL url) {
InputStream in = null;
try {
in = url.openStream();
byte[] bytes = new byte[8192];
int i;
while ((i = in.read(bytes)) != -1) {
digest.update(bytes, 0, i);
}
return digest.digest();
} catch (IOException e) {
logger.debug("Can't read resource.", e);
return null;
} finally {
closeQuietly(in);
}
}
static void tryPatchShadedLibraryIdAndSign(File libraryFile, String originalName) {
String newId = new String(generateUniqueId(originalName.length()), CharsetUtil.UTF_8);
if (!tryExec("install_name_tool -id " + newId + " " + libraryFile.getAbsolutePath())) {
return;
}
tryExec("codesign -s - " + libraryFile.getAbsolutePath());
}
private static boolean tryExec(String cmd) {
try {
int exitValue = Runtime.getRuntime().exec(cmd).waitFor();
if (exitValue != 0) {
logger.debug("Execution of '{}' failed: {}", cmd, exitValue);
return false;
}
logger.debug("Execution of '{}' succeed: {}", cmd, exitValue);
return true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (IOException e) {
logger.info("Execution of '{}' failed.", cmd, e);
} catch (SecurityException e) {
logger.error("Execution of '{}' failed.", cmd, e);
}
return false;
}
private static boolean shouldShadedLibraryIdBePatched(String packagePrefix) {
return TRY_TO_PATCH_SHADED_ID && PlatformDependent.isOsx() && !packagePrefix.isEmpty();
}
private static byte[] generateUniqueId(int length) {
byte[] idBytes = new byte[length];
for (int i = 0; i < idBytes.length; i++) {
// We should only use bytes as replacement that are in our UNIQUE_ID_BYTES array.
idBytes[i] = UNIQUE_ID_BYTES[PlatformDependent.threadLocalRandom()
.nextInt(UNIQUE_ID_BYTES.length)];
}
return idBytes;
}
/**
* Loading the native library into the specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where the native library will be loaded into
* @param name - The native library path or name
* @param absolute - Whether the native library will be loaded by path or by name
*/
private static void loadLibrary(final ClassLoader loader, final String name, final boolean absolute) {
Throwable suppressed = null;
try {
try {
// Make sure the helper belongs to the target ClassLoader.
final Class<?> newHelper = tryToLoadClass(loader, NativeLibraryUtil.class);
loadLibraryByHelper(newHelper, name, absolute);
logger.debug("Successfully loaded the library {}", name);
return;
} catch (UnsatisfiedLinkError e) { // Should by pass the UnsatisfiedLinkError here!
suppressed = e;
} catch (Exception e) {
suppressed = e;
}
NativeLibraryUtil.loadLibrary(name, absolute); // Fallback to local helper class.
logger.debug("Successfully loaded the library {}", name);
} catch (NoSuchMethodError nsme) {
if (suppressed != null) {
ThrowableUtil.addSuppressed(nsme, suppressed);
}
rethrowWithMoreDetailsIfPossible(name, nsme);
} catch (UnsatisfiedLinkError ule) {
if (suppressed != null) {
ThrowableUtil.addSuppressed(ule, suppressed);
}
throw ule;
}
}
@SuppressJava6Requirement(reason = "Guarded by version check")
private static void rethrowWithMoreDetailsIfPossible(String name, NoSuchMethodError error) {
if (PlatformDependent.javaVersion() >= 7) {
throw new LinkageError(
"Possible multiple incompatible native libraries on the classpath for '" + name + "'?", error);
}
throw error;
}
private static void loadLibraryByHelper(final Class<?> helper, final String name, final boolean absolute)
throws UnsatisfiedLinkError {
Object ret = AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
try {
// Invoke the helper to load the native library, if succeed, then the native
// library belong to the specified ClassLoader.
Method method = helper.getMethod("loadLibrary", String.class, boolean.class);
method.setAccessible(true);
return method.invoke(null, name, absolute);
} catch (Exception e) {
return e;
}
}
});
if (ret instanceof Throwable) {
Throwable t = (Throwable) ret;
assert !(t instanceof UnsatisfiedLinkError) : t + " should be a wrapper throwable";
Throwable cause = t.getCause();
if (cause instanceof UnsatisfiedLinkError) {
throw (UnsatisfiedLinkError) cause;
}
UnsatisfiedLinkError ule = new UnsatisfiedLinkError(t.getMessage());
ule.initCause(t);
throw ule;
}
}
/**
* Try to load the helper {@link Class} into specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where to load the helper {@link Class}
* @param helper - The helper {@link Class}
* @return A new helper Class defined in the specified ClassLoader.
* @throws ClassNotFoundException Helper class not found or loading failed
*/
private static Class<?> tryToLoadClass(final ClassLoader loader, final Class<?> helper)
throws ClassNotFoundException {
try {
return Class.forName(helper.getName(), false, loader);
} catch (ClassNotFoundException e1) {
if (loader == null) {
// cannot defineClass inside bootstrap class loader
throw e1;
}
try {
// The helper class is NOT found in target ClassLoader, we have to define the helper class.
final byte[] classBinary = classToByteArray(helper);
return AccessController.doPrivileged(new PrivilegedAction<Class<?>>() {
@Override
public Class<?> run() {
try {
// Define the helper class in the target ClassLoader,
// then we can call the helper to load the native library.
Method defineClass = ClassLoader.class.getDeclaredMethod("defineClass", String.class,
byte[].class, int.class, int.class);
defineClass.setAccessible(true);
return (Class<?>) defineClass.invoke(loader, helper.getName(), classBinary, 0,
classBinary.length);
} catch (Exception e) {
throw new IllegalStateException("Define class failed!", e);
}
}
});
} catch (ClassNotFoundException e2) {
ThrowableUtil.addSuppressed(e2, e1);
throw e2;
} catch (RuntimeException e2) {
ThrowableUtil.addSuppressed(e2, e1);
throw e2;
} catch (Error e2) {
ThrowableUtil.addSuppressed(e2, e1);
throw e2;
}
}
}
/**
* Load the helper {@link Class} as a byte array, to be redefined in specified {@link ClassLoader}.
* @param clazz - The helper {@link Class} provided by this bundle
* @return The binary content of helper {@link Class}.
* @throws ClassNotFoundException Helper class not found or loading failed
*/
private static byte[] classToByteArray(Class<?> clazz) throws ClassNotFoundException {
String fileName = clazz.getName();
int lastDot = fileName.lastIndexOf('.');
if (lastDot > 0) {
fileName = fileName.substring(lastDot + 1);
}
URL classUrl = clazz.getResource(fileName + ".class");
if (classUrl == null) {
throw new ClassNotFoundException(clazz.getName());
}
byte[] buf = new byte[1024];
ByteArrayOutputStream out = new ByteArrayOutputStream(4096);
InputStream in = null;
try {
in = classUrl.openStream();
for (int r; (r = in.read(buf)) != -1;) {
out.write(buf, 0, r);
}
return out.toByteArray();
} catch (IOException ex) {
throw new ClassNotFoundException(clazz.getName(), ex);
} finally {
closeQuietly(in);
closeQuietly(out);
}
}
private static void closeQuietly(Closeable c) {
if (c != null) {
try {
c.close();
} catch (IOException ignore) {
// ignore
}
}
}
private NativeLibraryLoader() {
// Utility
}
private static final class NoexecVolumeDetector {
@SuppressJava6Requirement(reason = "Usage guarded by java version check")
private static boolean canExecuteExecutable(File file) throws IOException {
if (PlatformDependent.javaVersion() < 7) {
// Pre-JDK7, the Java API did not directly support POSIX permissions; instead of implementing a custom
// work-around, assume true, which disables the check.
return true;
}
// If we can already execute, there is nothing to do.
if (file.canExecute()) {
return true;
}
// On volumes, with noexec set, even files with the executable POSIX permissions will fail to execute.
// The File#canExecute() method honors this behavior, probaby via parsing the noexec flag when initializing
// the UnixFileStore, though the flag is not exposed via a public API. To find out if library is being
// loaded off a volume with noexec, confirm or add executalbe permissions, then check File#canExecute().
// Note: We use FQCN to not break when netty is used in java6
Set<java.nio.file.attribute.PosixFilePermission> existingFilePermissions =
java.nio.file.Files.getPosixFilePermissions(file.toPath());
Set<java.nio.file.attribute.PosixFilePermission> executePermissions =
EnumSet.of(java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE,
java.nio.file.attribute.PosixFilePermission.GROUP_EXECUTE,
java.nio.file.attribute.PosixFilePermission.OTHERS_EXECUTE);
if (existingFilePermissions.containsAll(executePermissions)) {
return false;
}
Set<java.nio.file.attribute.PosixFilePermission> newPermissions = EnumSet.copyOf(existingFilePermissions);
newPermissions.addAll(executePermissions);
java.nio.file.Files.setPosixFilePermissions(file.toPath(), newPermissions);
return file.canExecute();
}
private NoexecVolumeDetector() {
// Utility
}
}
}
|
Allow to disable duplicate native library check (#11928)
Motivation:
ea2742b4f628dfae9523c49b22472df921d1386c introduced a change to fail loading the native lib if the lib is included multiple times in the classpath. While this makes a lot of sense it is not always easy for people to fix this easily in a fast-manner. We should allow to opt-out of it.
Modifications:
- Add 'io.netty.native.detectNativeLibraryDuplicates' property that allows to disable the check. Default is enabled
- When check is disabled we log via warn to notify the user about the need to fix it
Result:
Easier for people to upgrade
|
common/src/main/java/io/netty/util/internal/NativeLibraryLoader.java
|
Allow to disable duplicate native library check (#11928)
|
<ide><path>ommon/src/main/java/io/netty/util/internal/NativeLibraryLoader.java
<ide> private static final File WORKDIR;
<ide> private static final boolean DELETE_NATIVE_LIB_AFTER_LOADING;
<ide> private static final boolean TRY_TO_PATCH_SHADED_ID;
<add> private static final boolean DETECT_NATIVE_LIBRARY_DUPLICATES;
<ide>
<ide> // Just use a-Z and numbers as valid ID bytes.
<ide> private static final byte[] UNIQUE_ID_BYTES =
<ide> TRY_TO_PATCH_SHADED_ID = SystemPropertyUtil.getBoolean(
<ide> "io.netty.native.tryPatchShadedId", true);
<ide> logger.debug("-Dio.netty.native.tryPatchShadedId: {}", TRY_TO_PATCH_SHADED_ID);
<add>
<add> DETECT_NATIVE_LIBRARY_DUPLICATES = SystemPropertyUtil.getBoolean(
<add> "io.netty.native.detectNativeLibraryDuplicates", true);
<add> logger.debug("-Dio.netty.native.detectNativeLibraryDuplicates: {}", DETECT_NATIVE_LIBRARY_DUPLICATES);
<ide> }
<ide>
<ide> /**
<ide> case 1:
<ide> return urlsList.get(0);
<ide> default:
<del> try {
<del> MessageDigest md = MessageDigest.getInstance("SHA-256");
<del> // We found more than 1 resource with the same name. Let's check if the content of the file is the
<del> // same as in this case it will not have any bad effect.
<del> URL url = urlsList.get(0);
<del> byte[] digest = digest(md, url);
<del> boolean allSame = true;
<del> if (digest != null) {
<del> for (int i = 1; i < size; i++) {
<del> byte[] digest2 = digest(md, urlsList.get(i));
<del> if (digest2 == null || !Arrays.equals(digest, digest2)) {
<del> allSame = false;
<del> break;
<add> if (DETECT_NATIVE_LIBRARY_DUPLICATES) {
<add> try {
<add> MessageDigest md = MessageDigest.getInstance("SHA-256");
<add> // We found more than 1 resource with the same name. Let's check if the content of the file is
<add> // the same as in this case it will not have any bad effect.
<add> URL url = urlsList.get(0);
<add> byte[] digest = digest(md, url);
<add> boolean allSame = true;
<add> if (digest != null) {
<add> for (int i = 1; i < size; i++) {
<add> byte[] digest2 = digest(md, urlsList.get(i));
<add> if (digest2 == null || !Arrays.equals(digest, digest2)) {
<add> allSame = false;
<add> break;
<add> }
<ide> }
<add> } else {
<add> allSame = false;
<ide> }
<del> } else {
<del> allSame = false;
<add> if (allSame) {
<add> return url;
<add> }
<add> } catch (NoSuchAlgorithmException e) {
<add> logger.debug("Don't support SHA-256, can't check if resources have same content.", e);
<ide> }
<del> if (allSame) {
<del> return url;
<del> }
<del> } catch (NoSuchAlgorithmException e) {
<del> logger.debug("Don't support SHA-256, can't check if resources have same content.", e);
<add>
<add> throw new IllegalStateException(
<add> "Multiple resources found for '" + path + "' with different content: " + urlsList);
<add> } else {
<add> logger.warn("Multiple resources found for '" + path + "' with different content: " +
<add> urlsList + ". Please fix your dependency graph.");
<add> return urlsList.get(0);
<ide> }
<del>
<del> throw new IllegalStateException(
<del> "Multiple resources found for '" + path + "' with different content: " + urlsList);
<ide> }
<ide> }
<ide>
|
|
JavaScript
|
agpl-3.0
|
3c2449945198af3f5b8b9221360077f90d649252
| 0 |
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
577ce4a0-2e64-11e5-9284-b827eb9e62be
|
helloWorld.js
|
577756f2-2e64-11e5-9284-b827eb9e62be
|
577ce4a0-2e64-11e5-9284-b827eb9e62be
|
helloWorld.js
|
577ce4a0-2e64-11e5-9284-b827eb9e62be
|
<ide><path>elloWorld.js
<del>577756f2-2e64-11e5-9284-b827eb9e62be
<add>577ce4a0-2e64-11e5-9284-b827eb9e62be
|
|
Java
|
mit
|
12c0f0b0a427ae60a440220dfac855bc85a8e16c
| 0 |
weefbellington/screenplay
|
package com.davidstemmer.screenplay.sample.presenter;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import com.davidstemmer.screenplay.sample.R;
import com.davidstemmer.screenplay.sample.scene.ModalScene;
import com.davidstemmer.screenplay.sample.scene.PagedScene1;
import com.davidstemmer.screenplay.sample.scene.SimpleScene;
import com.davidstemmer.screenplay.sample.view.NavigationMenuView;
import com.davidstemmer.screenplay.scene.Scene;
import javax.inject.Inject;
import butterknife.ButterKnife;
import butterknife.OnClick;
import flow.Flow;
import mortar.ViewPresenter;
/**
* Created by weefbellington on 10/25/14.
*/
public class NavigationMenuPresenter extends ViewPresenter<NavigationMenuView> {
private final DrawerPresenter drawer;
private final Flow flow;
private final SimpleScene simpleScene;
private final PagedScene1 pagedScene;
private final ModalScene modalScene;
private Scene nextScene;
@Inject
public NavigationMenuPresenter(DrawerPresenter drawerPresenter,
Flow flow,
SimpleScene simpleScene,
PagedScene1 pagedScene,
ModalScene modalScene) {
this.drawer = drawerPresenter;
this.flow = flow;
this.simpleScene = simpleScene;
this.pagedScene = pagedScene;
this.modalScene = modalScene;
}
@OnClick(R.id.nav_item_simple_scene)
void welcomeClicked() {
showNextSceneAfterDelay(simpleScene);
drawer.close();
}
@OnClick(R.id.nav_item_paged_scenes)
void pagedScenesClicked() {
showNextSceneAfterDelay(pagedScene);
drawer.close();
}
@OnClick(R.id.nav_item_modal_scenes)
void modalScenesClicked() {
showNextSceneAfterDelay(modalScene);
drawer.close();
}
@Override
protected void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
ButterKnife.inject(this, (Activity) getView().getContext());
}
private final Handler mDrawerHandler = new Handler();
private void showNextSceneAfterDelay(final Scene nextScene) {
// Clears any previously posted runnables, for double clicks
mDrawerHandler.removeCallbacksAndMessages(null);
mDrawerHandler.postDelayed(new Runnable() {
@Override
public void run() {
flow.replaceTo(nextScene);
}
}, 250);
// The millisecond delay is arbitrary and was arrived at through trial and error
drawer.close();
}
}
|
sample/src/main/java/com/davidstemmer/screenplay/sample/presenter/NavigationMenuPresenter.java
|
package com.davidstemmer.screenplay.sample.presenter;
import android.app.Activity;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.view.View;
import com.davidstemmer.screenplay.sample.R;
import com.davidstemmer.screenplay.sample.scene.ModalScene;
import com.davidstemmer.screenplay.sample.scene.PagedScene1;
import com.davidstemmer.screenplay.sample.scene.SimpleScene;
import com.davidstemmer.screenplay.sample.view.NavigationMenuView;
import com.davidstemmer.screenplay.scene.Scene;
import javax.inject.Inject;
import butterknife.ButterKnife;
import butterknife.OnClick;
import flow.Flow;
import mortar.ViewPresenter;
/**
* Created by weefbellington on 10/25/14.
*/
public class NavigationMenuPresenter extends ViewPresenter<NavigationMenuView> implements DrawerLayout.DrawerListener {
private final DrawerPresenter drawer;
private final Flow flow;
private final SimpleScene simpleScene;
private final PagedScene1 pagedScene;
private final ModalScene modalScene;
private Scene nextScene;
@Inject
public NavigationMenuPresenter(DrawerPresenter drawerPresenter,
Flow flow,
SimpleScene simpleScene,
PagedScene1 pagedScene,
ModalScene modalScene) {
this.drawer = drawerPresenter;
this.flow = flow;
this.simpleScene = simpleScene;
this.pagedScene = pagedScene;
this.modalScene = modalScene;
}
@OnClick(R.id.nav_item_simple_scene)
void welcomeClicked() {
nextScene = simpleScene;
drawer.getLayout().closeDrawer(getView());
}
@OnClick(R.id.nav_item_paged_scenes)
void pagedScenesClicked() {
nextScene = pagedScene;
drawer.getLayout().closeDrawer(getView());
}
@OnClick(R.id.nav_item_modal_scenes)
void modalScenesClicked() {
nextScene = modalScene;
drawer.getLayout().closeDrawer(getView());
}
@Override
protected void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
ButterKnife.inject(this, (Activity) getView().getContext());
drawer.getLayout().setDrawerListener(this);
}
@Override
public void onDrawerSlide(View view, float v) {
}
@Override
public void onDrawerOpened(View view) {
}
@Override
public void onDrawerClosed(View view) {
if (nextScene == null) {
return;
}
if (flow.getBackstack().current().getScreen() != nextScene) {
flow.resetTo(nextScene);
nextScene = null;
}
}
@Override
public void onDrawerStateChanged(int i) {
}
}
|
Improve drawer navigation with delay
|
sample/src/main/java/com/davidstemmer/screenplay/sample/presenter/NavigationMenuPresenter.java
|
Improve drawer navigation with delay
|
<ide><path>ample/src/main/java/com/davidstemmer/screenplay/sample/presenter/NavigationMenuPresenter.java
<ide>
<ide> import android.app.Activity;
<ide> import android.os.Bundle;
<del>import android.support.v4.widget.DrawerLayout;
<del>import android.view.View;
<add>import android.os.Handler;
<ide>
<ide> import com.davidstemmer.screenplay.sample.R;
<ide> import com.davidstemmer.screenplay.sample.scene.ModalScene;
<ide> /**
<ide> * Created by weefbellington on 10/25/14.
<ide> */
<del>public class NavigationMenuPresenter extends ViewPresenter<NavigationMenuView> implements DrawerLayout.DrawerListener {
<add>public class NavigationMenuPresenter extends ViewPresenter<NavigationMenuView> {
<ide>
<ide> private final DrawerPresenter drawer;
<ide> private final Flow flow;
<ide>
<ide> @OnClick(R.id.nav_item_simple_scene)
<ide> void welcomeClicked() {
<del> nextScene = simpleScene;
<del> drawer.getLayout().closeDrawer(getView());
<add> showNextSceneAfterDelay(simpleScene);
<add> drawer.close();
<ide> }
<ide>
<ide> @OnClick(R.id.nav_item_paged_scenes)
<ide> void pagedScenesClicked() {
<del> nextScene = pagedScene;
<del> drawer.getLayout().closeDrawer(getView());
<add> showNextSceneAfterDelay(pagedScene);
<add> drawer.close();
<ide> }
<ide>
<ide> @OnClick(R.id.nav_item_modal_scenes)
<ide> void modalScenesClicked() {
<del> nextScene = modalScene;
<del> drawer.getLayout().closeDrawer(getView());
<add> showNextSceneAfterDelay(modalScene);
<add> drawer.close();
<ide> }
<ide>
<ide> @Override
<ide> protected void onLoad(Bundle savedInstanceState) {
<ide> super.onLoad(savedInstanceState);
<del>
<ide> ButterKnife.inject(this, (Activity) getView().getContext());
<del> drawer.getLayout().setDrawerListener(this);
<ide> }
<ide>
<del> @Override
<del> public void onDrawerSlide(View view, float v) {
<del>
<add> private final Handler mDrawerHandler = new Handler();
<add> private void showNextSceneAfterDelay(final Scene nextScene) {
<add> // Clears any previously posted runnables, for double clicks
<add> mDrawerHandler.removeCallbacksAndMessages(null);
<add> mDrawerHandler.postDelayed(new Runnable() {
<add> @Override
<add> public void run() {
<add> flow.replaceTo(nextScene);
<add> }
<add> }, 250);
<add> // The millisecond delay is arbitrary and was arrived at through trial and error
<add> drawer.close();
<ide> }
<del>
<del> @Override
<del> public void onDrawerOpened(View view) {
<del>
<del> }
<del>
<del> @Override
<del> public void onDrawerClosed(View view) {
<del> if (nextScene == null) {
<del> return;
<del> }
<del> if (flow.getBackstack().current().getScreen() != nextScene) {
<del> flow.resetTo(nextScene);
<del> nextScene = null;
<del> }
<del> }
<del>
<del> @Override
<del> public void onDrawerStateChanged(int i) {
<del>
<del> }
<del>
<ide> }
|
|
Java
|
bsd-3-clause
|
e0c639a35311eb75122cd6dd90d737d264be02dc
| 0 |
crosswalk-project/crosswalk,axinging/crosswalk,xzhan96/crosswalk,darktears/crosswalk,dreamsxin/crosswalk,minggangw/crosswalk,lincsoon/crosswalk,marcuspridham/crosswalk,zliang7/crosswalk,crosswalk-project/crosswalk,jondong/crosswalk,jondong/crosswalk,heke123/crosswalk,minggangw/crosswalk,lincsoon/crosswalk,rakuco/crosswalk,minggangw/crosswalk,hgl888/crosswalk,zliang7/crosswalk,zliang7/crosswalk,lincsoon/crosswalk,Bysmyyr/crosswalk,dreamsxin/crosswalk,darktears/crosswalk,lincsoon/crosswalk,marcuspridham/crosswalk,PeterWangIntel/crosswalk,crosswalk-project/crosswalk,baleboy/crosswalk,jondong/crosswalk,dreamsxin/crosswalk,Bysmyyr/crosswalk,baleboy/crosswalk,rakuco/crosswalk,rakuco/crosswalk,zliang7/crosswalk,xzhan96/crosswalk,xzhan96/crosswalk,Bysmyyr/crosswalk,heke123/crosswalk,minggangw/crosswalk,jondong/crosswalk,axinging/crosswalk,darktears/crosswalk,hgl888/crosswalk,darktears/crosswalk,crosswalk-project/crosswalk,marcuspridham/crosswalk,minggangw/crosswalk,baleboy/crosswalk,jondong/crosswalk,crosswalk-project/crosswalk,heke123/crosswalk,dreamsxin/crosswalk,axinging/crosswalk,dreamsxin/crosswalk,marcuspridham/crosswalk,hgl888/crosswalk,marcuspridham/crosswalk,dreamsxin/crosswalk,PeterWangIntel/crosswalk,heke123/crosswalk,rakuco/crosswalk,xzhan96/crosswalk,Bysmyyr/crosswalk,xzhan96/crosswalk,Bysmyyr/crosswalk,axinging/crosswalk,marcuspridham/crosswalk,PeterWangIntel/crosswalk,darktears/crosswalk,zliang7/crosswalk,marcuspridham/crosswalk,axinging/crosswalk,crosswalk-project/crosswalk,rakuco/crosswalk,minggangw/crosswalk,baleboy/crosswalk,hgl888/crosswalk,darktears/crosswalk,heke123/crosswalk,marcuspridham/crosswalk,hgl888/crosswalk,zliang7/crosswalk,darktears/crosswalk,Bysmyyr/crosswalk,jondong/crosswalk,baleboy/crosswalk,PeterWangIntel/crosswalk,PeterWangIntel/crosswalk,hgl888/crosswalk,zliang7/crosswalk,heke123/crosswalk,zliang7/crosswalk,lincsoon/crosswalk,baleboy/crosswalk,PeterWangIntel/crosswalk,axinging/crosswalk,baleboy/crosswalk,xzhan96/crosswalk,hgl888/crosswalk,minggangw/crosswalk,crosswalk-project/crosswalk,xzhan96/crosswalk,lincsoon/crosswalk,rakuco/crosswalk,Bysmyyr/crosswalk,dreamsxin/crosswalk,PeterWangIntel/crosswalk,axinging/crosswalk,jondong/crosswalk,heke123/crosswalk,baleboy/crosswalk,darktears/crosswalk,Bysmyyr/crosswalk,lincsoon/crosswalk,jondong/crosswalk,lincsoon/crosswalk,crosswalk-project/crosswalk,rakuco/crosswalk,minggangw/crosswalk,heke123/crosswalk,xzhan96/crosswalk,hgl888/crosswalk,rakuco/crosswalk
|
// Copyright (c) 2015 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.xwalk.core;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.pm.Signature;
import android.os.Build;
import android.util.Log;
import dalvik.system.DexClassLoader;
import java.io.File;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.HashMap;
import junit.framework.Assert;
/**
* The appropriate invocation order is:
* handlePreInit() - attachXWalkCore() - dockXWalkCore() - handlePostInit() - over
*/
class XWalkCoreWrapper {
private static final String XWALK_APK_PACKAGE = "org.xwalk.core";
private static final String WRAPPER_PACKAGE = "org.xwalk.core";
private static final String BRIDGE_PACKAGE = "org.xwalk.core.internal";
private static final String TAG = "XWalkLib";
private static final String XWALK_CORE_EXTRACTED_DIR = "extracted_xwalkcore";
private static final String XWALK_CORE_CLASSES_DEX = "classes.dex";
private static final String OPTIMIZED_DEX_DIR = "dex";
private static final String META_XWALK_ENABLE_DOWNLOAD_MODE = "xwalk_enable_download_mode";
private static XWalkCoreWrapper sProvisionalInstance;
private static XWalkCoreWrapper sInstance;
private static LinkedList<String> sReservedActivities = new LinkedList<String>();
private static HashMap<String, LinkedList<ReservedAction> > sReservedActions =
new HashMap<String, LinkedList<ReservedAction> >();
private static class ReservedAction {
ReservedAction(Object object) {
mObject = object;
}
ReservedAction(Class<?> clazz) {
mClass = clazz;
}
ReservedAction(ReflectMethod method) {
mMethod = method;
if (method.getArguments() != null) {
mArguments = Arrays.copyOf(method.getArguments(), method.getArguments().length);
}
}
Object mObject;
Class<?> mClass;
ReflectMethod mMethod;
Object[] mArguments;
}
private int mApiVersion;
private int mMinApiVersion;
private int mCoreStatus;
private Context mWrapperContext;
private Context mBridgeContext;
private ClassLoader mBridgeLoader;
public static XWalkCoreWrapper getInstance() {
return sInstance;
}
public static int getCoreStatus() {
if (sInstance != null) return XWalkLibraryInterface.STATUS_MATCH;
if (sProvisionalInstance == null) return XWalkLibraryInterface.STATUS_PENDING;
return sProvisionalInstance.mCoreStatus;
}
/**
* This method must be invoked on the UI thread.
*/
public static void handlePreInit(String tag) {
if (sInstance != null) return;
Log.d(TAG, "Pre init xwalk core in " + tag);
if (sReservedActions.containsKey(tag)) {
sReservedActions.remove(tag);
} else {
sReservedActivities.add(tag);
}
sReservedActions.put(tag, new LinkedList<ReservedAction>());
}
public static void reserveReflectObject(Object object) {
String tag = sReservedActivities.getLast();
Log.d(TAG, "Reserve object " + object.getClass() + " to " + tag);
sReservedActions.get(tag).add(new ReservedAction(object));
}
public static void reserveReflectClass(Class<?> clazz) {
String tag = sReservedActivities.getLast();
Log.d(TAG, "Reserve class " + clazz.toString() + " to " + tag);
sReservedActions.get(tag).add(new ReservedAction(clazz));
}
public static void reserveReflectMethod(ReflectMethod method) {
String tag = sReservedActivities.getLast();
Log.d(TAG, "Reserve method " + method.toString() + " to " + tag);
sReservedActions.get(tag).add(new ReservedAction(method));
}
/**
* This method must be invoked on the UI thread.
*/
public static void handlePostInit(String tag) {
if (!sReservedActions.containsKey(tag)) return;
Log.d(TAG, "Post init xwalk core in " + tag);
LinkedList<ReservedAction> reservedActions = sReservedActions.get(tag);
for (ReservedAction action : reservedActions) {
if (action.mObject != null) {
Log.d(TAG, "Init reserved object: " + action.mObject.getClass());
new ReflectMethod(action.mObject, "reflectionInit").invoke();
} else if (action.mClass != null) {
Log.d(TAG, "Init reserved class: " + action.mClass.toString());
new ReflectMethod(action.mClass, "reflectionInit").invoke();
} else {
Log.d(TAG, "Call reserved method: " + action.mMethod.toString());
Object[] args = action.mArguments;
if (args != null) {
for (int i = 0; i < args.length; ++i) {
if (args[i] instanceof ReflectMethod) {
args[i] = ((ReflectMethod) args[i]).invokeWithArguments();
}
}
}
action.mMethod.invoke(args);
}
}
sReservedActivities.remove(tag);
sReservedActions.remove(tag);
}
public static int attachXWalkCore(Context context) {
Assert.assertFalse(sReservedActivities.isEmpty());
Assert.assertNull(sInstance);
Log.d(TAG, "Attach xwalk core");
sProvisionalInstance = new XWalkCoreWrapper(context, -1);
if (!sProvisionalInstance.findEmbeddedCore()) {
if (sProvisionalInstance.isDownloadMode()) {
sProvisionalInstance.findDownloadedCore();
} else {
sProvisionalInstance.findSharedCore();
}
}
return sProvisionalInstance.mCoreStatus;
}
/**
* This method must be invoked on the UI thread.
*/
public static void dockXWalkCore() {
Assert.assertNotNull(sProvisionalInstance);
Assert.assertNull(sInstance);
Log.d(TAG, "Dock xwalk core");
sInstance = sProvisionalInstance;
sProvisionalInstance = null;
sInstance.initCoreBridge();
sInstance.initXWalkView();
}
/**
* This method must be invoked on the UI thread.
*/
public static void initEmbeddedMode() {
if (sInstance != null || !sReservedActivities.isEmpty()) return;
Log.d(TAG, "Init embedded mode");
XWalkCoreWrapper provisionalInstance = new XWalkCoreWrapper(null, -1);
if (!provisionalInstance.findEmbeddedCore()) {
Assert.fail("Please have your activity extend XWalkActivity for shared mode");
}
sInstance = provisionalInstance;
sInstance.initCoreBridge();
}
private XWalkCoreWrapper(Context context, int minApiVersion) {
mApiVersion = XWalkAppVersion.API_VERSION;
mMinApiVersion = (minApiVersion > 0 && minApiVersion <= mApiVersion) ?
minApiVersion : mApiVersion;
mCoreStatus = XWalkLibraryInterface.STATUS_PENDING;
mWrapperContext = context;
}
private void initCoreBridge() {
Log.d(TAG, "Init core bridge");
Class<?> clazz = getBridgeClass("XWalkCoreBridge");
ReflectMethod method = new ReflectMethod(clazz, "init", Context.class, Object.class);
method.invoke(mBridgeContext, this);
}
private void initXWalkView() {
Log.d(TAG, "Init xwalk view");
Class<?> clazz = getBridgeClass("XWalkViewDelegate");
ReflectMethod method = new ReflectMethod(clazz, "init", Context.class, Context.class);
method.invoke(mBridgeContext, mWrapperContext);
}
private boolean findEmbeddedCore() {
mBridgeContext = null;
mBridgeLoader = XWalkCoreWrapper.class.getClassLoader();
if (!checkCoreVersion() || !checkCoreArchitecture()) {
mBridgeLoader = null;
return false;
}
Log.d(TAG, "Running in embedded mode");
mCoreStatus = XWalkLibraryInterface.STATUS_MATCH;
return true;
}
private boolean findSharedCore() {
if (!checkCorePackage()) return false;
mBridgeLoader = mBridgeContext.getClassLoader();
if (!checkCoreVersion() || !checkCoreArchitecture()) {
mBridgeContext = null;
mBridgeLoader = null;
return false;
}
Log.d(TAG, "Running in shared mode");
mCoreStatus = XWalkLibraryInterface.STATUS_MATCH;
return true;
}
private boolean findDownloadedCore() {
String libDir = mWrapperContext.getDir(XWALK_CORE_EXTRACTED_DIR, Context.MODE_PRIVATE).
getAbsolutePath();
String dexPath = libDir + File.separator + XWALK_CORE_CLASSES_DEX;
String dexOutputPath = mWrapperContext.getDir(OPTIMIZED_DEX_DIR, Context.MODE_PRIVATE).
getAbsolutePath();
ClassLoader localClassLoader = ClassLoader.getSystemClassLoader();
mBridgeLoader = new DexClassLoader(dexPath, dexOutputPath, libDir, localClassLoader);
if (!checkCoreVersion() || !checkCoreArchitecture()) {
mBridgeLoader = null;
return false;
}
Log.d(TAG, "Running in downloaded mode");
mCoreStatus = XWalkLibraryInterface.STATUS_MATCH;
return true;
}
private boolean isDownloadMode() {
try {
PackageManager packageManager = mWrapperContext.getPackageManager();
ApplicationInfo appInfo = packageManager.getApplicationInfo(
mWrapperContext.getPackageName(), PackageManager.GET_META_DATA);
String enableStr = appInfo.metaData.getString(META_XWALK_ENABLE_DOWNLOAD_MODE);
return enableStr.equalsIgnoreCase("enable");
} catch (NameNotFoundException | NullPointerException e) {
}
return false;
}
private boolean checkCoreVersion() {
try {
Class<?> clazz = getBridgeClass("XWalkCoreVersion");
int libVersion = (int) new ReflectField(clazz, "API_VERSION").get();
int minLibVersion = (int) new ReflectField(clazz, "MIN_API_VERSION").get();
Log.d(TAG, "lib version, api:" + libVersion + ", min api:" + minLibVersion);
Log.d(TAG, "app version, api:" + mApiVersion + ", min api:" + mMinApiVersion);
if (mMinApiVersion > libVersion) {
mCoreStatus = XWalkLibraryInterface.STATUS_OLDER_VERSION;
return false;
} else if (mApiVersion < minLibVersion) {
mCoreStatus = XWalkLibraryInterface.STATUS_NEWER_VERSION;
return false;
}
} catch (RuntimeException e) {
Log.d(TAG, "XWalk core not found");
mCoreStatus = XWalkLibraryInterface.STATUS_NOT_FOUND;
return false;
}
Log.d(TAG, "XWalk core version matched");
return true;
}
private boolean checkCoreArchitecture() {
try {
Class<?> clazz = getBridgeClass("XWalkViewDelegate");
ReflectMethod method = new ReflectMethod(clazz, "loadXWalkLibrary",
Context.class, String.class);
boolean architectureMatched = false;
String libDir = null;
if (mBridgeContext != null) {
// Only load the native library from /data/data if in shared mode and the Android
// version is lower than 4.2. Android enables a system path /data/app-lib to store
// native libraries starting from 4.2 and load them automatically.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
libDir = "/data/data/" + mBridgeContext.getPackageName() + "/lib";
}
architectureMatched = (boolean) method.invoke(mBridgeContext, libDir);
} else {
try {
architectureMatched = (boolean) method.invoke(mBridgeContext, libDir);
} catch (RuntimeException ex) {
Log.d(TAG, ex.getLocalizedMessage());
}
if (!architectureMatched && mWrapperContext != null) {
libDir = mWrapperContext.getDir(
XWalkLibraryInterface.PRIVATE_DATA_DIRECTORY_SUFFIX,
Context.MODE_PRIVATE).toString();
architectureMatched = (boolean) method.invoke(mBridgeContext, libDir);
}
}
if (!architectureMatched) {
Log.d(TAG, "Mismatch of CPU architecture");
mCoreStatus = XWalkLibraryInterface.STATUS_ARCHITECTURE_MISMATCH;
return false;
}
} catch (RuntimeException e) {
Log.d(TAG, e.getLocalizedMessage());
mCoreStatus = XWalkLibraryInterface.STATUS_INCOMPLETE_LIBRARY;
return false;
}
Log.d(TAG, "XWalk core architecture matched");
return true;
}
private boolean checkCorePackage() {
if (!XWalkAppVersion.VERIFY_XWALK_APK) {
Log.d(TAG, "Not verifying the package integrity of Crosswalk runtime library");
} else {
try {
PackageInfo packageInfo = mWrapperContext.getPackageManager().getPackageInfo(
XWALK_APK_PACKAGE, PackageManager.GET_SIGNATURES);
if (!verifyPackageInfo(packageInfo,
XWalkAppVersion.XWALK_APK_HASH_ALGORITHM,
XWalkAppVersion.XWALK_APK_HASH_CODE)) {
mCoreStatus = XWalkLibraryInterface.STATUS_SIGNATURE_CHECK_ERROR;
return false;
}
} catch (NameNotFoundException e) {
Log.d(TAG, "Crosswalk package not found");
return false;
}
}
try {
mBridgeContext = mWrapperContext.createPackageContext(XWALK_APK_PACKAGE,
Context.CONTEXT_INCLUDE_CODE | Context.CONTEXT_IGNORE_SECURITY);
} catch (NameNotFoundException e) {
Log.d(TAG, "Crosswalk package not found");
return false;
}
Log.d(TAG, "Created package context for " + XWALK_APK_PACKAGE);
return true;
}
private boolean verifyPackageInfo(PackageInfo packageInfo, String hashAlgorithm,
String hashCode) {
if (packageInfo.signatures == null) {
Log.e(TAG, "No signature in package info");
return false;
}
MessageDigest md = null;
try {
md = MessageDigest.getInstance(hashAlgorithm);
} catch (NoSuchAlgorithmException | NullPointerException e) {
Assert.fail("Invalid hash algorithm");
}
byte[] hashArray = hexStringToByteArray(hashCode);
if (hashArray == null) {
Assert.fail("Invalid hash code");
}
for (int i = 0; i < packageInfo.signatures.length; ++i) {
Log.d(TAG, "Checking signature " + i);
byte[] binaryCert = packageInfo.signatures[i].toByteArray();
byte[] digest = md.digest(binaryCert);
if (!MessageDigest.isEqual(digest, hashArray)) {
Log.e(TAG, "Hash code does not match");
continue;
}
Log.d(TAG, "Signature passed verification");
return true;
}
return false;
}
private byte[] hexStringToByteArray(String str) {
if (str == null || str.isEmpty() || str.length()%2 != 0) return null;
byte[] result = new byte[str.length() / 2];
for (int i = 0; i < str.length(); i += 2) {
int digit = Character.digit(str.charAt(i), 16);
digit <<= 4;
digit += Character.digit(str.charAt(i+1), 16);
result[i/2] = (byte) digit;
}
return result;
}
public boolean isSharedMode() {
return mBridgeContext != null;
}
public Object getBridgeObject(Object object) {
try {
return new ReflectMethod(object, "getBridge").invoke();
} catch (RuntimeException e) {
}
return null;
}
public Object getWrapperObject(Object object) {
try {
return new ReflectMethod(object, "getWrapper").invoke();
} catch (RuntimeException e) {
}
return null;
}
public Class<?> getBridgeClass(String name) {
try {
return mBridgeLoader.loadClass(BRIDGE_PACKAGE + "." + name);
} catch (ClassNotFoundException e) {
}
return null;
}
}
|
runtime/android/core/src/org/xwalk/core/XWalkCoreWrapper.java
|
// Copyright (c) 2015 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.xwalk.core;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.pm.Signature;
import android.os.Build;
import android.util.Log;
import dalvik.system.DexClassLoader;
import java.io.File;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.HashMap;
import junit.framework.Assert;
/**
* The appropriate invocation order is:
* handlePreInit() - attachXWalkCore() - dockXWalkCore() - handlePostInit() - over
*/
class XWalkCoreWrapper {
private static final String XWALK_APK_PACKAGE = "org.xwalk.core";
private static final String WRAPPER_PACKAGE = "org.xwalk.core";
private static final String BRIDGE_PACKAGE = "org.xwalk.core.internal";
private static final String TAG = "XWalkLib";
private static final String XWALK_CORE_EXTRACTED_DIR = "extracted_xwalkcore";
private static final String XWALK_CORE_CLASSES_DEX = "classes.dex";
private static final String OPTIMIZED_DEX_DIR = "dex";
private static XWalkCoreWrapper sProvisionalInstance;
private static XWalkCoreWrapper sInstance;
private static LinkedList<String> sReservedActivities = new LinkedList<String>();
private static HashMap<String, LinkedList<ReservedAction> > sReservedActions =
new HashMap<String, LinkedList<ReservedAction> >();
private static class ReservedAction {
ReservedAction(Object object) {
mObject = object;
}
ReservedAction(Class<?> clazz) {
mClass = clazz;
}
ReservedAction(ReflectMethod method) {
mMethod = method;
if (method.getArguments() != null) {
mArguments = Arrays.copyOf(method.getArguments(), method.getArguments().length);
}
}
Object mObject;
Class<?> mClass;
ReflectMethod mMethod;
Object[] mArguments;
}
private int mApiVersion;
private int mMinApiVersion;
private int mCoreStatus;
private Context mWrapperContext;
private Context mBridgeContext;
private ClassLoader mBridgeLoader;
public static XWalkCoreWrapper getInstance() {
return sInstance;
}
public static int getCoreStatus() {
if (sInstance != null) return XWalkLibraryInterface.STATUS_MATCH;
if (sProvisionalInstance == null) return XWalkLibraryInterface.STATUS_PENDING;
return sProvisionalInstance.mCoreStatus;
}
/**
* This method must be invoked on the UI thread.
*/
public static void handlePreInit(String tag) {
if (sInstance != null) return;
Log.d(TAG, "Pre init xwalk core in " + tag);
if (sReservedActions.containsKey(tag)) {
sReservedActions.remove(tag);
} else {
sReservedActivities.add(tag);
}
sReservedActions.put(tag, new LinkedList<ReservedAction>());
}
public static void reserveReflectObject(Object object) {
String tag = sReservedActivities.getLast();
Log.d(TAG, "Reserve object " + object.getClass() + " to " + tag);
sReservedActions.get(tag).add(new ReservedAction(object));
}
public static void reserveReflectClass(Class<?> clazz) {
String tag = sReservedActivities.getLast();
Log.d(TAG, "Reserve class " + clazz.toString() + " to " + tag);
sReservedActions.get(tag).add(new ReservedAction(clazz));
}
public static void reserveReflectMethod(ReflectMethod method) {
String tag = sReservedActivities.getLast();
Log.d(TAG, "Reserve method " + method.toString() + " to " + tag);
sReservedActions.get(tag).add(new ReservedAction(method));
}
/**
* This method must be invoked on the UI thread.
*/
public static void handlePostInit(String tag) {
if (!sReservedActions.containsKey(tag)) return;
Log.d(TAG, "Post init xwalk core in " + tag);
LinkedList<ReservedAction> reservedActions = sReservedActions.get(tag);
for (ReservedAction action : reservedActions) {
if (action.mObject != null) {
Log.d(TAG, "Init reserved object: " + action.mObject.getClass());
new ReflectMethod(action.mObject, "reflectionInit").invoke();
} else if (action.mClass != null) {
Log.d(TAG, "Init reserved class: " + action.mClass.toString());
new ReflectMethod(action.mClass, "reflectionInit").invoke();
} else {
Log.d(TAG, "Call reserved method: " + action.mMethod.toString());
Object[] args = action.mArguments;
if (args != null) {
for (int i = 0; i < args.length; ++i) {
if (args[i] instanceof ReflectMethod) {
args[i] = ((ReflectMethod) args[i]).invokeWithArguments();
}
}
}
action.mMethod.invoke(args);
}
}
sReservedActivities.remove(tag);
sReservedActions.remove(tag);
}
public static int attachXWalkCore(Context context) {
Assert.assertFalse(sReservedActivities.isEmpty());
Assert.assertNull(sInstance);
Log.d(TAG, "Attach xwalk core");
sProvisionalInstance = new XWalkCoreWrapper(context, -1);
if (!sProvisionalInstance.findEmbeddedCore()) {
if (!sProvisionalInstance.findDownloadedCore()) {
sProvisionalInstance.findSharedCore();
}
}
return sProvisionalInstance.mCoreStatus;
}
/**
* This method must be invoked on the UI thread.
*/
public static void dockXWalkCore() {
Assert.assertNotNull(sProvisionalInstance);
Assert.assertNull(sInstance);
Log.d(TAG, "Dock xwalk core");
sInstance = sProvisionalInstance;
sProvisionalInstance = null;
sInstance.initCoreBridge();
sInstance.initXWalkView();
}
/**
* This method must be invoked on the UI thread.
*/
public static void initEmbeddedMode() {
if (sInstance != null || !sReservedActivities.isEmpty()) return;
Log.d(TAG, "Init embedded mode");
XWalkCoreWrapper provisionalInstance = new XWalkCoreWrapper(null, -1);
if (!provisionalInstance.findEmbeddedCore()) {
Assert.fail("Please have your activity extend XWalkActivity for shared mode");
}
sInstance = provisionalInstance;
sInstance.initCoreBridge();
}
private XWalkCoreWrapper(Context context, int minApiVersion) {
mApiVersion = XWalkAppVersion.API_VERSION;
mMinApiVersion = (minApiVersion > 0 && minApiVersion <= mApiVersion) ?
minApiVersion : mApiVersion;
mCoreStatus = XWalkLibraryInterface.STATUS_PENDING;
mWrapperContext = context;
}
private void initCoreBridge() {
Log.d(TAG, "Init core bridge");
Class<?> clazz = getBridgeClass("XWalkCoreBridge");
ReflectMethod method = new ReflectMethod(clazz, "init", Context.class, Object.class);
method.invoke(mBridgeContext, this);
}
private void initXWalkView() {
Log.d(TAG, "Init xwalk view");
Class<?> clazz = getBridgeClass("XWalkViewDelegate");
ReflectMethod method = new ReflectMethod(clazz, "init", Context.class, Context.class);
method.invoke(mBridgeContext, mWrapperContext);
}
private boolean findEmbeddedCore() {
mBridgeContext = null;
mBridgeLoader = XWalkCoreWrapper.class.getClassLoader();
if (!checkCoreVersion() || !checkCoreArchitecture()) {
mBridgeLoader = null;
return false;
}
Log.d(TAG, "Running in embedded mode");
mCoreStatus = XWalkLibraryInterface.STATUS_MATCH;
return true;
}
private boolean findSharedCore() {
if (!checkCorePackage()) return false;
mBridgeLoader = mBridgeContext.getClassLoader();
if (!checkCoreVersion() || !checkCoreArchitecture()) {
mBridgeContext = null;
mBridgeLoader = null;
return false;
}
Log.d(TAG, "Running in shared mode");
mCoreStatus = XWalkLibraryInterface.STATUS_MATCH;
return true;
}
private boolean findDownloadedCore() {
String libDir = mWrapperContext.getDir(XWALK_CORE_EXTRACTED_DIR, Context.MODE_PRIVATE).
toString();
String dexPath = libDir + File.separator + XWALK_CORE_CLASSES_DEX;
String dexOutputPath = mWrapperContext.getDir(OPTIMIZED_DEX_DIR, Context.MODE_PRIVATE).
getAbsolutePath();
ClassLoader localClassLoader = ClassLoader.getSystemClassLoader();
mBridgeLoader = new DexClassLoader(dexPath, dexOutputPath, libDir, localClassLoader);
if (!checkCoreVersion() || !checkCoreArchitecture()) {
mBridgeLoader = null;
return false;
}
Log.d(TAG, "Running in downloaded mode");
mCoreStatus = XWalkLibraryInterface.STATUS_MATCH;
return true;
}
private boolean checkCoreVersion() {
try {
Class<?> clazz = getBridgeClass("XWalkCoreVersion");
int libVersion = (int) new ReflectField(clazz, "API_VERSION").get();
int minLibVersion = (int) new ReflectField(clazz, "MIN_API_VERSION").get();
Log.d(TAG, "lib version, api:" + libVersion + ", min api:" + minLibVersion);
Log.d(TAG, "app version, api:" + mApiVersion + ", min api:" + mMinApiVersion);
if (mMinApiVersion > libVersion) {
mCoreStatus = XWalkLibraryInterface.STATUS_OLDER_VERSION;
return false;
} else if (mApiVersion < minLibVersion) {
mCoreStatus = XWalkLibraryInterface.STATUS_NEWER_VERSION;
return false;
}
} catch (RuntimeException e) {
Log.d(TAG, "XWalk core not found");
mCoreStatus = XWalkLibraryInterface.STATUS_NOT_FOUND;
return false;
}
Log.d(TAG, "XWalk core version matched");
return true;
}
private boolean checkCoreArchitecture() {
try {
Class<?> clazz = getBridgeClass("XWalkViewDelegate");
ReflectMethod method = new ReflectMethod(clazz, "loadXWalkLibrary",
Context.class, String.class);
boolean architectureMatched = false;
String libDir = null;
if (mBridgeContext != null) {
// Only load the native library from /data/data if in shared mode and the Android
// version is lower than 4.2. Android enables a system path /data/app-lib to store
// native libraries starting from 4.2 and load them automatically.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
libDir = "/data/data/" + mBridgeContext.getPackageName() + "/lib";
}
architectureMatched = (boolean) method.invoke(mBridgeContext, libDir);
} else {
try {
architectureMatched = (boolean) method.invoke(mBridgeContext, libDir);
} catch (RuntimeException ex) {
Log.d(TAG, ex.getLocalizedMessage());
}
if (!architectureMatched && mWrapperContext != null) {
libDir = mWrapperContext.getDir(
XWalkLibraryInterface.PRIVATE_DATA_DIRECTORY_SUFFIX,
Context.MODE_PRIVATE).toString();
architectureMatched = (boolean) method.invoke(mBridgeContext, libDir);
}
}
if (!architectureMatched) {
Log.d(TAG, "Mismatch of CPU architecture");
mCoreStatus = XWalkLibraryInterface.STATUS_ARCHITECTURE_MISMATCH;
return false;
}
} catch (RuntimeException e) {
Log.d(TAG, e.getLocalizedMessage());
mCoreStatus = XWalkLibraryInterface.STATUS_INCOMPLETE_LIBRARY;
return false;
}
Log.d(TAG, "XWalk core architecture matched");
return true;
}
private boolean checkCorePackage() {
if (!XWalkAppVersion.VERIFY_XWALK_APK) {
Log.d(TAG, "Not verifying the package integrity of Crosswalk runtime library");
} else {
try {
PackageInfo packageInfo = mWrapperContext.getPackageManager().getPackageInfo(
XWALK_APK_PACKAGE, PackageManager.GET_SIGNATURES);
if (!verifyPackageInfo(packageInfo,
XWalkAppVersion.XWALK_APK_HASH_ALGORITHM,
XWalkAppVersion.XWALK_APK_HASH_CODE)) {
mCoreStatus = XWalkLibraryInterface.STATUS_SIGNATURE_CHECK_ERROR;
return false;
}
} catch (NameNotFoundException e) {
Log.d(TAG, "Crosswalk package not found");
return false;
}
}
try {
mBridgeContext = mWrapperContext.createPackageContext(XWALK_APK_PACKAGE,
Context.CONTEXT_INCLUDE_CODE | Context.CONTEXT_IGNORE_SECURITY);
} catch (NameNotFoundException e) {
Log.d(TAG, "Crosswalk package not found");
return false;
}
Log.d(TAG, "Created package context for " + XWALK_APK_PACKAGE);
return true;
}
private boolean verifyPackageInfo(PackageInfo packageInfo, String hashAlgorithm,
String hashCode) {
if (packageInfo.signatures == null) {
Log.e(TAG, "No signature in package info");
return false;
}
MessageDigest md = null;
try {
md = MessageDigest.getInstance(hashAlgorithm);
} catch (NoSuchAlgorithmException | NullPointerException e) {
Assert.fail("Invalid hash algorithm");
}
byte[] hashArray = hexStringToByteArray(hashCode);
if (hashArray == null) {
Assert.fail("Invalid hash code");
}
for (int i = 0; i < packageInfo.signatures.length; ++i) {
Log.d(TAG, "Checking signature " + i);
byte[] binaryCert = packageInfo.signatures[i].toByteArray();
byte[] digest = md.digest(binaryCert);
if (!MessageDigest.isEqual(digest, hashArray)) {
Log.e(TAG, "Hash code does not match");
continue;
}
Log.d(TAG, "Signature passed verification");
return true;
}
return false;
}
private byte[] hexStringToByteArray(String str) {
if (str == null || str.isEmpty() || str.length()%2 != 0) return null;
byte[] result = new byte[str.length() / 2];
for (int i = 0; i < str.length(); i += 2) {
int digit = Character.digit(str.charAt(i), 16);
digit <<= 4;
digit += Character.digit(str.charAt(i+1), 16);
result[i/2] = (byte) digit;
}
return result;
}
public boolean isSharedMode() {
return mBridgeContext != null;
}
public Object getBridgeObject(Object object) {
try {
return new ReflectMethod(object, "getBridge").invoke();
} catch (RuntimeException e) {
}
return null;
}
public Object getWrapperObject(Object object) {
try {
return new ReflectMethod(object, "getWrapper").invoke();
} catch (RuntimeException e) {
}
return null;
}
public Class<?> getBridgeClass(String name) {
try {
return mBridgeLoader.loadClass(BRIDGE_PACKAGE + "." + name);
} catch (ClassNotFoundException e) {
}
return null;
}
}
|
[Android] Make download mode work exclusively
App works under silent download mode should always use its downloaded
XWalkRuntimeLib and it shall not call into the shared mode as developer
may want to force app to use the specified XWalkRuntimeLib.
|
runtime/android/core/src/org/xwalk/core/XWalkCoreWrapper.java
|
[Android] Make download mode work exclusively
|
<ide><path>untime/android/core/src/org/xwalk/core/XWalkCoreWrapper.java
<ide> package org.xwalk.core;
<ide>
<ide> import android.content.Context;
<add>import android.content.pm.ApplicationInfo;
<ide> import android.content.pm.PackageInfo;
<ide> import android.content.pm.PackageManager;
<ide> import android.content.pm.PackageManager.NameNotFoundException;
<ide> private static final String XWALK_CORE_EXTRACTED_DIR = "extracted_xwalkcore";
<ide> private static final String XWALK_CORE_CLASSES_DEX = "classes.dex";
<ide> private static final String OPTIMIZED_DEX_DIR = "dex";
<add> private static final String META_XWALK_ENABLE_DOWNLOAD_MODE = "xwalk_enable_download_mode";
<ide>
<ide> private static XWalkCoreWrapper sProvisionalInstance;
<ide> private static XWalkCoreWrapper sInstance;
<ide> Log.d(TAG, "Attach xwalk core");
<ide> sProvisionalInstance = new XWalkCoreWrapper(context, -1);
<ide> if (!sProvisionalInstance.findEmbeddedCore()) {
<del> if (!sProvisionalInstance.findDownloadedCore()) {
<add> if (sProvisionalInstance.isDownloadMode()) {
<add> sProvisionalInstance.findDownloadedCore();
<add> } else {
<ide> sProvisionalInstance.findSharedCore();
<ide> }
<ide> }
<ide>
<ide> private boolean findDownloadedCore() {
<ide> String libDir = mWrapperContext.getDir(XWALK_CORE_EXTRACTED_DIR, Context.MODE_PRIVATE).
<del> toString();
<add> getAbsolutePath();
<ide> String dexPath = libDir + File.separator + XWALK_CORE_CLASSES_DEX;
<ide> String dexOutputPath = mWrapperContext.getDir(OPTIMIZED_DEX_DIR, Context.MODE_PRIVATE).
<ide> getAbsolutePath();
<ide> Log.d(TAG, "Running in downloaded mode");
<ide> mCoreStatus = XWalkLibraryInterface.STATUS_MATCH;
<ide> return true;
<add> }
<add>
<add> private boolean isDownloadMode() {
<add> try {
<add> PackageManager packageManager = mWrapperContext.getPackageManager();
<add> ApplicationInfo appInfo = packageManager.getApplicationInfo(
<add> mWrapperContext.getPackageName(), PackageManager.GET_META_DATA);
<add> String enableStr = appInfo.metaData.getString(META_XWALK_ENABLE_DOWNLOAD_MODE);
<add> return enableStr.equalsIgnoreCase("enable");
<add> } catch (NameNotFoundException | NullPointerException e) {
<add> }
<add> return false;
<ide> }
<ide>
<ide> private boolean checkCoreVersion() {
|
|
Java
|
apache-2.0
|
1adce90daeaafb5765daf3663ac5bad89c35cfd8
| 0 |
lifey/ob1k,outbrain/ob1k,lifey/ob1k,outbrain/ob1k,outbrain/ob1k
|
package com.outbrain.ob1k.cache;
import com.google.common.collect.Lists;
import com.outbrain.ob1k.concurrent.*;
import com.outbrain.ob1k.concurrent.eager.ComposablePromise;
import com.outbrain.swinfra.metrics.api.Counter;
import com.outbrain.swinfra.metrics.api.Gauge;
import com.outbrain.swinfra.metrics.api.MetricFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static com.outbrain.ob1k.concurrent.ComposableFutures.newPromise;
/**
* a wrapper for TypedCache implementation that delegate missing entries to a loader.
* the loader is used a such a way that prevents concurrent activations on the same key.
* <p/>
* Created by aronen on 10/26/14.
*/
public class LoadingCacheDelegate<K, V> implements TypedCache<K, V> {
private final TypedCache<K, V> cache;
private final CacheLoader<K, V> loader;
private final String cacheName;
private final ConcurrentMap<K, ComposablePromise<V>> futureValues;
private final Counter cacheHits;
private final Counter cacheMiss;
private final Counter cacheErrors;
private final Counter loaderErrors;
public LoadingCacheDelegate(final TypedCache<K, V> cache, final CacheLoader<K, V> loader, final String cacheName) {
this(cache, loader, cacheName, null);
}
public LoadingCacheDelegate(final TypedCache<K, V> cache, final CacheLoader<K, V> loader, final String cacheName, final MetricFactory metricFactory) {
this.cache = cache;
this.loader = loader;
this.cacheName = cacheName;
this.futureValues = new ConcurrentHashMap<>();
if (metricFactory != null) {
metricFactory.registerGauge("LoadingCacheDelegate." + cacheName, "mapSize", new Gauge<Integer>() {
@Override
public Integer getValue() {
return futureValues.size();
}
});
cacheHits = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "hits");
cacheMiss = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "miss");
cacheErrors = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "cacheErrors");
loaderErrors = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "loaderErrors");
} else {
cacheHits = null;
cacheMiss = null;
cacheErrors = null;
loaderErrors = null;
}
}
@Override
public ComposableFuture<V> getAsync(final K key) {
return ComposableFutures.build(new Producer<V>() {
@Override
public void produce(final Consumer<V> consumer) {
final ComposablePromise<V> promise = newPromise();
final ComposablePromise<V> prev = futureValues.putIfAbsent(key, promise);
if (prev != null) {
consumeFrom(prev.future(), consumer);
return;
}
final ComposableFuture<V> cachedResult = cache.getAsync(key);
cachedResult.consume(new Consumer<V>() {
@Override
public void consume(final Try<V> res) {
if (res.isSuccess()) {
final V result = res.getValue();
if (result == null) {
if (cacheMiss != null) {
cacheMiss.inc();
}
final ComposableFuture<V> loadedResult = loader.load(cacheName, key);
loadedResult.consume(new Consumer<V>() {
@Override
public void consume(final Try<V> loadedRes) {
if (loadedRes.isSuccess()) {
promise.set(loadedRes.getValue());
cache.setAsync(key, loadedRes.getValue()).consume(new Consumer<Boolean>() {
@Override
public void consume(final Try<Boolean> result) {
futureValues.remove(key);
}
});
} else {
if (loaderErrors != null) {
loaderErrors.inc();
}
promise.setException(loadedRes.getError());
futureValues.remove(key);
}
}
});
} else {
if (cacheHits != null) {
cacheHits.inc();
}
promise.set(result);
futureValues.remove(key);
}
} else {
if (cacheErrors != null) {
cacheErrors.inc();
}
promise.setException(res.getError());
futureValues.remove(key);
}
}
});
consumeFrom(promise.future(), consumer);
}
});
}
private static <T> void consumeFrom(final ComposableFuture<T> source, final Consumer<T> consumer) {
source.consume(new Consumer<T>() {
@Override
public void consume(final Try<T> result) {
consumer.consume(result);
}
});
}
@Override
public ComposableFuture<Map<K, V>> getBulkAsync(final Iterable<? extends K> keys) {
return ComposableFutures.build(new Producer<Map<K, V>>() {
@Override
public void produce(final Consumer<Map<K, V>> consumer) {
final List<? extends K> listKeys = Lists.newArrayList(keys);
final List<K> processedKeys = new ArrayList<>();
final Map<K, ComposablePromise<V>> res = new HashMap<>();
for (final K key : listKeys) {
final ComposablePromise<V> promise = newPromise();
final ComposablePromise<V> prev = futureValues.putIfAbsent(key, promise);
if (prev == null) {
res.put(key, promise);
processedKeys.add(key);
} else {
res.put(key, prev);
}
}
final ComposableFuture<Map<K, V>> cachedResults = cache.getBulkAsync(processedKeys);
cachedResults.consume(new Consumer<Map<K, V>>() {
@Override
public void consume(final Try<Map<K, V>> res) {
if (res.isSuccess()) {
final Map<K, V> result = res.getValue();
final List<K> missingFromCacheKeys = new ArrayList<>();
for (final K key : processedKeys) {
if (result.containsKey(key)) {
final ComposablePromise<V> promise = futureValues.get(key);
promise.set(result.get(key));
futureValues.remove(key);
} else {
missingFromCacheKeys.add(key);
}
}
if (!missingFromCacheKeys.isEmpty()) {
final ComposableFuture<Map<K, V>> loadedResults = loader.load(cacheName, missingFromCacheKeys);
loadedResults.consume(new Consumer<Map<K, V>>() {
@Override
public void consume(final Try<Map<K, V>> loadedRes) {
if (loadedRes.isSuccess()) {
final Map<K, V> elements = loadedRes.getValue();
for (final K key : missingFromCacheKeys) {
futureValues.get(key).set(elements.get(key));
}
cache.setBulkAsync(elements).consume(new Consumer<Map<K, Boolean>>() {
@Override
public void consume(final Try<Map<K, Boolean>> setResults) {
for (final K key : missingFromCacheKeys) {
futureValues.remove(key);
}
}
});
} else {
for (final K key : missingFromCacheKeys) {
final ComposablePromise<V> promise = futureValues.get(key);
promise.setException(loadedRes.getError());
futureValues.remove(key);
}
}
}
});
}
} else {
for (final K key : processedKeys) {
final ComposablePromise<V> promise = futureValues.get(key);
promise.setException(res.getError());
futureValues.remove(key);
}
}
}
});
consumeFrom(ComposableFutures.all(false, mapToFutures(res)), consumer);
}
});
}
private static <K, V> Map<K, ComposableFuture<V>> mapToFutures(final Map<K, ComposablePromise<V>> promises) {
final HashMap<K, ComposableFuture<V>> result = new HashMap<>(promises.size());
for (final Map.Entry<K, ComposablePromise<V>> promiseEntry : promises.entrySet()) {
result.put(promiseEntry.getKey(), promiseEntry.getValue().future());
}
return result;
}
@Override
public ComposableFuture<Boolean> setAsync(final K key, final V value) {
return cache.setAsync(key, value);
}
@Override
public ComposableFuture<Boolean> setAsync(final K key, final EntryMapper<K, V> mapper, final int maxIterations) {
return cache.setAsync(key, mapper, maxIterations);
}
@Override
public ComposableFuture<Map<K, Boolean>> setBulkAsync(final Map<? extends K, ? extends V> entries) {
return cache.setBulkAsync(entries);
}
@Override
public ComposableFuture<Boolean> deleteAsync(final K key) {
return cache.deleteAsync(key);
}
}
|
ob1k-cache/src/main/java/com/outbrain/ob1k/cache/LoadingCacheDelegate.java
|
package com.outbrain.ob1k.cache;
import com.google.common.collect.Lists;
import com.outbrain.ob1k.concurrent.*;
import com.outbrain.ob1k.concurrent.eager.ComposablePromise;
import com.outbrain.swinfra.metrics.api.Counter;
import com.outbrain.swinfra.metrics.api.Gauge;
import com.outbrain.swinfra.metrics.api.MetricFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static com.outbrain.ob1k.concurrent.ComposableFutures.newPromise;
/**
* a wrapper for TypedCache implementation that delegate missing entries to a loader.
* the loader is used a such a way that prevents concurrent activations on the same key.
* <p/>
* Created by aronen on 10/26/14.
*/
public class LoadingCacheDelegate<K, V> implements TypedCache<K, V> {
private final TypedCache<K, V> cache;
private final CacheLoader<K, V> loader;
private final String cacheName;
private final ConcurrentMap<K, ComposablePromise<V>> futureValues;
private final Counter cacheHits;
private final Counter cacheMiss;
private final Counter cacheErrors;
private final Counter loaderErrors;
public LoadingCacheDelegate(final TypedCache<K, V> cache, final CacheLoader<K, V> loader, final String cacheName) {
this(cache, loader, cacheName, null);
}
public LoadingCacheDelegate(final TypedCache<K, V> cache, final CacheLoader<K, V> loader, final String cacheName, final MetricFactory metricFactory) {
this.cache = cache;
this.loader = loader;
this.cacheName = cacheName;
this.futureValues = new ConcurrentHashMap<>();
if (metricFactory != null) {
metricFactory.registerGauge("LoadingCacheDelegate." + cacheName, "mapSize", new Gauge<Integer>() {
@Override
public Integer getValue() {
return futureValues.size();
}
});
cacheHits = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "hits");
cacheMiss = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "miss");
cacheErrors = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "cacheErrors");
loaderErrors = metricFactory.createCounter("LoadingCacheDelegate." + cacheName, "loaderErrors");
} else {
cacheHits = null;
cacheMiss = null;
cacheErrors = null;
loaderErrors = null;
}
}
@Override
public ComposableFuture<V> getAsync(final K key) {
return ComposableFutures.build(new Producer<V>() {
@Override
public void produce(final Consumer<V> consumer) {
final ComposablePromise<V> promise = newPromise();
final ComposablePromise<V> prev = futureValues.putIfAbsent(key, promise);
if (prev != null) {
consumeFrom(prev.future(), consumer);
return;
}
final ComposableFuture<V> cachedResult = cache.getAsync(key);
cachedResult.consume(new Consumer<V>() {
@Override
public void consume(final Try<V> res) {
if (res.isSuccess()) {
final V result = res.getValue();
if (result == null) {
if (cacheMiss != null) {
cacheMiss.inc();
}
final ComposableFuture<V> loadedResult = loader.load(cacheName, key);
loadedResult.consume(new Consumer<V>() {
@Override
public void consume(final Try<V> loadedRes) {
if (loadedRes.isSuccess()) {
promise.set(loadedRes.getValue());
cache.setAsync(key, loadedRes.getValue()).consume(new Consumer<Boolean>() {
@Override
public void consume(final Try<Boolean> result) {
futureValues.remove(key);
}
});
} else {
if (loaderErrors != null) {
loaderErrors.inc();
}
promise.setException(loadedRes.getError());
futureValues.remove(key);
}
}
});
} else {
if (cacheHits != null) {
cacheHits.inc();
}
promise.set(result);
}
} else {
if (cacheErrors != null) {
cacheErrors.inc();
}
promise.setException(res.getError());
futureValues.remove(key);
}
}
});
consumeFrom(promise.future(), consumer);
}
});
}
private static <T> void consumeFrom(final ComposableFuture<T> source, final Consumer<T> consumer) {
source.consume(new Consumer<T>() {
@Override
public void consume(final Try<T> result) {
consumer.consume(result);
}
});
}
@Override
public ComposableFuture<Map<K, V>> getBulkAsync(final Iterable<? extends K> keys) {
return ComposableFutures.build(new Producer<Map<K, V>>() {
@Override
public void produce(final Consumer<Map<K, V>> consumer) {
final List<? extends K> listKeys = Lists.newArrayList(keys);
final List<K> processedKeys = new ArrayList<>();
final Map<K, ComposablePromise<V>> res = new HashMap<>();
for (final K key : listKeys) {
final ComposablePromise<V> promise = newPromise();
final ComposablePromise<V> prev = futureValues.putIfAbsent(key, promise);
if (prev == null) {
res.put(key, promise);
processedKeys.add(key);
} else {
res.put(key, prev);
}
}
final ComposableFuture<Map<K, V>> cachedResults = cache.getBulkAsync(processedKeys);
cachedResults.consume(new Consumer<Map<K, V>>() {
@Override
public void consume(final Try<Map<K, V>> res) {
if (res.isSuccess()) {
final Map<K, V> result = res.getValue();
final List<K> missingFromCacheKeys = new ArrayList<>();
for (final K key : processedKeys) {
if (result.containsKey(key)) {
final ComposablePromise<V> promise = futureValues.get(key);
promise.set(result.get(key));
futureValues.remove(key, promise);
} else {
missingFromCacheKeys.add(key);
}
}
if (!missingFromCacheKeys.isEmpty()) {
final ComposableFuture<Map<K, V>> loadedResults = loader.load(cacheName, missingFromCacheKeys);
loadedResults.consume(new Consumer<Map<K, V>>() {
@Override
public void consume(final Try<Map<K, V>> loadedRes) {
if (loadedRes.isSuccess()) {
final Map<K, V> elements = loadedRes.getValue();
for (final K key : missingFromCacheKeys) {
futureValues.get(key).set(elements.get(key));
}
cache.setBulkAsync(elements).consume(new Consumer<Map<K, Boolean>>() {
@Override
public void consume(final Try<Map<K, Boolean>> setResults) {
for (final K key : missingFromCacheKeys) {
futureValues.remove(key);
}
}
});
} else {
for (final K key : missingFromCacheKeys) {
final ComposablePromise<V> promise = futureValues.get(key);
promise.setException(loadedRes.getError());
futureValues.remove(key);
}
}
}
});
}
} else {
for (final K key : processedKeys) {
final ComposablePromise<V> promise = futureValues.get(key);
promise.setException(res.getError());
futureValues.remove(key);
}
}
}
});
consumeFrom(ComposableFutures.all(false, mapToFutures(res)), consumer);
}
});
}
private static <K, V> Map<K, ComposableFuture<V>> mapToFutures(final Map<K, ComposablePromise<V>> promises) {
final HashMap<K, ComposableFuture<V>> result = new HashMap<>(promises.size());
for (final Map.Entry<K, ComposablePromise<V>> promiseEntry : promises.entrySet()) {
result.put(promiseEntry.getKey(), promiseEntry.getValue().future());
}
return result;
}
@Override
public ComposableFuture<Boolean> setAsync(final K key, final V value) {
return cache.setAsync(key, value);
}
@Override
public ComposableFuture<Boolean> setAsync(final K key, final EntryMapper<K, V> mapper, final int maxIterations) {
return cache.setAsync(key, mapper, maxIterations);
}
@Override
public ComposableFuture<Map<K, Boolean>> setBulkAsync(final Map<? extends K, ? extends V> entries) {
return cache.setBulkAsync(entries);
}
@Override
public ComposableFuture<Boolean> deleteAsync(final K key) {
return cache.deleteAsync(key);
}
}
|
fix leak in local storage.
|
ob1k-cache/src/main/java/com/outbrain/ob1k/cache/LoadingCacheDelegate.java
|
fix leak in local storage.
|
<ide><path>b1k-cache/src/main/java/com/outbrain/ob1k/cache/LoadingCacheDelegate.java
<ide> cacheHits.inc();
<ide> }
<ide> promise.set(result);
<add> futureValues.remove(key);
<ide> }
<ide> } else {
<ide> if (cacheErrors != null) {
<ide> if (result.containsKey(key)) {
<ide> final ComposablePromise<V> promise = futureValues.get(key);
<ide> promise.set(result.get(key));
<del> futureValues.remove(key, promise);
<add> futureValues.remove(key);
<ide> } else {
<ide> missingFromCacheKeys.add(key);
<ide> }
|
|
Java
|
apache-2.0
|
1f46d6e9a3dd8914fe321a72bced3cf37bde2808
| 0 |
trombka/blc-tmp,caosg/BroadleafCommerce,caosg/BroadleafCommerce,trombka/blc-tmp,trombka/blc-tmp,TouK/BroadleafCommerce,TouK/BroadleafCommerce,caosg/BroadleafCommerce,TouK/BroadleafCommerce
|
/*
* #%L
* BroadleafCommerce Framework
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.core.order.domain;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.broadleafcommerce.common.admin.domain.AdminMainEntity;
import org.broadleafcommerce.common.copy.CreateResponse;
import org.broadleafcommerce.common.copy.MultiTenantCloneable;
import org.broadleafcommerce.common.copy.MultiTenantCopyContext;
import org.broadleafcommerce.common.currency.util.BroadleafCurrencyUtils;
import org.broadleafcommerce.common.currency.util.CurrencyCodeIdentifiable;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransform;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransformMember;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransformTypes;
import org.broadleafcommerce.common.money.Money;
import org.broadleafcommerce.common.presentation.*;
import org.broadleafcommerce.common.presentation.client.SupportedFieldType;
import org.broadleafcommerce.common.presentation.client.VisibilityEnum;
import org.broadleafcommerce.common.presentation.override.AdminPresentationMergeEntry;
import org.broadleafcommerce.common.presentation.override.AdminPresentationMergeOverride;
import org.broadleafcommerce.common.presentation.override.AdminPresentationMergeOverrides;
import org.broadleafcommerce.common.presentation.override.PropertyType;
import org.broadleafcommerce.common.util.HibernateUtils;
import org.broadleafcommerce.core.catalog.domain.Category;
import org.broadleafcommerce.core.catalog.domain.CategoryImpl;
import org.broadleafcommerce.core.offer.domain.CandidateItemOffer;
import org.broadleafcommerce.core.offer.domain.CandidateItemOfferImpl;
import org.broadleafcommerce.core.offer.domain.OrderItemAdjustment;
import org.broadleafcommerce.core.offer.domain.OrderItemAdjustmentImpl;
import org.broadleafcommerce.core.order.service.type.OrderItemType;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.*;
import org.hibernate.annotations.Parameter;
import javax.persistence.CascadeType;
import javax.persistence.*;
import javax.persistence.Entity;
import javax.persistence.Table;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_ORDER_ITEM")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY,
booleanOverrideValue = true))
}
)
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "OrderItemImpl_baseOrderItem")
@DirectCopyTransform({
@DirectCopyTransformMember(templateTokens = DirectCopyTransformTypes.MULTITENANT_SITE)
})
public class OrderItemImpl implements OrderItem, Cloneable, AdminMainEntity, CurrencyCodeIdentifiable, MultiTenantCloneable<OrderItemImpl> {
private static final Log LOG = LogFactory.getLog(OrderItemImpl.class);
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "OrderItemId")
@GenericGenerator(
name="OrderItemId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="OrderItemImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.OrderItemImpl")
}
)
@Column(name = "ORDER_ITEM_ID")
@AdminPresentation(visibility = VisibilityEnum.HIDDEN_ALL)
protected Long id;
@ManyToOne(fetch = FetchType.LAZY, targetEntity = CategoryImpl.class)
@JoinColumn(name = "CATEGORY_ID")
@Index(name="ORDERITEM_CATEGORY_INDEX", columnNames={"CATEGORY_ID"})
@NotFound(action = NotFoundAction.IGNORE)
@AdminPresentation(friendlyName = "OrderItemImpl_Category", order=Presentation.FieldOrder.CATEGORY,
group = Presentation.Group.Name.Catalog, groupOrder = Presentation.Group.Order.Catalog)
@AdminPresentationToOneLookup()
protected Category category;
@ManyToOne(targetEntity = OrderImpl.class)
@JoinColumn(name = "ORDER_ID")
@Index(name="ORDERITEM_ORDER_INDEX", columnNames={"ORDER_ID"})
@AdminPresentation(excluded = true)
protected Order order;
@Column(name = "PRICE", precision = 19, scale = 5)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Price", order = Presentation.FieldOrder.PRICE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
fieldType = SupportedFieldType.MONEY, prominent = true, gridOrder = 3000)
protected BigDecimal price;
@Column(name = "QUANTITY", nullable = false)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Quantity", order = Presentation.FieldOrder.QUANTITY,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
prominent = true, gridOrder = 2000)
protected int quantity;
@Column(name = "RETAIL_PRICE", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Retail_Price", order = Presentation.FieldOrder.RETAILPRICE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
fieldType = SupportedFieldType.MONEY, prominent = true, gridOrder = 4000)
protected BigDecimal retailPrice;
@Column(name = "SALE_PRICE", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Sale_Price", order = Presentation.FieldOrder.SALEPRICE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
fieldType = SupportedFieldType.MONEY)
protected BigDecimal salePrice;
@Column(name = "NAME")
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Name", order=Presentation.FieldOrder.NAME,
group = Presentation.Group.Name.Description, prominent=true, gridOrder = 1000,
groupOrder = Presentation.Group.Order.Description)
protected String name;
@ManyToOne(targetEntity = PersonalMessageImpl.class, cascade = { CascadeType.ALL })
@JoinColumn(name = "PERSONAL_MESSAGE_ID")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@Index(name="ORDERITEM_MESSAGE_INDEX", columnNames={"PERSONAL_MESSAGE_ID"})
protected PersonalMessage personalMessage;
@ManyToOne(fetch = FetchType.LAZY, targetEntity = GiftWrapOrderItemImpl.class, cascade = { CascadeType.MERGE, CascadeType.PERSIST })
@JoinColumn(name = "GIFT_WRAP_ITEM_ID", nullable = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@Index(name="ORDERITEM_GIFT_INDEX", columnNames={"GIFT_WRAP_ITEM_ID"})
@AdminPresentation(excluded = true)
protected GiftWrapOrderItem giftWrapOrderItem;
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemAdjustmentImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
@AdminPresentationCollection(friendlyName="OrderItemImpl_Adjustments", order = Presentation.FieldOrder.ADJUSTMENTS,
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced)
protected List<OrderItemAdjustment> orderItemAdjustments = new ArrayList<OrderItemAdjustment>();
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemQualifierImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<OrderItemQualifier> orderItemQualifiers = new ArrayList<OrderItemQualifier>();
@OneToMany(mappedBy = "orderItem", targetEntity = CandidateItemOfferImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<CandidateItemOffer> candidateItemOffers = new ArrayList<CandidateItemOffer>();
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemPriceDetailImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderItemImpl_Price_Details", order = Presentation.FieldOrder.PRICEDETAILS,
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced)
protected List<OrderItemPriceDetail> orderItemPriceDetails = new ArrayList<OrderItemPriceDetail>();
@Column(name = "ORDER_ITEM_TYPE")
@Index(name="ORDERITEM_TYPE_INDEX", columnNames={"ORDER_ITEM_TYPE"})
protected String orderItemType;
@Column(name = "ITEM_TAXABLE_FLAG")
protected Boolean itemTaxable;
@Column(name = "RETAIL_PRICE_OVERRIDE")
protected Boolean retailPriceOverride;
@Column(name = "SALE_PRICE_OVERRIDE")
protected Boolean salePriceOverride;
@Column(name = "DISCOUNTS_ALLOWED")
@AdminPresentation(friendlyName = "OrderItemImpl_Discounts_Allowed", order=Presentation.FieldOrder.DISCOUNTALLOWED,
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced)
protected Boolean discountsAllowed;
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemAttributeImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@MapKey(name="name")
@AdminPresentationMap(friendlyName = "OrderItemImpl_Attributes",
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced,
deleteEntityUponRemove = true, forceFreeFormKeys = true, keyPropertyFriendlyName = "OrderItemAttributeImpl_Attribute_Name"
)
protected Map<String, OrderItemAttribute> orderItemAttributeMap = new HashMap<String, OrderItemAttribute>();
/**
* @deprecated use {@link FulfillmentGroupItem#getTaxes()} ()} or {@link FulfillmentGroupItem#getTotalTax()} ()} ()} instead
*/
@Column(name = "TOTAL_TAX")
@Deprecated
protected BigDecimal totalTax;
@OneToMany(mappedBy = "parentOrderItem", targetEntity = OrderItemImpl.class)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<OrderItem> childOrderItems = new ArrayList<OrderItem>();
@ManyToOne(targetEntity = OrderItemImpl.class)
@JoinColumn(name = "PARENT_ORDER_ITEM_ID")
@Index(name="ORDERITEM_PARENT_INDEX", columnNames={"PARENT_ORDER_ITEM_ID"})
protected OrderItem parentOrderItem;
@Override
public Money getRetailPrice() {
if (retailPrice == null) {
updateSaleAndRetailPrices();
}
return convertToMoney(retailPrice);
}
@Override
public void setRetailPrice(Money retailPrice) {
this.retailPrice = Money.toAmount(retailPrice);
}
@Override
public Money getSalePrice() {
// Added retailPrice check since a null salePrice is not a reliable way to determine that prices have
// been initialized.
if (salePrice == null && retailPrice == null) {
updateSaleAndRetailPrices();
}
if (salePrice != null) {
Money returnPrice = convertToMoney(salePrice);
if (retailPrice != null && returnPrice.greaterThan(getRetailPrice())) {
return getRetailPrice();
} else {
return returnPrice;
}
} else {
return getRetailPrice();
}
}
@Override
public void setSalePrice(Money salePrice) {
this.salePrice = Money.toAmount(salePrice);
}
@Override
public Money getPrice() {
return getAveragePrice();
}
@Override
public void setPrice(Money finalPrice) {
setRetailPrice(finalPrice);
setSalePrice(finalPrice);
setRetailPriceOverride(true);
setSalePriceOverride(true);
setDiscountingAllowed(false);
this.price = Money.toAmount(finalPrice);
}
@Override
public Money getTaxablePrice() {
Money taxablePrice = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getOrder().getCurrency());
if (isTaxable() == null || isTaxable()) {
taxablePrice = getAveragePrice();
}
return taxablePrice;
}
@Override
public int getQuantity() {
return quantity;
}
@Override
public void setQuantity(int quantity) {
this.quantity = quantity;
}
@Override
public Category getCategory() {
return HibernateUtils.deproxy(category);
}
@Override
public void setCategory(Category category) {
this.category = category;
}
@Override
public List<CandidateItemOffer> getCandidateItemOffers() {
return candidateItemOffers;
}
@Override
public void setCandidateItemOffers(List<CandidateItemOffer> candidateItemOffers) {
this.candidateItemOffers = candidateItemOffers;
}
@Override
public PersonalMessage getPersonalMessage() {
return personalMessage;
}
@Override
public void setPersonalMessage(PersonalMessage personalMessage) {
this.personalMessage = personalMessage;
}
@Override
public Order getOrder() {
return order;
}
@Override
public void setOrder(Order order) {
this.order = order;
}
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public boolean isInCategory(String categoryName) {
Category currentCategory = category;
if (currentCategory != null) {
if (currentCategory.getName().equals(categoryName)) {
return true;
}
while ((currentCategory = currentCategory.getDefaultParentCategory()) != null) {
if (currentCategory.getName().equals(categoryName)) {
return true;
}
}
}
return false;
}
@Override
public List<OrderItemQualifier> getOrderItemQualifiers() {
return this.orderItemQualifiers;
}
@Override
public void setOrderItemQualifiers(List<OrderItemQualifier> orderItemQualifiers) {
this.orderItemQualifiers = orderItemQualifiers;
}
@Override
public List<OrderItemAdjustment> getOrderItemAdjustments() {
return this.orderItemAdjustments;
}
@Override
public void setOrderItemAdjustments(List<OrderItemAdjustment> orderItemAdjustments) {
this.orderItemAdjustments = orderItemAdjustments;
}
@Override
public Money getAdjustmentValue() {
return getAverageAdjustmentValue();
}
@Override
public GiftWrapOrderItem getGiftWrapOrderItem() {
return HibernateUtils.deproxy(giftWrapOrderItem);
}
@Override
public void setGiftWrapOrderItem(GiftWrapOrderItem giftWrapOrderItem) {
this.giftWrapOrderItem = giftWrapOrderItem;
}
@Override
public OrderItemType getOrderItemType() {
return convertOrderItemType(orderItemType);
}
@Override
public void setOrderItemType(OrderItemType orderItemType) {
this.orderItemType = orderItemType.getType();
}
@Override
public boolean getIsOnSale() {
if (getSalePrice() != null) {
return !getSalePrice().equals(getRetailPrice());
} else {
return false;
}
}
@Override
public boolean getIsDiscounted() {
if (getPrice() != null) {
return !getPrice().equals(getRetailPrice());
} else {
return false;
}
}
@Override
public boolean updateSaleAndRetailPrices() {
if (salePrice == null) {
salePrice = retailPrice;
}
return false;
}
@Override
public void finalizePrice() {
price = getAveragePrice().getAmount();
}
@Override
public void assignFinalPrice() {
Money finalPrice = getTotalPrice().divide(quantity);
price = finalPrice.getAmount();
}
@Override
public Money getPriceBeforeAdjustments(boolean allowSalesPrice) {
boolean retailPriceOverride = false;
for (OrderItemPriceDetail oipd : getOrderItemPriceDetails()) {
if (oipd.getUseSalePrice() == false) {
retailPriceOverride = true;
break;
}
}
if (allowSalesPrice && !retailPriceOverride) {
return getSalePrice();
} else {
return getRetailPrice();
}
}
@Override
public void addCandidateItemOffer(CandidateItemOffer candidateItemOffer) {
getCandidateItemOffers().add(candidateItemOffer);
}
@Override
public void removeAllCandidateItemOffers() {
if (getCandidateItemOffers() != null) {
for (CandidateItemOffer candidate : getCandidateItemOffers()) {
candidate.setOrderItem(null);
}
getCandidateItemOffers().clear();
}
}
@Override
public int removeAllAdjustments() {
int removedAdjustmentCount = 0;
if (getOrderItemAdjustments() != null) {
for (OrderItemAdjustment adjustment : getOrderItemAdjustments()) {
adjustment.setOrderItem(null);
}
removedAdjustmentCount = getOrderItemAdjustments().size();
getOrderItemAdjustments().clear();
}
assignFinalPrice();
return removedAdjustmentCount;
}
/**
* A list of arbitrary attributes added to this item.
*/
@Override
public Map<String,OrderItemAttribute> getOrderItemAttributes() {
return orderItemAttributeMap;
}
/**
* Sets the map of order item attributes.
*
* @param orderItemAttributes
*/
@Override
public void setOrderItemAttributes(Map<String,OrderItemAttribute> orderItemAttributes) {
this.orderItemAttributeMap = orderItemAttributes;
}
@Override
public Boolean isTaxable() {
return itemTaxable == null ? true : itemTaxable;
}
@Override
public void setTaxable(Boolean taxable) {
this.itemTaxable = taxable;
}
@Override
public void setOrderItemPriceDetails(List<OrderItemPriceDetail> orderItemPriceDetails) {
this.orderItemPriceDetails = orderItemPriceDetails;
}
@Override
public boolean isDiscountingAllowed() {
if (discountsAllowed == null) {
return true;
} else {
return discountsAllowed.booleanValue();
}
}
@Override
public void setDiscountingAllowed(boolean discountsAllowed) {
this.discountsAllowed = discountsAllowed;
}
@Override
public Money getAveragePrice() {
if (quantity == 0) {
return price == null ? null : BroadleafCurrencyUtils.getMoney(price, getOrder().getCurrency());
}
return getTotalPrice().divide(quantity);
}
@Override
public Money getAverageAdjustmentValue() {
if (quantity == 0) {
return null;
}
return getTotalAdjustmentValue().divide(quantity);
}
@Override
public Money getTotalAdjustmentValue() {
Money totalAdjustmentValue = BroadleafCurrencyUtils.getMoney(getOrder().getCurrency());
List<OrderItemPriceDetail> priceDetails = getOrderItemPriceDetails();
if (priceDetails != null) {
for (OrderItemPriceDetail priceDetail : getOrderItemPriceDetails()) {
totalAdjustmentValue = totalAdjustmentValue.add(priceDetail.getTotalAdjustmentValue());
}
}
return totalAdjustmentValue;
}
@Override
public Money getTotalPrice() {
Money returnValue = convertToMoney(BigDecimal.ZERO);
if (orderItemPriceDetails != null && orderItemPriceDetails.size() > 0) {
for (OrderItemPriceDetail oipd : orderItemPriceDetails) {
returnValue = returnValue.add(oipd.getTotalAdjustedPrice());
}
} else {
if (price != null) {
returnValue = convertToMoney(price).multiply(quantity);
} else {
return getSalePrice().multiply(quantity);
}
}
return returnValue;
}
@Override
public Money getTotalPriceBeforeAdjustments(boolean allowSalesPrice) {
return getPriceBeforeAdjustments(allowSalesPrice).multiply(getQuantity());
}
@Override
public void setRetailPriceOverride(boolean override) {
this.retailPriceOverride = Boolean.valueOf(override);
}
@Override
public boolean isRetailPriceOverride() {
if (retailPriceOverride == null) {
return false;
} else {
return retailPriceOverride.booleanValue();
}
}
@Override
public void setSalePriceOverride(boolean override) {
this.salePriceOverride = Boolean.valueOf(override);
}
@Override
public boolean isSalePriceOverride() {
if (salePriceOverride == null) {
return false;
} else {
return salePriceOverride.booleanValue();
}
}
@Override
public List<OrderItemPriceDetail> getOrderItemPriceDetails() {
return orderItemPriceDetails;
}
@Override
public List<OrderItem> getChildOrderItems() {
return childOrderItems;
}
@Override
public void setChildOrderItems(List<OrderItem> childOrderItems) {
this.childOrderItems = childOrderItems;
}
@Override
public OrderItem getParentOrderItem() {
return parentOrderItem;
}
@Override
public void setParentOrderItem(OrderItem parentOrderItem) {
this.parentOrderItem = parentOrderItem;
}
@Override
public boolean isAParentOf(OrderItem candidateChild) {
if (CollectionUtils.isNotEmpty(this.getChildOrderItems())) {
for (OrderItem child : this.getChildOrderItems()) {
if (child.equals(candidateChild)) {
return true;
}
}
// Item wasn't a direct child. Let's check the hierarchy
for (OrderItem child : this.getChildOrderItems()) {
if (child.isAParentOf(candidateChild)) {
return true;
}
}
}
return false;
}
@Override
public String getMainEntityName() {
return getName();
}
@Override
public String getCurrencyCode() {
if (getOrder().getCurrency() != null) {
return getOrder().getCurrency().getCurrencyCode();
}
return null;
}
public void checkCloneable(OrderItem orderItem) throws CloneNotSupportedException, SecurityException, NoSuchMethodException {
Method cloneMethod = orderItem.getClass().getMethod("clone", new Class[]{});
if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") &&
!orderItem.getClass().getName().startsWith("org.broadleafcommerce")) {
//subclass is not implementing the clone method
throw new CloneNotSupportedException("Custom extensions and implementations should implement clone in " +
"order to guarantee split and merge operations are performed accurately");
}
}
protected Money convertToMoney(BigDecimal amount) {
return amount == null ? null : BroadleafCurrencyUtils.getMoney(amount, getOrder().getCurrency());
}
protected OrderItemType convertOrderItemType(String type) {
return OrderItemType.getInstance(type);
}
@Override
public OrderItem clone() {
//this is likely an extended class - instantiate from the fully qualified name via reflection
OrderItemImpl clonedOrderItem;
try {
clonedOrderItem = (OrderItemImpl) Class.forName(this.getClass().getName()).newInstance();
try {
checkCloneable(clonedOrderItem);
} catch (CloneNotSupportedException e) {
LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " +
clonedOrderItem.getClass().getName(), e);
}
if (candidateItemOffers != null) {
for (CandidateItemOffer candidate : candidateItemOffers) {
CandidateItemOffer clone = candidate.clone();
clone.setOrderItem(clonedOrderItem);
clonedOrderItem.getCandidateItemOffers().add(clone);
}
}
if (orderItemAttributeMap != null && !orderItemAttributeMap.isEmpty()) {
for (OrderItemAttribute attribute : orderItemAttributeMap.values()) {
OrderItemAttribute clone = attribute.clone();
clone.setOrderItem(clonedOrderItem);
clonedOrderItem.getOrderItemAttributes().put(clone.getName(), clone);
}
}
if (CollectionUtils.isNotEmpty(childOrderItems)) {
for (OrderItem childOrderItem : childOrderItems) {
OrderItem clone = childOrderItem.clone();
clone.setParentOrderItem(clonedOrderItem);
clonedOrderItem.getChildOrderItems().add(clone);
}
}
clonedOrderItem.setCategory(category);
clonedOrderItem.setGiftWrapOrderItem(giftWrapOrderItem);
clonedOrderItem.setName(name);
clonedOrderItem.setOrder(order);
clonedOrderItem.setOrderItemType(convertOrderItemType(orderItemType));
clonedOrderItem.setPersonalMessage(personalMessage);
clonedOrderItem.setQuantity(quantity);
clonedOrderItem.retailPrice = retailPrice;
clonedOrderItem.salePrice = salePrice;
clonedOrderItem.discountsAllowed = discountsAllowed;
clonedOrderItem.salePriceOverride = salePriceOverride;
clonedOrderItem.retailPriceOverride = retailPriceOverride;
clonedOrderItem.setParentOrderItem(parentOrderItem);
} catch (Exception e) {
throw new RuntimeException(e);
}
return clonedOrderItem;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((category == null) ? 0 : category.hashCode());
result = prime * result + ((giftWrapOrderItem == null) ? 0 : giftWrapOrderItem.hashCode());
result = prime * result + ((order == null) ? 0 : order.hashCode());
result = prime * result + ((orderItemType == null) ? 0 : orderItemType.hashCode());
result = prime * result + ((personalMessage == null) ? 0 : personalMessage.hashCode());
result = prime * result + ((price == null) ? 0 : price.hashCode());
result = prime * result + quantity;
result = prime * result + ((retailPrice == null) ? 0 : retailPrice.hashCode());
result = prime * result + ((salePrice == null) ? 0 : salePrice.hashCode());
result = prime * result + ((parentOrderItem == null) ? 0 : parentOrderItem.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!getClass().isAssignableFrom(obj.getClass())) {
return false;
}
OrderItemImpl other = (OrderItemImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (category == null) {
if (other.category != null) {
return false;
}
} else if (!category.equals(other.category)) {
return false;
}
if (giftWrapOrderItem == null) {
if (other.giftWrapOrderItem != null) {
return false;
}
} else if (!giftWrapOrderItem.equals(other.giftWrapOrderItem)) {
return false;
}
if (order == null) {
if (other.order != null) {
return false;
}
} else if (!order.equals(other.order)) {
return false;
}
if (orderItemType == null) {
if (other.orderItemType != null) {
return false;
}
} else if (!orderItemType.equals(other.orderItemType)) {
return false;
}
if (personalMessage == null) {
if (other.personalMessage != null) {
return false;
}
} else if (!personalMessage.equals(other.personalMessage)) {
return false;
}
if (price == null) {
if (other.price != null) {
return false;
}
} else if (!price.equals(other.price)) {
return false;
}
if (quantity != other.quantity) {
return false;
}
if (retailPrice == null) {
if (other.retailPrice != null) {
return false;
}
} else if (!retailPrice.equals(other.retailPrice)) {
return false;
}
if (salePrice == null) {
if (other.salePrice != null) {
return false;
}
} else if (!salePrice.equals(other.salePrice)) {
return false;
}
if (parentOrderItem == null) {
if (other.parentOrderItem != null) {
return false;
}
} else if (!parentOrderItem.equals(other.parentOrderItem)) {
return false;
}
return true;
}
@Override
public <G extends OrderItemImpl> CreateResponse<G> createOrRetrieveCopyInstance(MultiTenantCopyContext context) throws CloneNotSupportedException {
CreateResponse<G> createResponse = context.createOrRetrieveCopyInstance(this);
if (createResponse.isAlreadyPopulated()) {
return createResponse;
}
OrderItem cloned = createResponse.getClone();
cloned.setCategory(category.createOrRetrieveCopyInstance(context).getClone());
cloned.setName(name);
cloned.setOrderItemType(getOrderItemType());
cloned.setDiscountingAllowed(discountsAllowed);
cloned.setTaxable(isTaxable());
cloned.setSalePriceOverride(salePriceOverride);
cloned.setSalePrice(getSalePrice());
cloned.setRetailPrice(getRetailPrice());
cloned.setRetailPriceOverride(retailPriceOverride);
cloned.setQuantity(quantity);
cloned.setPersonalMessage(personalMessage);
// dont clone
cloned.setParentOrderItem(parentOrderItem);
for(OrderItem entry : childOrderItems){
OrderItem clonedEntry = ((OrderItemImpl)entry).createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setParentOrderItem(clonedEntry);
cloned.getChildOrderItems().add(clonedEntry);
}
for(CandidateItemOffer entry : candidateItemOffers){
CandidateItemOffer clonedEntry = entry.createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setOrderItem(cloned);
cloned.getCandidateItemOffers().add(clonedEntry);
}
for(Map.Entry<String,OrderItemAttribute> entry : orderItemAttributeMap.entrySet()){
OrderItemAttribute clonedEntry = entry.getValue().createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setOrderItem(cloned);
cloned.getOrderItemAttributes().put(entry.getKey(),clonedEntry);
}
// dont clone
cloned.setGiftWrapOrderItem(giftWrapOrderItem);
for(OrderItemPriceDetail entry : orderItemPriceDetails){
OrderItemPriceDetail clonedEntry = entry.createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setOrderItem(cloned);
cloned.getOrderItemPriceDetails().add(clonedEntry);
}
return createResponse;
}
public static class Presentation {
public static class Tab {
public static class Name {
public static final String Advanced = "OrderImpl_Advanced";
}
public static class Order {
public static final int Advanced = 2000;
}
}
public static class Group {
public static class Name {
public static final String Description = "OrderItemImpl_Description";
public static final String Pricing = "OrderItemImpl_Pricing";
public static final String Catalog = "OrderItemImpl_Catalog";
}
public static class Order {
public static final int Description = 1000;
public static final int Pricing = 2000;
public static final int Catalog = 3000;
}
}
public static class FieldOrder {
public static final int NAME = 1000;
public static final int PRICE = 2000;
public static final int QUANTITY = 3000;
public static final int RETAILPRICE = 4000;
public static final int SALEPRICE = 5000;
public static final int TOTALTAX = 6000;
public static final int CATEGORY = 1000;
public static final int PRICEDETAILS = 1000;
public static final int ADJUSTMENTS = 2000;
public static final int DISCOUNTALLOWED = 3000;
}
}
@Override
public boolean isSkuActive() {
//abstract method, by default return true
return true;
}
}
|
core/broadleaf-framework/src/main/java/org/broadleafcommerce/core/order/domain/OrderItemImpl.java
|
/*
* #%L
* BroadleafCommerce Framework
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.core.order.domain;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.broadleafcommerce.common.admin.domain.AdminMainEntity;
import org.broadleafcommerce.common.copy.CreateResponse;
import org.broadleafcommerce.common.copy.MultiTenantCloneable;
import org.broadleafcommerce.common.copy.MultiTenantCopyContext;
import org.broadleafcommerce.common.currency.util.BroadleafCurrencyUtils;
import org.broadleafcommerce.common.currency.util.CurrencyCodeIdentifiable;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransform;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransformMember;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransformTypes;
import org.broadleafcommerce.common.money.Money;
import org.broadleafcommerce.common.presentation.*;
import org.broadleafcommerce.common.presentation.client.SupportedFieldType;
import org.broadleafcommerce.common.presentation.client.VisibilityEnum;
import org.broadleafcommerce.common.presentation.override.AdminPresentationMergeEntry;
import org.broadleafcommerce.common.presentation.override.AdminPresentationMergeOverride;
import org.broadleafcommerce.common.presentation.override.AdminPresentationMergeOverrides;
import org.broadleafcommerce.common.presentation.override.PropertyType;
import org.broadleafcommerce.common.util.HibernateUtils;
import org.broadleafcommerce.core.catalog.domain.Category;
import org.broadleafcommerce.core.catalog.domain.CategoryImpl;
import org.broadleafcommerce.core.offer.domain.CandidateItemOffer;
import org.broadleafcommerce.core.offer.domain.CandidateItemOfferImpl;
import org.broadleafcommerce.core.offer.domain.OrderItemAdjustment;
import org.broadleafcommerce.core.offer.domain.OrderItemAdjustmentImpl;
import org.broadleafcommerce.core.order.service.type.OrderItemType;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.*;
import org.hibernate.annotations.Parameter;
import javax.persistence.CascadeType;
import javax.persistence.*;
import javax.persistence.Entity;
import javax.persistence.Table;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_ORDER_ITEM")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY,
booleanOverrideValue = true))
}
)
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "OrderItemImpl_baseOrderItem")
@DirectCopyTransform({
@DirectCopyTransformMember(templateTokens = DirectCopyTransformTypes.MULTITENANT_SITE)
})
public class OrderItemImpl implements OrderItem, Cloneable, AdminMainEntity, CurrencyCodeIdentifiable, MultiTenantCloneable<OrderItemImpl> {
private static final Log LOG = LogFactory.getLog(OrderItemImpl.class);
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "OrderItemId")
@GenericGenerator(
name="OrderItemId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="OrderItemImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.OrderItemImpl")
}
)
@Column(name = "ORDER_ITEM_ID")
@AdminPresentation(visibility = VisibilityEnum.HIDDEN_ALL)
protected Long id;
@ManyToOne(fetch = FetchType.LAZY, targetEntity = CategoryImpl.class)
@JoinColumn(name = "CATEGORY_ID")
@Index(name="ORDERITEM_CATEGORY_INDEX", columnNames={"CATEGORY_ID"})
@NotFound(action = NotFoundAction.IGNORE)
@AdminPresentation(friendlyName = "OrderItemImpl_Category", order=Presentation.FieldOrder.CATEGORY,
group = Presentation.Group.Name.Catalog, groupOrder = Presentation.Group.Order.Catalog)
@AdminPresentationToOneLookup()
protected Category category;
@ManyToOne(targetEntity = OrderImpl.class)
@JoinColumn(name = "ORDER_ID")
@Index(name="ORDERITEM_ORDER_INDEX", columnNames={"ORDER_ID"})
@AdminPresentation(excluded = true)
protected Order order;
@Column(name = "PRICE", precision = 19, scale = 5)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Price", order = Presentation.FieldOrder.PRICE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
fieldType = SupportedFieldType.MONEY, prominent = true, gridOrder = 3000)
protected BigDecimal price;
@Column(name = "QUANTITY", nullable = false)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Quantity", order = Presentation.FieldOrder.QUANTITY,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
prominent = true, gridOrder = 2000)
protected int quantity;
@Column(name = "RETAIL_PRICE", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Retail_Price", order = Presentation.FieldOrder.RETAILPRICE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
fieldType = SupportedFieldType.MONEY, prominent = true, gridOrder = 4000)
protected BigDecimal retailPrice;
@Column(name = "SALE_PRICE", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Sale_Price", order = Presentation.FieldOrder.SALEPRICE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
fieldType = SupportedFieldType.MONEY)
protected BigDecimal salePrice;
@Column(name = "NAME")
@AdminPresentation(friendlyName = "OrderItemImpl_Item_Name", order=Presentation.FieldOrder.NAME,
group = Presentation.Group.Name.Description, prominent=true, gridOrder = 1000,
groupOrder = Presentation.Group.Order.Description)
protected String name;
@ManyToOne(targetEntity = PersonalMessageImpl.class, cascade = { CascadeType.ALL })
@JoinColumn(name = "PERSONAL_MESSAGE_ID")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@Index(name="ORDERITEM_MESSAGE_INDEX", columnNames={"PERSONAL_MESSAGE_ID"})
protected PersonalMessage personalMessage;
@ManyToOne(fetch = FetchType.LAZY, targetEntity = GiftWrapOrderItemImpl.class, cascade = { CascadeType.MERGE, CascadeType.PERSIST })
@JoinColumn(name = "GIFT_WRAP_ITEM_ID", nullable = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@Index(name="ORDERITEM_GIFT_INDEX", columnNames={"GIFT_WRAP_ITEM_ID"})
@AdminPresentation(excluded = true)
protected GiftWrapOrderItem giftWrapOrderItem;
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemAdjustmentImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
@AdminPresentationCollection(friendlyName="OrderItemImpl_Adjustments", order = Presentation.FieldOrder.ADJUSTMENTS,
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced)
protected List<OrderItemAdjustment> orderItemAdjustments = new ArrayList<OrderItemAdjustment>();
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemQualifierImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<OrderItemQualifier> orderItemQualifiers = new ArrayList<OrderItemQualifier>();
@OneToMany(mappedBy = "orderItem", targetEntity = CandidateItemOfferImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<CandidateItemOffer> candidateItemOffers = new ArrayList<CandidateItemOffer>();
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemPriceDetailImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderItemImpl_Price_Details", order = Presentation.FieldOrder.PRICEDETAILS,
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced)
protected List<OrderItemPriceDetail> orderItemPriceDetails = new ArrayList<OrderItemPriceDetail>();
@Column(name = "ORDER_ITEM_TYPE")
@Index(name="ORDERITEM_TYPE_INDEX", columnNames={"ORDER_ITEM_TYPE"})
protected String orderItemType;
@Column(name = "ITEM_TAXABLE_FLAG")
protected Boolean itemTaxable;
@Column(name = "RETAIL_PRICE_OVERRIDE")
protected Boolean retailPriceOverride;
@Column(name = "SALE_PRICE_OVERRIDE")
protected Boolean salePriceOverride;
@Column(name = "DISCOUNTS_ALLOWED")
@AdminPresentation(friendlyName = "OrderItemImpl_Discounts_Allowed", order=Presentation.FieldOrder.DISCOUNTALLOWED,
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced)
protected Boolean discountsAllowed;
@OneToMany(mappedBy = "orderItem", targetEntity = OrderItemAttributeImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@MapKey(name="name")
@AdminPresentationMap(friendlyName = "OrderItemImpl_Attributes",
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced,
deleteEntityUponRemove = true, forceFreeFormKeys = true, keyPropertyFriendlyName = "OrderItemAttributeImpl_Attribute_Name"
)
protected Map<String, OrderItemAttribute> orderItemAttributeMap = new HashMap<String, OrderItemAttribute>();
@Column(name = "TOTAL_TAX")
@AdminPresentation(friendlyName = "OrderItemImpl_Total_Tax", order = Presentation.FieldOrder.TOTALTAX,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
fieldType = SupportedFieldType.MONEY)
protected BigDecimal totalTax;
@OneToMany(mappedBy = "parentOrderItem", targetEntity = OrderItemImpl.class)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<OrderItem> childOrderItems = new ArrayList<OrderItem>();
@ManyToOne(targetEntity = OrderItemImpl.class)
@JoinColumn(name = "PARENT_ORDER_ITEM_ID")
@Index(name="ORDERITEM_PARENT_INDEX", columnNames={"PARENT_ORDER_ITEM_ID"})
protected OrderItem parentOrderItem;
@Override
public Money getRetailPrice() {
if (retailPrice == null) {
updateSaleAndRetailPrices();
}
return convertToMoney(retailPrice);
}
@Override
public void setRetailPrice(Money retailPrice) {
this.retailPrice = Money.toAmount(retailPrice);
}
@Override
public Money getSalePrice() {
// Added retailPrice check since a null salePrice is not a reliable way to determine that prices have
// been initialized.
if (salePrice == null && retailPrice == null) {
updateSaleAndRetailPrices();
}
if (salePrice != null) {
Money returnPrice = convertToMoney(salePrice);
if (retailPrice != null && returnPrice.greaterThan(getRetailPrice())) {
return getRetailPrice();
} else {
return returnPrice;
}
} else {
return getRetailPrice();
}
}
@Override
public void setSalePrice(Money salePrice) {
this.salePrice = Money.toAmount(salePrice);
}
@Override
public Money getPrice() {
return getAveragePrice();
}
@Override
public void setPrice(Money finalPrice) {
setRetailPrice(finalPrice);
setSalePrice(finalPrice);
setRetailPriceOverride(true);
setSalePriceOverride(true);
setDiscountingAllowed(false);
this.price = Money.toAmount(finalPrice);
}
@Override
public Money getTaxablePrice() {
Money taxablePrice = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getOrder().getCurrency());
if (isTaxable() == null || isTaxable()) {
taxablePrice = getAveragePrice();
}
return taxablePrice;
}
@Override
public int getQuantity() {
return quantity;
}
@Override
public void setQuantity(int quantity) {
this.quantity = quantity;
}
@Override
public Category getCategory() {
return HibernateUtils.deproxy(category);
}
@Override
public void setCategory(Category category) {
this.category = category;
}
@Override
public List<CandidateItemOffer> getCandidateItemOffers() {
return candidateItemOffers;
}
@Override
public void setCandidateItemOffers(List<CandidateItemOffer> candidateItemOffers) {
this.candidateItemOffers = candidateItemOffers;
}
@Override
public PersonalMessage getPersonalMessage() {
return personalMessage;
}
@Override
public void setPersonalMessage(PersonalMessage personalMessage) {
this.personalMessage = personalMessage;
}
@Override
public Order getOrder() {
return order;
}
@Override
public void setOrder(Order order) {
this.order = order;
}
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public boolean isInCategory(String categoryName) {
Category currentCategory = category;
if (currentCategory != null) {
if (currentCategory.getName().equals(categoryName)) {
return true;
}
while ((currentCategory = currentCategory.getDefaultParentCategory()) != null) {
if (currentCategory.getName().equals(categoryName)) {
return true;
}
}
}
return false;
}
@Override
public List<OrderItemQualifier> getOrderItemQualifiers() {
return this.orderItemQualifiers;
}
@Override
public void setOrderItemQualifiers(List<OrderItemQualifier> orderItemQualifiers) {
this.orderItemQualifiers = orderItemQualifiers;
}
@Override
public List<OrderItemAdjustment> getOrderItemAdjustments() {
return this.orderItemAdjustments;
}
@Override
public void setOrderItemAdjustments(List<OrderItemAdjustment> orderItemAdjustments) {
this.orderItemAdjustments = orderItemAdjustments;
}
@Override
public Money getAdjustmentValue() {
return getAverageAdjustmentValue();
}
@Override
public GiftWrapOrderItem getGiftWrapOrderItem() {
return HibernateUtils.deproxy(giftWrapOrderItem);
}
@Override
public void setGiftWrapOrderItem(GiftWrapOrderItem giftWrapOrderItem) {
this.giftWrapOrderItem = giftWrapOrderItem;
}
@Override
public OrderItemType getOrderItemType() {
return convertOrderItemType(orderItemType);
}
@Override
public void setOrderItemType(OrderItemType orderItemType) {
this.orderItemType = orderItemType.getType();
}
@Override
public boolean getIsOnSale() {
if (getSalePrice() != null) {
return !getSalePrice().equals(getRetailPrice());
} else {
return false;
}
}
@Override
public boolean getIsDiscounted() {
if (getPrice() != null) {
return !getPrice().equals(getRetailPrice());
} else {
return false;
}
}
@Override
public boolean updateSaleAndRetailPrices() {
if (salePrice == null) {
salePrice = retailPrice;
}
return false;
}
@Override
public void finalizePrice() {
price = getAveragePrice().getAmount();
}
@Override
public void assignFinalPrice() {
Money finalPrice = getTotalPrice().divide(quantity);
price = finalPrice.getAmount();
}
@Override
public Money getPriceBeforeAdjustments(boolean allowSalesPrice) {
boolean retailPriceOverride = false;
for (OrderItemPriceDetail oipd : getOrderItemPriceDetails()) {
if (oipd.getUseSalePrice() == false) {
retailPriceOverride = true;
break;
}
}
if (allowSalesPrice && !retailPriceOverride) {
return getSalePrice();
} else {
return getRetailPrice();
}
}
@Override
public void addCandidateItemOffer(CandidateItemOffer candidateItemOffer) {
getCandidateItemOffers().add(candidateItemOffer);
}
@Override
public void removeAllCandidateItemOffers() {
if (getCandidateItemOffers() != null) {
for (CandidateItemOffer candidate : getCandidateItemOffers()) {
candidate.setOrderItem(null);
}
getCandidateItemOffers().clear();
}
}
@Override
public int removeAllAdjustments() {
int removedAdjustmentCount = 0;
if (getOrderItemAdjustments() != null) {
for (OrderItemAdjustment adjustment : getOrderItemAdjustments()) {
adjustment.setOrderItem(null);
}
removedAdjustmentCount = getOrderItemAdjustments().size();
getOrderItemAdjustments().clear();
}
assignFinalPrice();
return removedAdjustmentCount;
}
/**
* A list of arbitrary attributes added to this item.
*/
@Override
public Map<String,OrderItemAttribute> getOrderItemAttributes() {
return orderItemAttributeMap;
}
/**
* Sets the map of order item attributes.
*
* @param orderItemAttributes
*/
@Override
public void setOrderItemAttributes(Map<String,OrderItemAttribute> orderItemAttributes) {
this.orderItemAttributeMap = orderItemAttributes;
}
@Override
public Boolean isTaxable() {
return itemTaxable == null ? true : itemTaxable;
}
@Override
public void setTaxable(Boolean taxable) {
this.itemTaxable = taxable;
}
@Override
public void setOrderItemPriceDetails(List<OrderItemPriceDetail> orderItemPriceDetails) {
this.orderItemPriceDetails = orderItemPriceDetails;
}
@Override
public boolean isDiscountingAllowed() {
if (discountsAllowed == null) {
return true;
} else {
return discountsAllowed.booleanValue();
}
}
@Override
public void setDiscountingAllowed(boolean discountsAllowed) {
this.discountsAllowed = discountsAllowed;
}
@Override
public Money getAveragePrice() {
if (quantity == 0) {
return price == null ? null : BroadleafCurrencyUtils.getMoney(price, getOrder().getCurrency());
}
return getTotalPrice().divide(quantity);
}
@Override
public Money getAverageAdjustmentValue() {
if (quantity == 0) {
return null;
}
return getTotalAdjustmentValue().divide(quantity);
}
@Override
public Money getTotalAdjustmentValue() {
Money totalAdjustmentValue = BroadleafCurrencyUtils.getMoney(getOrder().getCurrency());
List<OrderItemPriceDetail> priceDetails = getOrderItemPriceDetails();
if (priceDetails != null) {
for (OrderItemPriceDetail priceDetail : getOrderItemPriceDetails()) {
totalAdjustmentValue = totalAdjustmentValue.add(priceDetail.getTotalAdjustmentValue());
}
}
return totalAdjustmentValue;
}
@Override
public Money getTotalPrice() {
Money returnValue = convertToMoney(BigDecimal.ZERO);
if (orderItemPriceDetails != null && orderItemPriceDetails.size() > 0) {
for (OrderItemPriceDetail oipd : orderItemPriceDetails) {
returnValue = returnValue.add(oipd.getTotalAdjustedPrice());
}
} else {
if (price != null) {
returnValue = convertToMoney(price).multiply(quantity);
} else {
return getSalePrice().multiply(quantity);
}
}
return returnValue;
}
@Override
public Money getTotalPriceBeforeAdjustments(boolean allowSalesPrice) {
return getPriceBeforeAdjustments(allowSalesPrice).multiply(getQuantity());
}
@Override
public void setRetailPriceOverride(boolean override) {
this.retailPriceOverride = Boolean.valueOf(override);
}
@Override
public boolean isRetailPriceOverride() {
if (retailPriceOverride == null) {
return false;
} else {
return retailPriceOverride.booleanValue();
}
}
@Override
public void setSalePriceOverride(boolean override) {
this.salePriceOverride = Boolean.valueOf(override);
}
@Override
public boolean isSalePriceOverride() {
if (salePriceOverride == null) {
return false;
} else {
return salePriceOverride.booleanValue();
}
}
@Override
public List<OrderItemPriceDetail> getOrderItemPriceDetails() {
return orderItemPriceDetails;
}
@Override
public List<OrderItem> getChildOrderItems() {
return childOrderItems;
}
@Override
public void setChildOrderItems(List<OrderItem> childOrderItems) {
this.childOrderItems = childOrderItems;
}
@Override
public OrderItem getParentOrderItem() {
return parentOrderItem;
}
@Override
public void setParentOrderItem(OrderItem parentOrderItem) {
this.parentOrderItem = parentOrderItem;
}
@Override
public boolean isAParentOf(OrderItem candidateChild) {
if (CollectionUtils.isNotEmpty(this.getChildOrderItems())) {
for (OrderItem child : this.getChildOrderItems()) {
if (child.equals(candidateChild)) {
return true;
}
}
// Item wasn't a direct child. Let's check the hierarchy
for (OrderItem child : this.getChildOrderItems()) {
if (child.isAParentOf(candidateChild)) {
return true;
}
}
}
return false;
}
@Override
public String getMainEntityName() {
return getName();
}
@Override
public String getCurrencyCode() {
if (getOrder().getCurrency() != null) {
return getOrder().getCurrency().getCurrencyCode();
}
return null;
}
public void checkCloneable(OrderItem orderItem) throws CloneNotSupportedException, SecurityException, NoSuchMethodException {
Method cloneMethod = orderItem.getClass().getMethod("clone", new Class[]{});
if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") &&
!orderItem.getClass().getName().startsWith("org.broadleafcommerce")) {
//subclass is not implementing the clone method
throw new CloneNotSupportedException("Custom extensions and implementations should implement clone in " +
"order to guarantee split and merge operations are performed accurately");
}
}
protected Money convertToMoney(BigDecimal amount) {
return amount == null ? null : BroadleafCurrencyUtils.getMoney(amount, getOrder().getCurrency());
}
protected OrderItemType convertOrderItemType(String type) {
return OrderItemType.getInstance(type);
}
@Override
public OrderItem clone() {
//this is likely an extended class - instantiate from the fully qualified name via reflection
OrderItemImpl clonedOrderItem;
try {
clonedOrderItem = (OrderItemImpl) Class.forName(this.getClass().getName()).newInstance();
try {
checkCloneable(clonedOrderItem);
} catch (CloneNotSupportedException e) {
LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " +
clonedOrderItem.getClass().getName(), e);
}
if (candidateItemOffers != null) {
for (CandidateItemOffer candidate : candidateItemOffers) {
CandidateItemOffer clone = candidate.clone();
clone.setOrderItem(clonedOrderItem);
clonedOrderItem.getCandidateItemOffers().add(clone);
}
}
if (orderItemAttributeMap != null && !orderItemAttributeMap.isEmpty()) {
for (OrderItemAttribute attribute : orderItemAttributeMap.values()) {
OrderItemAttribute clone = attribute.clone();
clone.setOrderItem(clonedOrderItem);
clonedOrderItem.getOrderItemAttributes().put(clone.getName(), clone);
}
}
if (CollectionUtils.isNotEmpty(childOrderItems)) {
for (OrderItem childOrderItem : childOrderItems) {
OrderItem clone = childOrderItem.clone();
clone.setParentOrderItem(clonedOrderItem);
clonedOrderItem.getChildOrderItems().add(clone);
}
}
clonedOrderItem.setCategory(category);
clonedOrderItem.setGiftWrapOrderItem(giftWrapOrderItem);
clonedOrderItem.setName(name);
clonedOrderItem.setOrder(order);
clonedOrderItem.setOrderItemType(convertOrderItemType(orderItemType));
clonedOrderItem.setPersonalMessage(personalMessage);
clonedOrderItem.setQuantity(quantity);
clonedOrderItem.retailPrice = retailPrice;
clonedOrderItem.salePrice = salePrice;
clonedOrderItem.discountsAllowed = discountsAllowed;
clonedOrderItem.salePriceOverride = salePriceOverride;
clonedOrderItem.retailPriceOverride = retailPriceOverride;
clonedOrderItem.setParentOrderItem(parentOrderItem);
} catch (Exception e) {
throw new RuntimeException(e);
}
return clonedOrderItem;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((category == null) ? 0 : category.hashCode());
result = prime * result + ((giftWrapOrderItem == null) ? 0 : giftWrapOrderItem.hashCode());
result = prime * result + ((order == null) ? 0 : order.hashCode());
result = prime * result + ((orderItemType == null) ? 0 : orderItemType.hashCode());
result = prime * result + ((personalMessage == null) ? 0 : personalMessage.hashCode());
result = prime * result + ((price == null) ? 0 : price.hashCode());
result = prime * result + quantity;
result = prime * result + ((retailPrice == null) ? 0 : retailPrice.hashCode());
result = prime * result + ((salePrice == null) ? 0 : salePrice.hashCode());
result = prime * result + ((parentOrderItem == null) ? 0 : parentOrderItem.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!getClass().isAssignableFrom(obj.getClass())) {
return false;
}
OrderItemImpl other = (OrderItemImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (category == null) {
if (other.category != null) {
return false;
}
} else if (!category.equals(other.category)) {
return false;
}
if (giftWrapOrderItem == null) {
if (other.giftWrapOrderItem != null) {
return false;
}
} else if (!giftWrapOrderItem.equals(other.giftWrapOrderItem)) {
return false;
}
if (order == null) {
if (other.order != null) {
return false;
}
} else if (!order.equals(other.order)) {
return false;
}
if (orderItemType == null) {
if (other.orderItemType != null) {
return false;
}
} else if (!orderItemType.equals(other.orderItemType)) {
return false;
}
if (personalMessage == null) {
if (other.personalMessage != null) {
return false;
}
} else if (!personalMessage.equals(other.personalMessage)) {
return false;
}
if (price == null) {
if (other.price != null) {
return false;
}
} else if (!price.equals(other.price)) {
return false;
}
if (quantity != other.quantity) {
return false;
}
if (retailPrice == null) {
if (other.retailPrice != null) {
return false;
}
} else if (!retailPrice.equals(other.retailPrice)) {
return false;
}
if (salePrice == null) {
if (other.salePrice != null) {
return false;
}
} else if (!salePrice.equals(other.salePrice)) {
return false;
}
if (parentOrderItem == null) {
if (other.parentOrderItem != null) {
return false;
}
} else if (!parentOrderItem.equals(other.parentOrderItem)) {
return false;
}
return true;
}
@Override
public <G extends OrderItemImpl> CreateResponse<G> createOrRetrieveCopyInstance(MultiTenantCopyContext context) throws CloneNotSupportedException {
CreateResponse<G> createResponse = context.createOrRetrieveCopyInstance(this);
if (createResponse.isAlreadyPopulated()) {
return createResponse;
}
OrderItem cloned = createResponse.getClone();
cloned.setCategory(category.createOrRetrieveCopyInstance(context).getClone());
cloned.setName(name);
cloned.setOrderItemType(getOrderItemType());
cloned.setDiscountingAllowed(discountsAllowed);
cloned.setTaxable(isTaxable());
cloned.setSalePriceOverride(salePriceOverride);
cloned.setSalePrice(getSalePrice());
cloned.setRetailPrice(getRetailPrice());
cloned.setRetailPriceOverride(retailPriceOverride);
cloned.setQuantity(quantity);
cloned.setPersonalMessage(personalMessage);
// dont clone
cloned.setParentOrderItem(parentOrderItem);
for(OrderItem entry : childOrderItems){
OrderItem clonedEntry = ((OrderItemImpl)entry).createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setParentOrderItem(clonedEntry);
cloned.getChildOrderItems().add(clonedEntry);
}
for(CandidateItemOffer entry : candidateItemOffers){
CandidateItemOffer clonedEntry = entry.createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setOrderItem(cloned);
cloned.getCandidateItemOffers().add(clonedEntry);
}
for(Map.Entry<String,OrderItemAttribute> entry : orderItemAttributeMap.entrySet()){
OrderItemAttribute clonedEntry = entry.getValue().createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setOrderItem(cloned);
cloned.getOrderItemAttributes().put(entry.getKey(),clonedEntry);
}
// dont clone
cloned.setGiftWrapOrderItem(giftWrapOrderItem);
for(OrderItemPriceDetail entry : orderItemPriceDetails){
OrderItemPriceDetail clonedEntry = entry.createOrRetrieveCopyInstance(context).getClone();
clonedEntry.setOrderItem(cloned);
cloned.getOrderItemPriceDetails().add(clonedEntry);
}
return createResponse;
}
public static class Presentation {
public static class Tab {
public static class Name {
public static final String Advanced = "OrderImpl_Advanced";
}
public static class Order {
public static final int Advanced = 2000;
}
}
public static class Group {
public static class Name {
public static final String Description = "OrderItemImpl_Description";
public static final String Pricing = "OrderItemImpl_Pricing";
public static final String Catalog = "OrderItemImpl_Catalog";
}
public static class Order {
public static final int Description = 1000;
public static final int Pricing = 2000;
public static final int Catalog = 3000;
}
}
public static class FieldOrder {
public static final int NAME = 1000;
public static final int PRICE = 2000;
public static final int QUANTITY = 3000;
public static final int RETAILPRICE = 4000;
public static final int SALEPRICE = 5000;
public static final int TOTALTAX = 6000;
public static final int CATEGORY = 1000;
public static final int PRICEDETAILS = 1000;
public static final int ADJUSTMENTS = 2000;
public static final int DISCOUNTALLOWED = 3000;
}
}
@Override
public boolean isSkuActive() {
//abstract method, by default return true
return true;
}
}
|
Remove totalTax from AdminPresentation to avoid confusion
- Address BroadleafCommerce/QA#1139
|
core/broadleaf-framework/src/main/java/org/broadleafcommerce/core/order/domain/OrderItemImpl.java
|
Remove totalTax from AdminPresentation to avoid confusion
|
<ide><path>ore/broadleaf-framework/src/main/java/org/broadleafcommerce/core/order/domain/OrderItemImpl.java
<ide> )
<ide> protected Map<String, OrderItemAttribute> orderItemAttributeMap = new HashMap<String, OrderItemAttribute>();
<ide>
<add> /**
<add> * @deprecated use {@link FulfillmentGroupItem#getTaxes()} ()} or {@link FulfillmentGroupItem#getTotalTax()} ()} ()} instead
<add> */
<ide> @Column(name = "TOTAL_TAX")
<del> @AdminPresentation(friendlyName = "OrderItemImpl_Total_Tax", order = Presentation.FieldOrder.TOTALTAX,
<del> group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
<del> fieldType = SupportedFieldType.MONEY)
<add> @Deprecated
<ide> protected BigDecimal totalTax;
<ide>
<ide> @OneToMany(mappedBy = "parentOrderItem", targetEntity = OrderItemImpl.class)
|
|
Java
|
agpl-3.0
|
6ab6f82b10388ed1f60ae5734cbbf3689e820c4b
| 0 |
jotomo/AndroidAPS,Heiner1/AndroidAPS,jotomo/AndroidAPS,MilosKozak/AndroidAPS,winni67/AndroidAPS,MilosKozak/AndroidAPS,RoumenGeorgiev/AndroidAPS,Heiner1/AndroidAPS,jotomo/AndroidAPS,Heiner1/AndroidAPS,winni67/AndroidAPS,PoweRGbg/AndroidAPS,PoweRGbg/AndroidAPS,AdrianLxM/AndroidAPS,RoumenGeorgiev/AndroidAPS,PoweRGbg/AndroidAPS,Heiner1/AndroidAPS,MilosKozak/AndroidAPS,AdrianLxM/AndroidAPS
|
package info.nightscout.androidaps.plugins.PumpDanaR;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.IBinder;
import android.support.annotation.Nullable;
import com.squareup.otto.Subscribe;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.Objects;
import info.nightscout.androidaps.BuildConfig;
import info.nightscout.androidaps.Config;
import info.nightscout.androidaps.Constants;
import info.nightscout.androidaps.MainApp;
import info.nightscout.androidaps.R;
import info.nightscout.androidaps.data.DetailedBolusInfo;
import info.nightscout.androidaps.data.PumpEnactResult;
import info.nightscout.androidaps.db.ExtendedBolus;
import info.nightscout.androidaps.db.TemporaryBasal;
import info.nightscout.androidaps.db.Treatment;
import info.nightscout.androidaps.events.EventAppExit;
import info.nightscout.androidaps.events.EventPreferenceChange;
import info.nightscout.androidaps.interfaces.ConstraintsInterface;
import info.nightscout.androidaps.interfaces.DanaRInterface;
import info.nightscout.androidaps.interfaces.PluginBase;
import info.nightscout.androidaps.interfaces.ProfileInterface;
import info.nightscout.androidaps.interfaces.PumpDescription;
import info.nightscout.androidaps.interfaces.PumpInterface;
import info.nightscout.androidaps.plugins.ConfigBuilder.ConfigBuilderPlugin;
import info.nightscout.androidaps.data.Profile;
import info.nightscout.androidaps.data.ProfileStore;
import info.nightscout.androidaps.plugins.Overview.Notification;
import info.nightscout.androidaps.plugins.Overview.events.EventDismissNotification;
import info.nightscout.androidaps.plugins.Overview.events.EventNewNotification;
import info.nightscout.androidaps.plugins.ProfileNS.NSProfilePlugin;
import info.nightscout.androidaps.plugins.PumpDanaR.services.DanaRExecutionService;
import info.nightscout.utils.DateUtil;
import info.nightscout.utils.DecimalFormatter;
import info.nightscout.utils.Round;
import info.nightscout.utils.SP;
/**
* Created by mike on 05.08.2016.
*/
public class DanaRPlugin implements PluginBase, PumpInterface, DanaRInterface, ConstraintsInterface, ProfileInterface {
private static Logger log = LoggerFactory.getLogger(DanaRPlugin.class);
@Override
public String getFragmentClass() {
return DanaRFragment.class.getName();
}
static boolean fragmentPumpEnabled = false;
static boolean fragmentProfileEnabled = false;
static boolean fragmentPumpVisible = true;
public static DanaRExecutionService sExecutionService;
private static DanaRPump pump = DanaRPump.getInstance();
private static boolean useExtendedBoluses = false;
public static PumpDescription pumpDescription = new PumpDescription();
public DanaRPlugin() {
useExtendedBoluses = SP.getBoolean("danar_useextended", false);
Context context = MainApp.instance().getApplicationContext();
Intent intent = new Intent(context, DanaRExecutionService.class);
context.bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
MainApp.bus().register(this);
pumpDescription.isBolusCapable = true;
pumpDescription.bolusStep = 0.1d;
pumpDescription.isExtendedBolusCapable = true;
pumpDescription.extendedBolusStep = 0.05d;
pumpDescription.extendedBolusDurationStep = 30;
pumpDescription.extendedBolusMaxDuration = 8 * 60;
pumpDescription.isTempBasalCapable = true;
pumpDescription.tempBasalStyle = PumpDescription.PERCENT;
pumpDescription.maxTempPercent = 200;
pumpDescription.tempPercentStep = 10;
pumpDescription.tempDurationStep = 60;
pumpDescription.tempMaxDuration = 24 * 60;
pumpDescription.isSetBasalProfileCapable = true;
pumpDescription.basalStep = 0.01d;
pumpDescription.basalMinimumRate = 0.04d;
pumpDescription.isRefillingCapable = true;
}
ServiceConnection mConnection = new ServiceConnection() {
public void onServiceDisconnected(ComponentName name) {
log.debug("Service is disconnected");
sExecutionService = null;
}
public void onServiceConnected(ComponentName name, IBinder service) {
log.debug("Service is connected");
DanaRExecutionService.LocalBinder mLocalBinder = (DanaRExecutionService.LocalBinder) service;
sExecutionService = mLocalBinder.getServiceInstance();
}
};
@SuppressWarnings("UnusedParameters")
@Subscribe
public void onStatusEvent(final EventAppExit e) {
MainApp.instance().getApplicationContext().unbindService(mConnection);
}
@Subscribe
public void onStatusEvent(final EventPreferenceChange s) {
if (isEnabled(PUMP)) {
boolean previousValue = useExtendedBoluses;
useExtendedBoluses = SP.getBoolean("danar_useextended", false);
if (useExtendedBoluses != previousValue && MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress()) {
sExecutionService.extendedBolusStop();
}
}
}
// Plugin base interface
@Override
public int getType() {
return PluginBase.PUMP;
}
@Override
public String getName() {
return MainApp.instance().getString(R.string.danarpump);
}
@Override
public String getNameShort() {
String name = MainApp.sResources.getString(R.string.danarpump_shortname);
if (!name.trim().isEmpty()) {
//only if translation exists
return name;
}
// use long name as fallback
return getName();
}
@Override
public boolean isEnabled(int type) {
if (type == PluginBase.PROFILE) return fragmentProfileEnabled && fragmentPumpEnabled;
else if (type == PluginBase.PUMP) return fragmentPumpEnabled;
else if (type == PluginBase.CONSTRAINTS) return fragmentPumpEnabled;
return false;
}
@Override
public boolean isVisibleInTabs(int type) {
if (type == PluginBase.PROFILE || type == PluginBase.CONSTRAINTS) return false;
else if (type == PluginBase.PUMP) return fragmentPumpVisible;
return false;
}
@Override
public boolean canBeHidden(int type) {
return true;
}
@Override
public boolean hasFragment() {
return true;
}
@Override
public boolean showInList(int type) {
return type == PUMP;
}
@Override
public void setFragmentEnabled(int type, boolean fragmentEnabled) {
if (type == PluginBase.PROFILE)
this.fragmentProfileEnabled = fragmentEnabled;
else if (type == PluginBase.PUMP)
this.fragmentPumpEnabled = fragmentEnabled;
// if pump profile was enabled need to switch to another too
if (type == PluginBase.PUMP && !fragmentEnabled && this.fragmentProfileEnabled) {
setFragmentEnabled(PluginBase.PROFILE, false);
setFragmentVisible(PluginBase.PROFILE, false);
MainApp.getSpecificPlugin(NSProfilePlugin.class).setFragmentEnabled(PluginBase.PROFILE, true);
MainApp.getSpecificPlugin(NSProfilePlugin.class).setFragmentVisible(PluginBase.PROFILE, true);
}
}
@Override
public void setFragmentVisible(int type, boolean fragmentVisible) {
if (type == PluginBase.PUMP)
this.fragmentPumpVisible = fragmentVisible;
}
@Override
public boolean isFakingTempsByExtendedBoluses() {
return useExtendedBoluses;
}
@Override
public boolean isInitialized() {
return pump.lastConnection.getTime() > 0 && pump.isExtendedBolusEnabled;
}
@Override
public boolean isSuspended() {
return pump.pumpSuspended;
}
@Override
public boolean isBusy() {
if (sExecutionService == null) return false;
return sExecutionService.isConnected() || sExecutionService.isConnecting();
}
// Pump interface
@Override
public int setNewBasalProfile(Profile profile) {
if (sExecutionService == null) {
log.error("setNewBasalProfile sExecutionService is null");
return FAILED;
}
if (!isInitialized()) {
log.error("setNewBasalProfile not initialized");
Notification notification = new Notification(Notification.PROFILE_NOT_SET_NOT_INITIALIZED, MainApp.sResources.getString(R.string.pumpNotInitializedProfileNotSet), Notification.URGENT);
MainApp.bus().post(new EventNewNotification(notification));
return FAILED;
} else {
MainApp.bus().post(new EventDismissNotification(Notification.PROFILE_NOT_SET_NOT_INITIALIZED));
}
if (!sExecutionService.updateBasalsInPump(profile)) {
Notification notification = new Notification(Notification.FAILED_UDPATE_PROFILE, MainApp.sResources.getString(R.string.failedupdatebasalprofile), Notification.URGENT);
MainApp.bus().post(new EventNewNotification(notification));
return FAILED;
} else {
MainApp.bus().post(new EventDismissNotification(Notification.PROFILE_NOT_SET_NOT_INITIALIZED));
MainApp.bus().post(new EventDismissNotification(Notification.FAILED_UDPATE_PROFILE));
return SUCCESS;
}
}
@Override
public boolean isThisProfileSet(Profile profile) {
if (!isInitialized())
return true; // TODO: not sure what's better. so far TRUE to prevent too many SMS
if (pump.pumpProfiles == null)
return true; // TODO: not sure what's better. so far TRUE to prevent too many SMS
int basalValues = pump.basal48Enable ? 48 : 24;
int basalIncrement = pump.basal48Enable ? 30 * 60 : 60 * 60;
for (int h = 0; h < basalValues; h++) {
Double pumpValue = pump.pumpProfiles[pump.activeProfile][h];
Double profileValue = profile.getBasal((Integer) (h * basalIncrement));
if (profileValue == null) return true;
if (Math.abs(pumpValue - profileValue) > getPumpDescription().basalStep) {
log.debug("Diff found. Hour: " + h + " Pump: " + pumpValue + " Profile: " + profileValue);
return false;
}
}
return true;
}
@Override
public Date lastDataTime() {
return pump.lastConnection;
}
@Override
public void refreshDataFromPump(String reason) {
if (!isConnected() && !isConnecting()) {
doConnect(reason);
}
}
@Override
public double getBaseBasalRate() {
return pump.currentBasal;
}
@Override
public PumpEnactResult deliverTreatment(DetailedBolusInfo detailedBolusInfo) {
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
detailedBolusInfo.insulin = configBuilderPlugin.applyBolusConstraints(detailedBolusInfo.insulin);
if (detailedBolusInfo.insulin > 0 || detailedBolusInfo.carbs > 0) {
Treatment t = new Treatment(detailedBolusInfo.insulinInterface);
boolean connectionOK = false;
if (detailedBolusInfo.insulin > 0 || detailedBolusInfo.carbs > 0) connectionOK = sExecutionService.bolus(detailedBolusInfo.insulin, (int) detailedBolusInfo.carbs, t);
PumpEnactResult result = new PumpEnactResult();
result.success = connectionOK;
result.bolusDelivered = t.insulin;
result.carbsDelivered = detailedBolusInfo.carbs;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
if (Config.logPumpActions)
log.debug("deliverTreatment: OK. Asked: " + detailedBolusInfo.insulin + " Delivered: " + result.bolusDelivered);
detailedBolusInfo.insulin = t.insulin;
detailedBolusInfo.date = System.currentTimeMillis();
MainApp.getConfigBuilder().addToHistoryTreatment(detailedBolusInfo);
return result;
} else {
PumpEnactResult result = new PumpEnactResult();
result.success = false;
result.bolusDelivered = 0d;
result.carbsDelivered = 0d;
result.comment = MainApp.instance().getString(R.string.danar_invalidinput);
log.error("deliverTreatment: Invalid input");
return result;
}
}
@Override
public void stopBolusDelivering() {
if (sExecutionService == null) {
log.error("stopBolusDelivering sExecutionService is null");
return;
}
sExecutionService.bolusStop();
}
// This is called from APS
@Override
public PumpEnactResult setTempBasalAbsolute(Double absoluteRate, Integer durationInMinutes) {
// Recheck pump status if older than 30 min
if (pump.lastConnection.getTime() + 30 * 60 * 1000L < System.currentTimeMillis()) {
doConnect("setTempBasalAbsolute old data");
}
PumpEnactResult result = new PumpEnactResult();
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
absoluteRate = configBuilderPlugin.applyBasalConstraints(absoluteRate);
final boolean doTempOff = getBaseBasalRate() - absoluteRate == 0d;
final boolean doLowTemp = absoluteRate < getBaseBasalRate();
final boolean doHighTemp = absoluteRate > getBaseBasalRate() && !useExtendedBoluses;
final boolean doExtendedTemp = absoluteRate > getBaseBasalRate() && useExtendedBoluses;
if (doTempOff) {
// If extended in progress
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && useExtendedBoluses) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping extended bolus (doTempOff)");
return cancelExtendedBolus();
}
// If temp in progress
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping temp basal (doTempOff)");
return cancelRealTempBasal();
}
result.success = true;
result.enacted = false;
result.percent = 100;
result.isPercent = true;
result.isTempCancel = true;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: doTempOff OK");
return result;
}
if (doLowTemp || doHighTemp) {
Integer percentRate = Double.valueOf(absoluteRate / getBaseBasalRate() * 100).intValue();
if (percentRate < 100) percentRate = Round.ceilTo((double) percentRate, 10d).intValue();
else percentRate = Round.floorTo((double) percentRate, 10d).intValue();
if (percentRate > getPumpDescription().maxTempPercent) {
percentRate = getPumpDescription().maxTempPercent;
}
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Calculated percent rate: " + percentRate);
// If extended in progress
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && useExtendedBoluses) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping extended bolus (doLowTemp || doHighTemp)");
result = cancelExtendedBolus();
if (!result.success) {
log.error("setTempBasalAbsolute: Failed to stop previous extended bolus (doLowTemp || doHighTemp)");
return result;
}
}
// Check if some temp is already in progress
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
// Correct basal already set ?
TemporaryBasal running = MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis());
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: currently running: " + running.toString());
if (running.percentRate == percentRate) {
result.success = true;
result.percent = percentRate;
result.absolute = MainApp.getConfigBuilder().getTempBasalAbsoluteRateHistory();
result.enacted = false;
result.duration = ((Double) MainApp.getConfigBuilder().getTempBasalRemainingMinutesFromHistory()).intValue();
result.isPercent = true;
result.isTempCancel = false;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Correct temp basal already set (doLowTemp || doHighTemp)");
return result;
}
}
// Convert duration from minutes to hours
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Setting temp basal " + percentRate + "% for " + durationInMinutes + " mins (doLowTemp || doHighTemp)");
return setTempBasalPercent(percentRate, durationInMinutes);
}
if (doExtendedTemp) {
// Check if some temp is already in progress
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping temp basal (doExtendedTemp)");
result = cancelRealTempBasal();
// Check for proper result
if (!result.success) {
log.error("setTempBasalAbsolute: Failed to stop previous temp basal (doExtendedTemp)");
return result;
}
}
// Calculate # of halfHours from minutes
Integer durationInHalfHours = Math.max(durationInMinutes / 30, 1);
// We keep current basal running so need to sub current basal
Double extendedRateToSet = absoluteRate - getBaseBasalRate();
extendedRateToSet = configBuilderPlugin.applyBasalConstraints(extendedRateToSet);
// needs to be rounded to 0.1
extendedRateToSet = Round.roundTo(extendedRateToSet, pumpDescription.extendedBolusStep * 2); // *2 because of halfhours
// What is current rate of extended bolusing in u/h?
if (Config.logPumpActions) {
log.debug("setTempBasalAbsolute: Extended bolus in progress: " + MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() + " rate: " + pump.extendedBolusAbsoluteRate + "U/h duration remaining: " + pump.extendedBolusRemainingMinutes + "min");
log.debug("setTempBasalAbsolute: Rate to set: " + extendedRateToSet + "U/h");
}
// Compare with extended rate in progress
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && Math.abs(pump.extendedBolusAbsoluteRate - extendedRateToSet) < getPumpDescription().extendedBolusStep) {
// correct extended already set
result.success = true;
result.absolute = pump.extendedBolusAbsoluteRate;
result.enacted = false;
result.duration = pump.extendedBolusRemainingMinutes;
result.isPercent = false;
result.isTempCancel = false;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Correct extended already set");
return result;
}
// Now set new extended, no need to to stop previous (if running) because it's replaced
Double extendedAmount = extendedRateToSet / 2 * durationInHalfHours;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Setting extended: " + extendedAmount + "U halfhours: " + durationInHalfHours);
result = setExtendedBolus(extendedAmount, durationInMinutes);
if (!result.success) {
log.error("setTempBasalAbsolute: Failed to set extended bolus");
return result;
}
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Extended bolus set ok");
result.absolute = result.absolute + getBaseBasalRate();
return result;
}
// We should never end here
log.error("setTempBasalAbsolute: Internal error");
result.success = false;
result.comment = "Internal error";
return result;
}
@Override
public PumpEnactResult setTempBasalPercent(Integer percent, Integer durationInMinutes) {
PumpEnactResult result = new PumpEnactResult();
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
percent = configBuilderPlugin.applyBasalConstraints(percent);
if (percent < 0) {
result.isTempCancel = false;
result.enacted = false;
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_invalidinput);
log.error("setTempBasalPercent: Invalid input");
return result;
}
if (percent > getPumpDescription().maxTempPercent)
percent = getPumpDescription().maxTempPercent;
if (pump.isTempBasalInProgress && pump.tempBasalPercent == percent) {
result.enacted = false;
result.success = true;
result.isTempCancel = false;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.duration = pump.tempBasalRemainingMin;
result.percent = pump.tempBasalPercent;
result.absolute = MainApp.getConfigBuilder().getTempBasalAbsoluteRateHistory();
result.isPercent = true;
if (Config.logPumpActions)
log.debug("setTempBasalPercent: Correct value already set");
return result;
}
int durationInHours = Math.max(durationInMinutes / 60, 1);
boolean connectionOK = sExecutionService.tempBasal(percent, durationInHours);
if (connectionOK && pump.isTempBasalInProgress && pump.tempBasalPercent == percent) {
result.enacted = true;
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.isTempCancel = false;
result.duration = pump.tempBasalRemainingMin;
result.percent = pump.tempBasalPercent;
result.absolute = MainApp.getConfigBuilder().getTempBasalAbsoluteRateHistory();
result.isPercent = true;
if (Config.logPumpActions)
log.debug("setTempBasalPercent: OK");
return result;
}
result.enacted = false;
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
log.error("setTempBasalPercent: Failed to set temp basal");
return result;
}
@Override
public PumpEnactResult setExtendedBolus(Double insulin, Integer durationInMinutes) {
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
insulin = configBuilderPlugin.applyBolusConstraints(insulin);
// needs to be rounded
int durationInHalfHours = Math.max(durationInMinutes / 30, 1);
insulin = Round.roundTo(insulin, getPumpDescription().extendedBolusStep * (1 + durationInHalfHours % 1));
PumpEnactResult result = new PumpEnactResult();
if (pump.isExtendedInProgress && Math.abs(pump.extendedBolusAmount - insulin) < getPumpDescription().extendedBolusStep) {
result.enacted = false;
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.duration = pump.extendedBolusRemainingMinutes;
result.absolute = pump.extendedBolusAbsoluteRate;
result.isPercent = false;
result.isTempCancel = false;
if (Config.logPumpActions)
log.debug("setExtendedBolus: Correct extended bolus already set. Current: " + pump.extendedBolusAmount + " Asked: " + insulin);
return result;
}
boolean connectionOK = sExecutionService.extendedBolus(insulin, durationInHalfHours);
if (connectionOK && pump.isExtendedInProgress && Math.abs(pump.extendedBolusAmount - insulin) < getPumpDescription().extendedBolusStep) {
result.enacted = true;
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.isTempCancel = false;
result.duration = pump.extendedBolusRemainingMinutes;
result.absolute = pump.extendedBolusAbsoluteRate;
result.bolusDelivered = pump.extendedBolusAmount;
result.isPercent = false;
if (Config.logPumpActions)
log.debug("setExtendedBolus: OK");
return result;
}
result.enacted = false;
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
log.error("setExtendedBolus: Failed to extended bolus");
return result;
}
@Override
public PumpEnactResult cancelTempBasal(boolean userRequested) {
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress())
return cancelRealTempBasal();
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && useExtendedBoluses) {
PumpEnactResult cancelEx = cancelExtendedBolus();
return cancelEx;
}
PumpEnactResult result = new PumpEnactResult();
result.success = true;
result.enacted = false;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.isTempCancel = true;
return result;
}
public PumpEnactResult cancelRealTempBasal() {
PumpEnactResult result = new PumpEnactResult();
if (pump.isTempBasalInProgress) {
sExecutionService.tempBasalStop();
result.enacted = true;
result.isTempCancel = true;
}
if (!pump.isTempBasalInProgress) {
result.success = true;
result.isTempCancel = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
if (Config.logPumpActions)
log.debug("cancelRealTempBasal: OK");
return result;
} else {
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
result.isTempCancel = true;
log.error("cancelRealTempBasal: Failed to cancel temp basal");
return result;
}
}
@Override
public PumpEnactResult cancelExtendedBolus() {
PumpEnactResult result = new PumpEnactResult();
if (pump.isExtendedInProgress) {
sExecutionService.extendedBolusStop();
result.enacted = true;
result.isTempCancel = true;
}
if (!pump.isExtendedInProgress) {
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
if (Config.logPumpActions)
log.debug("cancelExtendedBolus: OK");
return result;
} else {
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
log.error("cancelExtendedBolus: Failed to cancel extended bolus");
return result;
}
}
public static void doConnect(String from) {
if (sExecutionService != null) sExecutionService.connect(from);
}
public static boolean isConnected() {
return sExecutionService != null && sExecutionService.isConnected();
}
public static boolean isConnecting() {
return sExecutionService != null && sExecutionService.isConnecting();
}
public static void doDisconnect(String from) {
if (sExecutionService != null) sExecutionService.disconnect(from);
}
@Override
public JSONObject getJSONStatus() {
if (pump.lastConnection.getTime() + 5 * 60 * 1000L < System.currentTimeMillis()) {
return null;
}
JSONObject pumpjson = new JSONObject();
JSONObject battery = new JSONObject();
JSONObject status = new JSONObject();
JSONObject extended = new JSONObject();
try {
battery.put("percent", pump.batteryRemaining);
status.put("status", pump.pumpSuspended ? "suspended" : "normal");
status.put("timestamp", DateUtil.toISOString(pump.lastConnection));
extended.put("Version", BuildConfig.VERSION_NAME + "-" + BuildConfig.BUILDVERSION);
extended.put("PumpIOB", pump.iob);
if (pump.lastBolusTime.getTime() != 0) {
extended.put("LastBolus", pump.lastBolusTime.toLocaleString());
extended.put("LastBolusAmount", pump.lastBolusAmount);
}
TemporaryBasal tb = MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis());
if (tb != null) {
extended.put("TempBasalAbsoluteRate", tb.tempBasalConvertedToAbsolute(System.currentTimeMillis()));
extended.put("TempBasalStart", DateUtil.dateAndTimeString(tb.date));
extended.put("TempBasalRemaining", tb.getPlannedRemainingMinutes());
}
ExtendedBolus eb = MainApp.getConfigBuilder().getExtendedBolusFromHistory(System.currentTimeMillis());
if (eb != null) {
extended.put("ExtendedBolusAbsoluteRate", eb.absoluteRate());
extended.put("ExtendedBolusStart", DateUtil.dateAndTimeString(eb.date));
extended.put("ExtendedBolusRemaining", eb.getPlannedRemainingMinutes());
}
extended.put("BaseBasalRate", getBaseBasalRate());
try {
extended.put("ActiveProfile", MainApp.getConfigBuilder().getProfileName());
} catch (Exception e) {
}
pumpjson.put("battery", battery);
pumpjson.put("status", status);
pumpjson.put("extended", extended);
pumpjson.put("reservoir", (int) pump.reservoirRemainingUnits);
pumpjson.put("clock", DateUtil.toISOString(new Date()));
} catch (JSONException e) {
e.printStackTrace();
}
return pumpjson;
}
@Override
public String deviceID() {
return pump.serialNumber;
}
@Override
public PumpDescription getPumpDescription() {
return pumpDescription;
}
/**
* DanaR interface
*/
@Override
public boolean loadHistory(byte type) {
return sExecutionService.loadHistory(type);
}
/**
* Constraint interface
*/
@Override
public boolean isLoopEnabled() {
return true;
}
@Override
public boolean isClosedModeEnabled() {
return true;
}
@Override
public boolean isAutosensModeEnabled() {
return true;
}
@Override
public boolean isAMAModeEnabled() {
return true;
}
@SuppressWarnings("PointlessBooleanExpression")
@Override
public Double applyBasalConstraints(Double absoluteRate) {
double origAbsoluteRate = absoluteRate;
if (pump != null) {
if (absoluteRate > pump.maxBasal) {
absoluteRate = pump.maxBasal;
if (Config.logConstraintsChanges && origAbsoluteRate != Constants.basalAbsoluteOnlyForCheckLimit)
log.debug("Limiting rate " + origAbsoluteRate + "U/h by pump constraint to " + absoluteRate + "U/h");
}
}
return absoluteRate;
}
@SuppressWarnings("PointlessBooleanExpression")
@Override
public Integer applyBasalConstraints(Integer percentRate) {
Integer origPercentRate = percentRate;
if (percentRate < 0) percentRate = 0;
if (percentRate > getPumpDescription().maxTempPercent)
percentRate = getPumpDescription().maxTempPercent;
if (!Objects.equals(percentRate, origPercentRate) && Config.logConstraintsChanges && !Objects.equals(origPercentRate, Constants.basalPercentOnlyForCheckLimit))
log.debug("Limiting percent rate " + origPercentRate + "% to " + percentRate + "%");
return percentRate;
}
@SuppressWarnings("PointlessBooleanExpression")
@Override
public Double applyBolusConstraints(Double insulin) {
double origInsulin = insulin;
if (pump != null) {
if (insulin > pump.maxBolus) {
insulin = pump.maxBolus;
if (Config.logConstraintsChanges && origInsulin != Constants.bolusOnlyForCheckLimit)
log.debug("Limiting bolus " + origInsulin + "U by pump constraint to " + insulin + "U");
}
}
return insulin;
}
@Override
public Integer applyCarbsConstraints(Integer carbs) {
return carbs;
}
@Override
public Double applyMaxIOBConstraints(Double maxIob) {
return maxIob;
}
@Nullable
@Override
public ProfileStore getProfile() {
if (pump.lastSettingsRead.getTime() == 0)
return null; // no info now
return pump.createConvertedProfile();
}
@Override
public String getUnits() {
return pump.getUnits();
}
@Override
public String getProfileName() {
return pump.createConvertedProfileName();
}
// Reply for sms communicator
public String shortStatus(boolean veryShort) {
String ret = "";
if (pump.lastConnection.getTime() != 0) {
Long agoMsec = System.currentTimeMillis() - pump.lastConnection.getTime();
int agoMin = (int) (agoMsec / 60d / 1000d);
ret += "LastConn: " + agoMin + " minago\n";
}
if (pump.lastBolusTime.getTime() != 0) {
ret += "LastBolus: " + DecimalFormatter.to2Decimal(pump.lastBolusAmount) + "U @" + android.text.format.DateFormat.format("HH:mm", pump.lastBolusTime) + "\n";
}
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
ret += "Temp: " + MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis()).toStringFull() + "\n";
}
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress()) {
ret += "Extended: " + MainApp.getConfigBuilder().getExtendedBolusFromHistory(System.currentTimeMillis()).toString() + "\n";
}
if (!veryShort) {
ret += "TDD: " + DecimalFormatter.to0Decimal(pump.dailyTotalUnits) + " / " + pump.maxDailyTotalUnits + " U\n";
}
ret += "IOB: " + pump.iob + "U\n";
ret += "Reserv: " + DecimalFormatter.to0Decimal(pump.reservoirRemainingUnits) + "U\n";
ret += "Batt: " + pump.batteryRemaining + "\n";
return ret;
}
// TODO: daily total constraint
}
|
app/src/main/java/info/nightscout/androidaps/plugins/PumpDanaR/DanaRPlugin.java
|
package info.nightscout.androidaps.plugins.PumpDanaR;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.IBinder;
import android.support.annotation.Nullable;
import com.squareup.otto.Subscribe;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.Objects;
import info.nightscout.androidaps.BuildConfig;
import info.nightscout.androidaps.Config;
import info.nightscout.androidaps.Constants;
import info.nightscout.androidaps.MainApp;
import info.nightscout.androidaps.R;
import info.nightscout.androidaps.data.DetailedBolusInfo;
import info.nightscout.androidaps.data.PumpEnactResult;
import info.nightscout.androidaps.db.ExtendedBolus;
import info.nightscout.androidaps.db.TemporaryBasal;
import info.nightscout.androidaps.db.Treatment;
import info.nightscout.androidaps.events.EventAppExit;
import info.nightscout.androidaps.events.EventPreferenceChange;
import info.nightscout.androidaps.interfaces.ConstraintsInterface;
import info.nightscout.androidaps.interfaces.DanaRInterface;
import info.nightscout.androidaps.interfaces.PluginBase;
import info.nightscout.androidaps.interfaces.ProfileInterface;
import info.nightscout.androidaps.interfaces.PumpDescription;
import info.nightscout.androidaps.interfaces.PumpInterface;
import info.nightscout.androidaps.plugins.ConfigBuilder.ConfigBuilderPlugin;
import info.nightscout.androidaps.data.Profile;
import info.nightscout.androidaps.data.ProfileStore;
import info.nightscout.androidaps.plugins.Overview.Notification;
import info.nightscout.androidaps.plugins.Overview.events.EventDismissNotification;
import info.nightscout.androidaps.plugins.Overview.events.EventNewNotification;
import info.nightscout.androidaps.plugins.ProfileNS.NSProfilePlugin;
import info.nightscout.androidaps.plugins.PumpDanaR.services.DanaRExecutionService;
import info.nightscout.utils.DateUtil;
import info.nightscout.utils.DecimalFormatter;
import info.nightscout.utils.Round;
import info.nightscout.utils.SP;
/**
* Created by mike on 05.08.2016.
*/
public class DanaRPlugin implements PluginBase, PumpInterface, DanaRInterface, ConstraintsInterface, ProfileInterface {
private static Logger log = LoggerFactory.getLogger(DanaRPlugin.class);
@Override
public String getFragmentClass() {
return DanaRFragment.class.getName();
}
static boolean fragmentPumpEnabled = false;
static boolean fragmentProfileEnabled = false;
static boolean fragmentPumpVisible = true;
public static DanaRExecutionService sExecutionService;
private static DanaRPump pump = DanaRPump.getInstance();
private static boolean useExtendedBoluses = false;
public static PumpDescription pumpDescription = new PumpDescription();
public DanaRPlugin() {
useExtendedBoluses = SP.getBoolean("danar_useextended", false);
Context context = MainApp.instance().getApplicationContext();
Intent intent = new Intent(context, DanaRExecutionService.class);
context.bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
MainApp.bus().register(this);
pumpDescription.isBolusCapable = true;
pumpDescription.bolusStep = 0.1d;
pumpDescription.isExtendedBolusCapable = true;
pumpDescription.extendedBolusStep = 0.05d;
pumpDescription.extendedBolusDurationStep = 30;
pumpDescription.extendedBolusMaxDuration = 8 * 60;
pumpDescription.isTempBasalCapable = true;
pumpDescription.tempBasalStyle = PumpDescription.PERCENT;
pumpDescription.maxTempPercent = 200;
pumpDescription.tempPercentStep = 10;
pumpDescription.tempDurationStep = 60;
pumpDescription.tempMaxDuration = 24 * 60;
pumpDescription.isSetBasalProfileCapable = true;
pumpDescription.basalStep = 0.01d;
pumpDescription.basalMinimumRate = 0.04d;
pumpDescription.isRefillingCapable = true;
}
ServiceConnection mConnection = new ServiceConnection() {
public void onServiceDisconnected(ComponentName name) {
log.debug("Service is disconnected");
sExecutionService = null;
}
public void onServiceConnected(ComponentName name, IBinder service) {
log.debug("Service is connected");
DanaRExecutionService.LocalBinder mLocalBinder = (DanaRExecutionService.LocalBinder) service;
sExecutionService = mLocalBinder.getServiceInstance();
}
};
@SuppressWarnings("UnusedParameters")
@Subscribe
public void onStatusEvent(final EventAppExit e) {
MainApp.instance().getApplicationContext().unbindService(mConnection);
}
@Subscribe
public void onStatusEvent(final EventPreferenceChange s) {
if (isEnabled(PUMP)) {
boolean previousValue = useExtendedBoluses;
useExtendedBoluses = SP.getBoolean("danar_useextended", false);
if (useExtendedBoluses != previousValue && MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress()) {
sExecutionService.extendedBolusStop();
}
}
}
// Plugin base interface
@Override
public int getType() {
return PluginBase.PUMP;
}
@Override
public String getName() {
return MainApp.instance().getString(R.string.danarpump);
}
@Override
public String getNameShort() {
String name = MainApp.sResources.getString(R.string.danarpump_shortname);
if (!name.trim().isEmpty()) {
//only if translation exists
return name;
}
// use long name as fallback
return getName();
}
@Override
public boolean isEnabled(int type) {
if (type == PluginBase.PROFILE) return fragmentProfileEnabled && fragmentPumpEnabled;
else if (type == PluginBase.PUMP) return fragmentPumpEnabled;
else if (type == PluginBase.CONSTRAINTS) return fragmentPumpEnabled;
return false;
}
@Override
public boolean isVisibleInTabs(int type) {
if (type == PluginBase.PROFILE || type == PluginBase.CONSTRAINTS) return false;
else if (type == PluginBase.PUMP) return fragmentPumpVisible;
return false;
}
@Override
public boolean canBeHidden(int type) {
return true;
}
@Override
public boolean hasFragment() {
return true;
}
@Override
public boolean showInList(int type) {
return type == PUMP;
}
@Override
public void setFragmentEnabled(int type, boolean fragmentEnabled) {
if (type == PluginBase.PROFILE)
this.fragmentProfileEnabled = fragmentEnabled;
else if (type == PluginBase.PUMP)
this.fragmentPumpEnabled = fragmentEnabled;
// if pump profile was enabled need to switch to another too
if (type == PluginBase.PUMP && !fragmentEnabled && this.fragmentProfileEnabled) {
setFragmentEnabled(PluginBase.PROFILE, false);
setFragmentVisible(PluginBase.PROFILE, false);
MainApp.getSpecificPlugin(NSProfilePlugin.class).setFragmentEnabled(PluginBase.PROFILE, true);
MainApp.getSpecificPlugin(NSProfilePlugin.class).setFragmentVisible(PluginBase.PROFILE, true);
}
}
@Override
public void setFragmentVisible(int type, boolean fragmentVisible) {
if (type == PluginBase.PUMP)
this.fragmentPumpVisible = fragmentVisible;
}
@Override
public boolean isFakingTempsByExtendedBoluses() {
return useExtendedBoluses;
}
@Override
public boolean isInitialized() {
return pump.lastConnection.getTime() > 0 && pump.isExtendedBolusEnabled;
}
@Override
public boolean isSuspended() {
return pump.pumpSuspended;
}
@Override
public boolean isBusy() {
if (sExecutionService == null) return false;
return sExecutionService.isConnected() || sExecutionService.isConnecting();
}
// Pump interface
@Override
public int setNewBasalProfile(Profile profile) {
if (sExecutionService == null) {
log.error("setNewBasalProfile sExecutionService is null");
return FAILED;
}
if (!isInitialized()) {
log.error("setNewBasalProfile not initialized");
Notification notification = new Notification(Notification.PROFILE_NOT_SET_NOT_INITIALIZED, MainApp.sResources.getString(R.string.pumpNotInitializedProfileNotSet), Notification.URGENT);
MainApp.bus().post(new EventNewNotification(notification));
return FAILED;
} else {
MainApp.bus().post(new EventDismissNotification(Notification.PROFILE_NOT_SET_NOT_INITIALIZED));
}
if (!sExecutionService.updateBasalsInPump(profile)) {
Notification notification = new Notification(Notification.FAILED_UDPATE_PROFILE, MainApp.sResources.getString(R.string.failedupdatebasalprofile), Notification.URGENT);
MainApp.bus().post(new EventNewNotification(notification));
return FAILED;
} else {
MainApp.bus().post(new EventDismissNotification(Notification.PROFILE_NOT_SET_NOT_INITIALIZED));
MainApp.bus().post(new EventDismissNotification(Notification.FAILED_UDPATE_PROFILE));
return SUCCESS;
}
}
@Override
public boolean isThisProfileSet(Profile profile) {
if (!isInitialized())
return true; // TODO: not sure what's better. so far TRUE to prevent too many SMS
if (pump.pumpProfiles == null)
return true; // TODO: not sure what's better. so far TRUE to prevent too many SMS
int basalValues = pump.basal48Enable ? 48 : 24;
int basalIncrement = pump.basal48Enable ? 30 * 60 : 60 * 60;
for (int h = 0; h < basalValues; h++) {
Double pumpValue = pump.pumpProfiles[pump.activeProfile][h];
Double profileValue = profile.getBasal((Integer) (h * basalIncrement));
if (profileValue == null) return true;
if (Math.abs(pumpValue - profileValue) > getPumpDescription().basalStep) {
log.debug("Diff found. Hour: " + h + " Pump: " + pumpValue + " Profile: " + profileValue);
return false;
}
}
return true;
}
@Override
public Date lastDataTime() {
return pump.lastConnection;
}
@Override
public void refreshDataFromPump(String reason) {
if (!isConnected() && !isConnecting()) {
doConnect(reason);
}
}
@Override
public double getBaseBasalRate() {
return pump.currentBasal;
}
@Override
public PumpEnactResult deliverTreatment(DetailedBolusInfo detailedBolusInfo) {
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
detailedBolusInfo.insulin = configBuilderPlugin.applyBolusConstraints(detailedBolusInfo.insulin);
if (detailedBolusInfo.insulin > 0 || detailedBolusInfo.carbs > 0) {
Treatment t = new Treatment(detailedBolusInfo.insulinInterface);
boolean connectionOK = false;
if (detailedBolusInfo.insulin > 0 || detailedBolusInfo.carbs > 0) connectionOK = sExecutionService.bolus(detailedBolusInfo.insulin, (int) detailedBolusInfo.carbs, t);
PumpEnactResult result = new PumpEnactResult();
result.success = connectionOK;
result.bolusDelivered = t.insulin;
result.carbsDelivered = detailedBolusInfo.carbs;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
if (Config.logPumpActions)
log.debug("deliverTreatment: OK. Asked: " + detailedBolusInfo.insulin + " Delivered: " + result.bolusDelivered);
detailedBolusInfo.insulin = t.insulin;
detailedBolusInfo.date = System.currentTimeMillis();
MainApp.getConfigBuilder().addToHistoryTreatment(detailedBolusInfo);
return result;
} else {
PumpEnactResult result = new PumpEnactResult();
result.success = false;
result.bolusDelivered = 0d;
result.carbsDelivered = 0d;
result.comment = MainApp.instance().getString(R.string.danar_invalidinput);
log.error("deliverTreatment: Invalid input");
return result;
}
}
@Override
public void stopBolusDelivering() {
if (sExecutionService == null) {
log.error("stopBolusDelivering sExecutionService is null");
return;
}
sExecutionService.bolusStop();
}
// This is called from APS
@Override
public PumpEnactResult setTempBasalAbsolute(Double absoluteRate, Integer durationInMinutes) {
// Recheck pump status if older than 30 min
if (pump.lastConnection.getTime() + 30 * 60 * 1000L < System.currentTimeMillis()) {
doConnect("setTempBasalAbsolute old data");
}
PumpEnactResult result = new PumpEnactResult();
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
absoluteRate = configBuilderPlugin.applyBasalConstraints(absoluteRate);
final boolean doTempOff = getBaseBasalRate() - absoluteRate == 0d;
final boolean doLowTemp = absoluteRate < getBaseBasalRate();
final boolean doHighTemp = absoluteRate > getBaseBasalRate() && !useExtendedBoluses;
final boolean doExtendedTemp = absoluteRate > getBaseBasalRate() && useExtendedBoluses;
if (doTempOff) {
// If extended in progress
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && useExtendedBoluses) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping extended bolus (doTempOff)");
return cancelExtendedBolus();
}
// If temp in progress
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping temp basal (doTempOff)");
return cancelRealTempBasal();
}
result.success = true;
result.enacted = false;
result.percent = 100;
result.isPercent = true;
result.isTempCancel = true;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: doTempOff OK");
return result;
}
if (doLowTemp || doHighTemp) {
Integer percentRate = Double.valueOf(absoluteRate / getBaseBasalRate() * 100).intValue();
if (percentRate < 100) percentRate = Round.ceilTo((double) percentRate, 10d).intValue();
else percentRate = Round.floorTo((double) percentRate, 10d).intValue();
if (percentRate > getPumpDescription().maxTempPercent) {
percentRate = getPumpDescription().maxTempPercent;
}
// If extended in progress
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && useExtendedBoluses) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping extended bolus (doLowTemp || doHighTemp)");
result = cancelExtendedBolus();
if (!result.success) {
log.error("setTempBasalAbsolute: Failed to stop previous extended bolus (doLowTemp || doHighTemp)");
return result;
}
}
// Check if some temp is already in progress
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
// Correct basal already set ?
if (MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis()).percentRate == percentRate) {
result.success = true;
result.percent = percentRate;
result.absolute = MainApp.getConfigBuilder().getTempBasalAbsoluteRateHistory();
result.enacted = false;
result.duration = ((Double) MainApp.getConfigBuilder().getTempBasalRemainingMinutesFromHistory()).intValue();
result.isPercent = true;
result.isTempCancel = false;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Correct temp basal already set (doLowTemp || doHighTemp)");
return result;
}
}
// Convert duration from minutes to hours
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Setting temp basal " + percentRate + "% for " + durationInMinutes + " mins (doLowTemp || doHighTemp)");
return setTempBasalPercent(percentRate, durationInMinutes);
}
if (doExtendedTemp) {
// Check if some temp is already in progress
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Stopping temp basal (doExtendedTemp)");
result = cancelRealTempBasal();
// Check for proper result
if (!result.success) {
log.error("setTempBasalAbsolute: Failed to stop previous temp basal (doExtendedTemp)");
return result;
}
}
// Calculate # of halfHours from minutes
Integer durationInHalfHours = Math.max(durationInMinutes / 30, 1);
// We keep current basal running so need to sub current basal
Double extendedRateToSet = absoluteRate - getBaseBasalRate();
extendedRateToSet = configBuilderPlugin.applyBasalConstraints(extendedRateToSet);
// needs to be rounded to 0.1
extendedRateToSet = Round.roundTo(extendedRateToSet, pumpDescription.extendedBolusStep * 2); // *2 because of halfhours
// What is current rate of extended bolusing in u/h?
if (Config.logPumpActions) {
log.debug("setTempBasalAbsolute: Extended bolus in progress: " + MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() + " rate: " + pump.extendedBolusAbsoluteRate + "U/h duration remaining: " + pump.extendedBolusRemainingMinutes + "min");
log.debug("setTempBasalAbsolute: Rate to set: " + extendedRateToSet + "U/h");
}
// Compare with extended rate in progress
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && Math.abs(pump.extendedBolusAbsoluteRate - extendedRateToSet) < getPumpDescription().extendedBolusStep) {
// correct extended already set
result.success = true;
result.absolute = pump.extendedBolusAbsoluteRate;
result.enacted = false;
result.duration = pump.extendedBolusRemainingMinutes;
result.isPercent = false;
result.isTempCancel = false;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Correct extended already set");
return result;
}
// Now set new extended, no need to to stop previous (if running) because it's replaced
Double extendedAmount = extendedRateToSet / 2 * durationInHalfHours;
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Setting extended: " + extendedAmount + "U halfhours: " + durationInHalfHours);
result = setExtendedBolus(extendedAmount, durationInMinutes);
if (!result.success) {
log.error("setTempBasalAbsolute: Failed to set extended bolus");
return result;
}
if (Config.logPumpActions)
log.debug("setTempBasalAbsolute: Extended bolus set ok");
result.absolute = result.absolute + getBaseBasalRate();
return result;
}
// We should never end here
log.error("setTempBasalAbsolute: Internal error");
result.success = false;
result.comment = "Internal error";
return result;
}
@Override
public PumpEnactResult setTempBasalPercent(Integer percent, Integer durationInMinutes) {
PumpEnactResult result = new PumpEnactResult();
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
percent = configBuilderPlugin.applyBasalConstraints(percent);
if (percent < 0) {
result.isTempCancel = false;
result.enacted = false;
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_invalidinput);
log.error("setTempBasalPercent: Invalid input");
return result;
}
if (percent > getPumpDescription().maxTempPercent)
percent = getPumpDescription().maxTempPercent;
if (pump.isTempBasalInProgress && pump.tempBasalPercent == percent) {
result.enacted = false;
result.success = true;
result.isTempCancel = false;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.duration = pump.tempBasalRemainingMin;
result.percent = pump.tempBasalPercent;
result.absolute = MainApp.getConfigBuilder().getTempBasalAbsoluteRateHistory();
result.isPercent = true;
if (Config.logPumpActions)
log.debug("setTempBasalPercent: Correct value already set");
return result;
}
int durationInHours = Math.max(durationInMinutes / 60, 1);
boolean connectionOK = sExecutionService.tempBasal(percent, durationInHours);
if (connectionOK && pump.isTempBasalInProgress && pump.tempBasalPercent == percent) {
result.enacted = true;
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.isTempCancel = false;
result.duration = pump.tempBasalRemainingMin;
result.percent = pump.tempBasalPercent;
result.absolute = MainApp.getConfigBuilder().getTempBasalAbsoluteRateHistory();
result.isPercent = true;
if (Config.logPumpActions)
log.debug("setTempBasalPercent: OK");
return result;
}
result.enacted = false;
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
log.error("setTempBasalPercent: Failed to set temp basal");
return result;
}
@Override
public PumpEnactResult setExtendedBolus(Double insulin, Integer durationInMinutes) {
ConfigBuilderPlugin configBuilderPlugin = MainApp.getConfigBuilder();
insulin = configBuilderPlugin.applyBolusConstraints(insulin);
// needs to be rounded
int durationInHalfHours = Math.max(durationInMinutes / 30, 1);
insulin = Round.roundTo(insulin, getPumpDescription().extendedBolusStep * (1 + durationInHalfHours % 1));
PumpEnactResult result = new PumpEnactResult();
if (pump.isExtendedInProgress && Math.abs(pump.extendedBolusAmount - insulin) < getPumpDescription().extendedBolusStep) {
result.enacted = false;
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.duration = pump.extendedBolusRemainingMinutes;
result.absolute = pump.extendedBolusAbsoluteRate;
result.isPercent = false;
result.isTempCancel = false;
if (Config.logPumpActions)
log.debug("setExtendedBolus: Correct extended bolus already set. Current: " + pump.extendedBolusAmount + " Asked: " + insulin);
return result;
}
boolean connectionOK = sExecutionService.extendedBolus(insulin, durationInHalfHours);
if (connectionOK && pump.isExtendedInProgress && Math.abs(pump.extendedBolusAmount - insulin) < getPumpDescription().extendedBolusStep) {
result.enacted = true;
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.isTempCancel = false;
result.duration = pump.extendedBolusRemainingMinutes;
result.absolute = pump.extendedBolusAbsoluteRate;
result.bolusDelivered = pump.extendedBolusAmount;
result.isPercent = false;
if (Config.logPumpActions)
log.debug("setExtendedBolus: OK");
return result;
}
result.enacted = false;
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
log.error("setExtendedBolus: Failed to extended bolus");
return result;
}
@Override
public PumpEnactResult cancelTempBasal(boolean userRequested) {
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress())
return cancelRealTempBasal();
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && useExtendedBoluses) {
PumpEnactResult cancelEx = cancelExtendedBolus();
return cancelEx;
}
PumpEnactResult result = new PumpEnactResult();
result.success = true;
result.enacted = false;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
result.isTempCancel = true;
return result;
}
public PumpEnactResult cancelRealTempBasal() {
PumpEnactResult result = new PumpEnactResult();
if (pump.isTempBasalInProgress) {
sExecutionService.tempBasalStop();
result.enacted = true;
result.isTempCancel = true;
}
if (!pump.isTempBasalInProgress) {
result.success = true;
result.isTempCancel = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
if (Config.logPumpActions)
log.debug("cancelRealTempBasal: OK");
return result;
} else {
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
result.isTempCancel = true;
log.error("cancelRealTempBasal: Failed to cancel temp basal");
return result;
}
}
@Override
public PumpEnactResult cancelExtendedBolus() {
PumpEnactResult result = new PumpEnactResult();
if (pump.isExtendedInProgress) {
sExecutionService.extendedBolusStop();
result.enacted = true;
result.isTempCancel = true;
}
if (!pump.isExtendedInProgress) {
result.success = true;
result.comment = MainApp.instance().getString(R.string.virtualpump_resultok);
if (Config.logPumpActions)
log.debug("cancelExtendedBolus: OK");
return result;
} else {
result.success = false;
result.comment = MainApp.instance().getString(R.string.danar_valuenotsetproperly);
log.error("cancelExtendedBolus: Failed to cancel extended bolus");
return result;
}
}
public static void doConnect(String from) {
if (sExecutionService != null) sExecutionService.connect(from);
}
public static boolean isConnected() {
return sExecutionService != null && sExecutionService.isConnected();
}
public static boolean isConnecting() {
return sExecutionService != null && sExecutionService.isConnecting();
}
public static void doDisconnect(String from) {
if (sExecutionService != null) sExecutionService.disconnect(from);
}
@Override
public JSONObject getJSONStatus() {
if (pump.lastConnection.getTime() + 5 * 60 * 1000L < System.currentTimeMillis()) {
return null;
}
JSONObject pumpjson = new JSONObject();
JSONObject battery = new JSONObject();
JSONObject status = new JSONObject();
JSONObject extended = new JSONObject();
try {
battery.put("percent", pump.batteryRemaining);
status.put("status", pump.pumpSuspended ? "suspended" : "normal");
status.put("timestamp", DateUtil.toISOString(pump.lastConnection));
extended.put("Version", BuildConfig.VERSION_NAME + "-" + BuildConfig.BUILDVERSION);
extended.put("PumpIOB", pump.iob);
if (pump.lastBolusTime.getTime() != 0) {
extended.put("LastBolus", pump.lastBolusTime.toLocaleString());
extended.put("LastBolusAmount", pump.lastBolusAmount);
}
TemporaryBasal tb = MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis());
if (tb != null) {
extended.put("TempBasalAbsoluteRate", tb.tempBasalConvertedToAbsolute(System.currentTimeMillis()));
extended.put("TempBasalStart", DateUtil.dateAndTimeString(tb.date));
extended.put("TempBasalRemaining", tb.getPlannedRemainingMinutes());
}
ExtendedBolus eb = MainApp.getConfigBuilder().getExtendedBolusFromHistory(System.currentTimeMillis());
if (eb != null) {
extended.put("ExtendedBolusAbsoluteRate", eb.absoluteRate());
extended.put("ExtendedBolusStart", DateUtil.dateAndTimeString(eb.date));
extended.put("ExtendedBolusRemaining", eb.getPlannedRemainingMinutes());
}
extended.put("BaseBasalRate", getBaseBasalRate());
try {
extended.put("ActiveProfile", MainApp.getConfigBuilder().getProfileName());
} catch (Exception e) {
}
pumpjson.put("battery", battery);
pumpjson.put("status", status);
pumpjson.put("extended", extended);
pumpjson.put("reservoir", (int) pump.reservoirRemainingUnits);
pumpjson.put("clock", DateUtil.toISOString(new Date()));
} catch (JSONException e) {
e.printStackTrace();
}
return pumpjson;
}
@Override
public String deviceID() {
return pump.serialNumber;
}
@Override
public PumpDescription getPumpDescription() {
return pumpDescription;
}
/**
* DanaR interface
*/
@Override
public boolean loadHistory(byte type) {
return sExecutionService.loadHistory(type);
}
/**
* Constraint interface
*/
@Override
public boolean isLoopEnabled() {
return true;
}
@Override
public boolean isClosedModeEnabled() {
return true;
}
@Override
public boolean isAutosensModeEnabled() {
return true;
}
@Override
public boolean isAMAModeEnabled() {
return true;
}
@SuppressWarnings("PointlessBooleanExpression")
@Override
public Double applyBasalConstraints(Double absoluteRate) {
double origAbsoluteRate = absoluteRate;
if (pump != null) {
if (absoluteRate > pump.maxBasal) {
absoluteRate = pump.maxBasal;
if (Config.logConstraintsChanges && origAbsoluteRate != Constants.basalAbsoluteOnlyForCheckLimit)
log.debug("Limiting rate " + origAbsoluteRate + "U/h by pump constraint to " + absoluteRate + "U/h");
}
}
return absoluteRate;
}
@SuppressWarnings("PointlessBooleanExpression")
@Override
public Integer applyBasalConstraints(Integer percentRate) {
Integer origPercentRate = percentRate;
if (percentRate < 0) percentRate = 0;
if (percentRate > getPumpDescription().maxTempPercent)
percentRate = getPumpDescription().maxTempPercent;
if (!Objects.equals(percentRate, origPercentRate) && Config.logConstraintsChanges && !Objects.equals(origPercentRate, Constants.basalPercentOnlyForCheckLimit))
log.debug("Limiting percent rate " + origPercentRate + "% to " + percentRate + "%");
return percentRate;
}
@SuppressWarnings("PointlessBooleanExpression")
@Override
public Double applyBolusConstraints(Double insulin) {
double origInsulin = insulin;
if (pump != null) {
if (insulin > pump.maxBolus) {
insulin = pump.maxBolus;
if (Config.logConstraintsChanges && origInsulin != Constants.bolusOnlyForCheckLimit)
log.debug("Limiting bolus " + origInsulin + "U by pump constraint to " + insulin + "U");
}
}
return insulin;
}
@Override
public Integer applyCarbsConstraints(Integer carbs) {
return carbs;
}
@Override
public Double applyMaxIOBConstraints(Double maxIob) {
return maxIob;
}
@Nullable
@Override
public ProfileStore getProfile() {
if (pump.lastSettingsRead.getTime() == 0)
return null; // no info now
return pump.createConvertedProfile();
}
@Override
public String getUnits() {
return pump.getUnits();
}
@Override
public String getProfileName() {
return pump.createConvertedProfileName();
}
// Reply for sms communicator
public String shortStatus(boolean veryShort) {
String ret = "";
if (pump.lastConnection.getTime() != 0) {
Long agoMsec = System.currentTimeMillis() - pump.lastConnection.getTime();
int agoMin = (int) (agoMsec / 60d / 1000d);
ret += "LastConn: " + agoMin + " minago\n";
}
if (pump.lastBolusTime.getTime() != 0) {
ret += "LastBolus: " + DecimalFormatter.to2Decimal(pump.lastBolusAmount) + "U @" + android.text.format.DateFormat.format("HH:mm", pump.lastBolusTime) + "\n";
}
if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
ret += "Temp: " + MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis()).toStringFull() + "\n";
}
if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress()) {
ret += "Extended: " + MainApp.getConfigBuilder().getExtendedBolusFromHistory(System.currentTimeMillis()).toString() + "\n";
}
if (!veryShort) {
ret += "TDD: " + DecimalFormatter.to0Decimal(pump.dailyTotalUnits) + " / " + pump.maxDailyTotalUnits + " U\n";
}
ret += "IOB: " + pump.iob + "U\n";
ret += "Reserv: " + DecimalFormatter.to0Decimal(pump.reservoirRemainingUnits) + "U\n";
ret += "Batt: " + pump.batteryRemaining + "\n";
return ret;
}
// TODO: daily total constraint
}
|
more debug on setting temp basal
|
app/src/main/java/info/nightscout/androidaps/plugins/PumpDanaR/DanaRPlugin.java
|
more debug on setting temp basal
|
<ide><path>pp/src/main/java/info/nightscout/androidaps/plugins/PumpDanaR/DanaRPlugin.java
<ide> if (percentRate > getPumpDescription().maxTempPercent) {
<ide> percentRate = getPumpDescription().maxTempPercent;
<ide> }
<add> if (Config.logPumpActions)
<add> log.debug("setTempBasalAbsolute: Calculated percent rate: " + percentRate);
<add>
<ide> // If extended in progress
<ide> if (MainApp.getConfigBuilder().isInHistoryExtendedBoluslInProgress() && useExtendedBoluses) {
<ide> if (Config.logPumpActions)
<ide> // Check if some temp is already in progress
<ide> if (MainApp.getConfigBuilder().isInHistoryRealTempBasalInProgress()) {
<ide> // Correct basal already set ?
<del> if (MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis()).percentRate == percentRate) {
<add> TemporaryBasal running = MainApp.getConfigBuilder().getRealTempBasalFromHistory(System.currentTimeMillis());
<add> if (Config.logPumpActions)
<add> log.debug("setTempBasalAbsolute: currently running: " + running.toString());
<add> if (running.percentRate == percentRate) {
<ide> result.success = true;
<ide> result.percent = percentRate;
<ide> result.absolute = MainApp.getConfigBuilder().getTempBasalAbsoluteRateHistory();
|
|
JavaScript
|
bsd-3-clause
|
dae78fb018c2f953a20173db5a416fa8a506f53d
| 0 |
johan--/cartodb.js,splashblot/cartodb.js,limasol/cartodb.js,Stonelinks/cartodb-lite,greyhwndz/cartodb.js,Stonelinks/cartodb-lite,limasol/cartodb.js,splashblot/cartodb.js,johan--/cartodb.js,CartoDB/cartodb.js,knownasilya/cartodb.js,CartoDB/cartodb.js,greyhwndz/cartodb.js,MappingKat/cartodb.js,johan--/cartodb.js,splashblot/cartodb.js,CartoDB/cartodb.js,splashblot/cartodb.js,greyhwndz/cartodb.js,Stonelinks/cartodb-lite,MappingKat/cartodb.js,Stonelinks/cartodb-lite,MappingKat/cartodb.js,CartoDB/cartodb.js,limasol/cartodb.js,MappingKat/cartodb.js,knownasilya/cartodb.js,johan--/cartodb.js,knownasilya/cartodb.js,greyhwndz/cartodb.js
|
cdb.geo.ui.LegendItemModel = cdb.core.Model.extend({
defaults: {
name: "Untitled",
value: ""
}
});
cdb.geo.ui.LegendItems = Backbone.Collection.extend({
model: cdb.geo.ui.LegendItemModel
});
cdb.geo.ui.LegendItem = cdb.core.View.extend({
tagName: "li",
initialize: function() {
_.bindAll(this, "render");
this.template = this.options.template ? _.template(this.options.template) : cdb.templates.getTemplate('geo/legend');
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
return this.$el;
}
});
/*
* ChoroplethLegend
*
* */
cdb.geo.ui.ChoroplethLegend = cdb.core.View.extend({
className: "choropleth-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li class="min"><%= leftLabel %></li><li class="max"><%= rightLabel %></li><li class="graph count_<%= buckets_count %>"><div class="colors"><%= colors %></div></li></ul>');
this.model = new cdb.core.Model();
},
render: function() {
if (this.items.length >= 2) {
this.leftLabel = this.items.at(0);
this.rightLabel = this.items.at(1);
var leftLabel = this.leftLabel.get("value");
var rightLabel = this.rightLabel.get("value");
var colors = "";
for (var i = 2; i < this.items.length; i++) {
var color = this.items.at(i).get("value");
colors += '<div class="quartile" style="background-color:'+color+'"></div>';
}
this.model.set({ title: this.title, show_title: this.show_title, leftLabel: leftLabel, rightLabel: rightLabel, colors: colors, buckets_count: this.items.length - 2 });
this.$el.html(this.template(this.model.toJSON()));
}
return this;
}
});
/*
* DensityLegend
*
* */
cdb.geo.ui.DensityLegend = cdb.core.View.extend({
className: "density-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li class="min"><%= leftLabel %></li><li class="max"><%= rightLabel %></li><li class="graph count_<%= buckets_count %>"><div class="colors"><%= colors %></div></li></ul>');
this.model = new cdb.core.Model();
},
render: function() {
if (this.items.length >= 2) {
this.leftLabel = this.items.at(0);
this.rightLabel = this.items.at(1);
var leftLabel = this.leftLabel.get("value");
var rightLabel = this.rightLabel.get("value");
var colors = "";
for (var i = 2; i < this.items.length; i++) {
var color = this.items.at(i).get("value");
colors += '<div class="quartile" style="background-color:'+color+'"></div>';
}
this.model.set({ title: this.title, show_title: this.show_title, leftLabel: leftLabel, rightLabel: rightLabel, colors: colors, buckets_count: this.items.length - 2 });
this.$el.html(this.template(this.model.toJSON()));
}
return this;
}
});
/*
* IntensityLegend
*
* */
cdb.geo.ui.IntensityLegend = cdb.core.View.extend({
className: "intensity-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li class="min"><%= leftLabel %></li><li class="max"><%= rightLabel %></li><li class="graph"></li></ul>');
this.model = new cdb.core.Model();
},
_hexToRGB: function(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
} : null;
},
_rgbToHex: function(r, g, b) {
function componentToHex(c) {
var hex = c.toString(16);
return hex.length == 1 ? "0" + hex : hex;
}
return "#" + componentToHex(r) + componentToHex(g) + componentToHex(b);
},
_calculateMultiply: function(color, steps) {
var colorHex = this._hexToRGB(color);
if (colorHex) {
var r = colorHex.r;
var g = colorHex.g;
var b = colorHex.b;
for (var i = 0; i <= steps; i++) {
r = Math.round(r * colorHex.r/255);
g = Math.round(g * colorHex.g/255);
b = Math.round(b * colorHex.b/255);
}
return this._rgbToHex(r,g,b);
}
return "#ffffff";
},
_renderGraph: function() {
var s = "";
s+= "background: <%= color %>;";
s+= "background: -moz-linear-gradient(left, <%= color %> 0%, <%= right %> 100%);";
s+= "background: -webkit-gradient(linear, left top, right top, color-stop(0%,<%= color %>), color-stop(100%,<%= right %>));";
s+= "background: -webkit-linear-gradient(left, <%= color %> 0%,<%= right %> 100%);";
s+= "background: -o-linear-gradient(left, <%= color %> 0%,<%= right %> 100%);";
s+= "background: -ms-linear-gradient(left, <%= color %> 0%,<%= right %> 100%)";
s+= "background: linear-gradient(to right, <%= color %> 0%,<%= right %> 100%);";
s+= "filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='<%= color %>', endColorstr='<%= right %>',GradientType=1 );";
s+= "background-image: -ms-linear-gradient(left, <%= color %> 0%,<%= right %> 100%)";
var backgroundStyle = _.template(s);
var baseColor = this.color.get("value");
var multipliedColor = this._calculateMultiply(baseColor, 4);
this.$el.find(".graph").attr("style", backgroundStyle({ color: baseColor, right: multipliedColor }));
},
render: function() {
if (this.items.length >= 3) {
this.leftLabel = this.items.at(0);
this.rightLabel = this.items.at(1);
this.color = this.items.at(2);
var leftLabel = this.leftLabel.get("value");
var rightLabel = this.rightLabel.get("value");
this.model.set({ title: this.title, show_title: this.show_title, leftLabel: leftLabel, rightLabel: rightLabel });
this.$el.html(this.template(this.model.toJSON()));
this._renderGraph();
}
return this;
}
});
cdb.geo.ui.DebugLegend = cdb.core.View.extend({
});
/*
* BubbleLegend
*
* */
cdb.geo.ui.BubbleLegend = cdb.core.View.extend({
className: "bubble-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li><%= min %></li><li class="graph"><div class="bubbles"></div></li><li><%= max %></li></ul>');
this.model = new cdb.core.Model();
this.add_related_model(this.model);
},
_renderGraph: function() {
if (this.items.length >= 3) {
this.$el.find(".graph").css("background", this.items.at(2).get("value"));
}
},
render: function() {
if (this.items.length >= 3) {
var min = this.items.at(0);
var max = this.items.at(1);
this.model.set({ title: this.title, show_title: this.show_title, min: min.get("value"), max: max.get("value") });
this.$el.html(this.template(this.model.toJSON()));
}
this._renderGraph();
return this;
}
});
/*
* CategoryLegend
*
* */
cdb.geo.ui.CategoryLegend = cdb.core.View.extend({
className: "category-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul></ul>');
this.model = new cdb.core.Model({
type: "custom",
title: this.title,
show_title: this.show_title
});
},
_renderItems: function() {
this.items.each(this._renderItem, this);
},
_renderItem: function(item) {
view = new cdb.geo.ui.LegendItem({
model: item,
template: '<div class="bullet" style="background:<%= value %>"></div><%= name || "null" %>'
});
this.$el.find("ul").append(view.render());
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
if (this.items.length > 0) {
this._renderItems();
} else {
this.$el.html('<div class="warning">The category legend is empty</div>');
}
return this;
}
});
/*
* ColorLegend
*
* */
cdb.geo.ui.ColorLegend = cdb.core.View.extend({
className: "color-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul></ul>');
this.model = new cdb.core.Model({
type: "custom",
title: this.title,
show_title: this.show_title
});
},
_renderItems: function() {
this.items.each(this._renderItem, this);
},
_renderItem: function(item) {
view = new cdb.geo.ui.LegendItem({
model: item,
template: '<div class="bullet" style="background:<%= value %>"></div><%= name || ((name === false) ? "false": "null") %>'
});
this.$el.find("ul").append(view.render());
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
if (this.items.length > 0) {
this._renderItems();
} else {
this.$el.html('<div class="warning">The color legend is empty</div>');
}
return this;
}
});
/*
* CustomLegend
*
* */
cdb.geo.ui.CustomLegend = cdb.core.View.extend({
className: "custom-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul></ul>');
this.model = new cdb.core.Model({
type: "custom",
title: this.title,
show_title: this.show_title
});
},
_renderItems: function() {
this.items.each(this._renderItem, this);
},
_renderItem: function(item) {
view = new cdb.geo.ui.LegendItem({
model: item,
template: '<div class="bullet" style="background:<%= value %>"></div><%= name || "null" %>'
});
this.$el.find("ul").append(view.render());
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
if (this.items.length > 0) {
this._renderItems();
} else {
this.$el.html('<div class="warning">The legend is empty</div>');
}
return this;
}
});
/*
* var legendA = new cdb.geo.ui.Legend({
* type: "custom",
* data: [
* { name: "Category 1", value: "#FFC926" },
* { name: "Category 2", value: "#76EC00" },
* { name: "Category 3", value: "#00BAF8" },
* { name: "Category 4", value: "#D04CFD" }
* ]
* });
*
* var legendB = new cdb.geo.ui.Legend({
* type: "bubble",
* data: [
* { name: "21,585", value: "#FFC926" },
* { name: "91,585", value: "#D04CFD" }
* ]
* });
*
* var stackedLegend = new cdb.geo.ui.StackedLegend({
* legends: [legendA, legendB, …]
* });
*
* $("#overlay").append(stackedLegend.render().$el);
*
*
* */
cdb.geo.ui.StackedLegend = cdb.core.View.extend({
events: {
"dragstart": "_stopPropagation",
"mousedown": "_stopPropagation",
"touchstart": "_stopPropagation",
"MSPointerDown": "_stopPropagation",
"dblclick": "_stopPropagation",
"mousewheel": "_stopPropagation",
"DOMMouseScroll": "_stopPropagation",
"dbclick": "_stopPropagation",
"click": "_stopPropagation"
},
className: "cartodb-legend-stack",
initialize: function() {
_.each(this.options.legends, this._setupBinding, this);
},
_stopPropagation: function(ev) {
ev.stopPropagation();
},
getLayerByIndex: function(index) {
if (!this._layerByIndex) {
this._layerByIndex = {};
var legends = this.options.legends;
for (var i = 0; i < legends.length; ++i) {
var legend = legends[i];
this._layerByIndex[legend.options.index] = legend;
}
}
return this._layerByIndex[index];
},
_setupBinding: function(legend) {
legend.model.bind("change:type", this._checkVisibility, this);
this.add_related_model(legend.model);
},
_checkVisibility: function() {
var visible = _.some(this.options.legends, function(legend) {
return legend.model.get("type")
}, this);
if (visible) {
this.show();
} else {
this.hide();
}
_.each(this.options.legends, function(item) {
if (item.model.get("type")) {
item.show();
} else {
item.hide();
}
}, this);
},
_renderItems: function() {
_.each(this.options.legends, function(item) {
this.$el.append(item.render().$el);
}, this);
},
show: function() {
this.$el.show();
},
hide: function() {
this.$el.hide();
},
render: function() {
this._renderItems();
this._checkVisibility();
return this;
}
});
cdb.geo.ui.LegendModel = cdb.core.Model.extend({
defaults: {
type: null,
show_title: false,
title: ""
},
initialize: function() {
this.items = new cdb.geo.ui.LegendItems(this.get("items"));
this.items.bind("add remove reset change", function() {
this.set("items", this.items.toJSON());
}, this);
this.bind("change:items", this._onUpdateItems, this);
this.bind("change:title change:show_title", this._onUpdateTitle, this);
},
_onUpdateTitle: function() {
this.title = this.get("title");
this.show_title = this.get("show_title");
},
_onUpdateItems: function() {
var items = this.get("items");
this.items.reset(items);
}
});
/*
* Legend
*
*/
cdb.geo.ui.Legend = cdb.core.View.extend({
className: "cartodb-legend",
events: {
"dragstart": "_stopPropagation",
"mousedown": "_stopPropagation",
"touchstart": "_stopPropagation",
"MSPointerDown": "_stopPropagation",
"dblclick": "_stopPropagation",
"mousewheel": "_stopPropagation",
"DOMMouseScroll": "_stopPropagation",
"dbclick": "_stopPropagation",
"click": "_stopPropagation"
},
initialize: function() {
_.bindAll(this, "render", "show", "hide");
_.defaults(this.options, this.default_options);
this.map = this.options.map;
this._setupModel();
this._setupItems();
this._updateLegendType();
},
_stopPropagation: function(ev) {
ev.stopPropagation();
},
_setupModel: function() {
if (!this.model) {
this.model = new cdb.geo.ui.LegendModel({
type: this.options.type || cdb.geo.ui.LegendModel.prototype.defaults.type,
title: this.options.title || cdb.geo.ui.LegendModel.prototype.defaults.title,
show_title: this.options.show_title || cdb.geo.ui.LegendModel.prototype.defaults.show_title
});
}
this.add_related_model(this.model);
this.model.bind("change:type change:items change:title change:show_title", this._updateLegendType, this);
},
_updateLegendType: function() {
var type = this.model.get("type");
this.legend_name = this._capitalize(type) + "Legend";
if (type == 'none' || type == null) {
this.legend_name = null;
this.model.set({ type: null}, { silent: true });
} else if (!cdb.geo.ui[this.legend_name]) {
// set the previous type
this.legend_name = null;
this.model.set({ type: this.model.previous("type") }, { silent: true });
return;
}
this._refresh();
},
_refresh: function() {
var self = this;
if (this.view) this.view.clean();
var type = this.model.get("type");
var title = this.model.get("title");
var show_title = this.model.get("show_title");
if (type) {
this.view = new cdb.geo.ui[this.legend_name] ({
title: title,
show_title: show_title,
items: self.items
});
// Set the type as the element class for styling
this.$el.removeClass();
this.$el.addClass(this.className + " " + this.model.get("type"));
this.show();
} else {
this.hide();
this.$el.removeClass();
this.$el.addClass(this.className + " none");
}
this.render();
},
_setupItems: function() {
var self = this;
this.items = this.model.items;
if (this.options.data) {
this.items.reset(this.options.data);
}
this.items.bind("add remove change:value change:name", this.render, this);
},
show: function(callback) {
if (this.model.get("type")) this.$el.show();
},
hide: function(callback) {
if (this.model.get("type")) this.$el.hide();
},
_capitalize: function(string) {
if (string) {
return string.charAt(0).toUpperCase() + string.slice(1);
}
},
render: function() {
if (this.view) {
this.$el.append(this.view.render().$el);
}
return this;
}
});
|
src/geo/ui/legend.js
|
cdb.geo.ui.LegendItemModel = cdb.core.Model.extend({
defaults: {
name: "Untitled",
value: ""
}
});
cdb.geo.ui.LegendItems = Backbone.Collection.extend({
model: cdb.geo.ui.LegendItemModel
});
cdb.geo.ui.LegendItem = cdb.core.View.extend({
tagName: "li",
initialize: function() {
_.bindAll(this, "render");
this.template = this.options.template ? _.template(this.options.template) : cdb.templates.getTemplate('geo/legend');
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
return this.$el;
}
});
/*
* ChoroplethLegend
*
* */
cdb.geo.ui.ChoroplethLegend = cdb.core.View.extend({
className: "choropleth-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li class="min"><%= leftLabel %></li><li class="max"><%= rightLabel %></li><li class="graph count_<%= buckets_count %>"><div class="colors"><%= colors %></div></li></ul>');
this.model = new cdb.core.Model();
},
render: function() {
if (this.items.length >= 2) {
this.leftLabel = this.items.at(0);
this.rightLabel = this.items.at(1);
var leftLabel = this.leftLabel.get("value");
var rightLabel = this.rightLabel.get("value");
var colors = "";
for (var i = 2; i < this.items.length; i++) {
var color = this.items.at(i).get("value");
colors += '<div class="quartile" style="background-color:'+color+'"></div>';
}
this.model.set({ title: this.title, show_title: this.show_title, leftLabel: leftLabel, rightLabel: rightLabel, colors: colors, buckets_count: this.items.length - 2 });
this.$el.html(this.template(this.model.toJSON()));
}
return this;
}
});
/*
* DensityLegend
*
* */
cdb.geo.ui.DensityLegend = cdb.core.View.extend({
className: "density-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li class="min"><%= leftLabel %></li><li class="max"><%= rightLabel %></li><li class="graph count_<%= buckets_count %>"><div class="colors"><%= colors %></div></li></ul>');
this.model = new cdb.core.Model();
},
render: function() {
if (this.items.length >= 2) {
this.leftLabel = this.items.at(0);
this.rightLabel = this.items.at(1);
var leftLabel = this.leftLabel.get("value");
var rightLabel = this.rightLabel.get("value");
var colors = "";
for (var i = 2; i < this.items.length; i++) {
var color = this.items.at(i).get("value");
colors += '<div class="quartile" style="background-color:'+color+'"></div>';
}
this.model.set({ title: this.title, show_title: this.show_title, leftLabel: leftLabel, rightLabel: rightLabel, colors: colors, buckets_count: this.items.length - 2 });
this.$el.html(this.template(this.model.toJSON()));
}
return this;
}
});
/*
* IntensityLegend
*
* */
cdb.geo.ui.IntensityLegend = cdb.core.View.extend({
className: "intensity-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li class="min"><%= leftLabel %></li><li class="max"><%= rightLabel %></li><li class="graph"></li></ul>');
this.model = new cdb.core.Model();
},
_hexToRGB: function(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
} : null;
},
_rgbToHex: function(r, g, b) {
function componentToHex(c) {
var hex = c.toString(16);
return hex.length == 1 ? "0" + hex : hex;
}
return "#" + componentToHex(r) + componentToHex(g) + componentToHex(b);
},
_calculateMultiply: function(color, steps) {
var colorHex = this._hexToRGB(color);
if (colorHex) {
var r = colorHex.r;
var g = colorHex.g;
var b = colorHex.b;
for (var i = 0; i <= steps; i++) {
r = Math.round(r * colorHex.r/255);
g = Math.round(g * colorHex.g/255);
b = Math.round(b * colorHex.b/255);
}
return this._rgbToHex(r,g,b);
}
return "#ffffff";
},
_renderGraph: function() {
var s = "";
s+= "background: <%= color %>;";
s+= "background: -moz-linear-gradient(left, <%= color %> 0%, <%= right %> 100%);";
s+= "background: -webkit-gradient(linear, left top, right top, color-stop(0%,<%= color %>), color-stop(100%,<%= right %>));";
s+= "background: -webkit-linear-gradient(left, <%= color %> 0%,<%= right %> 100%);";
s+= "background: -o-linear-gradient(left, <%= color %> 0%,<%= right %> 100%);";
s+= "background: -ms-linear-gradient(left, <%= color %> 0%,<%= right %> 100%)";
s+= "background: linear-gradient(to right, <%= color %> 0%,<%= right %> 100%);";
s+= "filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='<%= color %>', endColorstr='<%= right %>',GradientType=1 );";
s+= "background-image: -ms-linear-gradient(left, <%= color %> 0%,<%= right %> 100%)";
var backgroundStyle = _.template(s);
var baseColor = this.color.get("value");
var multipliedColor = this._calculateMultiply(baseColor, 4);
this.$el.find(".graph").attr("style", backgroundStyle({ color: baseColor, right: multipliedColor }));
},
render: function() {
if (this.items.length >= 3) {
this.leftLabel = this.items.at(0);
this.rightLabel = this.items.at(1);
this.color = this.items.at(2);
var leftLabel = this.leftLabel.get("value");
var rightLabel = this.rightLabel.get("value");
this.model.set({ title: this.title, show_title: this.show_title, leftLabel: leftLabel, rightLabel: rightLabel });
this.$el.html(this.template(this.model.toJSON()));
this._renderGraph();
}
return this;
}
});
cdb.geo.ui.DebugLegend = cdb.core.View.extend({
});
/*
* BubbleLegend
*
* */
cdb.geo.ui.BubbleLegend = cdb.core.View.extend({
className: "bubble-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul><li><%= min %></li><li class="graph"><div class="bubbles"></div></li><li><%= max %></li></ul>');
this.model = new cdb.core.Model();
this.add_related_model(this.model);
},
_renderGraph: function() {
if (this.items.length >= 3) {
this.$el.find(".graph").css("background", this.items.at(2).get("value"));
}
},
render: function() {
if (this.items.length >= 3) {
var min = this.items.at(0);
var max = this.items.at(1);
this.model.set({ title: this.title, show_title: this.show_title, min: min.get("value"), max: max.get("value") });
this.$el.html(this.template(this.model.toJSON()));
}
this._renderGraph();
return this;
}
});
/*
* CategoryLegend
*
* */
cdb.geo.ui.CategoryLegend = cdb.core.View.extend({
className: "category-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul></ul>');
this.model = new cdb.core.Model({
type: "custom",
title: this.title,
show_title: this.show_title
});
},
_renderItems: function() {
this.items.each(this._renderItem, this);
},
_renderItem: function(item) {
view = new cdb.geo.ui.LegendItem({
model: item,
template: '<div class="bullet" style="background:<%= value %>"></div><%= name || "null" %>'
});
this.$el.find("ul").append(view.render());
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
if (this.items.length > 0) {
this._renderItems();
} else {
this.$el.html('<div class="warning">The category legend is empty</div>');
}
return this;
}
});
/*
* ColorLegend
*
* */
cdb.geo.ui.ColorLegend = cdb.core.View.extend({
className: "color-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul></ul>');
this.model = new cdb.core.Model({
type: "custom",
title: this.title,
show_title: this.show_title
});
},
_renderItems: function() {
this.items.each(this._renderItem, this);
},
_renderItem: function(item) {
view = new cdb.geo.ui.LegendItem({
model: item,
template: '<div class="bullet" style="background:<%= value %>"></div><%= name || ((name === false) ? "false": "null") %>'
});
this.$el.find("ul").append(view.render());
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
if (this.items.length > 0) {
this._renderItems();
} else {
this.$el.html('<div class="warning">The color legend is empty</div>');
}
return this;
}
});
/*
* CustomLegend
*
* */
cdb.geo.ui.CustomLegend = cdb.core.View.extend({
className: "custom-legend",
initialize: function() {
this.title = this.options.title;
this.show_title = this.options.show_title;
this.items = this.options.items;
this.template = _.template('<% if (title && show_title) { %><div class="legend-title"><%= title %></div><% } %><ul></ul>');
this.model = new cdb.core.Model({
type: "custom",
title: this.title,
show_title: this.show_title
});
},
_renderItems: function() {
this.items.each(this._renderItem, this);
},
_renderItem: function(item) {
view = new cdb.geo.ui.LegendItem({
model: item,
template: '<div class="bullet" style="background:<%= value %>"></div><%= name || "null" %>'
});
this.$el.find("ul").append(view.render());
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
if (this.items.length > 0) {
this._renderItems();
} else {
this.$el.html('<div class="warning">The legend is empty</div>');
}
return this;
}
});
/*
* var legendA = new cdb.geo.ui.Legend({
* type: "custom",
* data: [
* { name: "Category 1", value: "#FFC926" },
* { name: "Category 2", value: "#76EC00" },
* { name: "Category 3", value: "#00BAF8" },
* { name: "Category 4", value: "#D04CFD" }
* ]
* });
*
* var legendB = new cdb.geo.ui.Legend({
* type: "bubble",
* data: [
* { name: "21,585", value: "#FFC926" },
* { name: "91,585", value: "#D04CFD" }
* ]
* });
*
* var stackedLegend = new cdb.geo.ui.StackedLegend({
* legends: [legendA, legendB, …]
* });
*
* $("#overlay").append(stackedLegend.render().$el);
*
*
* */
cdb.geo.ui.StackedLegend = cdb.core.View.extend({
className: "cartodb-legend-stack",
initialize: function() {
_.each(this.options.legends, this._setupBinding, this);
},
getLayerByIndex: function(index) {
if (!this._layerByIndex) {
this._layerByIndex = {};
var legends = this.options.legends;
for (var i = 0; i < legends.length; ++i) {
var legend = legends[i];
this._layerByIndex[legend.options.index] = legend;
}
}
return this._layerByIndex[index];
},
_setupBinding: function(legend) {
legend.model.bind("change:type", this._checkVisibility, this);
this.add_related_model(legend.model);
},
_checkVisibility: function() {
var visible = _.some(this.options.legends, function(legend) {
return legend.model.get("type")
}, this);
if (visible) {
this.show();
} else {
this.hide();
}
_.each(this.options.legends, function(item) {
if (item.model.get("type")) {
item.show();
} else {
item.hide();
}
}, this);
},
_renderItems: function() {
_.each(this.options.legends, function(item) {
this.$el.append(item.render().$el);
}, this);
},
show: function() {
this.$el.show();
},
hide: function() {
this.$el.hide();
},
render: function() {
this._renderItems();
this._checkVisibility();
return this;
}
});
cdb.geo.ui.LegendModel = cdb.core.Model.extend({
defaults: {
type: null,
show_title: false,
title: ""
},
initialize: function() {
this.items = new cdb.geo.ui.LegendItems(this.get("items"));
this.items.bind("add remove reset change", function() {
this.set("items", this.items.toJSON());
}, this);
this.bind("change:items", this._onUpdateItems, this);
this.bind("change:title change:show_title", this._onUpdateTitle, this);
},
_onUpdateTitle: function() {
this.title = this.get("title");
this.show_title = this.get("show_title");
},
_onUpdateItems: function() {
var items = this.get("items");
this.items.reset(items);
}
});
/*
* Legend
*
*/
cdb.geo.ui.Legend = cdb.core.View.extend({
className: "cartodb-legend",
initialize: function() {
_.bindAll(this, "render", "show", "hide");
_.defaults(this.options, this.default_options);
this.map = this.options.map;
this._setupModel();
this._setupItems();
this._updateLegendType();
},
_setupModel: function() {
if (!this.model) {
this.model = new cdb.geo.ui.LegendModel({
type: this.options.type || cdb.geo.ui.LegendModel.prototype.defaults.type,
title: this.options.title || cdb.geo.ui.LegendModel.prototype.defaults.title,
show_title: this.options.show_title || cdb.geo.ui.LegendModel.prototype.defaults.show_title
});
}
this.add_related_model(this.model);
this.model.bind("change:type change:items change:title change:show_title", this._updateLegendType, this);
},
_updateLegendType: function() {
var type = this.model.get("type");
this.legend_name = this._capitalize(type) + "Legend";
if (type == 'none' || type == null) {
this.legend_name = null;
this.model.set({ type: null}, { silent: true });
} else if (!cdb.geo.ui[this.legend_name]) {
// set the previous type
this.legend_name = null;
this.model.set({ type: this.model.previous("type") }, { silent: true });
return;
}
this._refresh();
},
_refresh: function() {
var self = this;
if (this.view) this.view.clean();
var type = this.model.get("type");
var title = this.model.get("title");
var show_title = this.model.get("show_title");
if (type) {
this.view = new cdb.geo.ui[this.legend_name] ({
title: title,
show_title: show_title,
items: self.items
});
// Set the type as the element class for styling
this.$el.removeClass();
this.$el.addClass(this.className + " " + this.model.get("type"));
this.show();
} else {
this.hide();
this.$el.removeClass();
this.$el.addClass(this.className + " none");
}
this.render();
},
_setupItems: function() {
var self = this;
this.items = this.model.items;
if (this.options.data) {
this.items.reset(this.options.data);
}
this.items.bind("add remove change:value change:name", this.render, this);
},
show: function(callback) {
if (this.model.get("type")) this.$el.show();
},
hide: function(callback) {
if (this.model.get("type")) this.$el.hide();
},
_capitalize: function(string) {
if (string) {
return string.charAt(0).toUpperCase() + string.slice(1);
}
},
render: function() {
if (this.view) {
this.$el.append(this.view.render().$el);
}
return this;
}
});
|
CDB-481 #close Stops propagation of clicks in legends
|
src/geo/ui/legend.js
|
CDB-481 #close Stops propagation of clicks in legends
|
<ide><path>rc/geo/ui/legend.js
<ide>
<ide> cdb.geo.ui.StackedLegend = cdb.core.View.extend({
<ide>
<add> events: {
<add>
<add> "dragstart": "_stopPropagation",
<add> "mousedown": "_stopPropagation",
<add> "touchstart": "_stopPropagation",
<add> "MSPointerDown": "_stopPropagation",
<add> "dblclick": "_stopPropagation",
<add> "mousewheel": "_stopPropagation",
<add> "DOMMouseScroll": "_stopPropagation",
<add> "dbclick": "_stopPropagation",
<add> "click": "_stopPropagation"
<add>
<add> },
<add>
<ide> className: "cartodb-legend-stack",
<ide>
<ide> initialize: function() {
<ide>
<ide> _.each(this.options.legends, this._setupBinding, this);
<add>
<add> },
<add>
<add> _stopPropagation: function(ev) {
<add>
<add> ev.stopPropagation();
<ide>
<ide> },
<ide>
<ide>
<ide> className: "cartodb-legend",
<ide>
<add> events: {
<add>
<add> "dragstart": "_stopPropagation",
<add> "mousedown": "_stopPropagation",
<add> "touchstart": "_stopPropagation",
<add> "MSPointerDown": "_stopPropagation",
<add> "dblclick": "_stopPropagation",
<add> "mousewheel": "_stopPropagation",
<add> "DOMMouseScroll": "_stopPropagation",
<add> "dbclick": "_stopPropagation",
<add> "click": "_stopPropagation"
<add>
<add> },
<add>
<ide> initialize: function() {
<ide>
<ide> _.bindAll(this, "render", "show", "hide");
<ide> this._setupItems();
<ide>
<ide> this._updateLegendType();
<add>
<add> },
<add>
<add> _stopPropagation: function(ev) {
<add>
<add> ev.stopPropagation();
<ide>
<ide> },
<ide>
|
|
Java
|
mit
|
ad871e835b2b61de050c872da81b3091e2c8063a
| 0 |
Col-E/Recaf,Col-E/Recaf
|
package me.coley.recaf.ui;
import java.util.List;
import java.util.stream.Collectors;
import com.sun.tools.attach.VirtualMachine;
import com.sun.tools.attach.VirtualMachineDescriptor;
import javafx.scene.control.Button;
import javafx.scene.control.ListCell;
import javafx.scene.control.ListView;
import javafx.scene.layout.BorderPane;
import javafx.stage.Stage;
import me.coley.recaf.Logging;
import me.coley.recaf.Recaf;
import me.coley.recaf.ui.component.ActionButton;
import me.coley.recaf.util.Icons;
import me.coley.recaf.util.JavaFX;
import me.coley.recaf.util.Lang;
import me.coley.recaf.util.SelfReference;
/**
* Window for handling attaching to external processes.
*
* @author Matt
*/
public class FxAttach extends Stage {
private final static FxAttach INSTANCE = new FxAttach();
private final ListView<VirtualMachineDescriptor> list = new ListView<>();
private VirtualMachineDescriptor selected;
private FxAttach() {
setTitle(Lang.get("ui.attach"));
getIcons().add(Icons.ATTACH);
Button btn = new ActionButton(Lang.get("ui.attach.prompt"), () -> {
attach(selected);
});
btn.setDisable(true);
list.setCellFactory(param -> new ListCell<VirtualMachineDescriptor>() {
@Override
public void updateItem(VirtualMachineDescriptor item, boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
setGraphic(null);
setText(null);
} else {
setGraphic(null);
setText(item.displayName());
}
}
});
list.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> {
selected = newValue;
boolean set = selected != null;
btn.setDisable(!set);
if (set) {
btn.setText(selected.displayName());
} else {
btn.setText(Lang.get("ui.attach.prompt"));
}
});
BorderPane bp = new BorderPane();
bp.setCenter(list);
bp.setBottom(btn);
refresh();
setScene(JavaFX.scene(bp, 700, 200));
}
private void refresh() {
List<VirtualMachineDescriptor> vms = VirtualMachine.list();
//@formatter:off
// Skip self, show normal VMs.
vms = vms.stream()
.filter(vm ->
vm.displayName().length() > 1 &&
!vm.displayName().startsWith("me.coley.recaf")
).filter(vm ->
!vm.displayName().startsWith("recaf-" + Recaf.VERSION))
.collect(Collectors.toList());
//@formatter:on
list.getItems().clear();
list.getItems().addAll(vms);
}
private static void attach(VirtualMachineDescriptor vmDesc) {
// The attach process is threaded so that the target vm's agent does not
// lock up the current instance of Recaf.
// Without threading, the client locks until the agent is terminated.
new Thread(() -> {
try {
VirtualMachine vm = VirtualMachine.attach(vmDesc);
vm.loadAgent(SelfReference.get().getPath(), "-agent");
vm.detach();
} catch (Exception e) {
Logging.error(e);
}
}).start();
}
/**
* Display config window.
*/
public static void open() {
if (INSTANCE.isShowing()) {
INSTANCE.toFront();
} else {
INSTANCE.show();
}
// refresh VM list
INSTANCE.refresh();
}
}
|
src/me/coley/recaf/ui/FxAttach.java
|
package me.coley.recaf.ui;
import java.util.List;
import java.util.stream.Collectors;
import com.sun.tools.attach.VirtualMachine;
import com.sun.tools.attach.VirtualMachineDescriptor;
import javafx.scene.control.Button;
import javafx.scene.control.ListCell;
import javafx.scene.control.ListView;
import javafx.scene.layout.BorderPane;
import javafx.stage.Stage;
import me.coley.recaf.Logging;
import me.coley.recaf.ui.component.ActionButton;
import me.coley.recaf.util.Icons;
import me.coley.recaf.util.JavaFX;
import me.coley.recaf.util.Lang;
import me.coley.recaf.util.SelfReference;
/**
* Window for handling attaching to external processes.
*
* @author Matt
*/
public class FxAttach extends Stage {
private final static FxAttach INSTANCE = new FxAttach();
private final ListView<VirtualMachineDescriptor> list = new ListView<>();
private VirtualMachineDescriptor selected;
private FxAttach() {
setTitle(Lang.get("ui.attach"));
getIcons().add(Icons.ATTACH);
Button btn = new ActionButton(Lang.get("ui.attach.prompt"), () -> {
attach(selected);
});
btn.setDisable(true);
list.setCellFactory(param -> new ListCell<VirtualMachineDescriptor>() {
@Override
public void updateItem(VirtualMachineDescriptor item, boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
setGraphic(null);
setText(null);
} else {
setGraphic(null);
setText(item.displayName());
}
}
});
list.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> {
selected = newValue;
boolean set = selected != null;
btn.setDisable(!set);
if (set) {
btn.setText(selected.displayName());
} else {
btn.setText(Lang.get("ui.attach.prompt"));
}
});
BorderPane bp = new BorderPane();
bp.setCenter(list);
bp.setBottom(btn);
refresh();
setScene(JavaFX.scene(bp, 700, 200));
}
private void refresh() {
List<VirtualMachineDescriptor> vms = VirtualMachine.list();
//@formatter:off
// Skip self, show normal VMs.
vms = vms.stream()
.filter(vm ->
vm.displayName().length() > 1 &&
!vm.displayName().startsWith("me.coley.recaf")
).collect(Collectors.toList());
//@formatter:on
list.getItems().clear();
list.getItems().addAll(vms);
}
private static void attach(VirtualMachineDescriptor vmDesc) {
// The attach process is threaded so that the target vm's agent does not
// lock up the current instance of Recaf.
// Without threading, the client locks until the agent is terminated.
new Thread(() -> {
try {
VirtualMachine vm = VirtualMachine.attach(vmDesc);
vm.loadAgent(SelfReference.get().getPath(), "-agent");
vm.detach();
} catch (Exception e) {
Logging.error(e);
}
}).start();
}
/**
* Display config window.
*/
public static void open() {
if (INSTANCE.isShowing()) {
INSTANCE.toFront();
} else {
INSTANCE.show();
}
// refresh VM list
INSTANCE.refresh();
}
}
|
Fix self (Recaf) showing up in attach menu when running via 'java -jar' vs 'java -cp recaf.jar'
|
src/me/coley/recaf/ui/FxAttach.java
|
Fix self (Recaf) showing up in attach menu when running via 'java -jar' vs 'java -cp recaf.jar'
|
<ide><path>rc/me/coley/recaf/ui/FxAttach.java
<ide> import javafx.scene.layout.BorderPane;
<ide> import javafx.stage.Stage;
<ide> import me.coley.recaf.Logging;
<add>import me.coley.recaf.Recaf;
<ide> import me.coley.recaf.ui.component.ActionButton;
<ide> import me.coley.recaf.util.Icons;
<ide> import me.coley.recaf.util.JavaFX;
<ide> .filter(vm ->
<ide> vm.displayName().length() > 1 &&
<ide> !vm.displayName().startsWith("me.coley.recaf")
<del> ).collect(Collectors.toList());
<add> ).filter(vm ->
<add> !vm.displayName().startsWith("recaf-" + Recaf.VERSION))
<add> .collect(Collectors.toList());
<ide> //@formatter:on
<ide> list.getItems().clear();
<ide> list.getItems().addAll(vms);
|
|
Java
|
apache-2.0
|
e69a06648167709c4771f15a4d953e80cc97c91e
| 0 |
Malamut54/dbobrov,Malamut54/dbobrov,Malamut54/dbobrov
|
package ru.job4j.array;
/**
*Inverting an array. Task 5.0.
*@author Dmitriy Bobrov (mailto:[email protected])
*@since 0.1
*/
public class Turn {
/**
*Class Turn. inverting array.
*/
/**
*inverting array.
*@param array - input array.
*@return array - return iverting array.
*/
public int[] back(int[] array) {
for (int i = 0; i < array.length / 2; i++) {
int tmp = array[i];
array[i] = array[array.length - i - 1];
array[array.length - i - 1] = tmp;
}
return array;
}
}
|
chapter_001/src/main/java/ru/job4j/array/Turn.java
|
package ru.job4j.array;
/**
*Inverting an array. Task 5.0.
*@author Dmitriy Bobrov (mailto:[email protected])
*@since 0.1
*/
public class Turn {
/**
*Class Turn. inverting array.
*/
/**
*inverting array.
*@param array - input array.
*@return array - return iverting array.
*/
public int[] back(int[] array) {
if (array.length % 2 == 0) {
for (int i = 0; i < array.length / 2; i++) {
int tmp = array[i];
array[i] = array[array.length - i - 1];
array[array.length - i - 1] = tmp;
}
return array;
} else {
for (int i = 0; i <= array.length / 2; i++) {
int tmp = array[i];
array[i] = array[array.length - i - 1];
array[array.length - i - 1] = tmp;
}
}
return array;
}
}
|
Task 5.0 inverting array
|
chapter_001/src/main/java/ru/job4j/array/Turn.java
|
Task 5.0 inverting array
|
<ide><path>hapter_001/src/main/java/ru/job4j/array/Turn.java
<ide> */
<ide>
<ide> public int[] back(int[] array) {
<del> if (array.length % 2 == 0) {
<del> for (int i = 0; i < array.length / 2; i++) {
<add> for (int i = 0; i < array.length / 2; i++) {
<ide> int tmp = array[i];
<ide> array[i] = array[array.length - i - 1];
<ide> array[array.length - i - 1] = tmp;
<ide> }
<ide> return array;
<del> } else {
<del> for (int i = 0; i <= array.length / 2; i++) {
<del> int tmp = array[i];
<del> array[i] = array[array.length - i - 1];
<del> array[array.length - i - 1] = tmp;
<del> }
<del> }
<del> return array;
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
7031f9779e61f55fd5c4c4378f033ef5c286a5a6
| 0 |
cunningt/camel,pax95/camel,adessaigne/camel,tadayosi/camel,adessaigne/camel,pax95/camel,christophd/camel,cunningt/camel,tadayosi/camel,cunningt/camel,christophd/camel,pax95/camel,apache/camel,tadayosi/camel,apache/camel,tadayosi/camel,pax95/camel,cunningt/camel,pax95/camel,christophd/camel,apache/camel,tadayosi/camel,adessaigne/camel,cunningt/camel,adessaigne/camel,christophd/camel,tadayosi/camel,apache/camel,apache/camel,cunningt/camel,christophd/camel,christophd/camel,apache/camel,adessaigne/camel,adessaigne/camel,pax95/camel
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.salesforce.internal.streaming;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.camel.CamelException;
import org.apache.camel.component.salesforce.SalesforceComponent;
import org.apache.camel.component.salesforce.SalesforceConsumer;
import org.apache.camel.component.salesforce.SalesforceEndpoint;
import org.apache.camel.component.salesforce.SalesforceEndpointConfig;
import org.apache.camel.component.salesforce.SalesforceHttpClient;
import org.apache.camel.component.salesforce.api.SalesforceException;
import org.apache.camel.component.salesforce.internal.SalesforceSession;
import org.apache.camel.support.service.ServiceSupport;
import org.cometd.bayeux.Message;
import org.cometd.bayeux.client.ClientSessionChannel;
import org.cometd.bayeux.client.ClientSessionChannel.MessageListener;
import org.cometd.client.BayeuxClient;
import org.cometd.client.BayeuxClient.State;
import org.cometd.client.transport.ClientTransport;
import org.cometd.client.transport.LongPollingTransport;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.http.HttpHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.cometd.bayeux.Channel.META_CONNECT;
import static org.cometd.bayeux.Channel.META_HANDSHAKE;
import static org.cometd.bayeux.Channel.META_SUBSCRIBE;
import static org.cometd.bayeux.Message.ERROR_FIELD;
import static org.cometd.bayeux.Message.SUBSCRIPTION_FIELD;
public class SubscriptionHelper extends ServiceSupport {
static final ReplayExtension REPLAY_EXTENSION = new ReplayExtension();
private static final Logger LOG = LoggerFactory.getLogger(SubscriptionHelper.class);
private static final int CONNECT_TIMEOUT = 110;
private static final String FAILURE_FIELD = "failure";
private static final String EXCEPTION_FIELD = "exception";
private static final String SFDC_FIELD = "sfdc";
private static final String FAILURE_REASON_FIELD = "failureReason";
private static final int DISCONNECT_INTERVAL = 5000;
private static final String SERVER_TOO_BUSY_ERROR = "503::";
private static final String AUTHENTICATION_INVALID = "401::Authentication invalid";
private static final String INVALID_REPLAY_ID_PATTERN = "400::The replayId \\{.*} you provided was invalid.*";
BayeuxClient client;
private final SalesforceComponent component;
private SalesforceSession session;
private final long timeout = 60 * 1000L;
private final Map<SalesforceConsumer, ClientSessionChannel.MessageListener> listenerMap;
private final long maxBackoff;
private final long backoffIncrement;
private ClientSessionChannel.MessageListener handshakeListener;
private ClientSessionChannel.MessageListener connectListener;
private volatile String handshakeError;
private volatile Exception handshakeException;
private volatile String connectError;
private volatile Exception connectException;
private volatile boolean reconnecting;
private final AtomicLong handshakeBackoff;
private final AtomicBoolean handshaking = new AtomicBoolean();
private final AtomicBoolean loggingIn = new AtomicBoolean();
public SubscriptionHelper(final SalesforceComponent component) {
this.component = component;
listenerMap = new ConcurrentHashMap<>();
handshakeBackoff = new AtomicLong();
backoffIncrement = component.getConfig().getBackoffIncrement();
maxBackoff = component.getConfig().getMaxBackoff();
}
@Override
protected void doStart() throws Exception {
session = component.getSession();
// create CometD client
client = createClient(component, session);
initMessageListeners();
connect();
}
private void initMessageListeners() {
// listener for handshake error or exception
if (handshakeListener == null) {
// first start
handshakeListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
component.getHttpClient().getWorkerPool().execute(() -> {
LOG.debug("[CHANNEL:META_HANDSHAKE]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Handshake failure: {}", message);
handshakeError = (String) message.get(ERROR_FIELD);
handshakeException = getFailure(message);
if (handshakeError != null) {
if (handshakeError.startsWith("403::")) {
String failureReason = getFailureReason(message);
if (failureReason.equals(AUTHENTICATION_INVALID)) {
LOG.debug(
"attempting login due to handshake error: 403 -> 401::Authentication invalid");
attemptLoginUntilSuccessful();
}
}
}
// failed, so keep trying
LOG.debug("Handshake failed, so try again.");
handshake();
} else if (!listenerMap.isEmpty()) {
reconnecting = true;
}
});
}
};
}
client.getChannel(META_HANDSHAKE).addListener(handshakeListener);
// listener for connect error
if (connectListener == null) {
connectListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
component.getHttpClient().getWorkerPool().execute(() -> {
LOG.debug("[CHANNEL:META_CONNECT]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Connect failure: {}", message);
connectError = (String) message.get(ERROR_FIELD);
connectException = getFailure(message);
if (connectError != null && connectError.equals(AUTHENTICATION_INVALID)) {
LOG.debug("connectError: " + connectError);
LOG.debug("Attempting login...");
attemptLoginUntilSuccessful();
}
// Server says don't retry to connect, so we'll handshake instead
// Otherwise, Bayeux client automatically re-attempts connection
if (message.getAdvice() != null &&
!message.getAdvice().get("reconnect").equals("retry")) {
LOG.debug("Advice != retry, so handshaking");
handshake();
}
} else if (reconnecting) {
LOG.debug("Refreshing subscriptions to {} channels on reconnect", listenerMap.size());
// reconnected to Salesforce, subscribe to existing
// channels
final Map<SalesforceConsumer, MessageListener> map = new HashMap<>(listenerMap);
listenerMap.clear();
for (Map.Entry<SalesforceConsumer, ClientSessionChannel.MessageListener> entry : map.entrySet()) {
final SalesforceConsumer consumer = entry.getKey();
final String topicName = consumer.getTopicName();
subscribe(topicName, consumer);
}
reconnecting = false;
}
});
}
};
}
client.getChannel(META_CONNECT).addListener(connectListener);
}
private void connect() throws CamelException {
// connect to Salesforce cometd endpoint
client.handshake();
final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
if (!client.waitFor(waitMs, BayeuxClient.State.CONNECTED)) {
if (handshakeException != null) {
throw new CamelException(
String.format("Exception during HANDSHAKE: %s", handshakeException.getMessage()), handshakeException);
} else if (handshakeError != null) {
throw new CamelException(String.format("Error during HANDSHAKE: %s", handshakeError));
} else if (connectException != null) {
throw new CamelException(
String.format("Exception during CONNECT: %s", connectException.getMessage()), connectException);
} else if (connectError != null) {
throw new CamelException(String.format("Error during CONNECT: %s", connectError));
} else {
throw new CamelException(String.format("Handshake request timeout after %s seconds", CONNECT_TIMEOUT));
}
}
}
private void handshake() {
LOG.debug("Begin handshake if not already in progress.");
if (!handshaking.compareAndSet(false, true)) {
return;
}
LOG.debug("Continuing with handshake.");
try {
doHandshake();
} finally {
handshaking.set(false);
}
}
private void doHandshake() {
if (isStoppingOrStopped()) {
return;
}
LOG.info("Handshaking after unexpected disconnect from Salesforce...");
boolean abort = false;
// wait for disconnect
LOG.debug("Waiting to disconnect...");
while (!abort && !client.isDisconnected()) {
try {
Thread.sleep(DISCONNECT_INTERVAL);
} catch (InterruptedException e) {
LOG.error("Aborting handshake on interrupt!");
abort = true;
}
abort = abort || isStoppingOrStopped();
}
if (!abort) {
// update handshake attempt backoff
final long backoff = handshakeBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Handshake aborted after exceeding {} msecs backoff", maxBackoff);
abort = true;
} else {
// pause before handshake attempt
LOG.debug("Pausing for {} msecs before handshake attempt", backoff);
try {
Thread.sleep(backoff);
} catch (InterruptedException e) {
LOG.error("Aborting handshake on interrupt!");
abort = true;
}
}
if (!abort) {
Exception lastError = new SalesforceException("Unknown error", null);
try {
// reset client. If we fail to stop and logout, catch the exception
// so we can still continue to doStart()
if (client != null) {
client.disconnect();
boolean disconnected = client.waitFor(timeout, State.DISCONNECTED);
if (!disconnected) {
LOG.warn("Could not disconnect client connected to: {} after: {} msec.", getEndpointUrl(component),
timeout);
client.abort();
}
client.handshake();
final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
client.waitFor(waitMs, BayeuxClient.State.CONNECTED);
}
} catch (Exception e) {
LOG.error("Error handshaking: " + e.getMessage(), e);
lastError = e;
}
if (client != null && client.isHandshook()) {
LOG.debug("Successful handshake!");
// reset backoff interval
handshakeBackoff.set(client.getBackoffIncrement());
} else {
LOG.error("Failed to handshake after pausing for {} msecs", backoff);
if ((backoff + backoffIncrement) > maxBackoff) {
// notify all consumers
String abortMsg = "Aborting handshake attempt due to: " + lastError.getMessage();
SalesforceException ex = new SalesforceException(abortMsg, lastError);
for (SalesforceConsumer consumer : listenerMap.keySet()) {
consumer.handleException(abortMsg, ex);
}
}
}
}
}
}
@SuppressWarnings("unchecked")
private static Exception getFailure(Message message) {
Exception exception = null;
if (message.get(EXCEPTION_FIELD) != null) {
exception = (Exception) message.get(EXCEPTION_FIELD);
} else if (message.get(FAILURE_FIELD) != null) {
exception = (Exception) ((Map<String, Object>) message.get(FAILURE_FIELD)).get("exception");
} else {
String failureReason = getFailureReason(message);
if (failureReason != null) {
exception = new SalesforceException(failureReason, null);
}
}
return exception;
}
private void closeChannel(final String name, MessageListener listener) {
if (client == null) {
return;
}
final ClientSessionChannel channel = client.getChannel(name);
channel.removeListener(listener);
channel.release();
}
@Override
protected void doStop() throws Exception {
closeChannel(META_CONNECT, connectListener);
closeChannel(META_HANDSHAKE, handshakeListener);
for (Map.Entry<SalesforceConsumer, MessageListener> entry : listenerMap.entrySet()) {
final SalesforceConsumer consumer = entry.getKey();
final String topic = consumer.getTopicName();
final MessageListener listener = entry.getValue();
closeChannel(getChannelName(topic), listener);
}
if (client == null) {
return;
}
client.disconnect();
boolean disconnected = client.waitFor(timeout, State.DISCONNECTED);
if (!disconnected) {
LOG.warn("Could not disconnect client connected to: {} after: {} msec.", getEndpointUrl(component), timeout);
client.abort();
}
client = null;
if (session != null) {
session.logout();
}
LOG.debug("Stopped the helper and destroyed the client");
}
static BayeuxClient createClient(final SalesforceComponent component, final SalesforceSession session)
throws SalesforceException {
// use default Jetty client from SalesforceComponent, it's shared by all consumers
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
Map<String, Object> options = new HashMap<>();
options.put(ClientTransport.MAX_NETWORK_DELAY_OPTION, httpClient.getTimeout());
if (component.getLongPollingTransportProperties() != null) {
options = component.getLongPollingTransportProperties();
}
// check login access token
if (session.getAccessToken() == null && !component.getLoginConfig().isLazyLogin()) {
session.login(null);
}
LongPollingTransport transport = new LongPollingTransport(options, httpClient) {
@Override
protected void customize(Request request) {
super.customize(request);
//accessToken might be null due to lazy login
String accessToken = session.getAccessToken();
if (accessToken == null) {
try {
accessToken = session.login(null);
} catch (SalesforceException e) {
throw new RuntimeException(e);
}
}
request.getHeaders().put(HttpHeader.AUTHORIZATION, "OAuth " + accessToken);
}
};
BayeuxClient client = new BayeuxClient(getEndpointUrl(component), transport);
// added eagerly to check for support during handshake
client.addExtension(REPLAY_EXTENSION);
return client;
}
public void subscribe(final String topicName, final SalesforceConsumer consumer) {
subscribe(topicName, consumer, false);
}
public void subscribe(
final String topicName, final SalesforceConsumer consumer,
final boolean skipReplayId) {
// create subscription for consumer
final String channelName = getChannelName(topicName);
if (!reconnecting && !skipReplayId) {
setupReplay((SalesforceEndpoint) consumer.getEndpoint());
}
// channel message listener
LOG.info("Subscribing to channel {}...", channelName);
final ClientSessionChannel.MessageListener listener = new ClientSessionChannel.MessageListener() {
@Override
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("Received Message: {}", message);
// convert CometD message to Camel Message
consumer.processMessage(channel, message);
}
};
// listener for subscription
final ClientSessionChannel.MessageListener subscriptionListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_SUBSCRIBE]: {}", message);
final String subscribedChannelName = message.get(SUBSCRIPTION_FIELD).toString();
if (channelName.equals(subscribedChannelName)) {
if (!message.isSuccessful()) {
String error = (String) message.get(ERROR_FIELD);
if (error == null) {
error = "Missing error message";
}
Exception failure = getFailure(message);
String msg = String.format("Error subscribing to %s: %s", topicName,
failure != null ? failure.getMessage() : error);
boolean abort = true;
if (isTemporaryError(message)) {
LOG.warn(msg);
// retry after delay
final long backoff = handshakeBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Subscribe aborted after exceeding {} msecs backoff", maxBackoff);
} else {
abort = false;
try {
LOG.debug("Pausing for {} msecs before subscribe attempt", backoff);
Thread.sleep(backoff);
component.getHttpClient().getWorkerPool().execute(() -> subscribe(topicName, consumer));
} catch (InterruptedException e) {
LOG.warn("Aborting subscribe on interrupt!", e);
}
}
} else if (error.matches(INVALID_REPLAY_ID_PATTERN)) {
abort = false;
final Long fallBackReplayId
= ((SalesforceEndpoint) consumer.getEndpoint()).getConfiguration().getFallBackReplayId();
LOG.warn(error);
LOG.warn("Falling back to replayId {} for channel {}", fallBackReplayId, channelName);
REPLAY_EXTENSION.addChannelReplayId(channelName, fallBackReplayId);
subscribe(topicName, consumer, true);
}
if (abort && client != null) {
consumer.handleException(msg, new SalesforceException(msg, failure));
}
} else {
// remember subscription
LOG.info("Subscribed to channel {}", subscribedChannelName);
listenerMap.put(consumer, listener);
// reset backoff interval
handshakeBackoff.set(0);
}
// remove this subscription listener
if (client != null) {
client.getChannel(META_SUBSCRIBE).removeListener(this);
} else {
LOG.warn("Trying to handle a subscription message but the client is already destroyed");
}
}
}
};
client.getChannel(META_SUBSCRIBE).addListener(subscriptionListener);
// subscribe asynchronously
final ClientSessionChannel clientChannel = client.getChannel(channelName);
clientChannel.subscribe(listener);
}
private static boolean isTemporaryError(Message message) {
String failureReason = getFailureReason(message);
return failureReason != null && failureReason.startsWith(SERVER_TOO_BUSY_ERROR);
}
private static String getFailureReason(Message message) {
String failureReason = null;
if (message.getExt() != null) {
@SuppressWarnings("unchecked")
Map<String, Object> sfdcFields = (Map<String, Object>) message.getExt().get(SFDC_FIELD);
if (sfdcFields != null) {
failureReason = (String) sfdcFields.get(FAILURE_REASON_FIELD);
}
}
return failureReason;
}
void setupReplay(final SalesforceEndpoint endpoint) {
final String topicName = endpoint.getTopicName();
final Optional<Long> replayId = determineReplayIdFor(endpoint, topicName);
if (replayId.isPresent()) {
final String channelName = getChannelName(topicName);
final Long replayIdValue = replayId.get();
LOG.info("Set Replay extension to replay from `{}` for channel `{}`", replayIdValue, channelName);
REPLAY_EXTENSION.addChannelReplayId(channelName, replayIdValue);
}
}
private void attemptLoginUntilSuccessful() {
if (!loggingIn.compareAndSet(false, true)) {
LOG.debug("already logging in");
return;
}
long backoff = 0;
try {
for (;;) {
try {
if (isStoppingOrStopped()) {
return;
}
session.login(session.getAccessToken());
break;
} catch (SalesforceException e) {
backoff = backoff + backoffIncrement;
if (backoff > maxBackoff) {
backoff = maxBackoff;
}
LOG.warn(String.format("Salesforce login failed. Pausing for %d seconds", backoff), e);
try {
Thread.sleep(backoff);
} catch (InterruptedException ex) {
throw new RuntimeException("Failed to login.", ex);
}
}
}
} finally {
loggingIn.set(false);
}
}
static Optional<Long> determineReplayIdFor(final SalesforceEndpoint endpoint, final String topicName) {
final String channelName = getChannelName(topicName);
final Long replayId = endpoint.getReplayId();
final SalesforceComponent component = endpoint.getComponent();
final SalesforceEndpointConfig endpointConfiguration = endpoint.getConfiguration();
final Map<String, Long> endpointInitialReplayIdMap = endpointConfiguration.getInitialReplayIdMap();
final Long endpointReplayId
= endpointInitialReplayIdMap.getOrDefault(topicName, endpointInitialReplayIdMap.get(channelName));
final Long endpointDefaultReplayId = endpointConfiguration.getDefaultReplayId();
final SalesforceEndpointConfig componentConfiguration = component.getConfig();
final Map<String, Long> componentInitialReplayIdMap = componentConfiguration.getInitialReplayIdMap();
final Long componentReplayId
= componentInitialReplayIdMap.getOrDefault(topicName, componentInitialReplayIdMap.get(channelName));
final Long componentDefaultReplayId = componentConfiguration.getDefaultReplayId();
// the endpoint values have priority over component values, and the
// default values priority
// over give topic values
return Stream.of(replayId, endpointReplayId, componentReplayId, endpointDefaultReplayId, componentDefaultReplayId)
.filter(Objects::nonNull).findFirst();
}
static String getChannelName(final String topicName) {
final StringBuilder channelName = new StringBuilder();
if (topicName.charAt(0) != '/') {
channelName.append('/');
}
if (topicName.indexOf('/', 1) > 0) {
channelName.append(topicName);
} else {
channelName.append("topic/");
channelName.append(topicName);
}
return channelName.toString();
}
public void unsubscribe(String topicName, SalesforceConsumer consumer) {
// channel name
final String channelName = getChannelName(topicName);
// unsubscribe from channel
final ClientSessionChannel.MessageListener listener = listenerMap.remove(consumer);
if (listener != null) {
LOG.debug("Unsubscribing from channel {}...", channelName);
final ClientSessionChannel clientChannel = client.getChannel(channelName);
// if there are other listeners on this channel, an unsubscribe message will not be sent,
// so we're not going to listen for and expect an unsub response. Just unsub and move on.
clientChannel.unsubscribe(listener);
}
}
static String getEndpointUrl(final SalesforceComponent component) {
// In version 36.0 replay is only enabled on a separate endpoint
if (Double.parseDouble(component.getConfig().getApiVersion()) == 36.0) {
boolean replayOptionsPresent = component.getConfig().getDefaultReplayId() != null
|| !component.getConfig().getInitialReplayIdMap().isEmpty();
if (replayOptionsPresent) {
return component.getSession().getInstanceUrl() + "/cometd/replay/" + component.getConfig().getApiVersion();
}
}
return component.getSession().getInstanceUrl() + "/cometd/" + component.getConfig().getApiVersion();
}
}
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/streaming/SubscriptionHelper.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.salesforce.internal.streaming;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.camel.CamelException;
import org.apache.camel.component.salesforce.SalesforceComponent;
import org.apache.camel.component.salesforce.SalesforceConsumer;
import org.apache.camel.component.salesforce.SalesforceEndpoint;
import org.apache.camel.component.salesforce.SalesforceEndpointConfig;
import org.apache.camel.component.salesforce.SalesforceHttpClient;
import org.apache.camel.component.salesforce.api.SalesforceException;
import org.apache.camel.component.salesforce.internal.SalesforceSession;
import org.apache.camel.support.service.ServiceSupport;
import org.cometd.bayeux.Message;
import org.cometd.bayeux.client.ClientSessionChannel;
import org.cometd.bayeux.client.ClientSessionChannel.MessageListener;
import org.cometd.client.BayeuxClient;
import org.cometd.client.BayeuxClient.State;
import org.cometd.client.transport.ClientTransport;
import org.cometd.client.transport.LongPollingTransport;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.http.HttpHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.cometd.bayeux.Channel.META_CONNECT;
import static org.cometd.bayeux.Channel.META_HANDSHAKE;
import static org.cometd.bayeux.Channel.META_SUBSCRIBE;
import static org.cometd.bayeux.Message.ERROR_FIELD;
import static org.cometd.bayeux.Message.SUBSCRIPTION_FIELD;
public class SubscriptionHelper extends ServiceSupport {
static final ReplayExtension REPLAY_EXTENSION = new ReplayExtension();
private static final Logger LOG = LoggerFactory.getLogger(SubscriptionHelper.class);
private static final int CONNECT_TIMEOUT = 110;
private static final String FAILURE_FIELD = "failure";
private static final String EXCEPTION_FIELD = "exception";
private static final String SFDC_FIELD = "sfdc";
private static final String FAILURE_REASON_FIELD = "failureReason";
private static final int DISCONNECT_INTERVAL = 5000;
private static final String SERVER_TOO_BUSY_ERROR = "503::";
private static final String AUTHENTICATION_INVALID = "401::Authentication invalid";
private static final String INVALID_REPLAY_ID_PATTERN = "400::The replayId \\{.*} you provided was invalid.*";
BayeuxClient client;
private final SalesforceComponent component;
private SalesforceSession session;
private final long timeout = 60 * 1000L;
private final Map<SalesforceConsumer, ClientSessionChannel.MessageListener> listenerMap;
private final long maxBackoff;
private final long backoffIncrement;
private ClientSessionChannel.MessageListener handshakeListener;
private ClientSessionChannel.MessageListener connectListener;
private volatile String handshakeError;
private volatile Exception handshakeException;
private volatile String connectError;
private volatile Exception connectException;
private volatile boolean reconnecting;
private final AtomicLong handshakeBackoff;
private final AtomicBoolean handshaking = new AtomicBoolean();
private final AtomicBoolean loggingIn = new AtomicBoolean();
public SubscriptionHelper(final SalesforceComponent component) {
this.component = component;
listenerMap = new ConcurrentHashMap<>();
handshakeBackoff = new AtomicLong();
backoffIncrement = component.getConfig().getBackoffIncrement();
maxBackoff = component.getConfig().getMaxBackoff();
}
@Override
protected void doStart() throws Exception {
session = component.getSession();
// create CometD client
client = createClient(component, session);
initMessageListeners();
connect();
}
private void initMessageListeners() {
// listener for handshake error or exception
if (handshakeListener == null) {
// first start
handshakeListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
component.getHttpClient().getWorkerPool().execute(() -> {
LOG.debug("[CHANNEL:META_HANDSHAKE]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Handshake failure: {}", message);
handshakeError = (String) message.get(ERROR_FIELD);
handshakeException = getFailure(message);
if (handshakeError != null) {
if (handshakeError.startsWith("403::")) {
String failureReason = getFailureReason(message);
if (failureReason.equals(AUTHENTICATION_INVALID)) {
LOG.debug(
"attempting login due to handshake error: 403 -> 401::Authentication invalid");
attemptLoginUntilSuccessful();
}
}
}
// failed, so keep trying
LOG.debug("Handshake failed, so try again.");
handshake();
} else if (!listenerMap.isEmpty()) {
reconnecting = true;
}
});
}
};
}
client.getChannel(META_HANDSHAKE).addListener(handshakeListener);
// listener for connect error
if (connectListener == null) {
connectListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
component.getHttpClient().getWorkerPool().execute(() -> {
LOG.debug("[CHANNEL:META_CONNECT]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Connect failure: {}", message);
connectError = (String) message.get(ERROR_FIELD);
connectException = getFailure(message);
if (connectError != null && connectError.equals(AUTHENTICATION_INVALID)) {
LOG.debug("connectError: " + connectError);
LOG.debug("Attempting login...");
attemptLoginUntilSuccessful();
}
// Server says don't retry to connect, so we'll handshake instead
// Otherwise, Bayeux client automatically re-attempts connection
if (message.getAdvice() != null &&
!message.getAdvice().get("reconnect").equals("retry")) {
LOG.debug("Advice != retry, so handshaking");
handshake();
}
} else if (reconnecting) {
LOG.debug("Refreshing subscriptions to {} channels on reconnect", listenerMap.size());
// reconnected to Salesforce, subscribe to existing
// channels
final Map<SalesforceConsumer, MessageListener> map = new HashMap<>(listenerMap);
listenerMap.clear();
for (Map.Entry<SalesforceConsumer, ClientSessionChannel.MessageListener> entry : map.entrySet()) {
final SalesforceConsumer consumer = entry.getKey();
final String topicName = consumer.getTopicName();
subscribe(topicName, consumer);
}
reconnecting = false;
}
});
}
};
}
client.getChannel(META_CONNECT).addListener(connectListener);
}
private void connect() throws CamelException {
// connect to Salesforce cometd endpoint
client.handshake();
final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
if (!client.waitFor(waitMs, BayeuxClient.State.CONNECTED)) {
if (handshakeException != null) {
throw new CamelException(
String.format("Exception during HANDSHAKE: %s", handshakeException.getMessage()), handshakeException);
} else if (handshakeError != null) {
throw new CamelException(String.format("Error during HANDSHAKE: %s", handshakeError));
} else if (connectException != null) {
throw new CamelException(
String.format("Exception during CONNECT: %s", connectException.getMessage()), connectException);
} else if (connectError != null) {
throw new CamelException(String.format("Error during CONNECT: %s", connectError));
} else {
throw new CamelException(String.format("Handshake request timeout after %s seconds", CONNECT_TIMEOUT));
}
}
}
private void handshake() {
LOG.debug("Begin handshake if not already in progress.");
if (!handshaking.compareAndSet(false, true)) {
return;
}
LOG.debug("Continuing with handshake.");
try {
doHandshake();
} finally {
handshaking.set(false);
}
}
private void doHandshake() {
if (isStoppingOrStopped()) {
return;
}
LOG.info("Handshaking after unexpected disconnect from Salesforce...");
boolean abort = false;
// wait for disconnect
LOG.debug("Waiting to disconnect...");
while (!abort && !client.isDisconnected()) {
try {
Thread.sleep(DISCONNECT_INTERVAL);
} catch (InterruptedException e) {
LOG.error("Aborting handshake on interrupt!");
abort = true;
}
abort = abort || isStoppingOrStopped();
}
if (!abort) {
// update handshake attempt backoff
final long backoff = handshakeBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Handshake aborted after exceeding {} msecs backoff", maxBackoff);
abort = true;
} else {
// pause before handshake attempt
LOG.debug("Pausing for {} msecs before handshake attempt", backoff);
try {
Thread.sleep(backoff);
} catch (InterruptedException e) {
LOG.error("Aborting handshake on interrupt!");
abort = true;
}
}
if (!abort) {
Exception lastError = new SalesforceException("Unknown error", null);
try {
// reset client. If we fail to stop and logout, catch the exception
// so we can still continue to doStart()
if (client != null) {
client.disconnect();
boolean disconnected = client.waitFor(timeout, State.DISCONNECTED);
if (!disconnected) {
LOG.warn("Could not disconnect client connected to: {} after: {} msec.", getEndpointUrl(component),
timeout);
client.abort();
}
}
client.handshake();
final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
client.waitFor(waitMs, BayeuxClient.State.CONNECTED);
} catch (Exception e) {
LOG.error("Error handshaking: " + e.getMessage(), e);
lastError = e;
}
if (client != null && client.isHandshook()) {
LOG.debug("Successful handshake!");
// reset backoff interval
handshakeBackoff.set(client.getBackoffIncrement());
} else {
LOG.error("Failed to handshake after pausing for {} msecs", backoff);
if ((backoff + backoffIncrement) > maxBackoff) {
// notify all consumers
String abortMsg = "Aborting handshake attempt due to: " + lastError.getMessage();
SalesforceException ex = new SalesforceException(abortMsg, lastError);
for (SalesforceConsumer consumer : listenerMap.keySet()) {
consumer.handleException(abortMsg, ex);
}
}
}
}
}
}
@SuppressWarnings("unchecked")
private static Exception getFailure(Message message) {
Exception exception = null;
if (message.get(EXCEPTION_FIELD) != null) {
exception = (Exception) message.get(EXCEPTION_FIELD);
} else if (message.get(FAILURE_FIELD) != null) {
exception = (Exception) ((Map<String, Object>) message.get(FAILURE_FIELD)).get("exception");
} else {
String failureReason = getFailureReason(message);
if (failureReason != null) {
exception = new SalesforceException(failureReason, null);
}
}
return exception;
}
private void closeChannel(final String name, MessageListener listener) {
if (client == null) {
return;
}
final ClientSessionChannel channel = client.getChannel(name);
channel.removeListener(listener);
channel.release();
}
@Override
protected void doStop() throws Exception {
closeChannel(META_CONNECT, connectListener);
closeChannel(META_HANDSHAKE, handshakeListener);
for (Map.Entry<SalesforceConsumer, MessageListener> entry : listenerMap.entrySet()) {
final SalesforceConsumer consumer = entry.getKey();
final String topic = consumer.getTopicName();
final MessageListener listener = entry.getValue();
closeChannel(getChannelName(topic), listener);
}
if (client == null) {
return;
}
client.disconnect();
boolean disconnected = client.waitFor(timeout, State.DISCONNECTED);
if (!disconnected) {
LOG.warn("Could not disconnect client connected to: {} after: {} msec.", getEndpointUrl(component), timeout);
client.abort();
}
client = null;
if (session != null) {
session.logout();
}
LOG.debug("Stopped the helper and destroyed the client");
}
static BayeuxClient createClient(final SalesforceComponent component, final SalesforceSession session)
throws SalesforceException {
// use default Jetty client from SalesforceComponent, it's shared by all consumers
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
Map<String, Object> options = new HashMap<>();
options.put(ClientTransport.MAX_NETWORK_DELAY_OPTION, httpClient.getTimeout());
if (component.getLongPollingTransportProperties() != null) {
options = component.getLongPollingTransportProperties();
}
// check login access token
if (session.getAccessToken() == null && !component.getLoginConfig().isLazyLogin()) {
session.login(null);
}
LongPollingTransport transport = new LongPollingTransport(options, httpClient) {
@Override
protected void customize(Request request) {
super.customize(request);
//accessToken might be null due to lazy login
String accessToken = session.getAccessToken();
if (accessToken == null) {
try {
accessToken = session.login(null);
} catch (SalesforceException e) {
throw new RuntimeException(e);
}
}
request.getHeaders().put(HttpHeader.AUTHORIZATION, "OAuth " + accessToken);
}
};
BayeuxClient client = new BayeuxClient(getEndpointUrl(component), transport);
// added eagerly to check for support during handshake
client.addExtension(REPLAY_EXTENSION);
return client;
}
public void subscribe(final String topicName, final SalesforceConsumer consumer) {
subscribe(topicName, consumer, false);
}
public void subscribe(
final String topicName, final SalesforceConsumer consumer,
final boolean skipReplayId) {
// create subscription for consumer
final String channelName = getChannelName(topicName);
if (!reconnecting && !skipReplayId) {
setupReplay((SalesforceEndpoint) consumer.getEndpoint());
}
// channel message listener
LOG.info("Subscribing to channel {}...", channelName);
final ClientSessionChannel.MessageListener listener = new ClientSessionChannel.MessageListener() {
@Override
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("Received Message: {}", message);
// convert CometD message to Camel Message
consumer.processMessage(channel, message);
}
};
// listener for subscription
final ClientSessionChannel.MessageListener subscriptionListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_SUBSCRIBE]: {}", message);
final String subscribedChannelName = message.get(SUBSCRIPTION_FIELD).toString();
if (channelName.equals(subscribedChannelName)) {
if (!message.isSuccessful()) {
String error = (String) message.get(ERROR_FIELD);
if (error == null) {
error = "Missing error message";
}
Exception failure = getFailure(message);
String msg = String.format("Error subscribing to %s: %s", topicName,
failure != null ? failure.getMessage() : error);
boolean abort = true;
if (isTemporaryError(message)) {
LOG.warn(msg);
// retry after delay
final long backoff = handshakeBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Subscribe aborted after exceeding {} msecs backoff", maxBackoff);
} else {
abort = false;
try {
LOG.debug("Pausing for {} msecs before subscribe attempt", backoff);
Thread.sleep(backoff);
component.getHttpClient().getWorkerPool().execute(() -> subscribe(topicName, consumer));
} catch (InterruptedException e) {
LOG.warn("Aborting subscribe on interrupt!", e);
}
}
} else if (error.matches(INVALID_REPLAY_ID_PATTERN)) {
abort = false;
final Long fallBackReplayId
= ((SalesforceEndpoint) consumer.getEndpoint()).getConfiguration().getFallBackReplayId();
LOG.warn(error);
LOG.warn("Falling back to replayId {} for channel {}", fallBackReplayId, channelName);
REPLAY_EXTENSION.addChannelReplayId(channelName, fallBackReplayId);
subscribe(topicName, consumer, true);
}
if (abort && client != null) {
consumer.handleException(msg, new SalesforceException(msg, failure));
}
} else {
// remember subscription
LOG.info("Subscribed to channel {}", subscribedChannelName);
listenerMap.put(consumer, listener);
// reset backoff interval
handshakeBackoff.set(0);
}
// remove this subscription listener
if (client != null) {
client.getChannel(META_SUBSCRIBE).removeListener(this);
} else {
LOG.warn("Trying to handle a subscription message but the client is already destroyed");
}
}
}
};
client.getChannel(META_SUBSCRIBE).addListener(subscriptionListener);
// subscribe asynchronously
final ClientSessionChannel clientChannel = client.getChannel(channelName);
clientChannel.subscribe(listener);
}
private static boolean isTemporaryError(Message message) {
String failureReason = getFailureReason(message);
return failureReason != null && failureReason.startsWith(SERVER_TOO_BUSY_ERROR);
}
private static String getFailureReason(Message message) {
String failureReason = null;
if (message.getExt() != null) {
@SuppressWarnings("unchecked")
Map<String, Object> sfdcFields = (Map<String, Object>) message.getExt().get(SFDC_FIELD);
if (sfdcFields != null) {
failureReason = (String) sfdcFields.get(FAILURE_REASON_FIELD);
}
}
return failureReason;
}
void setupReplay(final SalesforceEndpoint endpoint) {
final String topicName = endpoint.getTopicName();
final Optional<Long> replayId = determineReplayIdFor(endpoint, topicName);
if (replayId.isPresent()) {
final String channelName = getChannelName(topicName);
final Long replayIdValue = replayId.get();
LOG.info("Set Replay extension to replay from `{}` for channel `{}`", replayIdValue, channelName);
REPLAY_EXTENSION.addChannelReplayId(channelName, replayIdValue);
}
}
private void attemptLoginUntilSuccessful() {
if (!loggingIn.compareAndSet(false, true)) {
LOG.debug("already logging in");
return;
}
long backoff = 0;
try {
for (;;) {
try {
if (isStoppingOrStopped()) {
return;
}
session.login(session.getAccessToken());
break;
} catch (SalesforceException e) {
backoff = backoff + backoffIncrement;
if (backoff > maxBackoff) {
backoff = maxBackoff;
}
LOG.warn(String.format("Salesforce login failed. Pausing for %d seconds", backoff), e);
try {
Thread.sleep(backoff);
} catch (InterruptedException ex) {
throw new RuntimeException("Failed to login.", ex);
}
}
}
} finally {
loggingIn.set(false);
}
}
static Optional<Long> determineReplayIdFor(final SalesforceEndpoint endpoint, final String topicName) {
final String channelName = getChannelName(topicName);
final Long replayId = endpoint.getReplayId();
final SalesforceComponent component = endpoint.getComponent();
final SalesforceEndpointConfig endpointConfiguration = endpoint.getConfiguration();
final Map<String, Long> endpointInitialReplayIdMap = endpointConfiguration.getInitialReplayIdMap();
final Long endpointReplayId
= endpointInitialReplayIdMap.getOrDefault(topicName, endpointInitialReplayIdMap.get(channelName));
final Long endpointDefaultReplayId = endpointConfiguration.getDefaultReplayId();
final SalesforceEndpointConfig componentConfiguration = component.getConfig();
final Map<String, Long> componentInitialReplayIdMap = componentConfiguration.getInitialReplayIdMap();
final Long componentReplayId
= componentInitialReplayIdMap.getOrDefault(topicName, componentInitialReplayIdMap.get(channelName));
final Long componentDefaultReplayId = componentConfiguration.getDefaultReplayId();
// the endpoint values have priority over component values, and the
// default values priority
// over give topic values
return Stream.of(replayId, endpointReplayId, componentReplayId, endpointDefaultReplayId, componentDefaultReplayId)
.filter(Objects::nonNull).findFirst();
}
static String getChannelName(final String topicName) {
final StringBuilder channelName = new StringBuilder();
if (topicName.charAt(0) != '/') {
channelName.append('/');
}
if (topicName.indexOf('/', 1) > 0) {
channelName.append(topicName);
} else {
channelName.append("topic/");
channelName.append(topicName);
}
return channelName.toString();
}
public void unsubscribe(String topicName, SalesforceConsumer consumer) {
// channel name
final String channelName = getChannelName(topicName);
// unsubscribe from channel
final ClientSessionChannel.MessageListener listener = listenerMap.remove(consumer);
if (listener != null) {
LOG.debug("Unsubscribing from channel {}...", channelName);
final ClientSessionChannel clientChannel = client.getChannel(channelName);
// if there are other listeners on this channel, an unsubscribe message will not be sent,
// so we're not going to listen for and expect an unsub response. Just unsub and move on.
clientChannel.unsubscribe(listener);
}
}
static String getEndpointUrl(final SalesforceComponent component) {
// In version 36.0 replay is only enabled on a separate endpoint
if (Double.parseDouble(component.getConfig().getApiVersion()) == 36.0) {
boolean replayOptionsPresent = component.getConfig().getDefaultReplayId() != null
|| !component.getConfig().getInitialReplayIdMap().isEmpty();
if (replayOptionsPresent) {
return component.getSession().getInstanceUrl() + "/cometd/replay/" + component.getConfig().getApiVersion();
}
}
return component.getSession().getInstanceUrl() + "/cometd/" + component.getConfig().getApiVersion();
}
}
|
(chores) camel-salesforce: fix a potential NPE (#6466)
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/streaming/SubscriptionHelper.java
|
(chores) camel-salesforce: fix a potential NPE (#6466)
|
<ide><path>omponents/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/streaming/SubscriptionHelper.java
<ide> timeout);
<ide> client.abort();
<ide> }
<del> }
<del>
<del> client.handshake();
<del> final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
<del> client.waitFor(waitMs, BayeuxClient.State.CONNECTED);
<del>
<add>
<add> client.handshake();
<add> final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
<add> client.waitFor(waitMs, BayeuxClient.State.CONNECTED);
<add> }
<ide> } catch (Exception e) {
<ide> LOG.error("Error handshaking: " + e.getMessage(), e);
<ide> lastError = e;
|
|
JavaScript
|
apache-2.0
|
cf5a19796d0210d2ff75f21ffbb586ccbb3fea5b
| 0 |
edewit/fh-js-sdk,feedhenry/fh-js-sdk,feedhenry/fh-js-sdk,edewit/fh-js-sdk
|
var JSON = require("JSON");
module.exports = function(fail, req, resStatus, error){
var errraw;
var statusCode = 0;
if(req){
try{
statusCode = req.status;
var res = JSON.parse(req.responseText);
errraw = res.error || res.msg || res;
if (errraw instanceof Array) {
errraw = errraw.join('\n');
}
} catch(e){
errraw = req.responseText;
}
}
if(fail){
fail(errraw, {
status: statusCode,
message: resStatus,
error: error
});
}
};
|
src/modules/handleError.js
|
var JSON = require("JSON");
module.exports = function(fail, req, resStatus, error){
var errraw;
var statusCode = 0;
if(req){
try{
statusCode = req.status;
var res = JSON.parse(req.responseText);
errraw = res.error || res.msg;
if (errraw instanceof Array) {
errraw = errraw.join('\n');
}
} catch(e){
errraw = req.responseText;
}
}
if(fail){
fail(errraw, {
status: statusCode,
message: resStatus,
error: error
});
}
};
|
cherry pick evan's fix
|
src/modules/handleError.js
|
cherry pick evan's fix
|
<ide><path>rc/modules/handleError.js
<ide> try{
<ide> statusCode = req.status;
<ide> var res = JSON.parse(req.responseText);
<del> errraw = res.error || res.msg;
<add> errraw = res.error || res.msg || res;
<ide> if (errraw instanceof Array) {
<ide> errraw = errraw.join('\n');
<ide> }
|
|
Java
|
lgpl-2.1
|
f2fe1a63a77162b92f9ff94e89483c08619ac094
| 0 |
ThrawnCA/fb-contrib,ThrawnCA/fb-contrib,ThrawnCA/fb-contrib,rblasch/fb-contrib,ThrawnCA/fb-contrib,rblasch/fb-contrib,rblasch/fb-contrib,rblasch/fb-contrib
|
/*
* fb-contrib - Auxiliary detectors for Java programs
* Copyright (C) 2005-2016 Dave Brosius
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.mebigfatguy.fbcontrib.detect;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.ConstantFieldref;
import org.apache.bcel.classfile.ConstantNameAndType;
import com.mebigfatguy.fbcontrib.utils.OpcodeUtils;
import com.mebigfatguy.fbcontrib.utils.RegisterUtils;
import com.mebigfatguy.fbcontrib.utils.UnmodifiableSet;
import com.mebigfatguy.fbcontrib.utils.Values;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.BytecodeScanningDetector;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.ba.ClassContext;
import edu.umd.cs.findbugs.ba.XField;
/**
* Looks for use of iterators on synchronized collections built from the Collections class. As the collection in question was built thru
* Collections.synchronizedXXX, an assumption is made that this collection must be multithreaded safe. However, iterator access is used, which is explicitly
* unsafe. When iterators are to be used, synchronization should be done manually.
*/
public class SyncCollectionIterators extends BytecodeScanningDetector {
private final BugReporter bugReporter;
private static final Set<String> synchCollectionNames = UnmodifiableSet.create("synchronizedSet", "synchronizedMap", "synchronizedList",
"synchronizedSortedSet", "synchronizedSortedMap");
private static final Set<String> mapToSetMethods = UnmodifiableSet.create("keySet", "entrySet", "values");
enum State {
SEEN_NOTHING, SEEN_SYNC, SEEN_LOAD
}
private State state;
private Set<String> memberCollections;
private BitSet localCollections;
private List<Object> monitorObjects;
private OpcodeStack stack;
private Object collectionInfo = null;
/**
* constructs a SCI detector given the reporter to report bugs on
*
* @param bugReporter
* the sync of bug reports
*/
public SyncCollectionIterators(final BugReporter bugReporter) {
this.bugReporter = bugReporter;
}
@Override
public void visitClassContext(final ClassContext classContext) {
try {
memberCollections = new HashSet<>();
localCollections = new BitSet();
monitorObjects = new ArrayList<>();
stack = new OpcodeStack();
super.visitClassContext(classContext);
} finally {
memberCollections = null;
localCollections = null;
monitorObjects = null;
stack = null;
}
}
@Override
public void visitCode(final Code obj) {
if (obj.getCode() != null) {
state = State.SEEN_NOTHING;
localCollections.clear();
monitorObjects.clear();
stack.resetForMethodEntry(this);
super.visitCode(obj);
}
}
@Override
public void sawOpcode(final int seen) {
try {
stack.precomputation(this);
switch (state) {
case SEEN_NOTHING:
sawOpcodeAfterNothing(seen);
break;
case SEEN_SYNC:
sawOpcodeAfterSync(seen);
break;
case SEEN_LOAD:
sawOpcodeAfterLoad(seen);
break;
}
if (seen == MONITORENTER) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
int reg = item.getRegisterNumber();
if (reg >= 0) {
monitorObjects.add(Integer.valueOf(reg));
} else {
XField field = item.getXField();
if (field != null) {
monitorObjects.add(field.getName());
}
}
}
} else if ((seen == MONITOREXIT) && !monitorObjects.isEmpty()) {
monitorObjects.remove(monitorObjects.size() - 1);
}
} finally {
stack.sawOpcode(this, seen);
}
}
private void sawOpcodeAfterNothing(int seen) {
if ((seen == INVOKESTATIC) && "java/util/Collections".equals(getClassConstantOperand())) {
if (synchCollectionNames.contains(getNameConstantOperand())) {
state = State.SEEN_SYNC;
}
} else if (OpcodeUtils.isALoad(seen)) {
int reg = RegisterUtils.getALoadReg(this, seen);
if (localCollections.get(reg)) {
collectionInfo = Integer.valueOf(reg);
state = State.SEEN_LOAD;
}
} else if (seen == GETFIELD) {
ConstantFieldref ref = (ConstantFieldref) getConstantRefOperand();
ConstantNameAndType nandt = (ConstantNameAndType) getConstantPool().getConstant(ref.getNameAndTypeIndex());
String fieldName = nandt.getName(getConstantPool());
if (memberCollections.contains(fieldName)) {
collectionInfo = fieldName;
state = State.SEEN_LOAD;
}
}
}
private void sawOpcodeAfterSync(int seen) {
if (OpcodeUtils.isAStore(seen)) {
localCollections.set(RegisterUtils.getAStoreReg(this, seen));
} else if (seen == PUTFIELD) {
ConstantFieldref ref = (ConstantFieldref) getConstantRefOperand();
ConstantNameAndType nandt = (ConstantNameAndType) getConstantPool().getConstant(ref.getNameAndTypeIndex());
memberCollections.add(nandt.getName(getConstantPool()));
}
state = State.SEEN_NOTHING;
}
private void sawOpcodeAfterLoad(int seen) {
if (seen != INVOKEINTERFACE) {
state = State.SEEN_NOTHING;
return;
}
String calledClass = getClassConstantOperand();
if ((Values.SLASHED_JAVA_UTIL_MAP.equals(calledClass) || ("java/util/SortedMap".equals(calledClass)))) {
if (mapToSetMethods.contains(getNameConstantOperand())) {
state = State.SEEN_LOAD;
} else {
state = State.SEEN_NOTHING;
}
} else if (calledClass.startsWith("java/util/")) {
if ("iterator".equals(getNameConstantOperand())) {
state = State.SEEN_NOTHING;
if (monitorObjects.isEmpty() || !syncIsMap(monitorObjects.get(monitorObjects.size() - 1), collectionInfo)) {
bugReporter.reportBug(
new BugInstance(this, "SCI_SYNCHRONIZED_COLLECTION_ITERATORS", NORMAL_PRIORITY).addClass(this).addMethod(this).addSourceLine(this));
}
}
/* don't change state at this point */
} else {
state = State.SEEN_NOTHING;
}
}
private static boolean syncIsMap(Object syncObject, Object colInfo) {
if ((syncObject != null) && (colInfo != null) && syncObject.getClass().equals(colInfo.getClass())) {
return syncObject.equals(colInfo);
}
// Something went wrong... don't report
return true;
}
}
|
src/main/java/com/mebigfatguy/fbcontrib/detect/SyncCollectionIterators.java
|
/*
* fb-contrib - Auxiliary detectors for Java programs
* Copyright (C) 2005-2016 Dave Brosius
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.mebigfatguy.fbcontrib.detect;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.ConstantFieldref;
import org.apache.bcel.classfile.ConstantNameAndType;
import com.mebigfatguy.fbcontrib.utils.UnmodifiableSet;
import com.mebigfatguy.fbcontrib.utils.Values;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.BytecodeScanningDetector;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.ba.ClassContext;
import edu.umd.cs.findbugs.ba.XField;
/**
* Looks for use of iterators on synchronized collections built from the Collections class. As the collection in question was built thru
* Collections.synchronizedXXX, an assumption is made that this collection must be multithreaded safe. However, iterator access is used, which is explicitly
* unsafe. When iterators are to be used, synchronization should be done manually.
*/
public class SyncCollectionIterators extends BytecodeScanningDetector {
private final BugReporter bugReporter;
private static final Set<String> synchCollectionNames = UnmodifiableSet.create("synchronizedSet", "synchronizedMap", "synchronizedList",
"synchronizedSortedSet", "synchronizedSortedMap");
private static final Set<String> mapToSetMethods = UnmodifiableSet.create("keySet", "entrySet", "values");
enum State {
SEEN_NOTHING, SEEN_SYNC, SEEN_LOAD
}
private State state;
private Set<String> memberCollections;
private Set<Integer> localCollections;
private List<Object> monitorObjects;
private OpcodeStack stack;
private Object collectionInfo = null;
/**
* constructs a SCI detector given the reporter to report bugs on
*
* @param bugReporter
* the sync of bug reports
*/
public SyncCollectionIterators(final BugReporter bugReporter) {
this.bugReporter = bugReporter;
}
@Override
public void visitClassContext(final ClassContext classContext) {
try {
memberCollections = new HashSet<>();
localCollections = new HashSet<>();
monitorObjects = new ArrayList<>();
stack = new OpcodeStack();
super.visitClassContext(classContext);
} finally {
memberCollections = null;
localCollections = null;
monitorObjects = null;
stack = null;
}
}
@Override
public void visitCode(final Code obj) {
if (obj.getCode() != null) {
state = State.SEEN_NOTHING;
localCollections.clear();
monitorObjects.clear();
stack.resetForMethodEntry(this);
super.visitCode(obj);
}
}
@Override
public void sawOpcode(final int seen) {
try {
stack.precomputation(this);
switch (state) {
case SEEN_NOTHING:
sawOpcodeAfterNothing(seen);
break;
case SEEN_SYNC:
sawOpcodeAfterSync(seen);
break;
case SEEN_LOAD:
sawOpcodeAfterLoad(seen);
break;
}
if (seen == MONITORENTER) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
int reg = item.getRegisterNumber();
if (reg >= 0) {
monitorObjects.add(Integer.valueOf(reg));
} else {
XField field = item.getXField();
if (field != null) {
monitorObjects.add(field.getName());
}
}
}
} else if ((seen == MONITOREXIT) && !monitorObjects.isEmpty()) {
monitorObjects.remove(monitorObjects.size() - 1);
}
} finally {
stack.sawOpcode(this, seen);
}
}
private void sawOpcodeAfterNothing(int seen) {
if ((seen == INVOKESTATIC) && "java/util/Collections".equals(getClassConstantOperand())) {
if (synchCollectionNames.contains(getNameConstantOperand())) {
state = State.SEEN_SYNC;
}
} else if (seen == ALOAD) {
Integer reg = Integer.valueOf(getRegisterOperand());
if (localCollections.contains(reg)) {
collectionInfo = reg;
state = State.SEEN_LOAD;
}
} else if ((seen >= ALOAD_0) && (seen <= ALOAD_3)) {
Integer reg = Integer.valueOf(seen - ALOAD_0);
if (localCollections.contains(reg)) {
collectionInfo = reg;
state = State.SEEN_LOAD;
}
} else if (seen == GETFIELD) {
ConstantFieldref ref = (ConstantFieldref) getConstantRefOperand();
ConstantNameAndType nandt = (ConstantNameAndType) getConstantPool().getConstant(ref.getNameAndTypeIndex());
String fieldName = nandt.getName(getConstantPool());
if (memberCollections.contains(fieldName)) {
collectionInfo = fieldName;
state = State.SEEN_LOAD;
}
}
}
private void sawOpcodeAfterSync(int seen) {
if (seen == ASTORE) {
localCollections.add(getRegisterOperand());
} else if ((seen >= ASTORE_0) && (seen <= ASTORE_3)) {
localCollections.add(seen - ASTORE_0);
} else if (seen == PUTFIELD) {
ConstantFieldref ref = (ConstantFieldref) getConstantRefOperand();
ConstantNameAndType nandt = (ConstantNameAndType) getConstantPool().getConstant(ref.getNameAndTypeIndex());
memberCollections.add(nandt.getName(getConstantPool()));
}
state = State.SEEN_NOTHING;
}
private void sawOpcodeAfterLoad(int seen) {
if (seen != INVOKEINTERFACE) {
state = State.SEEN_NOTHING;
return;
}
String calledClass = getClassConstantOperand();
if ((Values.SLASHED_JAVA_UTIL_MAP.equals(calledClass) || ("java/util/SortedMap".equals(calledClass)))) {
if (mapToSetMethods.contains(getNameConstantOperand())) {
state = State.SEEN_LOAD;
} else {
state = State.SEEN_NOTHING;
}
} else if (calledClass.startsWith("java/util/")) {
if ("iterator".equals(getNameConstantOperand())) {
state = State.SEEN_NOTHING;
if (monitorObjects.isEmpty() || !syncIsMap(monitorObjects.get(monitorObjects.size() - 1), collectionInfo)) {
bugReporter.reportBug(
new BugInstance(this, "SCI_SYNCHRONIZED_COLLECTION_ITERATORS", NORMAL_PRIORITY).addClass(this).addMethod(this).addSourceLine(this));
}
}
/* don't change state at this point */
} else {
state = State.SEEN_NOTHING;
}
}
private static boolean syncIsMap(Object syncObject, Object colInfo) {
if ((syncObject != null) && (colInfo != null) && syncObject.getClass().equals(colInfo.getClass())) {
return syncObject.equals(colInfo);
}
// Something went wrong... don't report
return true;
}
}
|
use BitSet for Collection<Integer> for small collections
|
src/main/java/com/mebigfatguy/fbcontrib/detect/SyncCollectionIterators.java
|
use BitSet for Collection<Integer> for small collections
|
<ide><path>rc/main/java/com/mebigfatguy/fbcontrib/detect/SyncCollectionIterators.java
<ide> package com.mebigfatguy.fbcontrib.detect;
<ide>
<ide> import java.util.ArrayList;
<add>import java.util.BitSet;
<ide> import java.util.HashSet;
<ide> import java.util.List;
<ide> import java.util.Set;
<ide> import org.apache.bcel.classfile.ConstantFieldref;
<ide> import org.apache.bcel.classfile.ConstantNameAndType;
<ide>
<add>import com.mebigfatguy.fbcontrib.utils.OpcodeUtils;
<add>import com.mebigfatguy.fbcontrib.utils.RegisterUtils;
<ide> import com.mebigfatguy.fbcontrib.utils.UnmodifiableSet;
<ide> import com.mebigfatguy.fbcontrib.utils.Values;
<ide>
<ide>
<ide> private State state;
<ide> private Set<String> memberCollections;
<del> private Set<Integer> localCollections;
<add> private BitSet localCollections;
<ide> private List<Object> monitorObjects;
<ide> private OpcodeStack stack;
<ide> private Object collectionInfo = null;
<ide> public void visitClassContext(final ClassContext classContext) {
<ide> try {
<ide> memberCollections = new HashSet<>();
<del> localCollections = new HashSet<>();
<add> localCollections = new BitSet();
<ide> monitorObjects = new ArrayList<>();
<ide> stack = new OpcodeStack();
<ide> super.visitClassContext(classContext);
<ide> if (synchCollectionNames.contains(getNameConstantOperand())) {
<ide> state = State.SEEN_SYNC;
<ide> }
<del> } else if (seen == ALOAD) {
<del> Integer reg = Integer.valueOf(getRegisterOperand());
<del> if (localCollections.contains(reg)) {
<del> collectionInfo = reg;
<del> state = State.SEEN_LOAD;
<del> }
<del> } else if ((seen >= ALOAD_0) && (seen <= ALOAD_3)) {
<del> Integer reg = Integer.valueOf(seen - ALOAD_0);
<del> if (localCollections.contains(reg)) {
<del> collectionInfo = reg;
<add> } else if (OpcodeUtils.isALoad(seen)) {
<add> int reg = RegisterUtils.getALoadReg(this, seen);
<add> if (localCollections.get(reg)) {
<add> collectionInfo = Integer.valueOf(reg);
<ide> state = State.SEEN_LOAD;
<ide> }
<ide> } else if (seen == GETFIELD) {
<ide> }
<ide>
<ide> private void sawOpcodeAfterSync(int seen) {
<del> if (seen == ASTORE) {
<del> localCollections.add(getRegisterOperand());
<del> } else if ((seen >= ASTORE_0) && (seen <= ASTORE_3)) {
<del> localCollections.add(seen - ASTORE_0);
<add> if (OpcodeUtils.isAStore(seen)) {
<add> localCollections.set(RegisterUtils.getAStoreReg(this, seen));
<ide> } else if (seen == PUTFIELD) {
<ide> ConstantFieldref ref = (ConstantFieldref) getConstantRefOperand();
<ide> ConstantNameAndType nandt = (ConstantNameAndType) getConstantPool().getConstant(ref.getNameAndTypeIndex());
|
|
Java
|
bsd-3-clause
|
6dcb10add6abc0067a739da66d0f26de0c1f97cc
| 0 |
farmboy0/k-9,cliniome/pki,tsunli/k-9,dgger/k-9,KitAway/k-9,suzp1984/k-9,Eagles2F/k-9,dgger/k-9,crr0004/k-9,sonork/k-9,thuanpq/k-9,k9mail/k-9,sonork/k-9,ndew623/k-9,rishabhbitsg/k-9,nilsbraden/k-9,denim2x/k-9,indus1/k-9,github201407/k-9,rishabhbitsg/k-9,huhu/k-9,moparisthebest/k-9,philipwhiuk/k-9,sanderbaas/k-9,gilbertw1/k-9,gilbertw1/k-9,vasyl-khomko/k-9,crr0004/k-9,dhootha/k-9,cketti/k-9,k9mail/k-9,XiveZ/k-9,KitAway/k-9,huhu/k-9,gilbertw1/k-9,bashrc/k-9,mawiegand/k-9,cliniome/pki,cooperpellaton/k-9,tonytamsf/k-9,rollbrettler/k-9,sanderbaas/k-9,G00fY2/k-9_material_design,deepworks/k-9,cooperpellaton/k-9,rollbrettler/k-9,konfer/k-9,sebkur/k-9,bashrc/k-9,dpereira411/k-9,439teamwork/k-9,torte71/k-9,gaionim/k-9,msdgwzhy6/k-9,439teamwork/k-9,gnebsy/k-9,crr0004/k-9,imaeses/k-9,rollbrettler/k-9,leixinstar/k-9,rtreffer/openpgp-k-9,rtreffer/openpgp-k-9,vasyl-khomko/k-9,GuillaumeSmaha/k-9,XiveZ/k-9,deepworks/k-9,sedrubal/k-9,vasyl-khomko/k-9,tonytamsf/k-9,CodingRmy/k-9,konfer/k-9,thuanpq/k-9,dgger/k-9,torte71/k-9,moparisthebest/k-9,XiveZ/k-9,Valodim/k-9,github201407/k-9,ndew623/k-9,sanderbaas/k-9,dhootha/k-9,herpiko/k-9,Eagles2F/k-9,farmboy0/k-9,philipwhiuk/k-9,bashrc/k-9,G00fY2/k-9_material_design,cketti/k-9,farmboy0/k-9,msdgwzhy6/k-9,ndew623/k-9,cliniome/pki,WenduanMou1/k-9,philipwhiuk/q-mail,huhu/k-9,GuillaumeSmaha/k-9,cooperpellaton/k-9,Eagles2F/k-9,gnebsy/k-9,mawiegand/k-9,vt0r/k-9,suzp1984/k-9,tsunli/k-9,jberkel/k-9,github201407/k-9,philipwhiuk/q-mail,jca02266/k-9,suzp1984/k-9,KitAway/k-9,icedman21/k-9,439teamwork/k-9,WenduanMou1/k-9,gaionim/k-9,imaeses/k-9,nilsbraden/k-9,gaionim/k-9,philipwhiuk/q-mail,jca02266/k-9,msdgwzhy6/k-9,roscrazy/k-9,cketti/k-9,moparisthebest/k-9,icedman21/k-9,vatsalsura/k-9,imaeses/k-9,sebkur/k-9,dpereira411/k-9,dpereira411/k-9,gnebsy/k-9,leixinstar/k-9,denim2x/k-9,jca02266/k-9,denim2x/k-9,nilsbraden/k-9,GuillaumeSmaha/k-9,deepworks/k-9,icedman21/k-9,sedrubal/k-9,tonytamsf/k-9,vt0r/k-9,sebkur/k-9,herpiko/k-9,indus1/k-9,roscrazy/k-9,herpiko/k-9,WenduanMou1/k-9,jberkel/k-9,vatsalsura/k-9,leixinstar/k-9,dhootha/k-9,cketti/k-9,k9mail/k-9,torte71/k-9,sonork/k-9,CodingRmy/k-9,tsunli/k-9,mawiegand/k-9,thuanpq/k-9,konfer/k-9
|
package com.fsck.k9.mail.store;
import android.app.Application;
import android.content.ContentValues;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import android.text.util.Regex;
import android.util.Log;
import com.fsck.k9.K9;
import com.fsck.k9.Preferences;
import com.fsck.k9.Utility;
import com.fsck.k9.codec.binary.Base64OutputStream;
import com.fsck.k9.mail.*;
import com.fsck.k9.mail.Message.RecipientType;
import com.fsck.k9.mail.internet.*;
import com.fsck.k9.provider.AttachmentProvider;
import org.apache.commons.io.IOUtils;
import java.io.*;
import java.net.URI;
import java.net.URLEncoder;
import java.util.*;
import java.util.regex.Matcher;
/**
* <pre>
* Implements a SQLite database backed local store for Messages.
* </pre>
*/
public class LocalStore extends Store implements Serializable
{
private static final int DB_VERSION = 32;
private static final Flag[] PERMANENT_FLAGS = { Flag.DELETED, Flag.X_DESTROYED, Flag.SEEN };
private String mPath;
private SQLiteDatabase mDb;
private File mAttachmentsDir;
private Application mApplication;
private String uUid = null;
private static Set<String> HEADERS_TO_SAVE = new HashSet<String>();
static
{
HEADERS_TO_SAVE.add(K9.K9MAIL_IDENTITY);
HEADERS_TO_SAVE.add("In-Reply-To");
HEADERS_TO_SAVE.add("References");
HEADERS_TO_SAVE.add("X-User-Agent");
}
/*
* a String containing the columns getMessages expects to work with
* in the correct order.
*/
static private String GET_MESSAGES_COLS =
"subject, sender_list, date, uid, flags, id, to_list, cc_list, "
+ "bcc_list, reply_to_list, attachment_count, internal_date, message_id, folder_id ";
/**
* @param uri local://localhost/path/to/database/uuid.db
*/
public LocalStore(String _uri, Application application) throws MessagingException
{
mApplication = application;
URI uri = null;
try
{
uri = new URI(_uri);
}
catch (Exception e)
{
throw new MessagingException("Invalid uri for LocalStore");
}
if (!uri.getScheme().equals("local"))
{
throw new MessagingException("Invalid scheme");
}
mPath = uri.getPath();
// We need to associate the localstore with the account. Since we don't have the account
// handy here, we'll take the filename from the DB and use the basename of the filename
// Folders probably should have references to their containing accounts
File dbFile = new File(mPath);
String[] tokens = dbFile.getName().split("\\.");
uUid = tokens[0];
File parentDir = new File(mPath).getParentFile();
if (!parentDir.exists())
{
parentDir.mkdirs();
}
mAttachmentsDir = new File(mPath + "_att");
if (!mAttachmentsDir.exists())
{
mAttachmentsDir.mkdirs();
}
mDb = SQLiteDatabase.openOrCreateDatabase(mPath, null);
if (mDb.getVersion() != DB_VERSION)
{
doDbUpgrade(mDb, application);
}
}
private void doDbUpgrade(SQLiteDatabase mDb, Application application)
{
Log.i(K9.LOG_TAG, String.format("Upgrading database from version %d to version %d",
mDb.getVersion(), DB_VERSION));
AttachmentProvider.clear(application);
// schema version 29 was when we moved to incremental updates
// in the case of a new db or a < v29 db, we blow away and start from scratch
if (mDb.getVersion() < 29)
{
mDb.execSQL("DROP TABLE IF EXISTS folders");
mDb.execSQL("CREATE TABLE folders (id INTEGER PRIMARY KEY, name TEXT, "
+ "last_updated INTEGER, unread_count INTEGER, visible_limit INTEGER, status TEXT, push_state TEXT, last_pushed INTEGER)");
mDb.execSQL("CREATE INDEX IF NOT EXISTS folder_name ON folders (name)");
mDb.execSQL("DROP TABLE IF EXISTS messages");
mDb.execSQL("CREATE TABLE messages (id INTEGER PRIMARY KEY, deleted INTEGER default 0, folder_id INTEGER, uid TEXT, subject TEXT, "
+ "date INTEGER, flags TEXT, sender_list TEXT, to_list TEXT, cc_list TEXT, bcc_list TEXT, reply_to_list TEXT, "
+ "html_content TEXT, text_content TEXT, attachment_count INTEGER, internal_date INTEGER, message_id TEXT)");
mDb.execSQL("DROP TABLE IF EXISTS headers");
mDb.execSQL("CREATE TABLE headers (id INTEGER PRIMARY KEY, message_id INTEGER, name TEXT, value TEXT)");
mDb.execSQL("CREATE INDEX IF NOT EXISTS header_folder ON headers (message_id)");
mDb.execSQL("CREATE INDEX IF NOT EXISTS msg_uid ON messages (uid, folder_id)");
mDb.execSQL("DROP INDEX IF EXISTS msg_folder_id");
mDb.execSQL("DROP INDEX IF EXISTS msg_folder_id_date");
mDb.execSQL("CREATE INDEX IF NOT EXISTS msg_folder_id_deleted_date ON messages (folder_id,deleted,internal_date)");
mDb.execSQL("DROP TABLE IF EXISTS attachments");
mDb.execSQL("CREATE TABLE attachments (id INTEGER PRIMARY KEY, message_id INTEGER,"
+ "store_data TEXT, content_uri TEXT, size INTEGER, name TEXT,"
+ "mime_type TEXT)");
mDb.execSQL("DROP TABLE IF EXISTS pending_commands");
mDb.execSQL("CREATE TABLE pending_commands " +
"(id INTEGER PRIMARY KEY, command TEXT, arguments TEXT)");
mDb.execSQL("DROP TRIGGER IF EXISTS delete_folder");
mDb.execSQL("CREATE TRIGGER delete_folder BEFORE DELETE ON folders BEGIN DELETE FROM messages WHERE old.id = folder_id; END;");
mDb.execSQL("DROP TRIGGER IF EXISTS delete_message");
mDb.execSQL("CREATE TRIGGER delete_message BEFORE DELETE ON messages BEGIN DELETE FROM attachments WHERE old.id = message_id; "
+ "DELETE FROM headers where old.id = message_id; END;");
}
else
{ // in the case that we're starting out at 29 or newer, run all the needed updates
if (mDb.getVersion() < 30)
{
mDb.execSQL("ALTER TABLE messages ADD deleted INTEGER default 0");
}
if (mDb.getVersion() < 31)
{
mDb.execSQL("DROP INDEX IF EXISTS msg_folder_id_date");
mDb.execSQL("CREATE INDEX IF NOT EXISTS msg_folder_id_deleted_date ON messages (folder_id,deleted,internal_date)");
}
if (mDb.getVersion() < 32)
{
mDb.execSQL("UPDATE messages SET deleted = 1 WHERE flags LIKE '%DELETED%'");
}
}
mDb.setVersion(DB_VERSION);
if (mDb.getVersion() != DB_VERSION)
{
throw new Error("Database upgrade failed!");
}
try
{
pruneCachedAttachments(true);
}
catch (Exception me)
{
Log.e(K9.LOG_TAG, "Exception while force pruning attachments during DB update", me);
}
}
public long getSize()
{
long attachmentLength = 0;
File[] files = mAttachmentsDir.listFiles();
for (File file : files)
{
if (file.exists())
{
attachmentLength += file.length();
}
}
File dbFile = new File(mPath);
return dbFile.length() + attachmentLength;
}
public void compact() throws MessagingException
{
Log.i(K9.LOG_TAG, "Before prune size = " + getSize());
pruneCachedAttachments();
Log.i(K9.LOG_TAG, "After prune / before compaction size = " + getSize());
mDb.execSQL("VACUUM");
Log.i(K9.LOG_TAG, "After compaction size = " + getSize());
}
public void clear() throws MessagingException
{
Log.i(K9.LOG_TAG, "Before prune size = " + getSize());
pruneCachedAttachments(true);
Log.i(K9.LOG_TAG, "After prune / before compaction size = " + getSize());
Log.i(K9.LOG_TAG, "Before clear folder count = " + getFolderCount());
Log.i(K9.LOG_TAG, "Before clear message count = " + getMessageCount());
Log.i(K9.LOG_TAG, "After prune / before clear size = " + getSize());
// don't delete messages that are Local, since there is no copy on the server.
// Don't delete deleted messages. They are essentially placeholders for UIDs of messages that have
// been deleted locally. They take up insignificant space
mDb.execSQL("DELETE FROM messages WHERE deleted = 0 and uid not like 'Local%'");
compact();
Log.i(K9.LOG_TAG, "After clear message count = " + getMessageCount());
Log.i(K9.LOG_TAG, "After clear size = " + getSize());
}
public int getMessageCount() throws MessagingException
{
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT COUNT(*) FROM messages", null);
cursor.moveToFirst();
int messageCount = cursor.getInt(0);
return messageCount;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
public int getFolderCount() throws MessagingException
{
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT COUNT(*) FROM folders", null);
cursor.moveToFirst();
int messageCount = cursor.getInt(0);
return messageCount;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public LocalFolder getFolder(String name) throws MessagingException
{
return new LocalFolder(name);
}
// TODO this takes about 260-300ms, seems slow.
@Override
public LocalFolder[] getPersonalNamespaces() throws MessagingException
{
ArrayList<LocalFolder> folders = new ArrayList<LocalFolder>();
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT id, name, unread_count, visible_limit, last_updated, status, push_state, last_pushed FROM folders", null);
while (cursor.moveToNext())
{
LocalFolder folder = new LocalFolder(cursor.getString(1));
folder.open(cursor.getInt(0), cursor.getString(1), cursor.getInt(2), cursor.getInt(3), cursor.getLong(4), cursor.getString(5), cursor.getString(6), cursor.getLong(7));
folders.add(folder);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
return folders.toArray(new LocalFolder[] {});
}
@Override
public void checkSettings() throws MessagingException
{
}
/**
* Delete the entire Store and it's backing database.
*/
public void delete()
{
try
{
mDb.close();
}
catch (Exception e)
{
}
try
{
File[] attachments = mAttachmentsDir.listFiles();
for (File attachment : attachments)
{
if (attachment.exists())
{
attachment.delete();
}
}
if (mAttachmentsDir.exists())
{
mAttachmentsDir.delete();
}
}
catch (Exception e)
{
}
try
{
new File(mPath).delete();
}
catch (Exception e)
{
}
}
public void pruneCachedAttachments() throws MessagingException
{
pruneCachedAttachments(false);
}
/**
* Deletes all cached attachments for the entire store.
*/
public void pruneCachedAttachments(boolean force) throws MessagingException
{
if (force)
{
ContentValues cv = new ContentValues();
cv.putNull("content_uri");
mDb.update("attachments", cv, null, null);
}
File[] files = mAttachmentsDir.listFiles();
for (File file : files)
{
if (file.exists())
{
if (!force)
{
Cursor cursor = null;
try
{
cursor = mDb.query(
"attachments",
new String[] { "store_data" },
"id = ?",
new String[] { file.getName() },
null,
null,
null);
if (cursor.moveToNext())
{
if (cursor.getString(0) == null)
{
Log.d(K9.LOG_TAG, "Attachment " + file.getAbsolutePath() + " has no store data, not deleting");
/*
* If the attachment has no store data it is not recoverable, so
* we won't delete it.
*/
continue;
}
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
if (!force)
{
try
{
ContentValues cv = new ContentValues();
cv.putNull("content_uri");
mDb.update("attachments", cv, "id = ?", new String[] { file.getName() });
}
catch (Exception e)
{
/*
* If the row has gone away before we got to mark it not-downloaded that's
* okay.
*/
}
}
if (!file.delete())
{
file.deleteOnExit();
}
}
}
}
public void resetVisibleLimits()
{
resetVisibleLimits(K9.DEFAULT_VISIBLE_LIMIT);
}
public void resetVisibleLimits(int visibleLimit)
{
ContentValues cv = new ContentValues();
cv.put("visible_limit", Integer.toString(visibleLimit));
mDb.update("folders", cv, null, null);
}
public ArrayList<PendingCommand> getPendingCommands()
{
Cursor cursor = null;
try
{
cursor = mDb.query("pending_commands",
new String[] { "id", "command", "arguments" },
null,
null,
null,
null,
"id ASC");
ArrayList<PendingCommand> commands = new ArrayList<PendingCommand>();
while (cursor.moveToNext())
{
PendingCommand command = new PendingCommand();
command.mId = cursor.getLong(0);
command.command = cursor.getString(1);
String arguments = cursor.getString(2);
command.arguments = arguments.split(",");
for (int i = 0; i < command.arguments.length; i++)
{
command.arguments[i] = Utility.fastUrlDecode(command.arguments[i]);
}
commands.add(command);
}
return commands;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
public void addPendingCommand(PendingCommand command)
{
try
{
for (int i = 0; i < command.arguments.length; i++)
{
command.arguments[i] = URLEncoder.encode(command.arguments[i], "UTF-8");
}
ContentValues cv = new ContentValues();
cv.put("command", command.command);
cv.put("arguments", Utility.combine(command.arguments, ','));
mDb.insert("pending_commands", "command", cv);
}
catch (UnsupportedEncodingException usee)
{
throw new Error("Aparently UTF-8 has been lost to the annals of history.");
}
}
public void removePendingCommand(PendingCommand command)
{
mDb.delete("pending_commands", "id = ?", new String[] { Long.toString(command.mId) });
}
public void removePendingCommands()
{
mDb.delete("pending_commands", null, null);
}
public static class PendingCommand
{
private long mId;
public String command;
public String[] arguments;
@Override
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append(command);
sb.append(": ");
for (String argument : arguments)
{
sb.append(", ");
sb.append(argument);
//sb.append("\n");
}
return sb.toString();
}
}
public boolean isMoveCapable()
{
return true;
}
public boolean isCopyCapable()
{
return true;
}
public class LocalFolder extends Folder implements Serializable
{
private String mName;
private long mFolderId = -1;
private int mUnreadMessageCount = -1;
private int mVisibleLimit = -1;
private FolderClass displayClass = FolderClass.NO_CLASS;
private FolderClass syncClass = FolderClass.INHERITED;
private FolderClass pushClass = FolderClass.SECOND_CLASS;
private String prefId = null;
private String mPushState = null;
public LocalFolder(String name)
{
this.mName = name;
if (K9.INBOX.equals(getName()))
{
syncClass = FolderClass.FIRST_CLASS;
pushClass = FolderClass.FIRST_CLASS;
}
}
public LocalFolder(long id)
{
this.mFolderId = id;
}
public long getId()
{
return mFolderId;
}
@Override
public void open(OpenMode mode) throws MessagingException
{
if (isOpen())
{
return;
}
Cursor cursor = null;
try
{
String baseQuery =
"SELECT id, name,unread_count, visible_limit, last_updated, status, push_state, last_pushed FROM folders ";
if (mName != null)
{
cursor = mDb.rawQuery(baseQuery + "where folders.name = ?", new String[] { mName });
}
else
{
cursor = mDb.rawQuery(baseQuery + "where folders.id = ?", new String[] { Long.toString(mFolderId) });
}
if (cursor.moveToFirst())
{
int folderId = cursor.getInt(0);
if (folderId > 0)
{
open(folderId, cursor.getString(1), cursor.getInt(2), cursor.getInt(3), cursor.getLong(4), cursor.getString(5), cursor.getString(6), cursor.getLong(7));
}
}
else
{
create(FolderType.HOLDS_MESSAGES);
open(mode);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
private void open(int id, String name, int unreadCount, int visibleLimit, long lastChecked, String status, String pushState, long lastPushed) throws MessagingException
{
mFolderId = id;
mName = name;
mUnreadMessageCount = unreadCount;
mVisibleLimit = visibleLimit;
mPushState = pushState;
super.setStatus(status);
// Only want to set the local variable stored in the super class. This class
// does a DB update on setLastChecked
super.setLastChecked(lastChecked);
super.setLastPush(lastPushed);
}
@Override
public boolean isOpen()
{
return mFolderId != -1;
}
@Override
public OpenMode getMode() throws MessagingException
{
return OpenMode.READ_WRITE;
}
@Override
public String getName()
{
return mName;
}
@Override
public boolean exists() throws MessagingException
{
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT id FROM folders "
+ "where folders.name = ?", new String[] { this
.getName()
});
if (cursor.moveToFirst())
{
int folderId = cursor.getInt(0);
return (folderId > 0) ? true : false;
}
else
{
return false;
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public boolean create(FolderType type) throws MessagingException
{
if (exists())
{
throw new MessagingException("Folder " + mName + " already exists.");
}
mDb.execSQL("INSERT INTO folders (name, visible_limit) VALUES (?, ?)", new Object[]
{
mName,
K9.DEFAULT_VISIBLE_LIMIT
});
return true;
}
public boolean create(FolderType type, int visibleLimit) throws MessagingException
{
if (exists())
{
throw new MessagingException("Folder " + mName + " already exists.");
}
mDb.execSQL("INSERT INTO folders (name, visible_limit) VALUES (?, ?)", new Object[]
{
mName,
visibleLimit
});
return true;
}
@Override
public void close()
{
mFolderId = -1;
}
@Override
public int getMessageCount() throws MessagingException
{
open(OpenMode.READ_WRITE);
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT COUNT(*) FROM messages WHERE messages.folder_id = ?",
new String[]
{
Long.toString(mFolderId)
});
cursor.moveToFirst();
int messageCount = cursor.getInt(0);
return messageCount;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public int getUnreadMessageCount() throws MessagingException
{
open(OpenMode.READ_WRITE);
return mUnreadMessageCount;
}
public void setUnreadMessageCount(int unreadMessageCount) throws MessagingException
{
open(OpenMode.READ_WRITE);
mUnreadMessageCount = Math.max(0, unreadMessageCount);
mDb.execSQL("UPDATE folders SET unread_count = ? WHERE id = ?",
new Object[] { mUnreadMessageCount, mFolderId });
}
public void setLastChecked(long lastChecked) throws MessagingException
{
open(OpenMode.READ_WRITE);
super.setLastChecked(lastChecked);
mDb.execSQL("UPDATE folders SET last_updated = ? WHERE id = ?",
new Object[] { lastChecked, mFolderId });
}
public void setLastPush(long lastChecked) throws MessagingException
{
open(OpenMode.READ_WRITE);
super.setLastPush(lastChecked);
mDb.execSQL("UPDATE folders SET last_pushed = ? WHERE id = ?",
new Object[] { lastChecked, mFolderId });
}
public int getVisibleLimit() throws MessagingException
{
open(OpenMode.READ_WRITE);
return mVisibleLimit;
}
public void purgeToVisibleLimit(MessageRemovalListener listener) throws MessagingException
{
open(OpenMode.READ_WRITE);
Message[] messages = getMessages(null, false);
for (int i = mVisibleLimit; i < messages.length; i++)
{
if (listener != null)
{
listener.messageRemoved(messages[i]);
}
messages[i].setFlag(Flag.X_DESTROYED, true);
}
}
public void setVisibleLimit(int visibleLimit) throws MessagingException
{
open(OpenMode.READ_WRITE);
mVisibleLimit = visibleLimit;
mDb.execSQL("UPDATE folders SET visible_limit = ? WHERE id = ?",
new Object[] { mVisibleLimit, mFolderId });
}
public void setStatus(String status) throws MessagingException
{
open(OpenMode.READ_WRITE);
super.setStatus(status);
mDb.execSQL("UPDATE folders SET status = ? WHERE id = ?",
new Object[] { status, mFolderId });
}
public void setPushState(String pushState) throws MessagingException
{
open(OpenMode.READ_WRITE);
mPushState = pushState;
mDb.execSQL("UPDATE folders SET push_state = ? WHERE id = ?",
new Object[] { pushState, mFolderId });
}
public String getPushState()
{
return mPushState;
}
@Override
public FolderClass getDisplayClass()
{
return displayClass;
}
@Override
public FolderClass getSyncClass()
{
if (FolderClass.INHERITED == syncClass)
{
return getDisplayClass();
}
else
{
return syncClass;
}
}
public FolderClass getRawSyncClass()
{
return syncClass;
}
public FolderClass getPushClass()
{
if (FolderClass.INHERITED == pushClass)
{
return getSyncClass();
}
else
{
return pushClass;
}
}
public FolderClass getRawPushClass()
{
return pushClass;
}
public void setDisplayClass(FolderClass displayClass)
{
this.displayClass = displayClass;
}
public void setSyncClass(FolderClass syncClass)
{
this.syncClass = syncClass;
}
public void setPushClass(FolderClass pushClass)
{
this.pushClass = pushClass;
}
private String getPrefId() throws MessagingException
{
open(OpenMode.READ_WRITE);
if (prefId == null)
{
prefId = uUid + "." + mName;
}
return prefId;
}
public void delete(Preferences preferences) throws MessagingException
{
String id = getPrefId();
SharedPreferences.Editor editor = preferences.getPreferences().edit();
editor.remove(id + ".displayMode");
editor.remove(id + ".syncMode");
editor.commit();
}
public void save(Preferences preferences) throws MessagingException
{
String id = getPrefId();
SharedPreferences.Editor editor = preferences.getPreferences().edit();
// there can be a lot of folders. For the defaults, let's not save prefs, saving space, except for INBOX
if (displayClass == FolderClass.NO_CLASS && !K9.INBOX.equals(getName()))
{
editor.remove(id + ".displayMode");
}
else
{
editor.putString(id + ".displayMode", displayClass.name());
}
if (syncClass == FolderClass.INHERITED && !K9.INBOX.equals(getName()))
{
editor.remove(id + ".syncMode");
}
else
{
editor.putString(id + ".syncMode", syncClass.name());
}
if (pushClass == FolderClass.SECOND_CLASS && !K9.INBOX.equals(getName()))
{
editor.remove(id + ".pushMode");
}
else
{
editor.putString(id + ".pushMode", pushClass.name());
}
editor.commit();
}
public void refresh(Preferences preferences) throws MessagingException
{
String id = getPrefId();
try
{
displayClass = FolderClass.valueOf(preferences.getPreferences().getString(id + ".displayMode",
FolderClass.NO_CLASS.name()));
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to load displayMode for " + getName(), e);
displayClass = FolderClass.NO_CLASS;
}
if (displayClass == FolderClass.NONE)
{
displayClass = FolderClass.NO_CLASS;
}
FolderClass defSyncClass = FolderClass.INHERITED;
if (K9.INBOX.equals(getName()))
{
defSyncClass = FolderClass.FIRST_CLASS;
}
try
{
syncClass = FolderClass.valueOf(preferences.getPreferences().getString(id + ".syncMode",
defSyncClass.name()));
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to load syncMode for " + getName(), e);
syncClass = defSyncClass;
}
if (syncClass == FolderClass.NONE)
{
syncClass = FolderClass.INHERITED;
}
FolderClass defPushClass = FolderClass.SECOND_CLASS;
if (K9.INBOX.equals(getName()))
{
defPushClass = FolderClass.FIRST_CLASS;
}
try
{
pushClass = FolderClass.valueOf(preferences.getPreferences().getString(id + ".pushMode",
defPushClass.name()));
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to load pushMode for " + getName(), e);
pushClass = defPushClass;
}
if (pushClass == FolderClass.NONE)
{
pushClass = FolderClass.INHERITED;
}
}
@Override
public void fetch(Message[] messages, FetchProfile fp, MessageRetrievalListener listener)
throws MessagingException
{
open(OpenMode.READ_WRITE);
if (fp.contains(FetchProfile.Item.BODY))
{
for (Message message : messages)
{
LocalMessage localMessage = (LocalMessage)message;
Cursor cursor = null;
localMessage.setHeader(MimeHeader.HEADER_CONTENT_TYPE, "multipart/mixed");
MimeMultipart mp = new MimeMultipart();
mp.setSubType("mixed");
localMessage.setBody(mp);
try
{
cursor = mDb.rawQuery("SELECT html_content, text_content FROM messages "
+ "WHERE id = ?",
new String[] { Long.toString(localMessage.mId) });
cursor.moveToNext();
String htmlContent = cursor.getString(0);
String textContent = cursor.getString(1);
if (textContent != null)
{
LocalTextBody body = new LocalTextBody(textContent, htmlContent);
MimeBodyPart bp = new MimeBodyPart(body, "text/plain");
mp.addBodyPart(bp);
}
else
{
TextBody body = new TextBody(htmlContent);
MimeBodyPart bp = new MimeBodyPart(body, "text/html");
mp.addBodyPart(bp);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
try
{
cursor = mDb.query(
"attachments",
new String[]
{
"id",
"size",
"name",
"mime_type",
"store_data",
"content_uri"
},
"message_id = ?",
new String[] { Long.toString(localMessage.mId) },
null,
null,
null);
while (cursor.moveToNext())
{
long id = cursor.getLong(0);
int size = cursor.getInt(1);
String name = cursor.getString(2);
String type = cursor.getString(3);
String storeData = cursor.getString(4);
String contentUri = cursor.getString(5);
Body body = null;
if (contentUri != null)
{
body = new LocalAttachmentBody(Uri.parse(contentUri), mApplication);
}
MimeBodyPart bp = new LocalAttachmentBodyPart(body, id);
bp.setHeader(MimeHeader.HEADER_CONTENT_TYPE,
String.format("%s;\n name=\"%s\"",
type,
name));
bp.setHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING, "base64");
bp.setHeader(MimeHeader.HEADER_CONTENT_DISPOSITION,
String.format("attachment;\n filename=\"%s\";\n size=%d",
name,
size));
/*
* HEADER_ANDROID_ATTACHMENT_STORE_DATA is a custom header we add to that
* we can later pull the attachment from the remote store if neccesary.
*/
bp.setHeader(MimeHeader.HEADER_ANDROID_ATTACHMENT_STORE_DATA, storeData);
mp.addBodyPart(bp);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
}
}
@Override
public Message[] getMessages(int start, int end, MessageRetrievalListener listener)
throws MessagingException
{
open(OpenMode.READ_WRITE);
throw new MessagingException(
"LocalStore.getMessages(int, int, MessageRetrievalListener) not yet implemented");
}
private void populateHeaders(List<LocalMessage> messages)
{
Cursor cursor = null;
if (messages.size() == 0)
{
return;
}
try
{
Map<Long, LocalMessage> popMessages = new HashMap<Long, LocalMessage>();
List<String> ids = new ArrayList<String>();
StringBuffer questions = new StringBuffer();
for (int i = 0; i < messages.size(); i++)
{
if (i != 0)
{
questions.append(", ");
}
questions.append("?");
LocalMessage message = messages.get(i);
Long id = message.getId();
ids.add(Long.toString(id));
popMessages.put(id, message);
}
cursor = mDb.rawQuery(
"SELECT message_id, name, value "
+ "FROM headers " + "WHERE message_id in ( " + questions + ") ",
ids.toArray(new String[] {}));
while (cursor.moveToNext())
{
Long id = cursor.getLong(0);
String name = cursor.getString(1);
String value = cursor.getString(2);
//Log.i(K9.LOG_TAG, "Retrieved header name= " + name + ", value = " + value + " for message " + id);
popMessages.get(id).addHeader(name, value);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public Message getMessage(String uid) throws MessagingException
{
open(OpenMode.READ_WRITE);
LocalMessage message = new LocalMessage(uid, this);
Cursor cursor = null;
try
{
cursor = mDb.rawQuery(
"SELECT "
+ GET_MESSAGES_COLS
+ "FROM messages WHERE uid = ? AND folder_id = ?",
new String[]
{
message.getUid(), Long.toString(mFolderId)
});
if (!cursor.moveToNext())
{
return null;
}
message.populateFromGetMessageCursor(cursor);
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
return message;
}
@Override
public Message[] getMessages(MessageRetrievalListener listener) throws MessagingException
{
return getMessages(listener, true);
}
@Override
public Message[] getMessages(MessageRetrievalListener listener, boolean includeDeleted) throws MessagingException
{
return getMessages(
listener,
"SELECT " + GET_MESSAGES_COLS
+ "FROM messages WHERE "
+ (includeDeleted ? "" : "deleted = 0 AND ")
+ " folder_id = ? ORDER BY date DESC"
, new String[]
{
Long.toString(mFolderId)
}
);
}
public Message[] searchForMessages(MessageRetrievalListener listener, String queryString) throws MessagingException {
return getMessages(
listener,
"SELECT "
+ GET_MESSAGES_COLS
+ "FROM messages WHERE html_content LIKE ? OR subject LIKE ? OR sender_list LIKE ? ORDER BY date DESC"
, new String[]
{
queryString,
queryString,
queryString
}
);
}
/*
* Given a query string, actually do the query for the messages and
* call the MessageRetrievalListener for each one
*/
public Message[] getMessages(
MessageRetrievalListener listener,
String queryString, String[] placeHolders
) throws MessagingException
{
open(OpenMode.READ_WRITE);
ArrayList<LocalMessage> messages = new ArrayList<LocalMessage>();
Cursor cursor = null;
try
{
// pull out messages most recent first, since that's what the default sort is
cursor = mDb.rawQuery(queryString, placeHolders);
int i = 0;
ArrayList<LocalMessage> messagesForHeaders = new ArrayList<LocalMessage>();
while (cursor.moveToNext())
{
LocalMessage message = new LocalMessage(null, this);
message.populateFromGetMessageCursor(cursor);
messages.add(message);
if (listener != null)
{
listener.messageFinished(message, i, -1);
}
i++;
}
if (listener != null)
{
listener.messagesFinished(i);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
return messages.toArray(new Message[] {});
}
@Override
public Message[] getMessages(String[] uids, MessageRetrievalListener listener)
throws MessagingException
{
open(OpenMode.READ_WRITE);
if (uids == null)
{
return getMessages(listener);
}
ArrayList<Message> messages = new ArrayList<Message>();
for (String uid : uids)
{
messages.add(getMessage(uid));
}
return messages.toArray(new Message[] {});
}
@Override
public void copyMessages(Message[] msgs, Folder folder) throws MessagingException
{
if (!(folder instanceof LocalFolder))
{
throw new MessagingException("copyMessages called with incorrect Folder");
}
((LocalFolder) folder).appendMessages(msgs, true);
}
@Override
public void moveMessages(Message[] msgs, Folder destFolder) throws MessagingException
{
if (!(destFolder instanceof LocalFolder))
{
throw new MessagingException("copyMessages called with non-LocalFolder");
}
LocalFolder lDestFolder = (LocalFolder)destFolder;
lDestFolder.open(OpenMode.READ_WRITE);
for (Message message : msgs)
{
LocalMessage lMessage = (LocalMessage)message;
if (!message.isSet(Flag.SEEN))
{
setUnreadMessageCount(getUnreadMessageCount() - 1);
lDestFolder.setUnreadMessageCount(lDestFolder.getUnreadMessageCount() + 1);
}
String oldUID = message.getUid();
message.setUid(K9.LOCAL_UID_PREFIX + UUID.randomUUID().toString());
mDb.execSQL("UPDATE messages " + "SET folder_id = ?, uid = ? " + "WHERE id = ?", new Object[]
{
lDestFolder.getId(),
message.getUid(),
lMessage.getId()
});
LocalMessage placeHolder = new LocalMessage(oldUID, this);
placeHolder.setFlagInternal(Flag.DELETED, true);
placeHolder.setFlagInternal(Flag.SEEN, true);
appendMessages(new Message[] { placeHolder });
}
}
/**
* The method differs slightly from the contract; If an incoming message already has a uid
* assigned and it matches the uid of an existing message then this message will replace the
* old message. It is implemented as a delete/insert. This functionality is used in saving
* of drafts and re-synchronization of updated server messages.
*/
@Override
public void appendMessages(Message[] messages) throws MessagingException
{
appendMessages(messages, false);
}
/**
* The method differs slightly from the contract; If an incoming message already has a uid
* assigned and it matches the uid of an existing message then this message will replace the
* old message. It is implemented as a delete/insert. This functionality is used in saving
* of drafts and re-synchronization of updated server messages.
*/
public void appendMessages(Message[] messages, boolean copy) throws MessagingException
{
open(OpenMode.READ_WRITE);
for (Message message : messages)
{
if (!(message instanceof MimeMessage))
{
throw new Error("LocalStore can only store Messages that extend MimeMessage");
}
String uid = message.getUid();
if (uid == null)
{
uid = K9.LOCAL_UID_PREFIX + UUID.randomUUID().toString();
message.setUid(uid);
}
else
{
Message oldMessage = getMessage(uid);
if (oldMessage != null && oldMessage.isSet(Flag.SEEN) == false)
{
setUnreadMessageCount(getUnreadMessageCount() - 1);
}
/*
* The message may already exist in this Folder, so delete it first.
*/
deleteAttachments(message.getUid());
mDb.execSQL("DELETE FROM messages WHERE folder_id = ? AND uid = ?",
new Object[] { mFolderId, message.getUid() });
}
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
MimeUtility.collectParts(message, viewables, attachments);
StringBuffer sbHtml = new StringBuffer();
StringBuffer sbText = new StringBuffer();
for (Part viewable : viewables)
{
try
{
String text = MimeUtility.getTextFromPart(viewable);
/*
* Anything with MIME type text/html will be stored as such. Anything
* else will be stored as text/plain.
*/
if (viewable.getMimeType().equalsIgnoreCase("text/html"))
{
sbHtml.append(text);
}
else
{
sbText.append(text);
}
}
catch (Exception e)
{
throw new MessagingException("Unable to get text for message part", e);
}
}
String text = sbText.toString();
String html = markupContent(text, sbHtml.toString());
try
{
ContentValues cv = new ContentValues();
cv.put("uid", uid);
cv.put("subject", message.getSubject());
cv.put("sender_list", Address.pack(message.getFrom()));
cv.put("date", message.getSentDate() == null
? System.currentTimeMillis() : message.getSentDate().getTime());
cv.put("flags", Utility.combine(message.getFlags(), ',').toUpperCase());
cv.put("deleted", message.isSet(Flag.DELETED) ? 1 : 0);
cv.put("folder_id", mFolderId);
cv.put("to_list", Address.pack(message.getRecipients(RecipientType.TO)));
cv.put("cc_list", Address.pack(message.getRecipients(RecipientType.CC)));
cv.put("bcc_list", Address.pack(message.getRecipients(RecipientType.BCC)));
cv.put("html_content", html.length() > 0 ? html : null);
cv.put("text_content", text.length() > 0 ? text : null);
cv.put("reply_to_list", Address.pack(message.getReplyTo()));
cv.put("attachment_count", attachments.size());
cv.put("internal_date", message.getInternalDate() == null
? System.currentTimeMillis() : message.getInternalDate().getTime());
String messageId = message.getMessageId();
if (messageId != null)
{
cv.put("message_id", messageId);
}
long messageUid = mDb.insert("messages", "uid", cv);
for (Part attachment : attachments)
{
saveAttachment(messageUid, attachment, copy);
}
saveHeaders(messageUid, (MimeMessage)message);
if (message.isSet(Flag.SEEN) == false)
{
setUnreadMessageCount(getUnreadMessageCount() + 1);
}
}
catch (Exception e)
{
throw new MessagingException("Error appending message", e);
}
}
}
/**
* Update the given message in the LocalStore without first deleting the existing
* message (contrast with appendMessages). This method is used to store changes
* to the given message while updating attachments and not removing existing
* attachment data.
* TODO In the future this method should be combined with appendMessages since the Message
* contains enough data to decide what to do.
* @param message
* @throws MessagingException
*/
public void updateMessage(LocalMessage message) throws MessagingException
{
open(OpenMode.READ_WRITE);
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
message.buildMimeRepresentation();
MimeUtility.collectParts(message, viewables, attachments);
StringBuffer sbHtml = new StringBuffer();
StringBuffer sbText = new StringBuffer();
for (int i = 0, count = viewables.size(); i < count; i++)
{
Part viewable = viewables.get(i);
try
{
String text = MimeUtility.getTextFromPart(viewable);
/*
* Anything with MIME type text/html will be stored as such. Anything
* else will be stored as text/plain.
*/
if (viewable.getMimeType().equalsIgnoreCase("text/html"))
{
sbHtml.append(text);
}
else
{
sbText.append(text);
}
}
catch (Exception e)
{
throw new MessagingException("Unable to get text for message part", e);
}
}
String text = sbText.toString();
String html = markupContent(text, sbHtml.toString());
try
{
mDb.execSQL("UPDATE messages SET "
+ "uid = ?, subject = ?, sender_list = ?, date = ?, flags = ?, "
+ "folder_id = ?, to_list = ?, cc_list = ?, bcc_list = ?, "
+ "html_content = ?, text_content = ?, reply_to_list = ?, "
+ "attachment_count = ? WHERE id = ?",
new Object[]
{
message.getUid(),
message.getSubject(),
Address.pack(message.getFrom()),
message.getSentDate() == null ? System
.currentTimeMillis() : message.getSentDate()
.getTime(),
Utility.combine(message.getFlags(), ',').toUpperCase(),
mFolderId,
Address.pack(message
.getRecipients(RecipientType.TO)),
Address.pack(message
.getRecipients(RecipientType.CC)),
Address.pack(message
.getRecipients(RecipientType.BCC)),
html.length() > 0 ? html : null,
text.length() > 0 ? text : null,
Address.pack(message.getReplyTo()),
attachments.size(),
message.mId
});
for (int i = 0, count = attachments.size(); i < count; i++)
{
Part attachment = attachments.get(i);
saveAttachment(message.mId, attachment, false);
}
saveHeaders(message.getId(), message);
}
catch (Exception e)
{
throw new MessagingException("Error appending message", e);
}
}
private void saveHeaders(long id, MimeMessage message)
{
deleteHeaders(id);
for (String name : message.getHeaderNames())
{
if (HEADERS_TO_SAVE.contains(name))
{
String[] values = message.getHeader(name);
for (String value : values)
{
ContentValues cv = new ContentValues();
cv.put("message_id", id);
cv.put("name", name);
cv.put("value", value);
mDb.insert("headers", "name", cv);
}
}
}
}
private void deleteHeaders(long id)
{
mDb.execSQL("DELETE FROM headers WHERE id = ?",
new Object[]
{
id
});
}
/**
* @param messageId
* @param attachment
* @param attachmentId -1 to create a new attachment or >= 0 to update an existing
* @throws IOException
* @throws MessagingException
*/
private void saveAttachment(long messageId, Part attachment, boolean saveAsNew)
throws IOException, MessagingException
{
long attachmentId = -1;
Uri contentUri = null;
int size = -1;
File tempAttachmentFile = null;
if ((!saveAsNew) && (attachment instanceof LocalAttachmentBodyPart))
{
attachmentId = ((LocalAttachmentBodyPart) attachment).getAttachmentId();
}
if (attachment.getBody() != null)
{
Body body = attachment.getBody();
if (body instanceof LocalAttachmentBody)
{
contentUri = ((LocalAttachmentBody) body).getContentUri();
}
else
{
/*
* If the attachment has a body we're expected to save it into the local store
* so we copy the data into a cached attachment file.
*/
InputStream in = attachment.getBody().getInputStream();
tempAttachmentFile = File.createTempFile("att", null, mAttachmentsDir);
FileOutputStream out = new FileOutputStream(tempAttachmentFile);
size = IOUtils.copy(in, out);
in.close();
out.close();
}
}
if (size == -1)
{
/*
* If the attachment is not yet downloaded see if we can pull a size
* off the Content-Disposition.
*/
String disposition = attachment.getDisposition();
if (disposition != null)
{
String s = MimeUtility.getHeaderParameter(disposition, "size");
if (s != null)
{
size = Integer.parseInt(s);
}
}
}
if (size == -1)
{
size = 0;
}
String storeData =
Utility.combine(attachment.getHeader(
MimeHeader.HEADER_ANDROID_ATTACHMENT_STORE_DATA), ',');
String name = MimeUtility.getHeaderParameter(attachment.getContentType(), "name");
String contentDisposition = MimeUtility.unfoldAndDecode(attachment.getDisposition());
if (name == null && contentDisposition != null)
{
name = MimeUtility.getHeaderParameter(contentDisposition, "filename");
}
if (attachmentId == -1)
{
ContentValues cv = new ContentValues();
cv.put("message_id", messageId);
cv.put("content_uri", contentUri != null ? contentUri.toString() : null);
cv.put("store_data", storeData);
cv.put("size", size);
cv.put("name", name);
cv.put("mime_type", attachment.getMimeType());
attachmentId = mDb.insert("attachments", "message_id", cv);
}
else
{
ContentValues cv = new ContentValues();
cv.put("content_uri", contentUri != null ? contentUri.toString() : null);
cv.put("size", size);
mDb.update(
"attachments",
cv,
"id = ?",
new String[] { Long.toString(attachmentId) });
}
if (tempAttachmentFile != null)
{
File attachmentFile = new File(mAttachmentsDir, Long.toString(attachmentId));
tempAttachmentFile.renameTo(attachmentFile);
contentUri = AttachmentProvider.getAttachmentUri(
new File(mPath).getName(),
attachmentId);
attachment.setBody(new LocalAttachmentBody(contentUri, mApplication));
ContentValues cv = new ContentValues();
cv.put("content_uri", contentUri != null ? contentUri.toString() : null);
mDb.update(
"attachments",
cv,
"id = ?",
new String[] { Long.toString(attachmentId) });
}
if (attachment instanceof LocalAttachmentBodyPart)
{
((LocalAttachmentBodyPart) attachment).setAttachmentId(attachmentId);
}
}
/**
* Changes the stored uid of the given message (using it's internal id as a key) to
* the uid in the message.
* @param message
*/
public void changeUid(LocalMessage message) throws MessagingException
{
open(OpenMode.READ_WRITE);
ContentValues cv = new ContentValues();
cv.put("uid", message.getUid());
mDb.update("messages", cv, "id = ?", new String[] { Long.toString(message.mId) });
}
@Override
public void setFlags(Message[] messages, Flag[] flags, boolean value)
throws MessagingException
{
open(OpenMode.READ_WRITE);
for (Message message : messages)
{
message.setFlags(flags, value);
}
}
@Override
public void setFlags(Flag[] flags, boolean value)
throws MessagingException
{
open(OpenMode.READ_WRITE);
for (Message message : getMessages(null))
{
message.setFlags(flags, value);
}
}
@Override
public String getUidFromMessageId(Message message) throws MessagingException
{
throw new MessagingException("Cannot call getUidFromMessageId on LocalFolder");
}
public void deleteMessagesOlderThan(long cutoff) throws MessagingException
{
open(OpenMode.READ_ONLY);
mDb.execSQL("DELETE FROM messages WHERE folder_id = ? and date < ?", new Object[]
{
Long.toString(mFolderId), new Long(cutoff)
});
resetUnreadCount();
}
private void resetUnreadCount()
{
try
{
int newUnread = 0;
Message[] messages = getMessages(null);
for (Message message : messages)
{
if (message.isSet(Flag.SEEN) == false)
{
newUnread++;
}
}
setUnreadMessageCount(newUnread);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to fetch all messages from LocalStore", e);
}
}
@Override
public void delete(boolean recurse) throws MessagingException
{
// We need to open the folder first to make sure we've got it's id
open(OpenMode.READ_ONLY);
Message[] messages = getMessages(null);
for (Message message : messages)
{
deleteAttachments(message.getUid());
}
mDb.execSQL("DELETE FROM folders WHERE id = ?", new Object[]
{
Long.toString(mFolderId),
});
}
@Override
public boolean equals(Object o)
{
if (o instanceof LocalFolder)
{
return ((LocalFolder)o).mName.equals(mName);
}
return super.equals(o);
}
@Override
public Flag[] getPermanentFlags() throws MessagingException
{
return PERMANENT_FLAGS;
}
private void deleteAttachments(String uid) throws MessagingException
{
open(OpenMode.READ_WRITE);
Cursor messagesCursor = null;
try
{
messagesCursor = mDb.query(
"messages",
new String[] { "id" },
"folder_id = ? AND uid = ?",
new String[] { Long.toString(mFolderId), uid },
null,
null,
null);
while (messagesCursor.moveToNext())
{
long messageId = messagesCursor.getLong(0);
Cursor attachmentsCursor = null;
try
{
attachmentsCursor = mDb.query(
"attachments",
new String[] { "id" },
"message_id = ?",
new String[] { Long.toString(messageId) },
null,
null,
null);
while (attachmentsCursor.moveToNext())
{
long attachmentId = attachmentsCursor.getLong(0);
try
{
File file = new File(mAttachmentsDir, Long.toString(attachmentId));
if (file.exists())
{
file.delete();
}
}
catch (Exception e)
{
}
}
}
finally
{
if (attachmentsCursor != null)
{
attachmentsCursor.close();
}
}
}
}
finally
{
if (messagesCursor != null)
{
messagesCursor.close();
}
}
}
public String markupContent(String text, String html)
{
if (text.length() > 0 && html.length() == 0)
{
html = htmlifyString(text);
}
if (html.indexOf("cid:") != -1)
{
return html.replaceAll("cid:", "http://cid/");
}
else
{
return html;
}
}
public String htmlifyString(String text)
{
StringReader reader = new StringReader(text);
StringBuilder buff = new StringBuilder(text.length() + 512);
int c = 0;
try
{
while ((c = reader.read()) != -1)
{
switch (c)
{
case '&':
buff.append("&");
break;
case '<':
buff.append("<");
break;
case '>':
buff.append(">");
break;
case '\r':
break;
default:
buff.append((char)c);
}//switch
}
}
catch (IOException e)
{
//Should never happen
Log.e(K9.LOG_TAG, null, e);
}
text = buff.toString();
text = text.replaceAll("\\s*([-=_]{30,}+)\\s*","<hr />");
text = text.replaceAll("(?m)^([^\r\n]{4,}[\\s\\w,:;+/])(?:\r\n|\n|\r)(?=[a-z]\\S{0,10}[\\s\\n\\r])","$1 ");
text = text.replaceAll("(?m)(\r\n|\n|\r){4,}","\n\n");
Matcher m = Regex.WEB_URL_PATTERN.matcher(text);
StringBuffer sb = new StringBuffer(text.length() + 512);
sb.append("<html><body><pre style=\"white-space: pre-wrap; word-wrap:break-word; \">");
while (m.find())
{
int start = m.start();
if (start == 0 || (start != 0 && text.charAt(start - 1) != '@'))
{
m.appendReplacement(sb, "<a href=\"$0\">$0</a>");
}
else
{
m.appendReplacement(sb, "$0");
}
}
m.appendTail(sb);
sb.append("</pre></body></html>");
text = sb.toString();
return text;
}
}
public class LocalTextBody extends TextBody
{
private String mBodyForDisplay;
public LocalTextBody(String body)
{
super(body);
}
public LocalTextBody(String body, String bodyForDisplay) throws MessagingException
{
super(body);
this.mBodyForDisplay = bodyForDisplay;
}
public String getBodyForDisplay()
{
return mBodyForDisplay;
}
public void setBodyForDisplay(String mBodyForDisplay)
{
this.mBodyForDisplay = mBodyForDisplay;
}
}//LocalTextBody
public class LocalMessage extends MimeMessage
{
private long mId;
private int mAttachmentCount;
private String mSubject;
private boolean mHeadersLoaded = false;
private boolean mMessageDirty = false;
public LocalMessage()
{
}
LocalMessage(String uid, Folder folder) throws MessagingException
{
this.mUid = uid;
this.mFolder = folder;
}
private void populateFromGetMessageCursor(Cursor cursor)
throws MessagingException
{
this.setSubject(cursor.getString(0) == null ? "" : cursor.getString(0));
Address[] from = Address.unpack(cursor.getString(1));
if (from.length > 0)
{
this.setFrom(from[0]);
}
this.setInternalSentDate(new Date(cursor.getLong(2)));
this.setUid(cursor.getString(3));
String flagList = cursor.getString(4);
if (flagList != null && flagList.length() > 0)
{
String[] flags = flagList.split(",");
try
{
for (String flag : flags)
{
this.setFlagInternal(Flag.valueOf(flag), true);
}
}
catch (Exception e)
{
}
}
this.mId = cursor.getLong(5);
this.setRecipients(RecipientType.TO, Address.unpack(cursor.getString(6)));
this.setRecipients(RecipientType.CC, Address.unpack(cursor.getString(7)));
this.setRecipients(RecipientType.BCC, Address.unpack(cursor.getString(8)));
this.setReplyTo(Address.unpack(cursor.getString(9)));
this.mAttachmentCount = cursor.getInt(10);
this.setInternalDate(new Date(cursor.getLong(11)));
this.setMessageId(cursor.getString(12));
}
/* Custom version of writeTo that updates the MIME message based on localMessage
* changes.
*/
public void writeTo(OutputStream out) throws IOException, MessagingException
{
if (mMessageDirty) buildMimeRepresentation();
super.writeTo(out);
}
private void buildMimeRepresentation() throws MessagingException
{
if (!mMessageDirty)
{
return;
}
super.setSubject(mSubject);
if (this.mFrom != null && this.mFrom.length > 0)
{
super.setFrom(this.mFrom[0]);
}
super.setReplyTo(mReplyTo);
super.setSentDate(this.getSentDate());
super.setRecipients(RecipientType.TO, mTo);
super.setRecipients(RecipientType.CC, mCc);
super.setRecipients(RecipientType.BCC, mBcc);
if (mMessageId != null) super.setMessageId(mMessageId);
mMessageDirty = false;
return;
}
@Override
public String getSubject() throws MessagingException
{
return mSubject;
}
@Override
public void setSubject(String subject) throws MessagingException
{
mSubject = subject;
mMessageDirty = true;
}
public void setMessageId(String messageId)
{
mMessageId = messageId;
mMessageDirty = true;
}
public int getAttachmentCount()
{
return mAttachmentCount;
}
public void setFrom(Address from) throws MessagingException
{
this.mFrom = new Address[] { from };
mMessageDirty = true;
}
public void setReplyTo(Address[] replyTo) throws MessagingException
{
if (replyTo == null || replyTo.length == 0)
{
mReplyTo = null;
}
else
{
mReplyTo = replyTo;
}
mMessageDirty = true;
}
/*
* For performance reasons, we add headers instead of setting them (see super implementation)
* which removes (expensive) them before adding them
*/
@Override
public void setRecipients(RecipientType type, Address[] addresses) throws MessagingException
{
if (type == RecipientType.TO)
{
if (addresses == null || addresses.length == 0)
{
this.mTo = null;
}
else
{
this.mTo = addresses;
}
}
else if (type == RecipientType.CC)
{
if (addresses == null || addresses.length == 0)
{
this.mCc = null;
}
else
{
this.mCc = addresses;
}
}
else if (type == RecipientType.BCC)
{
if (addresses == null || addresses.length == 0)
{
this.mBcc = null;
}
else
{
this.mBcc = addresses;
}
}
else
{
throw new MessagingException("Unrecognized recipient type.");
}
mMessageDirty = true;
}
public void setFlagInternal(Flag flag, boolean set) throws MessagingException
{
super.setFlag(flag, set);
}
public long getId()
{
return mId;
}
public void setFlag(Flag flag, boolean set) throws MessagingException
{
if (flag == Flag.DELETED && set)
{
/*
* If a message is being marked as deleted we want to clear out it's content
* and attachments as well. Delete will not actually remove the row since we need
* to retain the uid for synchronization purposes.
*/
/*
* Delete all of the messages' content to save space.
*/
((LocalFolder) mFolder).deleteAttachments(getUid());
mDb.execSQL(
"UPDATE messages SET " +
"deleted = 1," +
"subject = NULL, " +
"sender_list = NULL, " +
"date = NULL, " +
"to_list = NULL, " +
"cc_list = NULL, " +
"bcc_list = NULL, " +
"html_content = NULL, " +
"text_content = NULL, " +
"reply_to_list = NULL " +
"WHERE id = ?",
new Object[]
{
mId
});
/*
* Delete all of the messages' attachments to save space.
*/
mDb.execSQL("DELETE FROM attachments WHERE id = ?",
new Object[]
{
mId
});
((LocalFolder)mFolder).deleteHeaders(mId);
}
else if (flag == Flag.X_DESTROYED && set)
{
((LocalFolder) mFolder).deleteAttachments(getUid());
mDb.execSQL("DELETE FROM messages WHERE id = ?",
new Object[] { mId });
((LocalFolder)mFolder).deleteHeaders(mId);
}
/*
* Update the unread count on the folder.
*/
try
{
if (flag == Flag.DELETED || flag == Flag.X_DESTROYED
|| (flag == Flag.SEEN && !isSet(Flag.DELETED)))
{
LocalFolder folder = (LocalFolder)mFolder;
if (set && !isSet(Flag.SEEN))
{
folder.setUnreadMessageCount(folder.getUnreadMessageCount() - 1);
}
else if (!set && isSet(Flag.SEEN))
{
folder.setUnreadMessageCount(folder.getUnreadMessageCount() + 1);
}
}
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to update LocalStore unread message count",
me);
throw new RuntimeException(me);
}
super.setFlag(flag, set);
/*
* Set the flags on the message.
*/
mDb.execSQL("UPDATE messages " + "SET flags = ? " + " WHERE id = ?", new Object[]
{
Utility.combine(getFlags(), ',').toUpperCase(), mId
});
}
private void loadHeaders()
{
ArrayList<LocalMessage> messages = new ArrayList<LocalMessage>();
messages.add(this);
mHeadersLoaded = true; // set true before calling populate headers to stop recursion
((LocalFolder) mFolder).populateHeaders(messages);
}
public void addHeader(String name, String value)
{
if (!mHeadersLoaded)
{
loadHeaders();
}
super.addHeader(name, value);
}
public void setHeader(String name, String value)
{
if (!mHeadersLoaded)
loadHeaders();
super.setHeader(name, value);
}
public String[] getHeader(String name)
{
if (!mHeadersLoaded)
loadHeaders();
return super.getHeader(name);
}
public void removeHeader(String name)
{
if (!mHeadersLoaded)
loadHeaders();
super.removeHeader(name);
}
}
public class LocalAttachmentBodyPart extends MimeBodyPart
{
private long mAttachmentId = -1;
public LocalAttachmentBodyPart(Body body, long attachmentId) throws MessagingException
{
super(body);
mAttachmentId = attachmentId;
}
/**
* Returns the local attachment id of this body, or -1 if it is not stored.
* @return
*/
public long getAttachmentId()
{
return mAttachmentId;
}
public void setAttachmentId(long attachmentId)
{
mAttachmentId = attachmentId;
}
public String toString()
{
return "" + mAttachmentId;
}
}
public static class LocalAttachmentBody implements Body
{
private Application mApplication;
private Uri mUri;
public LocalAttachmentBody(Uri uri, Application application)
{
mApplication = application;
mUri = uri;
}
public InputStream getInputStream() throws MessagingException
{
try
{
return mApplication.getContentResolver().openInputStream(mUri);
}
catch (FileNotFoundException fnfe)
{
/*
* Since it's completely normal for us to try to serve up attachments that
* have been blown away, we just return an empty stream.
*/
return new ByteArrayInputStream(new byte[0]);
}
catch (IOException ioe)
{
throw new MessagingException("Invalid attachment.", ioe);
}
}
public void writeTo(OutputStream out) throws IOException, MessagingException
{
InputStream in = getInputStream();
Base64OutputStream base64Out = new Base64OutputStream(out);
IOUtils.copy(in, base64Out);
base64Out.close();
}
public Uri getContentUri()
{
return mUri;
}
}
}
|
src/com/fsck/k9/mail/store/LocalStore.java
|
package com.fsck.k9.mail.store;
import android.app.Application;
import android.content.ContentValues;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import android.text.util.Regex;
import android.util.Log;
import com.fsck.k9.K9;
import com.fsck.k9.Preferences;
import com.fsck.k9.Utility;
import com.fsck.k9.codec.binary.Base64OutputStream;
import com.fsck.k9.mail.*;
import com.fsck.k9.mail.Message.RecipientType;
import com.fsck.k9.mail.internet.*;
import com.fsck.k9.provider.AttachmentProvider;
import org.apache.commons.io.IOUtils;
import java.io.*;
import java.net.URI;
import java.net.URLEncoder;
import java.util.*;
import java.util.regex.Matcher;
/**
* <pre>
* Implements a SQLite database backed local store for Messages.
* </pre>
*/
public class LocalStore extends Store implements Serializable
{
private static final int DB_VERSION = 32;
private static final Flag[] PERMANENT_FLAGS = { Flag.DELETED, Flag.X_DESTROYED, Flag.SEEN };
private String mPath;
private SQLiteDatabase mDb;
private File mAttachmentsDir;
private Application mApplication;
private String uUid = null;
private static Set<String> HEADERS_TO_SAVE = new HashSet<String>();
static
{
HEADERS_TO_SAVE.add(K9.K9MAIL_IDENTITY);
HEADERS_TO_SAVE.add("In-Reply-To");
HEADERS_TO_SAVE.add("References");
HEADERS_TO_SAVE.add("X-User-Agent");
}
/**
* @param uri local://localhost/path/to/database/uuid.db
*/
public LocalStore(String _uri, Application application) throws MessagingException
{
mApplication = application;
URI uri = null;
try
{
uri = new URI(_uri);
}
catch (Exception e)
{
throw new MessagingException("Invalid uri for LocalStore");
}
if (!uri.getScheme().equals("local"))
{
throw new MessagingException("Invalid scheme");
}
mPath = uri.getPath();
// We need to associate the localstore with the account. Since we don't have the account
// handy here, we'll take the filename from the DB and use the basename of the filename
// Folders probably should have references to their containing accounts
File dbFile = new File(mPath);
String[] tokens = dbFile.getName().split("\\.");
uUid = tokens[0];
File parentDir = new File(mPath).getParentFile();
if (!parentDir.exists())
{
parentDir.mkdirs();
}
mAttachmentsDir = new File(mPath + "_att");
if (!mAttachmentsDir.exists())
{
mAttachmentsDir.mkdirs();
}
mDb = SQLiteDatabase.openOrCreateDatabase(mPath, null);
if (mDb.getVersion() != DB_VERSION)
{
doDbUpgrade(mDb, application);
}
}
private void doDbUpgrade(SQLiteDatabase mDb, Application application)
{
Log.i(K9.LOG_TAG, String.format("Upgrading database from version %d to version %d",
mDb.getVersion(), DB_VERSION));
AttachmentProvider.clear(application);
// schema version 29 was when we moved to incremental updates
// in the case of a new db or a < v29 db, we blow away and start from scratch
if (mDb.getVersion() < 29)
{
mDb.execSQL("DROP TABLE IF EXISTS folders");
mDb.execSQL("CREATE TABLE folders (id INTEGER PRIMARY KEY, name TEXT, "
+ "last_updated INTEGER, unread_count INTEGER, visible_limit INTEGER, status TEXT, push_state TEXT, last_pushed INTEGER)");
mDb.execSQL("CREATE INDEX IF NOT EXISTS folder_name ON folders (name)");
mDb.execSQL("DROP TABLE IF EXISTS messages");
mDb.execSQL("CREATE TABLE messages (id INTEGER PRIMARY KEY, deleted INTEGER default 0, folder_id INTEGER, uid TEXT, subject TEXT, "
+ "date INTEGER, flags TEXT, sender_list TEXT, to_list TEXT, cc_list TEXT, bcc_list TEXT, reply_to_list TEXT, "
+ "html_content TEXT, text_content TEXT, attachment_count INTEGER, internal_date INTEGER, message_id TEXT)");
mDb.execSQL("DROP TABLE IF EXISTS headers");
mDb.execSQL("CREATE TABLE headers (id INTEGER PRIMARY KEY, message_id INTEGER, name TEXT, value TEXT)");
mDb.execSQL("CREATE INDEX IF NOT EXISTS header_folder ON headers (message_id)");
mDb.execSQL("CREATE INDEX IF NOT EXISTS msg_uid ON messages (uid, folder_id)");
mDb.execSQL("DROP INDEX IF EXISTS msg_folder_id");
mDb.execSQL("DROP INDEX IF EXISTS msg_folder_id_date");
mDb.execSQL("CREATE INDEX IF NOT EXISTS msg_folder_id_deleted_date ON messages (folder_id,deleted,internal_date)");
mDb.execSQL("DROP TABLE IF EXISTS attachments");
mDb.execSQL("CREATE TABLE attachments (id INTEGER PRIMARY KEY, message_id INTEGER,"
+ "store_data TEXT, content_uri TEXT, size INTEGER, name TEXT,"
+ "mime_type TEXT)");
mDb.execSQL("DROP TABLE IF EXISTS pending_commands");
mDb.execSQL("CREATE TABLE pending_commands " +
"(id INTEGER PRIMARY KEY, command TEXT, arguments TEXT)");
mDb.execSQL("DROP TRIGGER IF EXISTS delete_folder");
mDb.execSQL("CREATE TRIGGER delete_folder BEFORE DELETE ON folders BEGIN DELETE FROM messages WHERE old.id = folder_id; END;");
mDb.execSQL("DROP TRIGGER IF EXISTS delete_message");
mDb.execSQL("CREATE TRIGGER delete_message BEFORE DELETE ON messages BEGIN DELETE FROM attachments WHERE old.id = message_id; "
+ "DELETE FROM headers where old.id = message_id; END;");
}
else
{ // in the case that we're starting out at 29 or newer, run all the needed updates
if (mDb.getVersion() < 30)
{
mDb.execSQL("ALTER TABLE messages ADD deleted INTEGER default 0");
}
if (mDb.getVersion() < 31)
{
mDb.execSQL("DROP INDEX IF EXISTS msg_folder_id_date");
mDb.execSQL("CREATE INDEX IF NOT EXISTS msg_folder_id_deleted_date ON messages (folder_id,deleted,internal_date)");
}
if (mDb.getVersion() < 32)
{
mDb.execSQL("UPDATE messages SET deleted = 1 WHERE flags LIKE '%DELETED%'");
}
}
mDb.setVersion(DB_VERSION);
if (mDb.getVersion() != DB_VERSION)
{
throw new Error("Database upgrade failed!");
}
try
{
pruneCachedAttachments(true);
}
catch (Exception me)
{
Log.e(K9.LOG_TAG, "Exception while force pruning attachments during DB update", me);
}
}
public long getSize()
{
long attachmentLength = 0;
File[] files = mAttachmentsDir.listFiles();
for (File file : files)
{
if (file.exists())
{
attachmentLength += file.length();
}
}
File dbFile = new File(mPath);
return dbFile.length() + attachmentLength;
}
public void compact() throws MessagingException
{
Log.i(K9.LOG_TAG, "Before prune size = " + getSize());
pruneCachedAttachments();
Log.i(K9.LOG_TAG, "After prune / before compaction size = " + getSize());
mDb.execSQL("VACUUM");
Log.i(K9.LOG_TAG, "After compaction size = " + getSize());
}
public void clear() throws MessagingException
{
Log.i(K9.LOG_TAG, "Before prune size = " + getSize());
pruneCachedAttachments(true);
Log.i(K9.LOG_TAG, "After prune / before compaction size = " + getSize());
Log.i(K9.LOG_TAG, "Before clear folder count = " + getFolderCount());
Log.i(K9.LOG_TAG, "Before clear message count = " + getMessageCount());
Log.i(K9.LOG_TAG, "After prune / before clear size = " + getSize());
// don't delete messages that are Local, since there is no copy on the server.
// Don't delete deleted messages. They are essentially placeholders for UIDs of messages that have
// been deleted locally. They take up insignificant space
mDb.execSQL("DELETE FROM messages WHERE deleted = 0 and uid not like 'Local%'");
compact();
Log.i(K9.LOG_TAG, "After clear message count = " + getMessageCount());
Log.i(K9.LOG_TAG, "After clear size = " + getSize());
}
public int getMessageCount() throws MessagingException
{
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT COUNT(*) FROM messages", null);
cursor.moveToFirst();
int messageCount = cursor.getInt(0);
return messageCount;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
public int getFolderCount() throws MessagingException
{
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT COUNT(*) FROM folders", null);
cursor.moveToFirst();
int messageCount = cursor.getInt(0);
return messageCount;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public LocalFolder getFolder(String name) throws MessagingException
{
return new LocalFolder(name);
}
// TODO this takes about 260-300ms, seems slow.
@Override
public LocalFolder[] getPersonalNamespaces() throws MessagingException
{
ArrayList<LocalFolder> folders = new ArrayList<LocalFolder>();
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT id, name, unread_count, visible_limit, last_updated, status, push_state, last_pushed FROM folders", null);
while (cursor.moveToNext())
{
LocalFolder folder = new LocalFolder(cursor.getString(1));
folder.open(cursor.getInt(0), cursor.getString(1), cursor.getInt(2), cursor.getInt(3), cursor.getLong(4), cursor.getString(5), cursor.getString(6), cursor.getLong(7));
folders.add(folder);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
return folders.toArray(new LocalFolder[] {});
}
@Override
public void checkSettings() throws MessagingException
{
}
/**
* Delete the entire Store and it's backing database.
*/
public void delete()
{
try
{
mDb.close();
}
catch (Exception e)
{
}
try
{
File[] attachments = mAttachmentsDir.listFiles();
for (File attachment : attachments)
{
if (attachment.exists())
{
attachment.delete();
}
}
if (mAttachmentsDir.exists())
{
mAttachmentsDir.delete();
}
}
catch (Exception e)
{
}
try
{
new File(mPath).delete();
}
catch (Exception e)
{
}
}
public void pruneCachedAttachments() throws MessagingException
{
pruneCachedAttachments(false);
}
/**
* Deletes all cached attachments for the entire store.
*/
public void pruneCachedAttachments(boolean force) throws MessagingException
{
if (force)
{
ContentValues cv = new ContentValues();
cv.putNull("content_uri");
mDb.update("attachments", cv, null, null);
}
File[] files = mAttachmentsDir.listFiles();
for (File file : files)
{
if (file.exists())
{
if (!force)
{
Cursor cursor = null;
try
{
cursor = mDb.query(
"attachments",
new String[] { "store_data" },
"id = ?",
new String[] { file.getName() },
null,
null,
null);
if (cursor.moveToNext())
{
if (cursor.getString(0) == null)
{
Log.d(K9.LOG_TAG, "Attachment " + file.getAbsolutePath() + " has no store data, not deleting");
/*
* If the attachment has no store data it is not recoverable, so
* we won't delete it.
*/
continue;
}
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
if (!force)
{
try
{
ContentValues cv = new ContentValues();
cv.putNull("content_uri");
mDb.update("attachments", cv, "id = ?", new String[] { file.getName() });
}
catch (Exception e)
{
/*
* If the row has gone away before we got to mark it not-downloaded that's
* okay.
*/
}
}
if (!file.delete())
{
file.deleteOnExit();
}
}
}
}
public void resetVisibleLimits()
{
resetVisibleLimits(K9.DEFAULT_VISIBLE_LIMIT);
}
public void resetVisibleLimits(int visibleLimit)
{
ContentValues cv = new ContentValues();
cv.put("visible_limit", Integer.toString(visibleLimit));
mDb.update("folders", cv, null, null);
}
public ArrayList<PendingCommand> getPendingCommands()
{
Cursor cursor = null;
try
{
cursor = mDb.query("pending_commands",
new String[] { "id", "command", "arguments" },
null,
null,
null,
null,
"id ASC");
ArrayList<PendingCommand> commands = new ArrayList<PendingCommand>();
while (cursor.moveToNext())
{
PendingCommand command = new PendingCommand();
command.mId = cursor.getLong(0);
command.command = cursor.getString(1);
String arguments = cursor.getString(2);
command.arguments = arguments.split(",");
for (int i = 0; i < command.arguments.length; i++)
{
command.arguments[i] = Utility.fastUrlDecode(command.arguments[i]);
}
commands.add(command);
}
return commands;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
public void addPendingCommand(PendingCommand command)
{
try
{
for (int i = 0; i < command.arguments.length; i++)
{
command.arguments[i] = URLEncoder.encode(command.arguments[i], "UTF-8");
}
ContentValues cv = new ContentValues();
cv.put("command", command.command);
cv.put("arguments", Utility.combine(command.arguments, ','));
mDb.insert("pending_commands", "command", cv);
}
catch (UnsupportedEncodingException usee)
{
throw new Error("Aparently UTF-8 has been lost to the annals of history.");
}
}
public void removePendingCommand(PendingCommand command)
{
mDb.delete("pending_commands", "id = ?", new String[] { Long.toString(command.mId) });
}
public void removePendingCommands()
{
mDb.delete("pending_commands", null, null);
}
public static class PendingCommand
{
private long mId;
public String command;
public String[] arguments;
@Override
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append(command);
sb.append(": ");
for (String argument : arguments)
{
sb.append(", ");
sb.append(argument);
//sb.append("\n");
}
return sb.toString();
}
}
public boolean isMoveCapable()
{
return true;
}
public boolean isCopyCapable()
{
return true;
}
public class LocalFolder extends Folder implements Serializable
{
private String mName;
private long mFolderId = -1;
private int mUnreadMessageCount = -1;
private int mVisibleLimit = -1;
private FolderClass displayClass = FolderClass.NO_CLASS;
private FolderClass syncClass = FolderClass.INHERITED;
private FolderClass pushClass = FolderClass.SECOND_CLASS;
private String prefId = null;
private String mPushState = null;
/*
* a String containing the columns getMessages expects to work with
* in the correct order.
*/
private String GET_MESSAGES_COLS =
"subject, sender_list, date, uid, flags, id, to_list, cc_list, "
+ "bcc_list, reply_to_list, attachment_count, internal_date, message_id ";
public LocalFolder(String name)
{
this.mName = name;
if (K9.INBOX.equals(getName()))
{
syncClass = FolderClass.FIRST_CLASS;
pushClass = FolderClass.FIRST_CLASS;
}
}
public LocalFolder(long id)
{
this.mFolderId = id;
}
public long getId()
{
return mFolderId;
}
@Override
public void open(OpenMode mode) throws MessagingException
{
if (isOpen())
{
return;
}
Cursor cursor = null;
try
{
String baseQuery =
"SELECT id, name,unread_count, visible_limit, last_updated, status, push_state, last_pushed FROM folders ";
if (mName != null)
{
cursor = mDb.rawQuery(baseQuery + "where folders.name = ?", new String[] { mName });
}
else
{
cursor = mDb.rawQuery(baseQuery + "where folders.id = ?", new String[] { Long.toString(mFolderId) });
}
if (cursor.moveToFirst())
{
int folderId = cursor.getInt(0);
if (folderId > 0)
{
open(folderId, cursor.getString(1), cursor.getInt(2), cursor.getInt(3), cursor.getLong(4), cursor.getString(5), cursor.getString(6), cursor.getLong(7));
}
}
else
{
create(FolderType.HOLDS_MESSAGES);
open(mode);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
private void open(int id, String name, int unreadCount, int visibleLimit, long lastChecked, String status, String pushState, long lastPushed) throws MessagingException
{
mFolderId = id;
mName = name;
mUnreadMessageCount = unreadCount;
mVisibleLimit = visibleLimit;
mPushState = pushState;
super.setStatus(status);
// Only want to set the local variable stored in the super class. This class
// does a DB update on setLastChecked
super.setLastChecked(lastChecked);
super.setLastPush(lastPushed);
}
@Override
public boolean isOpen()
{
return mFolderId != -1;
}
@Override
public OpenMode getMode() throws MessagingException
{
return OpenMode.READ_WRITE;
}
@Override
public String getName()
{
return mName;
}
@Override
public boolean exists() throws MessagingException
{
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT id FROM folders "
+ "where folders.name = ?", new String[] { this
.getName()
});
if (cursor.moveToFirst())
{
int folderId = cursor.getInt(0);
return (folderId > 0) ? true : false;
}
else
{
return false;
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public boolean create(FolderType type) throws MessagingException
{
if (exists())
{
throw new MessagingException("Folder " + mName + " already exists.");
}
mDb.execSQL("INSERT INTO folders (name, visible_limit) VALUES (?, ?)", new Object[]
{
mName,
K9.DEFAULT_VISIBLE_LIMIT
});
return true;
}
public boolean create(FolderType type, int visibleLimit) throws MessagingException
{
if (exists())
{
throw new MessagingException("Folder " + mName + " already exists.");
}
mDb.execSQL("INSERT INTO folders (name, visible_limit) VALUES (?, ?)", new Object[]
{
mName,
visibleLimit
});
return true;
}
@Override
public void close()
{
mFolderId = -1;
}
@Override
public int getMessageCount() throws MessagingException
{
open(OpenMode.READ_WRITE);
Cursor cursor = null;
try
{
cursor = mDb.rawQuery("SELECT COUNT(*) FROM messages WHERE messages.folder_id = ?",
new String[]
{
Long.toString(mFolderId)
});
cursor.moveToFirst();
int messageCount = cursor.getInt(0);
return messageCount;
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public int getUnreadMessageCount() throws MessagingException
{
open(OpenMode.READ_WRITE);
return mUnreadMessageCount;
}
public void setUnreadMessageCount(int unreadMessageCount) throws MessagingException
{
open(OpenMode.READ_WRITE);
mUnreadMessageCount = Math.max(0, unreadMessageCount);
mDb.execSQL("UPDATE folders SET unread_count = ? WHERE id = ?",
new Object[] { mUnreadMessageCount, mFolderId });
}
public void setLastChecked(long lastChecked) throws MessagingException
{
open(OpenMode.READ_WRITE);
super.setLastChecked(lastChecked);
mDb.execSQL("UPDATE folders SET last_updated = ? WHERE id = ?",
new Object[] { lastChecked, mFolderId });
}
public void setLastPush(long lastChecked) throws MessagingException
{
open(OpenMode.READ_WRITE);
super.setLastPush(lastChecked);
mDb.execSQL("UPDATE folders SET last_pushed = ? WHERE id = ?",
new Object[] { lastChecked, mFolderId });
}
public int getVisibleLimit() throws MessagingException
{
open(OpenMode.READ_WRITE);
return mVisibleLimit;
}
public void purgeToVisibleLimit(MessageRemovalListener listener) throws MessagingException
{
open(OpenMode.READ_WRITE);
Message[] messages = getMessages(null, false);
for (int i = mVisibleLimit; i < messages.length; i++)
{
if (listener != null)
{
listener.messageRemoved(messages[i]);
}
messages[i].setFlag(Flag.X_DESTROYED, true);
}
}
public void setVisibleLimit(int visibleLimit) throws MessagingException
{
open(OpenMode.READ_WRITE);
mVisibleLimit = visibleLimit;
mDb.execSQL("UPDATE folders SET visible_limit = ? WHERE id = ?",
new Object[] { mVisibleLimit, mFolderId });
}
public void setStatus(String status) throws MessagingException
{
open(OpenMode.READ_WRITE);
super.setStatus(status);
mDb.execSQL("UPDATE folders SET status = ? WHERE id = ?",
new Object[] { status, mFolderId });
}
public void setPushState(String pushState) throws MessagingException
{
open(OpenMode.READ_WRITE);
mPushState = pushState;
mDb.execSQL("UPDATE folders SET push_state = ? WHERE id = ?",
new Object[] { pushState, mFolderId });
}
public String getPushState()
{
return mPushState;
}
@Override
public FolderClass getDisplayClass()
{
return displayClass;
}
@Override
public FolderClass getSyncClass()
{
if (FolderClass.INHERITED == syncClass)
{
return getDisplayClass();
}
else
{
return syncClass;
}
}
public FolderClass getRawSyncClass()
{
return syncClass;
}
public FolderClass getPushClass()
{
if (FolderClass.INHERITED == pushClass)
{
return getSyncClass();
}
else
{
return pushClass;
}
}
public FolderClass getRawPushClass()
{
return pushClass;
}
public void setDisplayClass(FolderClass displayClass)
{
this.displayClass = displayClass;
}
public void setSyncClass(FolderClass syncClass)
{
this.syncClass = syncClass;
}
public void setPushClass(FolderClass pushClass)
{
this.pushClass = pushClass;
}
private String getPrefId() throws MessagingException
{
open(OpenMode.READ_WRITE);
if (prefId == null)
{
prefId = uUid + "." + mName;
}
return prefId;
}
public void delete(Preferences preferences) throws MessagingException
{
String id = getPrefId();
SharedPreferences.Editor editor = preferences.getPreferences().edit();
editor.remove(id + ".displayMode");
editor.remove(id + ".syncMode");
editor.commit();
}
public void save(Preferences preferences) throws MessagingException
{
String id = getPrefId();
SharedPreferences.Editor editor = preferences.getPreferences().edit();
// there can be a lot of folders. For the defaults, let's not save prefs, saving space, except for INBOX
if (displayClass == FolderClass.NO_CLASS && !K9.INBOX.equals(getName()))
{
editor.remove(id + ".displayMode");
}
else
{
editor.putString(id + ".displayMode", displayClass.name());
}
if (syncClass == FolderClass.INHERITED && !K9.INBOX.equals(getName()))
{
editor.remove(id + ".syncMode");
}
else
{
editor.putString(id + ".syncMode", syncClass.name());
}
if (pushClass == FolderClass.SECOND_CLASS && !K9.INBOX.equals(getName()))
{
editor.remove(id + ".pushMode");
}
else
{
editor.putString(id + ".pushMode", pushClass.name());
}
editor.commit();
}
public void refresh(Preferences preferences) throws MessagingException
{
String id = getPrefId();
try
{
displayClass = FolderClass.valueOf(preferences.getPreferences().getString(id + ".displayMode",
FolderClass.NO_CLASS.name()));
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to load displayMode for " + getName(), e);
displayClass = FolderClass.NO_CLASS;
}
if (displayClass == FolderClass.NONE)
{
displayClass = FolderClass.NO_CLASS;
}
FolderClass defSyncClass = FolderClass.INHERITED;
if (K9.INBOX.equals(getName()))
{
defSyncClass = FolderClass.FIRST_CLASS;
}
try
{
syncClass = FolderClass.valueOf(preferences.getPreferences().getString(id + ".syncMode",
defSyncClass.name()));
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to load syncMode for " + getName(), e);
syncClass = defSyncClass;
}
if (syncClass == FolderClass.NONE)
{
syncClass = FolderClass.INHERITED;
}
FolderClass defPushClass = FolderClass.SECOND_CLASS;
if (K9.INBOX.equals(getName()))
{
defPushClass = FolderClass.FIRST_CLASS;
}
try
{
pushClass = FolderClass.valueOf(preferences.getPreferences().getString(id + ".pushMode",
defPushClass.name()));
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to load pushMode for " + getName(), e);
pushClass = defPushClass;
}
if (pushClass == FolderClass.NONE)
{
pushClass = FolderClass.INHERITED;
}
}
@Override
public void fetch(Message[] messages, FetchProfile fp, MessageRetrievalListener listener)
throws MessagingException
{
open(OpenMode.READ_WRITE);
if (fp.contains(FetchProfile.Item.BODY))
{
for (Message message : messages)
{
LocalMessage localMessage = (LocalMessage)message;
Cursor cursor = null;
localMessage.setHeader(MimeHeader.HEADER_CONTENT_TYPE, "multipart/mixed");
MimeMultipart mp = new MimeMultipart();
mp.setSubType("mixed");
localMessage.setBody(mp);
try
{
cursor = mDb.rawQuery("SELECT html_content, text_content FROM messages "
+ "WHERE id = ?",
new String[] { Long.toString(localMessage.mId) });
cursor.moveToNext();
String htmlContent = cursor.getString(0);
String textContent = cursor.getString(1);
if (textContent != null)
{
LocalTextBody body = new LocalTextBody(textContent, htmlContent);
MimeBodyPart bp = new MimeBodyPart(body, "text/plain");
mp.addBodyPart(bp);
}
else
{
TextBody body = new TextBody(htmlContent);
MimeBodyPart bp = new MimeBodyPart(body, "text/html");
mp.addBodyPart(bp);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
try
{
cursor = mDb.query(
"attachments",
new String[]
{
"id",
"size",
"name",
"mime_type",
"store_data",
"content_uri"
},
"message_id = ?",
new String[] { Long.toString(localMessage.mId) },
null,
null,
null);
while (cursor.moveToNext())
{
long id = cursor.getLong(0);
int size = cursor.getInt(1);
String name = cursor.getString(2);
String type = cursor.getString(3);
String storeData = cursor.getString(4);
String contentUri = cursor.getString(5);
Body body = null;
if (contentUri != null)
{
body = new LocalAttachmentBody(Uri.parse(contentUri), mApplication);
}
MimeBodyPart bp = new LocalAttachmentBodyPart(body, id);
bp.setHeader(MimeHeader.HEADER_CONTENT_TYPE,
String.format("%s;\n name=\"%s\"",
type,
name));
bp.setHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING, "base64");
bp.setHeader(MimeHeader.HEADER_CONTENT_DISPOSITION,
String.format("attachment;\n filename=\"%s\";\n size=%d",
name,
size));
/*
* HEADER_ANDROID_ATTACHMENT_STORE_DATA is a custom header we add to that
* we can later pull the attachment from the remote store if neccesary.
*/
bp.setHeader(MimeHeader.HEADER_ANDROID_ATTACHMENT_STORE_DATA, storeData);
mp.addBodyPart(bp);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
}
}
@Override
public Message[] getMessages(int start, int end, MessageRetrievalListener listener)
throws MessagingException
{
open(OpenMode.READ_WRITE);
throw new MessagingException(
"LocalStore.getMessages(int, int, MessageRetrievalListener) not yet implemented");
}
private void populateHeaders(List<LocalMessage> messages)
{
Cursor cursor = null;
if (messages.size() == 0)
{
return;
}
try
{
Map<Long, LocalMessage> popMessages = new HashMap<Long, LocalMessage>();
List<String> ids = new ArrayList<String>();
StringBuffer questions = new StringBuffer();
for (int i = 0; i < messages.size(); i++)
{
if (i != 0)
{
questions.append(", ");
}
questions.append("?");
LocalMessage message = messages.get(i);
Long id = message.getId();
ids.add(Long.toString(id));
popMessages.put(id, message);
}
cursor = mDb.rawQuery(
"SELECT message_id, name, value "
+ "FROM headers " + "WHERE message_id in ( " + questions + ") ",
ids.toArray(new String[] {}));
while (cursor.moveToNext())
{
Long id = cursor.getLong(0);
String name = cursor.getString(1);
String value = cursor.getString(2);
//Log.i(K9.LOG_TAG, "Retrieved header name= " + name + ", value = " + value + " for message " + id);
popMessages.get(id).addHeader(name, value);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
}
@Override
public Message getMessage(String uid) throws MessagingException
{
open(OpenMode.READ_WRITE);
LocalMessage message = new LocalMessage(uid, this);
Cursor cursor = null;
try
{
cursor = mDb.rawQuery(
"SELECT "
+ GET_MESSAGES_COLS
+ "FROM messages WHERE uid = ? AND folder_id = ?",
new String[]
{
message.getUid(), Long.toString(mFolderId)
});
if (!cursor.moveToNext())
{
return null;
}
message.populateFromGetMessageCursor(cursor);
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
return message;
}
@Override
public Message[] getMessages(MessageRetrievalListener listener) throws MessagingException
{
return getMessages(listener, true);
}
@Override
public Message[] getMessages(MessageRetrievalListener listener, boolean includeDeleted) throws MessagingException
{
return getMessages(
listener,
"SELECT " + GET_MESSAGES_COLS
+ "FROM messages WHERE "
+ (includeDeleted ? "" : "deleted = 0 AND ")
+ " folder_id = ? ORDER BY date DESC"
, new String[]
{
Long.toString(mFolderId)
}
);
}
public Message[] searchForMessages(MessageRetrievalListener listener, String queryString) throws MessagingException {
return getMessages(
listener,
"SELECT "
+ GET_MESSAGES_COLS
+ "FROM messages WHERE html_content LIKE ? OR subject LIKE ? OR sender_list LIKE ? ORDER BY date DESC"
, new String[]
{
queryString,
queryString,
queryString
}
);
}
/*
* Given a query string, actually do the query for the messages and
* call the MessageRetrievalListener for each one
*/
public Message[] getMessages(
MessageRetrievalListener listener,
String queryString, String[] placeHolders
) throws MessagingException
{
open(OpenMode.READ_WRITE);
ArrayList<LocalMessage> messages = new ArrayList<LocalMessage>();
Cursor cursor = null;
try
{
// pull out messages most recent first, since that's what the default sort is
cursor = mDb.rawQuery(queryString, placeHolders);
int i = 0;
ArrayList<LocalMessage> messagesForHeaders = new ArrayList<LocalMessage>();
while (cursor.moveToNext())
{
LocalMessage message = new LocalMessage(null, this);
message.populateFromGetMessageCursor(cursor);
messages.add(message);
if (listener != null)
{
listener.messageFinished(message, i, -1);
}
i++;
}
if (listener != null)
{
listener.messagesFinished(i);
}
}
finally
{
if (cursor != null)
{
cursor.close();
}
}
return messages.toArray(new Message[] {});
}
@Override
public Message[] getMessages(String[] uids, MessageRetrievalListener listener)
throws MessagingException
{
open(OpenMode.READ_WRITE);
if (uids == null)
{
return getMessages(listener);
}
ArrayList<Message> messages = new ArrayList<Message>();
for (String uid : uids)
{
messages.add(getMessage(uid));
}
return messages.toArray(new Message[] {});
}
@Override
public void copyMessages(Message[] msgs, Folder folder) throws MessagingException
{
if (!(folder instanceof LocalFolder))
{
throw new MessagingException("copyMessages called with incorrect Folder");
}
((LocalFolder) folder).appendMessages(msgs, true);
}
@Override
public void moveMessages(Message[] msgs, Folder destFolder) throws MessagingException
{
if (!(destFolder instanceof LocalFolder))
{
throw new MessagingException("copyMessages called with non-LocalFolder");
}
LocalFolder lDestFolder = (LocalFolder)destFolder;
lDestFolder.open(OpenMode.READ_WRITE);
for (Message message : msgs)
{
LocalMessage lMessage = (LocalMessage)message;
if (!message.isSet(Flag.SEEN))
{
setUnreadMessageCount(getUnreadMessageCount() - 1);
lDestFolder.setUnreadMessageCount(lDestFolder.getUnreadMessageCount() + 1);
}
String oldUID = message.getUid();
message.setUid(K9.LOCAL_UID_PREFIX + UUID.randomUUID().toString());
mDb.execSQL("UPDATE messages " + "SET folder_id = ?, uid = ? " + "WHERE id = ?", new Object[]
{
lDestFolder.getId(),
message.getUid(),
lMessage.getId()
});
LocalMessage placeHolder = new LocalMessage(oldUID, this);
placeHolder.setFlagInternal(Flag.DELETED, true);
placeHolder.setFlagInternal(Flag.SEEN, true);
appendMessages(new Message[] { placeHolder });
}
}
/**
* The method differs slightly from the contract; If an incoming message already has a uid
* assigned and it matches the uid of an existing message then this message will replace the
* old message. It is implemented as a delete/insert. This functionality is used in saving
* of drafts and re-synchronization of updated server messages.
*/
@Override
public void appendMessages(Message[] messages) throws MessagingException
{
appendMessages(messages, false);
}
/**
* The method differs slightly from the contract; If an incoming message already has a uid
* assigned and it matches the uid of an existing message then this message will replace the
* old message. It is implemented as a delete/insert. This functionality is used in saving
* of drafts and re-synchronization of updated server messages.
*/
public void appendMessages(Message[] messages, boolean copy) throws MessagingException
{
open(OpenMode.READ_WRITE);
for (Message message : messages)
{
if (!(message instanceof MimeMessage))
{
throw new Error("LocalStore can only store Messages that extend MimeMessage");
}
String uid = message.getUid();
if (uid == null)
{
uid = K9.LOCAL_UID_PREFIX + UUID.randomUUID().toString();
message.setUid(uid);
}
else
{
Message oldMessage = getMessage(uid);
if (oldMessage != null && oldMessage.isSet(Flag.SEEN) == false)
{
setUnreadMessageCount(getUnreadMessageCount() - 1);
}
/*
* The message may already exist in this Folder, so delete it first.
*/
deleteAttachments(message.getUid());
mDb.execSQL("DELETE FROM messages WHERE folder_id = ? AND uid = ?",
new Object[] { mFolderId, message.getUid() });
}
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
MimeUtility.collectParts(message, viewables, attachments);
StringBuffer sbHtml = new StringBuffer();
StringBuffer sbText = new StringBuffer();
for (Part viewable : viewables)
{
try
{
String text = MimeUtility.getTextFromPart(viewable);
/*
* Anything with MIME type text/html will be stored as such. Anything
* else will be stored as text/plain.
*/
if (viewable.getMimeType().equalsIgnoreCase("text/html"))
{
sbHtml.append(text);
}
else
{
sbText.append(text);
}
}
catch (Exception e)
{
throw new MessagingException("Unable to get text for message part", e);
}
}
String text = sbText.toString();
String html = markupContent(text, sbHtml.toString());
try
{
ContentValues cv = new ContentValues();
cv.put("uid", uid);
cv.put("subject", message.getSubject());
cv.put("sender_list", Address.pack(message.getFrom()));
cv.put("date", message.getSentDate() == null
? System.currentTimeMillis() : message.getSentDate().getTime());
cv.put("flags", Utility.combine(message.getFlags(), ',').toUpperCase());
cv.put("deleted", message.isSet(Flag.DELETED) ? 1 : 0);
cv.put("folder_id", mFolderId);
cv.put("to_list", Address.pack(message.getRecipients(RecipientType.TO)));
cv.put("cc_list", Address.pack(message.getRecipients(RecipientType.CC)));
cv.put("bcc_list", Address.pack(message.getRecipients(RecipientType.BCC)));
cv.put("html_content", html.length() > 0 ? html : null);
cv.put("text_content", text.length() > 0 ? text : null);
cv.put("reply_to_list", Address.pack(message.getReplyTo()));
cv.put("attachment_count", attachments.size());
cv.put("internal_date", message.getInternalDate() == null
? System.currentTimeMillis() : message.getInternalDate().getTime());
String messageId = message.getMessageId();
if (messageId != null)
{
cv.put("message_id", messageId);
}
long messageUid = mDb.insert("messages", "uid", cv);
for (Part attachment : attachments)
{
saveAttachment(messageUid, attachment, copy);
}
saveHeaders(messageUid, (MimeMessage)message);
if (message.isSet(Flag.SEEN) == false)
{
setUnreadMessageCount(getUnreadMessageCount() + 1);
}
}
catch (Exception e)
{
throw new MessagingException("Error appending message", e);
}
}
}
/**
* Update the given message in the LocalStore without first deleting the existing
* message (contrast with appendMessages). This method is used to store changes
* to the given message while updating attachments and not removing existing
* attachment data.
* TODO In the future this method should be combined with appendMessages since the Message
* contains enough data to decide what to do.
* @param message
* @throws MessagingException
*/
public void updateMessage(LocalMessage message) throws MessagingException
{
open(OpenMode.READ_WRITE);
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
message.buildMimeRepresentation();
MimeUtility.collectParts(message, viewables, attachments);
StringBuffer sbHtml = new StringBuffer();
StringBuffer sbText = new StringBuffer();
for (int i = 0, count = viewables.size(); i < count; i++)
{
Part viewable = viewables.get(i);
try
{
String text = MimeUtility.getTextFromPart(viewable);
/*
* Anything with MIME type text/html will be stored as such. Anything
* else will be stored as text/plain.
*/
if (viewable.getMimeType().equalsIgnoreCase("text/html"))
{
sbHtml.append(text);
}
else
{
sbText.append(text);
}
}
catch (Exception e)
{
throw new MessagingException("Unable to get text for message part", e);
}
}
String text = sbText.toString();
String html = markupContent(text, sbHtml.toString());
try
{
mDb.execSQL("UPDATE messages SET "
+ "uid = ?, subject = ?, sender_list = ?, date = ?, flags = ?, "
+ "folder_id = ?, to_list = ?, cc_list = ?, bcc_list = ?, "
+ "html_content = ?, text_content = ?, reply_to_list = ?, "
+ "attachment_count = ? WHERE id = ?",
new Object[]
{
message.getUid(),
message.getSubject(),
Address.pack(message.getFrom()),
message.getSentDate() == null ? System
.currentTimeMillis() : message.getSentDate()
.getTime(),
Utility.combine(message.getFlags(), ',').toUpperCase(),
mFolderId,
Address.pack(message
.getRecipients(RecipientType.TO)),
Address.pack(message
.getRecipients(RecipientType.CC)),
Address.pack(message
.getRecipients(RecipientType.BCC)),
html.length() > 0 ? html : null,
text.length() > 0 ? text : null,
Address.pack(message.getReplyTo()),
attachments.size(),
message.mId
});
for (int i = 0, count = attachments.size(); i < count; i++)
{
Part attachment = attachments.get(i);
saveAttachment(message.mId, attachment, false);
}
saveHeaders(message.getId(), message);
}
catch (Exception e)
{
throw new MessagingException("Error appending message", e);
}
}
private void saveHeaders(long id, MimeMessage message)
{
deleteHeaders(id);
for (String name : message.getHeaderNames())
{
if (HEADERS_TO_SAVE.contains(name))
{
String[] values = message.getHeader(name);
for (String value : values)
{
ContentValues cv = new ContentValues();
cv.put("message_id", id);
cv.put("name", name);
cv.put("value", value);
mDb.insert("headers", "name", cv);
}
}
}
}
private void deleteHeaders(long id)
{
mDb.execSQL("DELETE FROM headers WHERE id = ?",
new Object[]
{
id
});
}
/**
* @param messageId
* @param attachment
* @param attachmentId -1 to create a new attachment or >= 0 to update an existing
* @throws IOException
* @throws MessagingException
*/
private void saveAttachment(long messageId, Part attachment, boolean saveAsNew)
throws IOException, MessagingException
{
long attachmentId = -1;
Uri contentUri = null;
int size = -1;
File tempAttachmentFile = null;
if ((!saveAsNew) && (attachment instanceof LocalAttachmentBodyPart))
{
attachmentId = ((LocalAttachmentBodyPart) attachment).getAttachmentId();
}
if (attachment.getBody() != null)
{
Body body = attachment.getBody();
if (body instanceof LocalAttachmentBody)
{
contentUri = ((LocalAttachmentBody) body).getContentUri();
}
else
{
/*
* If the attachment has a body we're expected to save it into the local store
* so we copy the data into a cached attachment file.
*/
InputStream in = attachment.getBody().getInputStream();
tempAttachmentFile = File.createTempFile("att", null, mAttachmentsDir);
FileOutputStream out = new FileOutputStream(tempAttachmentFile);
size = IOUtils.copy(in, out);
in.close();
out.close();
}
}
if (size == -1)
{
/*
* If the attachment is not yet downloaded see if we can pull a size
* off the Content-Disposition.
*/
String disposition = attachment.getDisposition();
if (disposition != null)
{
String s = MimeUtility.getHeaderParameter(disposition, "size");
if (s != null)
{
size = Integer.parseInt(s);
}
}
}
if (size == -1)
{
size = 0;
}
String storeData =
Utility.combine(attachment.getHeader(
MimeHeader.HEADER_ANDROID_ATTACHMENT_STORE_DATA), ',');
String name = MimeUtility.getHeaderParameter(attachment.getContentType(), "name");
String contentDisposition = MimeUtility.unfoldAndDecode(attachment.getDisposition());
if (name == null && contentDisposition != null)
{
name = MimeUtility.getHeaderParameter(contentDisposition, "filename");
}
if (attachmentId == -1)
{
ContentValues cv = new ContentValues();
cv.put("message_id", messageId);
cv.put("content_uri", contentUri != null ? contentUri.toString() : null);
cv.put("store_data", storeData);
cv.put("size", size);
cv.put("name", name);
cv.put("mime_type", attachment.getMimeType());
attachmentId = mDb.insert("attachments", "message_id", cv);
}
else
{
ContentValues cv = new ContentValues();
cv.put("content_uri", contentUri != null ? contentUri.toString() : null);
cv.put("size", size);
mDb.update(
"attachments",
cv,
"id = ?",
new String[] { Long.toString(attachmentId) });
}
if (tempAttachmentFile != null)
{
File attachmentFile = new File(mAttachmentsDir, Long.toString(attachmentId));
tempAttachmentFile.renameTo(attachmentFile);
contentUri = AttachmentProvider.getAttachmentUri(
new File(mPath).getName(),
attachmentId);
attachment.setBody(new LocalAttachmentBody(contentUri, mApplication));
ContentValues cv = new ContentValues();
cv.put("content_uri", contentUri != null ? contentUri.toString() : null);
mDb.update(
"attachments",
cv,
"id = ?",
new String[] { Long.toString(attachmentId) });
}
if (attachment instanceof LocalAttachmentBodyPart)
{
((LocalAttachmentBodyPart) attachment).setAttachmentId(attachmentId);
}
}
/**
* Changes the stored uid of the given message (using it's internal id as a key) to
* the uid in the message.
* @param message
*/
public void changeUid(LocalMessage message) throws MessagingException
{
open(OpenMode.READ_WRITE);
ContentValues cv = new ContentValues();
cv.put("uid", message.getUid());
mDb.update("messages", cv, "id = ?", new String[] { Long.toString(message.mId) });
}
@Override
public void setFlags(Message[] messages, Flag[] flags, boolean value)
throws MessagingException
{
open(OpenMode.READ_WRITE);
for (Message message : messages)
{
message.setFlags(flags, value);
}
}
@Override
public void setFlags(Flag[] flags, boolean value)
throws MessagingException
{
open(OpenMode.READ_WRITE);
for (Message message : getMessages(null))
{
message.setFlags(flags, value);
}
}
@Override
public String getUidFromMessageId(Message message) throws MessagingException
{
throw new MessagingException("Cannot call getUidFromMessageId on LocalFolder");
}
public void deleteMessagesOlderThan(long cutoff) throws MessagingException
{
open(OpenMode.READ_ONLY);
mDb.execSQL("DELETE FROM messages WHERE folder_id = ? and date < ?", new Object[]
{
Long.toString(mFolderId), new Long(cutoff)
});
resetUnreadCount();
}
private void resetUnreadCount()
{
try
{
int newUnread = 0;
Message[] messages = getMessages(null);
for (Message message : messages)
{
if (message.isSet(Flag.SEEN) == false)
{
newUnread++;
}
}
setUnreadMessageCount(newUnread);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to fetch all messages from LocalStore", e);
}
}
@Override
public void delete(boolean recurse) throws MessagingException
{
// We need to open the folder first to make sure we've got it's id
open(OpenMode.READ_ONLY);
Message[] messages = getMessages(null);
for (Message message : messages)
{
deleteAttachments(message.getUid());
}
mDb.execSQL("DELETE FROM folders WHERE id = ?", new Object[]
{
Long.toString(mFolderId),
});
}
@Override
public boolean equals(Object o)
{
if (o instanceof LocalFolder)
{
return ((LocalFolder)o).mName.equals(mName);
}
return super.equals(o);
}
@Override
public Flag[] getPermanentFlags() throws MessagingException
{
return PERMANENT_FLAGS;
}
private void deleteAttachments(String uid) throws MessagingException
{
open(OpenMode.READ_WRITE);
Cursor messagesCursor = null;
try
{
messagesCursor = mDb.query(
"messages",
new String[] { "id" },
"folder_id = ? AND uid = ?",
new String[] { Long.toString(mFolderId), uid },
null,
null,
null);
while (messagesCursor.moveToNext())
{
long messageId = messagesCursor.getLong(0);
Cursor attachmentsCursor = null;
try
{
attachmentsCursor = mDb.query(
"attachments",
new String[] { "id" },
"message_id = ?",
new String[] { Long.toString(messageId) },
null,
null,
null);
while (attachmentsCursor.moveToNext())
{
long attachmentId = attachmentsCursor.getLong(0);
try
{
File file = new File(mAttachmentsDir, Long.toString(attachmentId));
if (file.exists())
{
file.delete();
}
}
catch (Exception e)
{
}
}
}
finally
{
if (attachmentsCursor != null)
{
attachmentsCursor.close();
}
}
}
}
finally
{
if (messagesCursor != null)
{
messagesCursor.close();
}
}
}
public String markupContent(String text, String html)
{
if (text.length() > 0 && html.length() == 0)
{
html = htmlifyString(text);
}
if (html.indexOf("cid:") != -1)
{
return html.replaceAll("cid:", "http://cid/");
}
else
{
return html;
}
}
public String htmlifyString(String text)
{
StringReader reader = new StringReader(text);
StringBuilder buff = new StringBuilder(text.length() + 512);
int c = 0;
try
{
while ((c = reader.read()) != -1)
{
switch (c)
{
case '&':
buff.append("&");
break;
case '<':
buff.append("<");
break;
case '>':
buff.append(">");
break;
case '\r':
break;
default:
buff.append((char)c);
}//switch
}
}
catch (IOException e)
{
//Should never happen
Log.e(K9.LOG_TAG, null, e);
}
text = buff.toString();
text = text.replaceAll("\\s*([-=_]{30,}+)\\s*","<hr />");
text = text.replaceAll("(?m)^([^\r\n]{4,}[\\s\\w,:;+/])(?:\r\n|\n|\r)(?=[a-z]\\S{0,10}[\\s\\n\\r])","$1 ");
text = text.replaceAll("(?m)(\r\n|\n|\r){4,}","\n\n");
Matcher m = Regex.WEB_URL_PATTERN.matcher(text);
StringBuffer sb = new StringBuffer(text.length() + 512);
sb.append("<html><body><pre style=\"white-space: pre-wrap; word-wrap:break-word; \">");
while (m.find())
{
int start = m.start();
if (start == 0 || (start != 0 && text.charAt(start - 1) != '@'))
{
m.appendReplacement(sb, "<a href=\"$0\">$0</a>");
}
else
{
m.appendReplacement(sb, "$0");
}
}
m.appendTail(sb);
sb.append("</pre></body></html>");
text = sb.toString();
return text;
}
}
public class LocalTextBody extends TextBody
{
private String mBodyForDisplay;
public LocalTextBody(String body)
{
super(body);
}
public LocalTextBody(String body, String bodyForDisplay) throws MessagingException
{
super(body);
this.mBodyForDisplay = bodyForDisplay;
}
public String getBodyForDisplay()
{
return mBodyForDisplay;
}
public void setBodyForDisplay(String mBodyForDisplay)
{
this.mBodyForDisplay = mBodyForDisplay;
}
}//LocalTextBody
public class LocalMessage extends MimeMessage
{
private long mId;
private int mAttachmentCount;
private String mSubject;
private boolean mHeadersLoaded = false;
private boolean mMessageDirty = false;
public LocalMessage()
{
}
LocalMessage(String uid, Folder folder) throws MessagingException
{
this.mUid = uid;
this.mFolder = folder;
}
private void populateFromGetMessageCursor(Cursor cursor)
throws MessagingException
{
this.setSubject(cursor.getString(0) == null ? "" : cursor.getString(0));
Address[] from = Address.unpack(cursor.getString(1));
if (from.length > 0)
{
this.setFrom(from[0]);
}
this.setInternalSentDate(new Date(cursor.getLong(2)));
this.setUid(cursor.getString(3));
String flagList = cursor.getString(4);
if (flagList != null && flagList.length() > 0)
{
String[] flags = flagList.split(",");
try
{
for (String flag : flags)
{
this.setFlagInternal(Flag.valueOf(flag), true);
}
}
catch (Exception e)
{
}
}
this.mId = cursor.getLong(5);
this.setRecipients(RecipientType.TO, Address.unpack(cursor.getString(6)));
this.setRecipients(RecipientType.CC, Address.unpack(cursor.getString(7)));
this.setRecipients(RecipientType.BCC, Address.unpack(cursor.getString(8)));
this.setReplyTo(Address.unpack(cursor.getString(9)));
this.mAttachmentCount = cursor.getInt(10);
this.setInternalDate(new Date(cursor.getLong(11)));
this.setMessageId(cursor.getString(12));
}
/* Custom version of writeTo that updates the MIME message based on localMessage
* changes.
*/
public void writeTo(OutputStream out) throws IOException, MessagingException
{
if (mMessageDirty) buildMimeRepresentation();
super.writeTo(out);
}
private void buildMimeRepresentation() throws MessagingException
{
if (!mMessageDirty)
{
return;
}
super.setSubject(mSubject);
if (this.mFrom != null && this.mFrom.length > 0)
{
super.setFrom(this.mFrom[0]);
}
super.setReplyTo(mReplyTo);
super.setSentDate(this.getSentDate());
super.setRecipients(RecipientType.TO, mTo);
super.setRecipients(RecipientType.CC, mCc);
super.setRecipients(RecipientType.BCC, mBcc);
if (mMessageId != null) super.setMessageId(mMessageId);
mMessageDirty = false;
return;
}
@Override
public String getSubject() throws MessagingException
{
return mSubject;
}
@Override
public void setSubject(String subject) throws MessagingException
{
mSubject = subject;
mMessageDirty = true;
}
public void setMessageId(String messageId)
{
mMessageId = messageId;
mMessageDirty = true;
}
public int getAttachmentCount()
{
return mAttachmentCount;
}
public void setFrom(Address from) throws MessagingException
{
this.mFrom = new Address[] { from };
mMessageDirty = true;
}
public void setReplyTo(Address[] replyTo) throws MessagingException
{
if (replyTo == null || replyTo.length == 0)
{
mReplyTo = null;
}
else
{
mReplyTo = replyTo;
}
mMessageDirty = true;
}
/*
* For performance reasons, we add headers instead of setting them (see super implementation)
* which removes (expensive) them before adding them
*/
@Override
public void setRecipients(RecipientType type, Address[] addresses) throws MessagingException
{
if (type == RecipientType.TO)
{
if (addresses == null || addresses.length == 0)
{
this.mTo = null;
}
else
{
this.mTo = addresses;
}
}
else if (type == RecipientType.CC)
{
if (addresses == null || addresses.length == 0)
{
this.mCc = null;
}
else
{
this.mCc = addresses;
}
}
else if (type == RecipientType.BCC)
{
if (addresses == null || addresses.length == 0)
{
this.mBcc = null;
}
else
{
this.mBcc = addresses;
}
}
else
{
throw new MessagingException("Unrecognized recipient type.");
}
mMessageDirty = true;
}
public void setFlagInternal(Flag flag, boolean set) throws MessagingException
{
super.setFlag(flag, set);
}
public long getId()
{
return mId;
}
public void setFlag(Flag flag, boolean set) throws MessagingException
{
if (flag == Flag.DELETED && set)
{
/*
* If a message is being marked as deleted we want to clear out it's content
* and attachments as well. Delete will not actually remove the row since we need
* to retain the uid for synchronization purposes.
*/
/*
* Delete all of the messages' content to save space.
*/
((LocalFolder) mFolder).deleteAttachments(getUid());
mDb.execSQL(
"UPDATE messages SET " +
"deleted = 1," +
"subject = NULL, " +
"sender_list = NULL, " +
"date = NULL, " +
"to_list = NULL, " +
"cc_list = NULL, " +
"bcc_list = NULL, " +
"html_content = NULL, " +
"text_content = NULL, " +
"reply_to_list = NULL " +
"WHERE id = ?",
new Object[]
{
mId
});
/*
* Delete all of the messages' attachments to save space.
*/
mDb.execSQL("DELETE FROM attachments WHERE id = ?",
new Object[]
{
mId
});
((LocalFolder)mFolder).deleteHeaders(mId);
}
else if (flag == Flag.X_DESTROYED && set)
{
((LocalFolder) mFolder).deleteAttachments(getUid());
mDb.execSQL("DELETE FROM messages WHERE id = ?",
new Object[] { mId });
((LocalFolder)mFolder).deleteHeaders(mId);
}
/*
* Update the unread count on the folder.
*/
try
{
if (flag == Flag.DELETED || flag == Flag.X_DESTROYED
|| (flag == Flag.SEEN && !isSet(Flag.DELETED)))
{
LocalFolder folder = (LocalFolder)mFolder;
if (set && !isSet(Flag.SEEN))
{
folder.setUnreadMessageCount(folder.getUnreadMessageCount() - 1);
}
else if (!set && isSet(Flag.SEEN))
{
folder.setUnreadMessageCount(folder.getUnreadMessageCount() + 1);
}
}
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to update LocalStore unread message count",
me);
throw new RuntimeException(me);
}
super.setFlag(flag, set);
/*
* Set the flags on the message.
*/
mDb.execSQL("UPDATE messages " + "SET flags = ? " + " WHERE id = ?", new Object[]
{
Utility.combine(getFlags(), ',').toUpperCase(), mId
});
}
private void loadHeaders()
{
ArrayList<LocalMessage> messages = new ArrayList<LocalMessage>();
messages.add(this);
mHeadersLoaded = true; // set true before calling populate headers to stop recursion
((LocalFolder) mFolder).populateHeaders(messages);
}
public void addHeader(String name, String value)
{
if (!mHeadersLoaded)
{
loadHeaders();
}
super.addHeader(name, value);
}
public void setHeader(String name, String value)
{
if (!mHeadersLoaded)
loadHeaders();
super.setHeader(name, value);
}
public String[] getHeader(String name)
{
if (!mHeadersLoaded)
loadHeaders();
return super.getHeader(name);
}
public void removeHeader(String name)
{
if (!mHeadersLoaded)
loadHeaders();
super.removeHeader(name);
}
}
public class LocalAttachmentBodyPart extends MimeBodyPart
{
private long mAttachmentId = -1;
public LocalAttachmentBodyPart(Body body, long attachmentId) throws MessagingException
{
super(body);
mAttachmentId = attachmentId;
}
/**
* Returns the local attachment id of this body, or -1 if it is not stored.
* @return
*/
public long getAttachmentId()
{
return mAttachmentId;
}
public void setAttachmentId(long attachmentId)
{
mAttachmentId = attachmentId;
}
public String toString()
{
return "" + mAttachmentId;
}
}
public static class LocalAttachmentBody implements Body
{
private Application mApplication;
private Uri mUri;
public LocalAttachmentBody(Uri uri, Application application)
{
mApplication = application;
mUri = uri;
}
public InputStream getInputStream() throws MessagingException
{
try
{
return mApplication.getContentResolver().openInputStream(mUri);
}
catch (FileNotFoundException fnfe)
{
/*
* Since it's completely normal for us to try to serve up attachments that
* have been blown away, we just return an empty stream.
*/
return new ByteArrayInputStream(new byte[0]);
}
catch (IOException ioe)
{
throw new MessagingException("Invalid attachment.", ioe);
}
}
public void writeTo(OutputStream out) throws IOException, MessagingException
{
InputStream in = getInputStream();
Base64OutputStream base64Out = new Base64OutputStream(out);
IOUtils.copy(in, base64Out);
base64Out.close();
}
public Uri getContentUri()
{
return mUri;
}
}
}
|
Lift our column list for "load messages" out to the LocalStore, as the
first part of lifting "get messages" out of "folder" for search
|
src/com/fsck/k9/mail/store/LocalStore.java
|
Lift our column list for "load messages" out to the LocalStore, as the first part of lifting "get messages" out of "folder" for search
|
<ide><path>rc/com/fsck/k9/mail/store/LocalStore.java
<ide> HEADERS_TO_SAVE.add("References");
<ide> HEADERS_TO_SAVE.add("X-User-Agent");
<ide> }
<add> /*
<add> * a String containing the columns getMessages expects to work with
<add> * in the correct order.
<add> */
<add> static private String GET_MESSAGES_COLS =
<add> "subject, sender_list, date, uid, flags, id, to_list, cc_list, "
<add> + "bcc_list, reply_to_list, attachment_count, internal_date, message_id, folder_id ";
<add>
<ide>
<ide> /**
<ide> * @param uri local://localhost/path/to/database/uuid.db
<ide> private FolderClass pushClass = FolderClass.SECOND_CLASS;
<ide> private String prefId = null;
<ide> private String mPushState = null;
<del>
<del> /*
<del> * a String containing the columns getMessages expects to work with
<del> * in the correct order.
<del> */
<del> private String GET_MESSAGES_COLS =
<del>
<del> "subject, sender_list, date, uid, flags, id, to_list, cc_list, "
<del> + "bcc_list, reply_to_list, attachment_count, internal_date, message_id ";
<ide>
<ide>
<ide> public LocalFolder(String name)
|
|
Java
|
epl-1.0
|
359c773d8bffa2caba25cdaf2561ede71fe90736
| 0 |
opendaylight/yangtools,opendaylight/yangtools
|
/*
* Copyright (c) 2015 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.yangtools.yang.parser.stmt.rfc6020;
import static org.opendaylight.yangtools.yang.common.YangConstants.RFC6020_YANG_NAMESPACE;
import static org.opendaylight.yangtools.yang.common.YangConstants.YANG_XPATH_FUNCTIONS_PREFIX;
import com.google.common.base.CharMatcher;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.RegEx;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.opendaylight.yangtools.antlrv4.code.gen.YangStatementParser;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.YangVersion;
import org.opendaylight.yangtools.yang.model.api.RevisionAwareXPath;
import org.opendaylight.yangtools.yang.model.api.stmt.SchemaNodeIdentifier;
import org.opendaylight.yangtools.yang.model.api.stmt.SchemaNodeIdentifier.Relative;
import org.opendaylight.yangtools.yang.model.util.RevisionAwareXPathImpl;
import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContext;
import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContextUtils;
import org.opendaylight.yangtools.yang.parser.spi.source.SourceException;
import org.opendaylight.yangtools.yang.parser.spi.source.StatementSourceReference;
import org.opendaylight.yangtools.yang.parser.stmt.reactor.StatementContextBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class Utils {
private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
private static final CharMatcher LEFT_PARENTHESIS_MATCHER = CharMatcher.is('(');
private static final CharMatcher RIGHT_PARENTHESIS_MATCHER = CharMatcher.is(')');
private static final CharMatcher AMPERSAND_MATCHER = CharMatcher.is('&');
private static final CharMatcher QUESTION_MARK_MATCHER = CharMatcher.is('?');
private static final CharMatcher ANYQUOTE_MATCHER = CharMatcher.anyOf("'\"");
private static final Splitter SLASH_SPLITTER = Splitter.on('/').omitEmptyStrings().trimResults();
private static final Splitter SPACE_SPLITTER = Splitter.on(' ').omitEmptyStrings().trimResults();
private static final Splitter COLON_SPLITTER = Splitter.on(":").omitEmptyStrings().trimResults();
private static final Pattern PATH_ABS = Pattern.compile("/[^/].*");
@RegEx
private static final String YANG_XPATH_FUNCTIONS_STRING =
"(re-match|deref|derived-from(-or-self)?|enum-value|bit-is-set)(\\()";
private static final Pattern YANG_XPATH_FUNCTIONS_PATTERN = Pattern.compile(YANG_XPATH_FUNCTIONS_STRING);
private static final Pattern ESCAPED_DQUOT = Pattern.compile("\\\"", Pattern.LITERAL);
private static final Pattern ESCAPED_BACKSLASH = Pattern.compile("\\\\", Pattern.LITERAL);
private static final Pattern ESCAPED_LF = Pattern.compile("\\n", Pattern.LITERAL);
private static final Pattern ESCAPED_TAB = Pattern.compile("\\t", Pattern.LITERAL);
private static final ThreadLocal<XPathFactory> XPATH_FACTORY = new ThreadLocal<XPathFactory>() {
@Override
protected XPathFactory initialValue() {
return XPathFactory.newInstance();
}
};
private Utils() {
throw new UnsupportedOperationException();
}
/**
* Cleanup any resources attached to the current thread. Threads interacting with this class can cause thread-local
* caches to them. Invoke this method if you want to detach those resources.
*/
public static void detachFromCurrentThread() {
XPATH_FACTORY.remove();
}
public static Collection<SchemaNodeIdentifier.Relative> transformKeysStringToKeyNodes(
final StmtContext<?, ?, ?> ctx, final String value) {
final List<String> keyTokens = SPACE_SPLITTER.splitToList(value);
// to detect if key contains duplicates
if (new HashSet<>(keyTokens).size() < keyTokens.size()) {
// FIXME: report all duplicate keys
throw new SourceException(ctx.getStatementSourceReference(), "Duplicate value in list key: %s", value);
}
final Set<SchemaNodeIdentifier.Relative> keyNodes = new HashSet<>();
for (final String keyToken : keyTokens) {
final SchemaNodeIdentifier.Relative keyNode = (Relative) SchemaNodeIdentifier.Relative.create(false,
StmtContextUtils.qnameFromArgument(ctx, keyToken));
keyNodes.add(keyNode);
}
return keyNodes;
}
static Collection<SchemaNodeIdentifier.Relative> parseUniqueConstraintArgument(final StmtContext<?, ?, ?> ctx,
final String argumentValue) {
final Set<SchemaNodeIdentifier.Relative> uniqueConstraintNodes = new HashSet<>();
for (final String uniqueArgToken : SPACE_SPLITTER.split(argumentValue)) {
final SchemaNodeIdentifier nodeIdentifier = Utils.nodeIdentifierFromPath(ctx, uniqueArgToken);
SourceException.throwIf(nodeIdentifier.isAbsolute(), ctx.getStatementSourceReference(),
"Unique statement argument '%s' contains schema node identifier '%s' "
+ "which is not in the descendant node identifier form.", argumentValue, uniqueArgToken);
uniqueConstraintNodes.add((SchemaNodeIdentifier.Relative) nodeIdentifier);
}
return ImmutableSet.copyOf(uniqueConstraintNodes);
}
private static String trimSingleLastSlashFromXPath(final String path) {
return path.endsWith("/") ? path.substring(0, path.length() - 1) : path;
}
static RevisionAwareXPath parseXPath(final StmtContext<?, ?, ?> ctx, final String path) {
final XPath xPath = XPATH_FACTORY.get().newXPath();
xPath.setNamespaceContext(StmtNamespaceContext.create(ctx,
ImmutableBiMap.of(RFC6020_YANG_NAMESPACE.toString(), YANG_XPATH_FUNCTIONS_PREFIX)));
final String trimmed = trimSingleLastSlashFromXPath(path);
try {
// XPath extension functions have to be prefixed
// yang-specific XPath functions are in fact extended functions, therefore we have to add
// "yang" prefix to them so that they can be properly validated with the XPath.compile() method
// the "yang" prefix is bound to RFC6020 YANG namespace
final String prefixedXPath = addPrefixToYangXPathFunctions(trimmed, ctx);
// TODO: we could capture the result and expose its 'evaluate' method
xPath.compile(prefixedXPath);
} catch (final XPathExpressionException e) {
LOG.warn("Argument \"{}\" is not valid XPath string at \"{}\"", path, ctx.getStatementSourceReference(), e);
}
return new RevisionAwareXPathImpl(path, PATH_ABS.matcher(path).matches());
}
private static String addPrefixToYangXPathFunctions(final String path, final StmtContext<?, ?, ?> ctx) {
if (ctx.getRootVersion() == YangVersion.VERSION_1_1) {
// FIXME once Java 9 is available, change this to StringBuilder as Matcher.appendReplacement() and
// Matcher.appendTail() will accept StringBuilder parameter in Java 9
final StringBuffer result = new StringBuffer();
final String prefix = YANG_XPATH_FUNCTIONS_PREFIX + ":";
final Matcher matcher = YANG_XPATH_FUNCTIONS_PATTERN.matcher(path);
while (matcher.find()) {
matcher.appendReplacement(result, prefix + matcher.group());
}
matcher.appendTail(result);
return result.toString();
}
return path;
}
public static QName trimPrefix(final QName identifier) {
final String prefixedLocalName = identifier.getLocalName();
final String[] namesParts = prefixedLocalName.split(":");
if (namesParts.length == 2) {
final String localName = namesParts[1];
return QName.create(identifier.getModule(), localName);
}
return identifier;
}
public static String trimPrefix(final String identifier) {
final List<String> namesParts = COLON_SPLITTER.splitToList(identifier);
if (namesParts.size() == 2) {
return namesParts.get(1);
}
return identifier;
}
@SuppressWarnings("checkstyle:illegalCatch")
static SchemaNodeIdentifier nodeIdentifierFromPath(final StmtContext<?, ?, ?> ctx, final String path) {
// FIXME: is the path trimming really necessary??
final List<QName> qNames = new ArrayList<>();
for (final String nodeName : SLASH_SPLITTER.split(trimSingleLastSlashFromXPath(path))) {
try {
final QName qName = StmtContextUtils.qnameFromArgument(ctx, nodeName);
qNames.add(qName);
} catch (final RuntimeException e) {
throw new SourceException(ctx.getStatementSourceReference(), e,
"Failed to parse node '%s' in path '%s'", nodeName, path);
}
}
return SchemaNodeIdentifier.create(qNames, PATH_ABS.matcher(path).matches());
}
public static String stringFromStringContext(final YangStatementParser.ArgumentContext context,
final StatementSourceReference ref) {
return stringFromStringContext(context, YangVersion.VERSION_1, ref);
}
public static String stringFromStringContext(final YangStatementParser.ArgumentContext context,
final YangVersion yangVersion, final StatementSourceReference ref) {
final StringBuilder sb = new StringBuilder();
List<TerminalNode> strings = context.STRING();
if (strings.isEmpty()) {
strings = Collections.singletonList(context.IDENTIFIER());
}
for (final TerminalNode stringNode : strings) {
final String str = stringNode.getText();
final char firstChar = str.charAt(0);
final char lastChar = str.charAt(str.length() - 1);
if (firstChar == '"' && lastChar == '"') {
final String innerStr = str.substring(1, str.length() - 1);
/*
* Unescape escaped double quotes, tabs, new line and backslash
* in the inner string and trim the result.
*/
checkDoubleQuotedString(innerStr, yangVersion, ref);
sb.append(ESCAPED_TAB.matcher(
ESCAPED_LF.matcher(
ESCAPED_BACKSLASH.matcher(
ESCAPED_DQUOT.matcher(innerStr).replaceAll("\\\""))
.replaceAll("\\\\"))
.replaceAll("\\\n"))
.replaceAll("\\\t"));
} else if (firstChar == '\'' && lastChar == '\'') {
/*
* According to RFC6020 a single quote character cannot occur in
* a single-quoted string, even when preceded by a backslash.
*/
sb.append(str.substring(1, str.length() - 1));
} else {
checkUnquotedString(str, yangVersion, ref);
sb.append(str);
}
}
return sb.toString();
}
private static void checkUnquotedString(final String str, final YangVersion yangVersion,
final StatementSourceReference ref) {
if (yangVersion == YangVersion.VERSION_1_1) {
SourceException.throwIf(ANYQUOTE_MATCHER.matchesAnyOf(str), ref,
"YANG 1.1: unquoted string (%s) contains illegal characters", str);
}
}
private static void checkDoubleQuotedString(final String str, final YangVersion yangVersion,
final StatementSourceReference ref) {
if (yangVersion == YangVersion.VERSION_1_1) {
for (int i = 0; i < str.length() - 1; i++) {
if (str.charAt(i) == '\\') {
switch (str.charAt(i + 1)) {
case 'n':
case 't':
case '\\':
case '\"':
i++;
break;
default:
throw new SourceException(ref, "YANG 1.1: illegal double quoted string (%s). In double "
+ "quoted string the backslash must be followed by one of the following character "
+ "[n,t,\",\\], but was '%s'.", str, str.charAt(i + 1));
}
}
}
}
}
@Nullable
public static StatementContextBase<?, ?, ?> findNode(final StmtContext<?, ?, ?> rootStmtCtx,
final SchemaNodeIdentifier node) {
return (StatementContextBase<?, ?, ?>) rootStmtCtx.getFromNamespace(SchemaNodeIdentifierBuildNamespace.class,
node);
}
static @Nonnull Boolean parseBoolean(final StmtContext<?, ?, ?> ctx, final String input) {
if ("true".equals(input)) {
return Boolean.TRUE;
} else if ("false".equals(input)) {
return Boolean.FALSE;
} else {
throw new SourceException(ctx.getStatementSourceReference(),
"Invalid '%s' statement %s '%s', it can be either 'true' or 'false'",
ctx.getPublicDefinition().getStatementName(), ctx.getPublicDefinition().getArgumentName(), input);
}
}
static String internBoolean(final String input) {
if ("true".equals(input)) {
return "true";
} else if ("false".equals(input)) {
return "false";
} else {
return input;
}
}
/**
* Replaces illegal characters of QName by the name of the character (e.g. '?' is replaced by "QuestionMark" etc.).
*
* @param string
* input String
* @return result String
*/
public static String replaceIllegalCharsForQName(String string) {
string = LEFT_PARENTHESIS_MATCHER.replaceFrom(string, "LeftParenthesis");
string = RIGHT_PARENTHESIS_MATCHER.replaceFrom(string, "RightParenthesis");
string = AMPERSAND_MATCHER.replaceFrom(string, "Ampersand");
string = QUESTION_MARK_MATCHER.replaceFrom(string, "QuestionMark");
return string;
}
public static boolean belongsToTheSameModule(final QName targetStmtQName, final QName sourceStmtQName) {
return targetStmtQName.getModule().equals(sourceStmtQName.getModule());
}
}
|
yang/yang-parser-impl/src/main/java/org/opendaylight/yangtools/yang/parser/stmt/rfc6020/Utils.java
|
/*
* Copyright (c) 2015 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.yangtools.yang.parser.stmt.rfc6020;
import static org.opendaylight.yangtools.yang.common.YangConstants.RFC6020_YANG_NAMESPACE;
import static org.opendaylight.yangtools.yang.common.YangConstants.YANG_XPATH_FUNCTIONS_PREFIX;
import com.google.common.base.CharMatcher;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.RegEx;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.opendaylight.yangtools.antlrv4.code.gen.YangStatementParser;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.YangVersion;
import org.opendaylight.yangtools.yang.model.api.RevisionAwareXPath;
import org.opendaylight.yangtools.yang.model.api.stmt.SchemaNodeIdentifier;
import org.opendaylight.yangtools.yang.model.api.stmt.SchemaNodeIdentifier.Relative;
import org.opendaylight.yangtools.yang.model.util.RevisionAwareXPathImpl;
import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContext;
import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContextUtils;
import org.opendaylight.yangtools.yang.parser.spi.source.SourceException;
import org.opendaylight.yangtools.yang.parser.spi.source.StatementSourceReference;
import org.opendaylight.yangtools.yang.parser.stmt.reactor.StatementContextBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class Utils {
private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
private static final CharMatcher LEFT_PARENTHESIS_MATCHER = CharMatcher.is('(');
private static final CharMatcher RIGHT_PARENTHESIS_MATCHER = CharMatcher.is(')');
private static final CharMatcher AMPERSAND_MATCHER = CharMatcher.is('&');
private static final CharMatcher QUESTION_MARK_MATCHER = CharMatcher.is('?');
private static final CharMatcher ANYQUOTE_MATCHER = CharMatcher.anyOf("'\"");
private static final Splitter SLASH_SPLITTER = Splitter.on('/').omitEmptyStrings().trimResults();
private static final Splitter SPACE_SPLITTER = Splitter.on(' ').omitEmptyStrings().trimResults();
private static final Splitter COLON_SPLITTER = Splitter.on(":").omitEmptyStrings().trimResults();
private static final Pattern PATH_ABS = Pattern.compile("/[^/].*");
@RegEx
private static final String YANG_XPATH_FUNCTIONS_STRING =
"(re-match|deref|derived-from(-or-self)?|enum-value|bit-is-set)(\\()";
private static final Pattern YANG_XPATH_FUNCTIONS_PATTERN = Pattern.compile(YANG_XPATH_FUNCTIONS_STRING);
private static final ThreadLocal<XPathFactory> XPATH_FACTORY = new ThreadLocal<XPathFactory>() {
@Override
protected XPathFactory initialValue() {
return XPathFactory.newInstance();
}
};
private Utils() {
throw new UnsupportedOperationException();
}
/**
* Cleanup any resources attached to the current thread. Threads interacting with this class can cause thread-local
* caches to them. Invoke this method if you want to detach those resources.
*/
public static void detachFromCurrentThread() {
XPATH_FACTORY.remove();
}
public static Collection<SchemaNodeIdentifier.Relative> transformKeysStringToKeyNodes(
final StmtContext<?, ?, ?> ctx, final String value) {
final List<String> keyTokens = SPACE_SPLITTER.splitToList(value);
// to detect if key contains duplicates
if (new HashSet<>(keyTokens).size() < keyTokens.size()) {
// FIXME: report all duplicate keys
throw new SourceException(ctx.getStatementSourceReference(), "Duplicate value in list key: %s", value);
}
final Set<SchemaNodeIdentifier.Relative> keyNodes = new HashSet<>();
for (final String keyToken : keyTokens) {
final SchemaNodeIdentifier.Relative keyNode = (Relative) SchemaNodeIdentifier.Relative.create(false,
StmtContextUtils.qnameFromArgument(ctx, keyToken));
keyNodes.add(keyNode);
}
return keyNodes;
}
static Collection<SchemaNodeIdentifier.Relative> parseUniqueConstraintArgument(final StmtContext<?, ?, ?> ctx,
final String argumentValue) {
final Set<SchemaNodeIdentifier.Relative> uniqueConstraintNodes = new HashSet<>();
for (final String uniqueArgToken : SPACE_SPLITTER.split(argumentValue)) {
final SchemaNodeIdentifier nodeIdentifier = Utils.nodeIdentifierFromPath(ctx, uniqueArgToken);
SourceException.throwIf(nodeIdentifier.isAbsolute(), ctx.getStatementSourceReference(),
"Unique statement argument '%s' contains schema node identifier '%s' "
+ "which is not in the descendant node identifier form.", argumentValue, uniqueArgToken);
uniqueConstraintNodes.add((SchemaNodeIdentifier.Relative) nodeIdentifier);
}
return ImmutableSet.copyOf(uniqueConstraintNodes);
}
private static String trimSingleLastSlashFromXPath(final String path) {
return path.endsWith("/") ? path.substring(0, path.length() - 1) : path;
}
static RevisionAwareXPath parseXPath(final StmtContext<?, ?, ?> ctx, final String path) {
final XPath xPath = XPATH_FACTORY.get().newXPath();
xPath.setNamespaceContext(StmtNamespaceContext.create(ctx,
ImmutableBiMap.of(RFC6020_YANG_NAMESPACE.toString(), YANG_XPATH_FUNCTIONS_PREFIX)));
final String trimmed = trimSingleLastSlashFromXPath(path);
try {
// XPath extension functions have to be prefixed
// yang-specific XPath functions are in fact extended functions, therefore we have to add
// "yang" prefix to them so that they can be properly validated with the XPath.compile() method
// the "yang" prefix is bound to RFC6020 YANG namespace
final String prefixedXPath = addPrefixToYangXPathFunctions(trimmed, ctx);
// TODO: we could capture the result and expose its 'evaluate' method
xPath.compile(prefixedXPath);
} catch (final XPathExpressionException e) {
LOG.warn("Argument \"{}\" is not valid XPath string at \"{}\"", path, ctx.getStatementSourceReference(), e);
}
return new RevisionAwareXPathImpl(path, PATH_ABS.matcher(path).matches());
}
private static String addPrefixToYangXPathFunctions(final String path, final StmtContext<?, ?, ?> ctx) {
if (ctx.getRootVersion() == YangVersion.VERSION_1_1) {
// FIXME once Java 9 is available, change this to StringBuilder as Matcher.appendReplacement() and
// Matcher.appendTail() will accept StringBuilder parameter in Java 9
final StringBuffer result = new StringBuffer();
final String prefix = YANG_XPATH_FUNCTIONS_PREFIX + ":";
final Matcher matcher = YANG_XPATH_FUNCTIONS_PATTERN.matcher(path);
while (matcher.find()) {
matcher.appendReplacement(result, prefix + matcher.group());
}
matcher.appendTail(result);
return result.toString();
}
return path;
}
public static QName trimPrefix(final QName identifier) {
final String prefixedLocalName = identifier.getLocalName();
final String[] namesParts = prefixedLocalName.split(":");
if (namesParts.length == 2) {
final String localName = namesParts[1];
return QName.create(identifier.getModule(), localName);
}
return identifier;
}
public static String trimPrefix(final String identifier) {
final List<String> namesParts = COLON_SPLITTER.splitToList(identifier);
if (namesParts.size() == 2) {
return namesParts.get(1);
}
return identifier;
}
@SuppressWarnings("checkstyle:illegalCatch")
static SchemaNodeIdentifier nodeIdentifierFromPath(final StmtContext<?, ?, ?> ctx, final String path) {
// FIXME: is the path trimming really necessary??
final List<QName> qNames = new ArrayList<>();
for (final String nodeName : SLASH_SPLITTER.split(trimSingleLastSlashFromXPath(path))) {
try {
final QName qName = StmtContextUtils.qnameFromArgument(ctx, nodeName);
qNames.add(qName);
} catch (final RuntimeException e) {
throw new SourceException(ctx.getStatementSourceReference(), e,
"Failed to parse node '%s' in path '%s'", nodeName, path);
}
}
return SchemaNodeIdentifier.create(qNames, PATH_ABS.matcher(path).matches());
}
public static String stringFromStringContext(final YangStatementParser.ArgumentContext context,
final StatementSourceReference ref) {
return stringFromStringContext(context, YangVersion.VERSION_1, ref);
}
public static String stringFromStringContext(final YangStatementParser.ArgumentContext context,
final YangVersion yangVersion, final StatementSourceReference ref) {
final StringBuilder sb = new StringBuilder();
List<TerminalNode> strings = context.STRING();
if (strings.isEmpty()) {
strings = Collections.singletonList(context.IDENTIFIER());
}
for (final TerminalNode stringNode : strings) {
final String str = stringNode.getText();
final char firstChar = str.charAt(0);
final char lastChar = str.charAt(str.length() - 1);
if (firstChar == '"' && lastChar == '"') {
final String innerStr = str.substring(1, str.length() - 1);
/*
* Unescape escaped double quotes, tabs, new line and backslash
* in the inner string and trim the result.
*/
checkDoubleQuotedString(innerStr, yangVersion, ref);
sb.append(innerStr.replace("\\\"", "\"").replace("\\\\", "\\").replace("\\n", "\n")
.replace("\\t", "\t"));
} else if (firstChar == '\'' && lastChar == '\'') {
/*
* According to RFC6020 a single quote character cannot occur in
* a single-quoted string, even when preceded by a backslash.
*/
sb.append(str.substring(1, str.length() - 1));
} else {
checkUnquotedString(str, yangVersion, ref);
sb.append(str);
}
}
return sb.toString();
}
private static void checkUnquotedString(final String str, final YangVersion yangVersion,
final StatementSourceReference ref) {
if (yangVersion == YangVersion.VERSION_1_1) {
SourceException.throwIf(ANYQUOTE_MATCHER.matchesAnyOf(str), ref,
"YANG 1.1: unquoted string (%s) contains illegal characters", str);
}
}
private static void checkDoubleQuotedString(final String str, final YangVersion yangVersion,
final StatementSourceReference ref) {
if (yangVersion == YangVersion.VERSION_1_1) {
for (int i = 0; i < str.length() - 1; i++) {
if (str.charAt(i) == '\\') {
switch (str.charAt(i + 1)) {
case 'n':
case 't':
case '\\':
case '\"':
i++;
break;
default:
throw new SourceException(ref, "YANG 1.1: illegal double quoted string (%s). In double "
+ "quoted string the backslash must be followed by one of the following character "
+ "[n,t,\",\\], but was '%s'.", str, str.charAt(i + 1));
}
}
}
}
}
@Nullable
public static StatementContextBase<?, ?, ?> findNode(final StmtContext<?, ?, ?> rootStmtCtx,
final SchemaNodeIdentifier node) {
return (StatementContextBase<?, ?, ?>) rootStmtCtx.getFromNamespace(SchemaNodeIdentifierBuildNamespace.class,
node);
}
static @Nonnull Boolean parseBoolean(final StmtContext<?, ?, ?> ctx, final String input) {
if ("true".equals(input)) {
return Boolean.TRUE;
} else if ("false".equals(input)) {
return Boolean.FALSE;
} else {
throw new SourceException(ctx.getStatementSourceReference(),
"Invalid '%s' statement %s '%s', it can be either 'true' or 'false'",
ctx.getPublicDefinition().getStatementName(), ctx.getPublicDefinition().getArgumentName(), input);
}
}
static String internBoolean(final String input) {
if ("true".equals(input)) {
return "true";
} else if ("false".equals(input)) {
return "false";
} else {
return input;
}
}
/**
* Replaces illegal characters of QName by the name of the character (e.g. '?' is replaced by "QuestionMark" etc.).
*
* @param string
* input String
* @return result String
*/
public static String replaceIllegalCharsForQName(String string) {
string = LEFT_PARENTHESIS_MATCHER.replaceFrom(string, "LeftParenthesis");
string = RIGHT_PARENTHESIS_MATCHER.replaceFrom(string, "RightParenthesis");
string = AMPERSAND_MATCHER.replaceFrom(string, "Ampersand");
string = QUESTION_MARK_MATCHER.replaceFrom(string, "QuestionMark");
return string;
}
public static boolean belongsToTheSameModule(final QName targetStmtQName, final QName sourceStmtQName) {
return targetStmtQName.getModule().equals(sourceStmtQName.getModule());
}
}
|
Do not use String.replace()
This forces a Pattern compilation, which we do not really want.
Pre-compile the patterns instead.
Change-Id: Ibcb9f911f4a4089f375264b75ce2f5d9c5f46447
Signed-off-by: Robert Varga <[email protected]>
|
yang/yang-parser-impl/src/main/java/org/opendaylight/yangtools/yang/parser/stmt/rfc6020/Utils.java
|
Do not use String.replace()
|
<ide><path>ang/yang-parser-impl/src/main/java/org/opendaylight/yangtools/yang/parser/stmt/rfc6020/Utils.java
<ide> private static final String YANG_XPATH_FUNCTIONS_STRING =
<ide> "(re-match|deref|derived-from(-or-self)?|enum-value|bit-is-set)(\\()";
<ide> private static final Pattern YANG_XPATH_FUNCTIONS_PATTERN = Pattern.compile(YANG_XPATH_FUNCTIONS_STRING);
<add> private static final Pattern ESCAPED_DQUOT = Pattern.compile("\\\"", Pattern.LITERAL);
<add> private static final Pattern ESCAPED_BACKSLASH = Pattern.compile("\\\\", Pattern.LITERAL);
<add> private static final Pattern ESCAPED_LF = Pattern.compile("\\n", Pattern.LITERAL);
<add> private static final Pattern ESCAPED_TAB = Pattern.compile("\\t", Pattern.LITERAL);
<ide>
<ide> private static final ThreadLocal<XPathFactory> XPATH_FACTORY = new ThreadLocal<XPathFactory>() {
<ide> @Override
<ide> * in the inner string and trim the result.
<ide> */
<ide> checkDoubleQuotedString(innerStr, yangVersion, ref);
<del> sb.append(innerStr.replace("\\\"", "\"").replace("\\\\", "\\").replace("\\n", "\n")
<del> .replace("\\t", "\t"));
<add> sb.append(ESCAPED_TAB.matcher(
<add> ESCAPED_LF.matcher(
<add> ESCAPED_BACKSLASH.matcher(
<add> ESCAPED_DQUOT.matcher(innerStr).replaceAll("\\\""))
<add> .replaceAll("\\\\"))
<add> .replaceAll("\\\n"))
<add> .replaceAll("\\\t"));
<ide> } else if (firstChar == '\'' && lastChar == '\'') {
<ide> /*
<ide> * According to RFC6020 a single quote character cannot occur in
|
|
JavaScript
|
isc
|
19601bedb8c9758ae51876e6b2f468823a0f983d
| 0 |
xdv/ripple-client-desktop,yongsoo/ripple-client-desktop,xdv/ripple-client,wangbibo/ripple-client,yxxyun/ripple-client,vhpoet/ripple-client-desktop,Madsn/ripple-client,ripple/ripple-client,xdv/giving-client,yongsoo/ripple-client,ripple/ripple-client-desktop,vhpoet/ripple-client,mrajvanshy/ripple-client-desktop,dncohen/ripple-client-desktop,darkdarkdragon/ripple-client,MatthewPhinney/ripple-client,darkdarkdragon/ripple-client,arturomc/ripple-client,h0vhannes/ripple-client,mrajvanshy/ripple-client-desktop,Madsn/ripple-client,MatthewPhinney/ripple-client,yongsoo/ripple-client,bsteinlo/ripple-client,vhpoet/ripple-client,wangbibo/ripple-client,xdv/giving-client,dncohen/ripple-client-desktop,MatthewPhinney/ripple-client,Madsn/ripple-client,bankonme/ripple-client-desktop,yxxyun/ripple-client,vhpoet/ripple-client-desktop,yxxyun/ripple-client-desktop,mrajvanshy/ripple-client,darkdarkdragon/ripple-client,mrajvanshy/ripple-client,vhpoet/ripple-client,xdv/ripple-client,wangbibo/ripple-client,yongsoo/ripple-client-desktop,wangbibo/ripple-client,xdv/ripple-client-desktop,yxxyun/ripple-client,mrajvanshy/ripple-client,yongsoo/ripple-client,ripple/giving-client,ripple/ripple-client,mrajvanshy/ripple-client,bsteinlo/ripple-client,ripple/ripple-client,xdv/ripple-client,MatthewPhinney/ripple-client-desktop,yxxyun/ripple-client,resilience-me/DEPRICATED_ripple-client,arturomc/ripple-client,ripple/giving-client,h0vhannes/ripple-client,Madsn/ripple-client,xdv/giving-client,bankonme/ripple-client-desktop,darkdarkdragon/ripple-client-desktop,ripple/ripple-client-desktop,thics/ripple-client-desktop,ripple/ripple-client,darkdarkdragon/ripple-client,MatthewPhinney/ripple-client-desktop,yxxyun/ripple-client-desktop,h0vhannes/ripple-client,resilience-me/DEPRICATED_ripple-client,thics/ripple-client-desktop,darkdarkdragon/ripple-client-desktop,h0vhannes/ripple-client,xdv/ripple-client,MatthewPhinney/ripple-client,yongsoo/ripple-client,arturomc/ripple-client,arturomc/ripple-client,xdv/ripple-client-desktop,vhpoet/ripple-client
|
var util = require('util'),
events = require('events'),
rewriter = require('./jsonrewriter');
/**
* Class listening to Ripple network state and updating models.
*
* This class handles all incoming events by the network and updates
* the appropriate local models.
*/
var Model = function ()
{
events.EventEmitter.call(this);
};
util.inherits(Model, events.EventEmitter);
Model.prototype.init = function ()
{
var $scope = this.app.$scope;
this.reset();
$scope.currencies_all = require('../data/currencies');
$scope.currencies = $scope.currencies_all.slice(1);
$scope.pairs = require('../data/pairs');
this.app.id.on('accountload', this.handleAccountLoad.bind(this));
this.app.net.remote.on('net_account', this.handleAccountEvent.bind(this));
};
Model.prototype.reset = function ()
{
var $scope = this.app.$scope;
$scope.balance = "0";
$scope.lines = [];
$scope.offers = {};
$scope.events = [];
$scope.history = [];
$scope.balances = {};
}
Model.prototype.setApp = function (app)
{
this.app = app;
};
/**
* Setup listeners for identity state.
*
* Causes the initialization of account model data.
*/
Model.prototype.listenId = function (id)
{
};
Model.prototype.handleAccountLoad = function (e)
{
var $scope = this.app.$scope;
var remote = this.app.net.remote;
this.reset();
remote.request_account_lines(e.account)
.on('success', this.handleRippleLines.bind(this))
.on('error', this.handleRippleLinesError.bind(this)).request();
remote.request_wallet_accounts(e.secret)
.on('success', this.handleAccounts.bind(this))
.on('error', this.handleAccountsError.bind(this)).request();
remote.request_account_offers(e.account)
.on('success', this.handleOffers.bind(this))
.on('error', this.handleOffersError.bind(this)).request();
$scope.address = e.account;
if(!$scope.$$phase) {
$scope.$digest();
}
};
Model.prototype.handleRippleLines = function (data)
{
var self = this,
app = this.app,
$scope = app.$scope;
$scope.$apply(function () {
$scope.lines = {};
for (var n=0, l=data.lines.length; n<l; n++) {
var line = data.lines[n];
// XXX: This reinterpretation of the server response should be in the
// library upstream.
line = $.extend({}, line, {
limit: ripple.Amount.from_json({value: line.limit, currency: line.currency, issuer: line.account}),
limit_peer: ripple.Amount.from_json({value: line.limit_peer, currency: line.currency, issuer: app.id.account}),
balance: ripple.Amount.from_json({value: line.balance, currency: line.currency, issuer: app.id.account})
});
$scope.lines[line.account+line.currency] = line;
self._updateRippleBalance(line.currency, line.account, line.balance);
}
console.log('lines updated:', $scope.lines);
});
};
Model.prototype.handleRippleLinesError = function (data)
{
}
Model.prototype.handleOffers = function (data)
{
var self = this;
var $scope = this.app.$scope;
$scope.$apply(function ()
{
data.offers.forEach(function (offerData) {
var offer = {
seq: +offerData.seq,
gets: ripple.Amount.from_json(offerData.taker_gets),
pays: ripple.Amount.from_json(offerData.taker_pays)
};
self._updateOffer(offer);
});
console.log('offers updated:', $scope.offers);
});
};
Model.prototype.handleOffersError = function (data)
{
}
Model.prototype.handleAccounts = function (data)
{
var self = this;
var remote = this.app.net.remote;
var $scope = this.app.$scope;
$scope.$apply(function () {
$scope.balance = data.accounts[0].Balance;
remote.request_account_tx(data.accounts[0].Account, "0", "999999")
.on('success', self.handleAccountTx.bind(self, data.accounts[0].Account)).request();
});
};
Model.prototype.handleAccountsError = function (data)
{
}
Model.prototype.handleAccountTx = function (account, data)
{
var self = this;
var $scope = this.app.$scope;
$scope.$apply(function () {
if (data.transactions) {
data.transactions.forEach(function (e) {
self._processTxn(e.tx, e.meta, true);
});
}
});
};
Model.prototype.handleAccountEvent = function (e)
{
this._processTxn(e.transaction, e.meta);
var $scope = this.app.$scope;
$scope.$digest();
};
/**
* Process a transaction and add it to the history table.
*/
Model.prototype._processTxn = function (tx, meta, is_historic)
{
var self = this;
var $scope = this.app.$scope;
var account = this.app.id.account;
var processedTxn = rewriter.processTxn(tx, meta, account);
if (processedTxn) {
// Add to recent notifications
if (processedTxn.tx_result === "tesSUCCESS") {
$scope.events.unshift(processedTxn);
}
// Add to payments history
if (processedTxn.tx_type === "Payment" &&
processedTxn.tx_result === "tesSUCCESS") {
$scope.history.unshift(processedTxn);
}
// Update XRP balance
if (processedTxn.xrp_balance && !is_historic) {
$scope.balance = processedTxn.xrp_balance;
}
// Update Ripple lines
if (processedTxn.rippleState && !is_historic) {
this._updateLines(processedTxn);
}
// Update my offers
if (processedTxn.offers && !is_historic) {
processedTxn.offers.forEach(function (offer) {
self._updateOffer(offer);
});
}
}
};
/*
account: "rHMq44aXmd9wEYHK84VyiZyx8SP6VbpzNV"
balance: "0"
currency: "USD"
limit: "2000"
limit_peer: "0"
quality_in: 0
quality_out: 0
*/
Model.prototype._updateLines = function(txn)
{
console.log('update lines', txn);
var $scope = this.app.$scope;
var index = txn.counterparty + txn.currency,
line = {};
line.currency = txn.currency;
line.account = txn.counterparty;
if (txn.tx_type === "Payment") {
this._updateRippleBalance(txn.currency, txn.counterparty, txn.balance);
} else if (txn.tx_type === "TrustSet") {
line.limit = txn.limit;
line.limit_peer = txn.limit_peer;
} else return;
$scope.lines[index] = $.extend($scope.lines[index], line);
}
Model.prototype._updateRippleBalance = function(currency, new_account, new_balance)
{
var $scope = this.app.$scope;
// Ensure the balances entry exists first
if (!$scope.balances[currency]) {
$scope.balances[currency] = {components: {}};
}
var balance = $scope.balances[currency];
if (new_account) {
balance.components[new_account] = new_balance;
}
balance.total = 0;
for (var counterparty in balance.components) {
var amount = balance.components[counterparty];
// XXX: Do proper BigInteger addition through ripple.Amount
balance.total += +amount.to_text();
}
balance.total = ripple.Amount.from_human(""+balance.total+" "+currency);
};
Model.prototype._updateOffer = function (offer)
{
var $scope = this.app.$scope;
var reverseOrder = null;
var pairs = $scope.pairs;
for (var i = 0, l = pairs.length; i < l; i++) {
var pair = pairs[i].name;
if (pair.slice(0,3) == offer.gets.currency().to_json() &&
pair.slice(4,7) == offer.pays.currency().to_json()) {
reverseOrder = false;
break;
} else if (pair.slice(0,3) == offer.pays.currency().to_json() &&
pair.slice(4,7) == offer.gets.currency().to_json()) {
reverseOrder = true;
break;
}
}
// TODO: Sensible default for undefined pairs
if (reverseOrder === null) {
reverseOrder = false;
}
if (reverseOrder) {
offer.type = 'buy';
offer.first = offer.pays;
offer.second = offer.gets;
} else {
offer.type = 'sell';
offer.first = offer.gets;
offer.second = offer.pays;
}
if (!offer.deleted) {
$scope.offers[""+offer.seq] = offer;
} else {
delete $scope.offers[""+offer.seq];
}
};
exports.Model = Model;
|
src/js/client/model.js
|
var util = require('util'),
events = require('events'),
rewriter = require('./jsonrewriter');
/**
* Class listening to Ripple network state and updating models.
*
* This class handles all incoming events by the network and updates
* the appropriate local models.
*/
var Model = function ()
{
events.EventEmitter.call(this);
};
util.inherits(Model, events.EventEmitter);
Model.prototype.init = function ()
{
var $scope = this.app.$scope;
this.reset();
$scope.currencies_all = require('../data/currencies');
$scope.currencies = $scope.currencies_all.slice(1);
$scope.pairs = require('../data/pairs');
this.app.id.on('accountload', this.handleAccountLoad.bind(this));
this.app.net.remote.on('net_account', this.handleAccountEvent.bind(this));
};
Model.prototype.reset = function ()
{
var $scope = this.app.$scope;
$scope.balance = "0";
$scope.lines = [];
$scope.offers = {};
$scope.events = [];
$scope.history = [];
$scope.balances = {};
}
Model.prototype.setApp = function (app)
{
this.app = app;
};
/**
* Setup listeners for identity state.
*
* Causes the initialization of account model data.
*/
Model.prototype.listenId = function (id)
{
};
Model.prototype.handleAccountLoad = function (e)
{
var $scope = this.app.$scope;
var remote = this.app.net.remote;
this.reset();
remote.request_account_lines(e.account)
.on('success', this.handleRippleLines.bind(this))
.on('error', this.handleRippleLinesError.bind(this)).request();
remote.request_wallet_accounts(e.secret)
.on('success', this.handleAccounts.bind(this))
.on('error', this.handleAccountsError.bind(this)).request();
remote.request_account_offers(e.account)
.on('success', this.handleOffers.bind(this))
.on('error', this.handleOffersError.bind(this)).request();
$scope.address = e.account;
if(!$scope.$$phase) {
$scope.$digest();
}
};
Model.prototype.handleRippleLines = function (data)
{
var self = this,
app = this.app,
$scope = app.$scope;
$scope.$apply(function () {
$scope.lines = {};
for (var n=0, l=data.lines.length; n<l; n++) {
var line = data.lines[n];
// XXX: This reinterpretation of the server response should be in the
// library upstream.
line = $.extend({}, line, {
limit: ripple.Amount.from_json({value: line.limit, currency: line.currency, issuer: line.account}),
limit_peer: ripple.Amount.from_json({value: line.limit_peer, currency: line.currency, issuer: app.id.account}),
balance: ripple.Amount.from_json({value: line.balance, currency: line.currency, issuer: app.id.account})
});
$scope.lines[line.account+line.currency] = line;
self._updateRippleBalance(line.currency, line.account, line.balance);
}
console.log('lines updated:', $scope.lines);
});
};
Model.prototype.handleRippleLinesError = function (data)
{
}
Model.prototype.handleOffers = function (data)
{
var self = this;
var $scope = this.app.$scope;
$scope.$apply(function ()
{
data.offers.forEach(function (offerData) {
var offer = {
seq: +offerData.seq,
gets: ripple.Amount.from_json(offerData.taker_gets),
pays: ripple.Amount.from_json(offerData.taker_pays)
};
self._updateOffer(offer);
});
console.log('offers updated:', $scope.offers);
});
};
Model.prototype.handleOffersError = function (data)
{
}
Model.prototype.handleAccounts = function (data)
{
var self = this;
var remote = this.app.net.remote;
var $scope = this.app.$scope;
$scope.$apply(function () {
$scope.balance = data.accounts[0].Balance;
remote.request_account_tx(data.accounts[0].Account, "0", "999999")
.on('success', self.handleAccountTx.bind(self, data.accounts[0].Account)).request();
});
};
Model.prototype.handleAccountsError = function (data)
{
}
Model.prototype.handleAccountTx = function (account, data)
{
var self = this;
var $scope = this.app.$scope;
$scope.$apply(function () {
if (data.transactions) {
data.transactions.forEach(function (e) {
self._processTxn(e.tx, e.meta, true);
});
}
});
};
Model.prototype.handleAccountEvent = function (e)
{
this._processTxn(e.transaction, e.meta);
var $scope = this.app.$scope;
$scope.$digest();
};
/**
* Process a transaction and add it to the history table.
*/
Model.prototype._processTxn = function (tx, meta, is_historic)
{
var self = this;
var $scope = this.app.$scope;
var account = this.app.id.account;
var processedTxn = rewriter.processTxn(tx, meta, account);
if (processedTxn) {
// Add to recent notifications
if (processedTxn.tx_result === "tesSUCCESS") {
$scope.events.unshift(processedTxn);
}
// Add to payments history
if (processedTxn.tx_type === "Payment" &&
processedTxn.tx_result === "tesSUCCESS") {
$scope.history.unshift(processedTxn);
}
// Update XRP balance
if (processedTxn.xrp_balance && !is_historic) {
$scope.balance = processedTxn.xrp_balance;
}
// Update Ripple lines
if (processedTxn.rippleState && !is_historic) {
this._updateLines(processedTxn);
}
// Update my offers
if (processedTxn.offers && !is_historic) {
processedTxn.offers.forEach(function (offer) {
self._updateOffer(offer);
});
}
}
};
/*
account: "rHMq44aXmd9wEYHK84VyiZyx8SP6VbpzNV"
balance: "0"
currency: "USD"
limit: "2000"
limit_peer: "0"
quality_in: 0
quality_out: 0
*/
Model.prototype._updateLines = function(txn)
{
console.log('update lines', txn);
var $scope = this.app.$scope;
var index = txn.counterparty + txn.currency,
line = {};
line.currency = txn.currency;
line.account = txn.counterparty;
line.balance = ripple.Amount.from_json({value: line.balance, currency: line.currency});
if (txn.tx_type === "Payment") {
this._updateRippleBalance(txn.currency, txn.counterparty, txn.balance);
} else if (txn.tx_type === "TrustSet") {
line.limit = txn.limit;
line.limit_peer = txn.limit_peer;
} else return;
$scope.lines[index] = $.extend($scope.lines[index], line);
}
Model.prototype._updateRippleBalance = function(currency, new_account, new_balance)
{
var $scope = this.app.$scope;
// Ensure the balances entry exists first
if (!$scope.balances[currency]) {
$scope.balances[currency] = {components: {}};
}
var balance = $scope.balances[currency];
if (new_account) {
balance.components[new_account] = new_balance;
}
balance.total = 0;
for (var counterparty in balance.components) {
var amount = balance.components[counterparty];
// XXX: Do proper BigInteger addition through ripple.Amount
balance.total += +amount.to_text();
}
balance.total = ripple.Amount.from_human(""+balance.total+" "+currency);
};
Model.prototype._updateOffer = function (offer)
{
var $scope = this.app.$scope;
var reverseOrder = null;
var pairs = $scope.pairs;
for (var i = 0, l = pairs.length; i < l; i++) {
var pair = pairs[i].name;
if (pair.slice(0,3) == offer.gets.currency().to_json() &&
pair.slice(4,7) == offer.pays.currency().to_json()) {
reverseOrder = false;
break;
} else if (pair.slice(0,3) == offer.pays.currency().to_json() &&
pair.slice(4,7) == offer.gets.currency().to_json()) {
reverseOrder = true;
break;
}
}
// TODO: Sensible default for undefined pairs
if (reverseOrder === null) {
reverseOrder = false;
}
if (reverseOrder) {
offer.type = 'buy';
offer.first = offer.pays;
offer.second = offer.gets;
} else {
offer.type = 'sell';
offer.first = offer.gets;
offer.second = offer.pays;
}
if (!offer.deleted) {
$scope.offers[""+offer.seq] = offer;
} else {
delete $scope.offers[""+offer.seq];
}
};
exports.Model = Model;
|
Update lines correctly after granting trust fix #84
|
src/js/client/model.js
|
Update lines correctly after granting trust fix #84
|
<ide><path>rc/js/client/model.js
<ide>
<ide> line.currency = txn.currency;
<ide> line.account = txn.counterparty;
<del>
<del> line.balance = ripple.Amount.from_json({value: line.balance, currency: line.currency});
<ide>
<ide> if (txn.tx_type === "Payment") {
<ide> this._updateRippleBalance(txn.currency, txn.counterparty, txn.balance);
|
|
Java
|
apache-2.0
|
ee6d104e31a4f374b4b3d06f7d8551ba5a6db3f5
| 0 |
apache/zest-qi4j,Qi4j/qi4j-sdk,Qi4j/qi4j-sdk,apache/zest-qi4j,Qi4j/qi4j-sdk,Qi4j/qi4j-sdk,apache/zest-qi4j,apache/zest-qi4j,apache/zest-qi4j,Qi4j/qi4j-sdk
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.cache.memcache;
import org.apache.polygene.api.common.Optional;
import org.apache.polygene.api.property.Property;
/**
* Memcache CachePool Configuration.
*/
// START SNIPPET: config
public interface MemcacheConfiguration
{
/**
* Cached items expiration in seconds.
* Defaulted to 3600 seconds, one hour.
* @return Cached items expiration configuration property
*/
@Optional
Property<Integer> expiration();
/**
* Memcached server addresses space separated.
* Eg. {@literal "server1:11211 server2:11211"}.
* Defaulted to {@literal "127.0.0.1:11211"}.
* @return Memcached server addresses configuration property
*/
@Optional
Property<String> addresses();
/**
* Memcache Protocol.
* Can be {@literal text} or {@literal binary}
* Defaulted to {@literal text}.
* @return Memcache Protocol configuration property
*/
@Optional
Property<String> protocol();
/**
* Username.
* Authentication happens only if set.
* @return Username configuration property
*/
@Optional
Property<String> username();
/**
* Password.
* @return Password configuration property
*/
@Optional
Property<String> password();
/**
* SASL authentication mechanism.
* Defaulted to PLAIN.
* @return Authentication mechanism configuration property
*/
@Optional
Property<String> authMechanism();
}
// END SNIPPET: config
|
extensions/cache-memcache/src/main/java/org/apache/polygene/cache/memcache/MemcacheConfiguration.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.cache.memcache;
import org.apache.polygene.api.common.Optional;
import org.apache.polygene.api.configuration.Configuration;
import org.apache.polygene.api.property.Property;
/**
* Memcache CachePool Configuration.
*/
// START SNIPPET: config
public interface MemcacheConfiguration
extends Configuration<MemcachePoolService>
{
/**
* Cached items expiration in seconds.
* Defaulted to 3600 seconds, one hour.
* @return Cached items expiration configuration property
*/
@Optional
Property<Integer> expiration();
/**
* Memcached server addresses space separated.
* Eg. {@literal "server1:11211 server2:11211"}.
* Defaulted to {@literal "127.0.0.1:11211"}.
* @return Memcached server addresses configuration property
*/
@Optional
Property<String> addresses();
/**
* Memcache Protocol.
* Can be {@literal text} or {@literal binary}
* Defaulted to {@literal text}.
* @return Memcache Protocol configuration property
*/
@Optional
Property<String> protocol();
/**
* Username.
* Authentication happens only if set.
* @return Username configuration property
*/
@Optional
Property<String> username();
/**
* Password.
* @return Password configuration property
*/
@Optional
Property<String> password();
/**
* SASL authentication mechanism.
* Defaulted to PLAIN.
* @return Authentication mechanism configuration property
*/
@Optional
Property<String> authMechanism();
}
// END SNIPPET: config
|
:extensions:cache-memcache refine
|
extensions/cache-memcache/src/main/java/org/apache/polygene/cache/memcache/MemcacheConfiguration.java
|
:extensions:cache-memcache refine
|
<ide><path>xtensions/cache-memcache/src/main/java/org/apache/polygene/cache/memcache/MemcacheConfiguration.java
<ide> package org.apache.polygene.cache.memcache;
<ide>
<ide> import org.apache.polygene.api.common.Optional;
<del>import org.apache.polygene.api.configuration.Configuration;
<ide> import org.apache.polygene.api.property.Property;
<ide>
<ide> /**
<ide> */
<ide> // START SNIPPET: config
<ide> public interface MemcacheConfiguration
<del> extends Configuration<MemcachePoolService>
<ide> {
<ide> /**
<ide> * Cached items expiration in seconds.
|
|
Java
|
mit
|
75469c080cd0e9b75251664954850dbbce751220
| 0 |
quitschibo/chrome-extension-selenium-example
|
package com.manmoe.example.test;
import com.google.common.base.Predicate;
import com.manmoe.example.model.IssuesPage;
import com.manmoe.example.model.PopupPage;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.testng.ITestResult;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import us.monoid.web.Resty;
import java.io.IOException;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import static us.monoid.web.Resty.content;
import static us.monoid.web.Resty.put;
/**
* Just an example test.
*
* @author Manuel Möhlmann <[email protected]>
*/
public class FirespottingIT extends AbstractChromeExtensionTest {
private static final String EXTENSION_NAME_FROM_MANIFEST = "Firespotting! Interesting Ideas, Every Day!";
public static final long TIME_TO_WAIT_FOR_REFRESH = 3L;
/**
* sum of entries to be shown on popup page
*/
public static final int ENTRY_LIST_LENGTH = 15;
/**
* This is our testmodel. So we don't get lost in details, how to get some elements.
*/
protected PopupPage popupPage;
/**
* Issues page testmodel
*/
protected IssuesPage issuesPage;
/**
* We set it initially == true, so we can &= each test method result.
*/
protected boolean testResult = true;
/**
* Our testClient to send the results to sauceLabs
*/
protected Resty restClient = new Resty();
// -------------------- Setting up and down the test environment
/**
* Method for setting up the test environment.
*/
@BeforeClass
public void setUp() {
RemoteWebDriver testDriver = getWebDriver();
this.popupPage = new PopupPage(testDriver, EXTENSION_NAME_FROM_MANIFEST);
this.issuesPage = new IssuesPage(testDriver, EXTENSION_NAME_FROM_MANIFEST);
}
/**
* We tell the remote selenium server here, that we have finished.
*/
@AfterClass
public void tearDown() throws IOException {
// send saucelabs the result of the tests
// @TODO should be use @AfterSuite in future
String sauceUsername = getSystemVariable("SAUCE_USERNAME");
String sauceAccessKey = getSystemVariable("SAUCE_ACCESS_KEY");
String platformString = getSystemVariable("PLATFORM");
String buildNr = getSystemVariable("TRAVIS_BUILD_NUMBER");
// if sauceLabs is used, the results should be transmitted
if (sauceUsername != null && sauceAccessKey != null && platformString != null) {
String jobId = popupPage.getDriver().getSessionId().toString();
// build sauceLabs result
String url = "https://saucelabs.com/rest/v1/" + sauceUsername + "/jobs/" + jobId;
restClient.authenticate("https://saucelabs.com", sauceUsername, sauceAccessKey.toCharArray());
restClient.withHeader("Content-Type", "application/json");
restClient.json(url, put(content("{\"passed\": " + testResult + ", \"name\": \"Firespotting! " + platformString + " Test\", \"build\": \"" + buildNr + "\"}")));
}
this.popupPage.tearDown();
}
/**
* This method isolates the getting of the env variable for better testability.
*
* @param envVariableName the name of the env variable we want to get
*/
protected String getSystemVariable(String envVariableName) {
return System.getenv(envVariableName);
}
/**
* We want to know, if every method has succeeded. If one method fails, testResult == false.
*
* @param result Results of all testMethods.
*/
@AfterMethod(alwaysRun = true)
public void report(ITestResult result) {
testResult &= result.isSuccess();
}
// -------------------- Tests for the extension
/**
* This test checks, if the chrome extension is installed on the remote system. We want to get the extension id
* by the remote host (every chrome browser will generate another id). If the id is present, we assume, the the
* extension is installed correctly.
*/
@Test
public void isInstalled() {
assertTrue(popupPage.getId() != null, "We got null back. The extension is not installed properly");
}
/**
* Test for checking the popup window.
*/
@Test
public void testPopup() {
popupPage.open();
// check title
assertEquals(popupPage.getTitle(), "Firespotting!");
}
/**
* Clicks on every item and checks, if it loads.
*/
//@Test
public void testEntry() {
popupPage.open();
// check if all entries are there
for (int i = 1; i <= ENTRY_LIST_LENGTH; i++) {
String linkText = popupPage.getEntryTitle(i);
assertNotNull(linkText);
popupPage.clickOnEntryLink(linkText);
popupPage.switchToNewTab();
if (popupPage.getDriver().getWindowHandles().size() > 1) {
popupPage.getDriver().close();
}
popupPage.switchToFirstTab();
popupPage.open();
}
}
/**
* Clicks on the issues link and checks, if it loads.
*/
@Test
public void testIssues() {
popupPage.open();
popupPage.getIssues().click();
popupPage.switchToNewTab();
issuesPage.waitUntilLoaded();
}
/**
* Clicks on refresh link.
*
* @throws InterruptedException
*/
@Test
public void testRefresh() {
popupPage.open();
popupPage.getRefreshLink().click();
WebDriverWait driverWait = createWebDriverWait(popupPage.getDriver(), TIME_TO_WAIT_FOR_REFRESH);
driverWait.until(new Predicate<WebDriver>() {
@Override
public boolean apply(org.openqa.selenium.WebDriver webDriver) {
return popupPage.getTitle().equals("Firespotting!");
}
});
assertEquals(popupPage.getTitle(), "Firespotting!");
}
/**
* Just a helper method to create a WebDriverWait
*
* @param driver The driver we want to configure
* @param timeToWaitForRefresh The time the driver should wait for a refresh in seconds
*
* @return a newly created WebDriverWait
*/
protected WebDriverWait createWebDriverWait(WebDriver driver, long timeToWaitForRefresh) {
return new WebDriverWait(driver, timeToWaitForRefresh);
}
/**
* Clicks on Options link.
*/
@Test
public void testOpenOptions() {
popupPage.open();
popupPage.getOptionsLink().click();
popupPage.switchToNewTab();
assertEquals(popupPage.getTitle(), "Options");
popupPage.getDriver().close();
popupPage.switchToFirstTab();
}
}
|
src/main/java/com/manmoe/example/test/FirespottingIT.java
|
package com.manmoe.example.test;
import com.google.common.base.Predicate;
import com.manmoe.example.model.IssuesPage;
import com.manmoe.example.model.PopupPage;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.testng.ITestResult;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import us.monoid.web.Resty;
import java.io.IOException;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import static us.monoid.web.Resty.content;
import static us.monoid.web.Resty.put;
/**
* Just an example test.
*
* @author Manuel Möhlmann <[email protected]>
*/
public class FirespottingIT extends AbstractChromeExtensionTest {
private static final String EXTENSION_NAME_FROM_MANIFEST = "Firespotting! Interesting Ideas, Every Day!";
public static final long TIME_TO_WAIT_FOR_REFRESH = 3L;
/**
* sum of entries to be shown on popup page
*/
public static final int ENTRY_LIST_LENGTH = 15;
/**
* This is our testmodel. So we don't get lost in details, how to get some elements.
*/
protected PopupPage popupPage;
/**
* Issues page testmodel
*/
protected IssuesPage issuesPage;
/**
* We set it initially == true, so we can &= each test method result.
*/
protected boolean testResult = true;
/**
* Our testClient to send the results to sauceLabs
*/
protected Resty restClient = new Resty();
// -------------------- Setting up and down the test environment
/**
* Method for setting up the test environment.
*/
@BeforeClass
public void setUp() {
RemoteWebDriver testDriver = getWebDriver();
this.popupPage = new PopupPage(testDriver, EXTENSION_NAME_FROM_MANIFEST);
this.issuesPage = new IssuesPage(testDriver, EXTENSION_NAME_FROM_MANIFEST);
}
/**
* We tell the remote selenium server here, that we have finished.
*/
@AfterClass
public void tearDown() throws IOException {
// send saucelabs the result of the tests
// @TODO should be use @AfterSuite in future
String sauceUsername = getSystemVariable("SAUCE_USERNAME");
String sauceAccessKey = getSystemVariable("SAUCE_ACCESS_KEY");
String platformString = getSystemVariable("PLATFORM");
String buildNr = getSystemVariable("TRAVIS_BUILD_NUMBER");
// if sauceLabs is used, the results should be transmitted
if (sauceUsername != null && sauceAccessKey != null && platformString != null) {
String jobId = popupPage.getDriver().getSessionId().toString();
// build sauceLabs result
String url = "https://saucelabs.com/rest/v1/" + sauceUsername + "/jobs/" + jobId;
restClient.authenticate("https://saucelabs.com", sauceUsername, sauceAccessKey.toCharArray());
restClient.withHeader("Content-Type", "application/json");
restClient.json(url, put(content("{\"passed\": " + testResult + ", \"name\": \"Firespotting! " + platformString + " Test\", \"build\": \"" + buildNr + "\"}")));
}
this.popupPage.tearDown();
}
/**
* This method isolates the getting of the env variable for better testability.
*
* @param envVariableName the name of the env variable we want to get
*/
protected String getSystemVariable(String envVariableName) {
return System.getenv(envVariableName);
}
/**
* We want to know, if every method has succeeded. If one method fails, testResult == false.
*
* @param result Results of all testMethods.
*/
@AfterMethod(alwaysRun = true)
public void report(ITestResult result) {
testResult &= result.isSuccess();
}
// -------------------- Tests for the extension
/**
* This test checks, if the chrome extension is installed on the remote system. We want to get the extension id
* by the remote host (every chrome browser will generate another id). If the id is present, we assume, the the
* extension is installed correctly.
*/
@Test
public void isInstalled() {
assertTrue(popupPage.getId() != null, "We got null back. The extension is not installed properly");
}
/**
* Test for checking the popup window.
*/
@Test
public void testPopup() {
popupPage.open();
// check title
assertEquals(popupPage.getTitle(), "Firespotting!");
}
/**
* Clicks on every item and checks, if it loads.
*/
//@Test
public void testEntry() {
popupPage.open();
// check if all entries are there
for (int i = 1; i <= ENTRY_LIST_LENGTH; i++) {
String linkText = popupPage.getEntryTitle(i);
assertNotNull(linkText);
popupPage.clickOnEntryLink(linkText);
popupPage.switchToNewTab();
if (popupPage.getDriver().getWindowHandles().size() > 1) {
popupPage.getDriver().close();
}
popupPage.switchToFirstTab();
popupPage.open();
}
}
/**
* Clicks on the issues link and checks, if it loads.
*/
@Test
public void testIssues() {
popupPage.open();
popupPage.getIssues().click();
issuesPage.waitUntilLoaded();
}
/**
* Clicks on refresh link.
*
* @throws InterruptedException
*/
@Test
public void testRefresh() {
popupPage.open();
popupPage.getRefreshLink().click();
WebDriverWait driverWait = createWebDriverWait(popupPage.getDriver(), TIME_TO_WAIT_FOR_REFRESH);
driverWait.until(new Predicate<WebDriver>() {
@Override
public boolean apply(org.openqa.selenium.WebDriver webDriver) {
return popupPage.getTitle().equals("Firespotting!");
}
});
assertEquals(popupPage.getTitle(), "Firespotting!");
}
/**
* Just a helper method to create a WebDriverWait
*
* @param driver The driver we want to configure
* @param timeToWaitForRefresh The time the driver should wait for a refresh in seconds
*
* @return a newly created WebDriverWait
*/
protected WebDriverWait createWebDriverWait(WebDriver driver, long timeToWaitForRefresh) {
return new WebDriverWait(driver, timeToWaitForRefresh);
}
/**
* Clicks on Options link.
*/
@Test
public void testOpenOptions() {
popupPage.open();
popupPage.getOptionsLink().click();
popupPage.switchToNewTab();
assertEquals(popupPage.getTitle(), "Options");
popupPage.getDriver().close();
popupPage.switchToFirstTab();
}
}
|
modified issues test
added step for getting to old page
|
src/main/java/com/manmoe/example/test/FirespottingIT.java
|
modified issues test
|
<ide><path>rc/main/java/com/manmoe/example/test/FirespottingIT.java
<ide>
<ide> popupPage.getIssues().click();
<ide>
<add> popupPage.switchToNewTab();
<ide>
<ide> issuesPage.waitUntilLoaded();
<ide> }
|
|
Java
|
apache-2.0
|
23b7fd67a60cbbae03bd2a9bf4d350b40d44e2a0
| 0 |
leichunxin/spring,pboonphong/mybatis-spring,kazuki43zoo/spring,mybatis/spring,gigold/spring,mosoft521/spring,nguyenvanthan/spring,bojueWjt/spring,xt-coder/spring,hazendaz/spring,gigold/springMybatis,zhengyixin/spring,yinhe402/spring,20094720/JustMyStudy,zeal4u/spring,mosoft521/spring,adairtaosy/spring,kazuki43zoo/spring,hazendaz/spring,ssyue/spring,jiangchaoting/spring,zyong2004/mybatis-spring,gdarmont/mybatis-spring,pboonphong/mybatis-spring,mofeiyunfei/spring,mybatis/spring,forestqqqq/spring
|
/*
* Copyright 2010-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.spring.mapper;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import java.util.Properties;
import org.apache.ibatis.session.ExecutorType;
import org.apache.ibatis.session.SqlSessionFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.mapper.child.MapperChildInterface;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.config.ConstructorArgumentValues;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.stereotype.Component;
import com.mockrunner.mock.jdbc.MockDataSource;
/**
* @version $Id$
*/
public final class MapperScannerConfigurerTest {
private GenericApplicationContext applicationContext;
@Before
public void setupContext() {
applicationContext = new GenericApplicationContext();
// add the mapper scanner as a bean definition rather than explicitly setting a
// postProcessor on the context so initialization follows the same code path as reading from
// an XML config file
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(MapperScannerConfigurer.class);
definition.getPropertyValues().add("basePackage", "org.mybatis.spring.mapper");
applicationContext.registerBeanDefinition("mapperScanner", definition);
setupSqlSessionFactory("sqlSessionFactory");
// assume support for autowiring fields is added by MapperScannerConfigurer via
// org.springframework.context.annotation.ClassPathBeanDefinitionScanner.includeAnnotationConfig
}
private void startContext() {
applicationContext.refresh();
applicationContext.start();
// this will throw an exception if the beans cannot be found
applicationContext.getBean("sqlSessionFactory");
}
@After
public void assertNoMapperClass() {
// concrete classes should always be ignored by MapperScannerPostProcessor
assertBeanNotLoaded("mapperClass");
// no method interfaces should be ignored too
assertBeanNotLoaded("package-info");
// assertBeanNotLoaded("annotatedMapperZeroMethods"); // as of 1.1.0 mappers with no methods are loaded
}
@After
public void destroyContext() {
applicationContext.destroy();
}
@Test
public void testInterfaceScan() {
startContext();
// all interfaces with methods should be loaded
applicationContext.getBean("mapperInterface");
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
applicationContext.getBean("annotatedMapper");
}
@Test
public void testNameGenerator() {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(BeanNameGenerator.class);
applicationContext.registerBeanDefinition("beanNameGenerator", definition);
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"beanNameGenerator", new RuntimeBeanReference("beanNameGenerator"));
startContext();
// only child inferfaces should be loaded and named with its class name
applicationContext.getBean(MapperInterface.class.getName());
applicationContext.getBean(MapperSubinterface.class.getName());
applicationContext.getBean(MapperChildInterface.class.getName());
applicationContext.getBean(AnnotatedMapper.class.getName());
}
@Test
public void testMarkerInterfaceScan() {
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"markerInterface", MapperInterface.class);
startContext();
// only child inferfaces should be loaded
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
assertBeanNotLoaded("annotatedMapper");
}
@Test
public void testAnnotationScan() {
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"annotationClass", Component.class);
startContext();
// only annotated mappers should be loaded
applicationContext.getBean("annotatedMapper");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
assertBeanNotLoaded("mapperSubinterface");
}
@Test
public void testMarkerInterfaceAndAnnotationScan() {
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"markerInterface", MapperInterface.class);
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"annotationClass", Component.class);
startContext();
// everything should be loaded but the marker interface
applicationContext.getBean("annotatedMapper");
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
}
@Test
public void testScanWithExplicitSqlSessionFactory() throws Exception {
setupSqlSessionFactory("sqlSessionFactory2");
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"sqlSessionFactoryBeanName", "sqlSessionFactory2");
testInterfaceScan();
}
@Test
public void testScanWithExplicitSqlSessionTemplate() throws Exception {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(SqlSessionTemplate.class);
ConstructorArgumentValues constructorArgs = new ConstructorArgumentValues();
constructorArgs.addGenericArgumentValue(new RuntimeBeanReference("sqlSessionFactory"));
definition.setConstructorArgumentValues(constructorArgs);
applicationContext.registerBeanDefinition("sqlSessionTemplate", definition);
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"sqlSessionTemplateBeanName", "sqlSessionTemplate");
testInterfaceScan();
}
@Test
public void testScanWithExplicitSqlSessionFactoryViaPlaceholder() throws Exception {
setupSqlSessionFactory("sqlSessionFactory2");
// use a property placeholder for the session factory name
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"sqlSessionFactoryBeanName", "${sqlSessionFactoryBeanNameProperty}");
Properties props = new java.util.Properties();
props.put("sqlSessionFactoryBeanNameProperty", "sqlSessionFactory2");
GenericBeanDefinition propertyDefinition = new GenericBeanDefinition();
propertyDefinition.setBeanClass(PropertyPlaceholderConfigurer.class);
propertyDefinition.getPropertyValues().add("properties", props);
applicationContext.registerBeanDefinition("propertiesPlaceholder", propertyDefinition);
testInterfaceScan();
}
@Test
public void testScanWithNameConflict() {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(Object.class);
applicationContext.registerBeanDefinition("mapperInterface", definition);
startContext();
assertSame("scanner should not overwite existing bean definition", applicationContext
.getBean("mapperInterface").getClass(), Object.class);
}
@Test
public void testScanWithPropertyPlaceholders() {
GenericBeanDefinition definition = (GenericBeanDefinition) applicationContext
.getBeanDefinition("mapperScanner");
// use a property placeholder for basePackage
definition.getPropertyValues().removePropertyValue("basePackage");
definition.getPropertyValues().add("basePackage", "${basePackageProperty}");
definition.getPropertyValues().add("processPropertyPlaceHolders", true);
// also use a property placeholder for an SqlSessionFactory property
// to make sure the configLocation was setup correctly and MapperScanner did not change
// regular property placeholder substitution
definition = (GenericBeanDefinition) applicationContext
.getBeanDefinition("sqlSessionFactory");
definition.getPropertyValues().removePropertyValue("configLocation");
definition.getPropertyValues().add("configLocation", "${configLocationProperty}");
Properties props = new java.util.Properties();
props.put("basePackageProperty", "org.mybatis.spring.mapper");
props.put("configLocationProperty", "classpath:org/mybatis/spring/mybatis-config.xml");
GenericBeanDefinition propertyDefinition = new GenericBeanDefinition();
propertyDefinition.setBeanClass(PropertyPlaceholderConfigurer.class);
propertyDefinition.getPropertyValues().add("properties", props);
applicationContext.registerBeanDefinition("propertiesPlaceholder", propertyDefinition);
testInterfaceScan();
// make sure the configLocation was setup correctly
// mybatis-config.xml changes the executor from the default SIMPLE type
SqlSessionFactory sessionFactory = (SqlSessionFactory) applicationContext
.getBean("sqlSessionFactory");
assertSame(ExecutorType.REUSE, sessionFactory.getConfiguration().getDefaultExecutorType());
}
private void setupSqlSessionFactory(String name) {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(SqlSessionFactoryBean.class);
definition.getPropertyValues().add("dataSource", new MockDataSource());
applicationContext.registerBeanDefinition(name, definition);
}
private void assertBeanNotLoaded(String name) {
try {
applicationContext.getBean(name);
fail("Spring bean should not be defined for class " + name);
} catch (NoSuchBeanDefinitionException nsbde) {
// success
}
}
}
|
src/test/java/org/mybatis/spring/mapper/MapperScannerConfigurerTest.java
|
/*
* Copyright 2010-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.spring.mapper;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import java.util.Properties;
import org.apache.ibatis.session.ExecutorType;
import org.apache.ibatis.session.SqlSessionFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.config.ConstructorArgumentValues;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.stereotype.Component;
import com.mockrunner.mock.jdbc.MockDataSource;
/**
* @version $Id$
*/
public final class MapperScannerConfigurerTest {
private GenericApplicationContext applicationContext;
@Before
public void setupContext() {
applicationContext = new GenericApplicationContext();
// add the mapper scanner as a bean definition rather than explicitly setting a
// postProcessor on the context so initialization follows the same code path as reading from
// an XML config file
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(MapperScannerConfigurer.class);
definition.getPropertyValues().add("basePackage", "org.mybatis.spring.mapper");
applicationContext.registerBeanDefinition("mapperScanner", definition);
setupSqlSessionFactory("sqlSessionFactory");
// assume support for autowiring fields is added by MapperScannerConfigurer via
// org.springframework.context.annotation.ClassPathBeanDefinitionScanner.includeAnnotationConfig
}
private void startContext() {
applicationContext.refresh();
applicationContext.start();
// this will throw an exception if the beans cannot be found
applicationContext.getBean("sqlSessionFactory");
}
@After
public void assertNoMapperClass() {
// concrete classes should always be ignored by MapperScannerPostProcessor
assertBeanNotLoaded("mapperClass");
// no method interfaces should be ignored too
assertBeanNotLoaded("package-info");
// assertBeanNotLoaded("annotatedMapperZeroMethods"); // as of 1.1.0 mappers with no methods are loaded
}
@After
public void destroyContext() {
applicationContext.destroy();
}
@Test
public void testInterfaceScan() {
startContext();
// all interfaces with methods should be loaded
applicationContext.getBean("mapperInterface");
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
applicationContext.getBean("annotatedMapper");
}
@Test
public void testNameGenerator() {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(BeanNameGenerator.class);
applicationContext.registerBeanDefinition("beanNameGenerator", definition);
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"beanNameGenerator", new RuntimeBeanReference("beanNameGenerator"));
startContext();
// only child inferfaces should be loaded
applicationContext.getBean("org.mybatis.spring.mapper.MapperInterface");
applicationContext.getBean("org.mybatis.spring.mapper.MapperSubinterface");
applicationContext.getBean("org.mybatis.spring.mapper.child.MapperChildInterface");
applicationContext.getBean("org.mybatis.spring.mapper.AnnotatedMapper");
}
@Test
public void testMarkerInterfaceScan() {
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"markerInterface", MapperInterface.class);
startContext();
// only child inferfaces should be loaded
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
assertBeanNotLoaded("annotatedMapper");
}
@Test
public void testAnnotationScan() {
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"annotationClass", Component.class);
startContext();
// only annotated mappers should be loaded
applicationContext.getBean("annotatedMapper");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
assertBeanNotLoaded("mapperSubinterface");
}
@Test
public void testMarkerInterfaceAndAnnotationScan() {
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"markerInterface", MapperInterface.class);
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"annotationClass", Component.class);
startContext();
// everything should be loaded but the marker interface
applicationContext.getBean("annotatedMapper");
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
}
@Test
public void testScanWithExplicitSqlSessionFactory() throws Exception {
setupSqlSessionFactory("sqlSessionFactory2");
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"sqlSessionFactoryBeanName", "sqlSessionFactory2");
testInterfaceScan();
}
@Test
public void testScanWithExplicitSqlSessionTemplate() throws Exception {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(SqlSessionTemplate.class);
ConstructorArgumentValues constructorArgs = new ConstructorArgumentValues();
constructorArgs.addGenericArgumentValue(new RuntimeBeanReference("sqlSessionFactory"));
definition.setConstructorArgumentValues(constructorArgs);
applicationContext.registerBeanDefinition("sqlSessionTemplate", definition);
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"sqlSessionTemplateBeanName", "sqlSessionTemplate");
testInterfaceScan();
}
@Test
public void testScanWithExplicitSqlSessionFactoryViaPlaceholder() throws Exception {
setupSqlSessionFactory("sqlSessionFactory2");
// use a property placeholder for the session factory name
applicationContext.getBeanDefinition("mapperScanner").getPropertyValues().add(
"sqlSessionFactoryBeanName", "${sqlSessionFactoryBeanNameProperty}");
Properties props = new java.util.Properties();
props.put("sqlSessionFactoryBeanNameProperty", "sqlSessionFactory2");
GenericBeanDefinition propertyDefinition = new GenericBeanDefinition();
propertyDefinition.setBeanClass(PropertyPlaceholderConfigurer.class);
propertyDefinition.getPropertyValues().add("properties", props);
applicationContext.registerBeanDefinition("propertiesPlaceholder", propertyDefinition);
testInterfaceScan();
}
@Test
public void testScanWithNameConflict() {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(Object.class);
applicationContext.registerBeanDefinition("mapperInterface", definition);
startContext();
assertSame("scanner should not overwite existing bean definition", applicationContext
.getBean("mapperInterface").getClass(), Object.class);
}
@Test
public void testScanWithPropertyPlaceholders() {
GenericBeanDefinition definition = (GenericBeanDefinition) applicationContext
.getBeanDefinition("mapperScanner");
// use a property placeholder for basePackage
definition.getPropertyValues().removePropertyValue("basePackage");
definition.getPropertyValues().add("basePackage", "${basePackageProperty}");
definition.getPropertyValues().add("processPropertyPlaceHolders", true);
// also use a property placeholder for an SqlSessionFactory property
// to make sure the configLocation was setup correctly and MapperScanner did not change
// regular property placeholder substitution
definition = (GenericBeanDefinition) applicationContext
.getBeanDefinition("sqlSessionFactory");
definition.getPropertyValues().removePropertyValue("configLocation");
definition.getPropertyValues().add("configLocation", "${configLocationProperty}");
Properties props = new java.util.Properties();
props.put("basePackageProperty", "org.mybatis.spring.mapper");
props.put("configLocationProperty", "classpath:org/mybatis/spring/mybatis-config.xml");
GenericBeanDefinition propertyDefinition = new GenericBeanDefinition();
propertyDefinition.setBeanClass(PropertyPlaceholderConfigurer.class);
propertyDefinition.getPropertyValues().add("properties", props);
applicationContext.registerBeanDefinition("propertiesPlaceholder", propertyDefinition);
testInterfaceScan();
// make sure the configLocation was setup correctly
// mybatis-config.xml changes the executor from the default SIMPLE type
SqlSessionFactory sessionFactory = (SqlSessionFactory) applicationContext
.getBean("sqlSessionFactory");
assertSame(ExecutorType.REUSE, sessionFactory.getConfiguration().getDefaultExecutorType());
}
private void setupSqlSessionFactory(String name) {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(SqlSessionFactoryBean.class);
definition.getPropertyValues().add("dataSource", new MockDataSource());
applicationContext.registerBeanDefinition(name, definition);
}
private void assertBeanNotLoaded(String name) {
try {
applicationContext.getBean(name);
fail("Spring bean should not be defined for class " + name);
} catch (NoSuchBeanDefinitionException nsbde) {
// success
}
}
}
|
minor test change
|
src/test/java/org/mybatis/spring/mapper/MapperScannerConfigurerTest.java
|
minor test change
|
<ide><path>rc/test/java/org/mybatis/spring/mapper/MapperScannerConfigurerTest.java
<ide> import org.junit.Test;
<ide> import org.mybatis.spring.SqlSessionFactoryBean;
<ide> import org.mybatis.spring.SqlSessionTemplate;
<add>import org.mybatis.spring.mapper.child.MapperChildInterface;
<ide> import org.springframework.beans.factory.NoSuchBeanDefinitionException;
<ide> import org.springframework.beans.factory.config.ConstructorArgumentValues;
<ide> import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
<ide>
<ide> startContext();
<ide>
<del> // only child inferfaces should be loaded
<del> applicationContext.getBean("org.mybatis.spring.mapper.MapperInterface");
<del> applicationContext.getBean("org.mybatis.spring.mapper.MapperSubinterface");
<del> applicationContext.getBean("org.mybatis.spring.mapper.child.MapperChildInterface");
<del> applicationContext.getBean("org.mybatis.spring.mapper.AnnotatedMapper");
<add> // only child inferfaces should be loaded and named with its class name
<add> applicationContext.getBean(MapperInterface.class.getName());
<add> applicationContext.getBean(MapperSubinterface.class.getName());
<add> applicationContext.getBean(MapperChildInterface.class.getName());
<add> applicationContext.getBean(AnnotatedMapper.class.getName());
<ide> }
<ide>
<ide> @Test
|
|
Java
|
apache-2.0
|
28b7f3c84a0df8c48d376d242011a838097e1c7c
| 0 |
Carabi/carabiserver,Kopilov/carabiserver,Carabi/carabiserver,Kopilov/carabiserver,Kopilov/carabiserver,Carabi/carabiserver,Carabi/carabiserver,Kopilov/carabiserver
|
package ru.carabi.server.kernel;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.GeneralSecurityException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ejb.Singleton;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReader;
import javax.json.JsonValue;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.ws.rs.core.Response;
import javax.xml.ws.BindingProvider;
import javax.xml.ws.Holder;
import javax.xml.ws.WebServiceException;
import me.lima.ThreadSafeDateParser;
import ru.carabi.libs.CarabiFunc;
import static ru.carabi.libs.CarabiFunc.*;
import ru.carabi.server.CarabiException;
import ru.carabi.server.Settings;
import ru.carabi.server.UserLogon;
import ru.carabi.server.Utls;
import ru.carabi.server.entities.CarabiAppServer;
import ru.carabi.server.entities.CarabiUser;
import ru.carabi.server.entities.ChatMessage;
import ru.carabi.server.entities.FileOnServer;
import ru.carabi.server.kernel.oracle.CarabiDate;
import ru.carabi.server.logging.CarabiLogging;
import ru.carabi.server.rest.RestException;
import ru.carabi.stub.CarabiException_Exception;
import ru.carabi.stub.ChatService;
import ru.carabi.stub.ChatService_Service;
/**
*
* @author sasha<[email protected]>
*/
@Singleton
public class ChatBean {
private static final Logger logger = CarabiLogging.getLogger(ChatBean.class);
@EJB AdminBean admin;
@EJB EventerBean eventer;
@PersistenceContext(unitName = "ru.carabi.server_carabiserver-chat")
EntityManager emChat;
@PersistenceContext(unitName = "ru.carabi.server_carabiserver-kernel")
EntityManager emKernel;
/**
* Отправка сообщения. Отправляет сообщение получателю и, при успешной доставке,
* записывает в отправленные отправителю
* @param sender
* @param receiver
* @param messageText
* @return id сообщения на стороне отправителя
* @throws ru.carabi.server.CarabiException
*/
public Long sendMessage(CarabiUser sender, CarabiUser receiver, String messageText, Long senderAttachmentId, Long receiverAttachmentId) throws CarabiException {
CarabiAppServer receiverServer = getTargetUserServer(receiver);
Long recievedMessageId;
//Если целевой сервер -- текущий, вызываем функцию из Bean напрямую.
if (Settings.getCurrentServer().equals(receiverServer)) {
recievedMessageId = forwardMessage(sender, receiver, messageText, receiverAttachmentId);
} else { //иначе по SOAP
recievedMessageId = callForwardMessageSoap(receiverServer, sender, receiver, messageText, receiverAttachmentId);
}
if (recievedMessageId < 0) {
throw new CarabiException("could not forward message");
}
CarabiAppServer senderServer = getTargetUserServer(sender);
Long sentMessageId;
//Аналогично для отправителя, если сообщение дошло получателю
if (Settings.getCurrentServer().equals(senderServer)) {
sentMessageId = putMessage(sender, sender, receiver, recievedMessageId, receiver.getMainServer().getId(), messageText, senderAttachmentId);
} else {
sentMessageId = callPutMessageSoap(senderServer, sender, sender, receiver, recievedMessageId, receiver.getMainServer().getId(), messageText, senderAttachmentId);
}
return sentMessageId;
}
/**
* Доставка сообщения получателю. Предполагается, что его база чата располагается
* на текущем сервере (если нет — вызывается {@link ChatService#forwardMessage(java.lang.String, java.lang.String, java.lang.String, java.lang.String) }
* соответствующего сервера). Сообщение записывается в базу (вложение должно быть записано ранее), а пользователь
* получает уведомление через Eventer.
* @param sender отправитель
* @param receiver получатель
* @param messageText текст сообщения (для вложений -- короткий комментарий, например, имя файла)
* @param attachmentId id существующего вложения
*/
public Long forwardMessage(CarabiUser sender, CarabiUser receiver, String messageText, Long attachmentId) throws CarabiException {
CarabiAppServer receiverServer = getTargetUserServer(receiver);
if (!Settings.getCurrentServer().equals(receiverServer)) {
//Если мы не на сервере получателя -- вызываем функцию по SOAP
return callForwardMessageSoap(receiverServer, sender, receiver, messageText, attachmentId);
}
Long messageId = putMessage(receiver, sender, receiver, null, null, messageText, attachmentId);
return messageId;
}
private void messageToEventer(CarabiUser sender, CarabiUser receiver, boolean toReceiver, Long messageId) throws CarabiException {
try {
String eventText;
JsonObjectBuilder eventTextBuild = Json.createObjectBuilder();
eventTextBuild.add("sender", sender.getLogin());
eventTextBuild.add("receiver", receiver.getLogin());
eventTextBuild.add("id", messageId);
eventText = eventTextBuild.build().toString();
eventer.fireEvent("", toReceiver ? receiver.getLogin() : sender.getLogin(), (short) 12, eventText);
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
}
}
/**
* Запись сообщения в базу чата на текущем сервере.
* @param owner владелец ящика
* @param sender отправитель
* @param receiver получатель
* @param receivedMessageId Id полученного входящего сообщения, при записи парного ему отправленного
* @param receivedMessageServerId
* @param messageText текст сообщения
* @return Id новой записи
* @throws CarabiException
*/
public Long putMessage(CarabiUser owner, CarabiUser sender, CarabiUser receiver, Long receivedMessageId, Integer receivedMessageServerId, String messageText, Long attachmentId) throws CarabiException {
ChatMessage chatMessage = new ChatMessage();
chatMessage.setOwnerId(owner.getId());
chatMessage.setSenderId(sender.getId());
chatMessage.setSent(new Date());
chatMessage.setReceiverId(receiver.getId());
chatMessage.setReceivedMessageId(receivedMessageId);
chatMessage.setReceivedMessageServerId(receivedMessageServerId);
chatMessage.setMessageText(messageText);
if (attachmentId != null) {
chatMessage.setAttachment(emChat.find(FileOnServer.class, attachmentId));
}
chatMessage = emChat.merge(chatMessage);
emChat.flush();
messageToEventer(sender, receiver, owner.equals(receiver), chatMessage.getId());
return chatMessage.getId();
}
/**
* Выдаёт целевой сервер пользователя. Если не задан — Выдаёт основной, устанавливая его пользователю.
* @param user
* @return
*/
private CarabiAppServer getTargetUserServer(CarabiUser user) {
CarabiAppServer userServer = user.getMainServer();
if (userServer == null) {
userServer = Settings.getMasterServer();
user.setMainServer(userServer);
user = emKernel.merge(user);
emKernel.flush();
}
return userServer;
}
/**
* Пометка сообщения прочитанным/непрочитаным. Попутно ставит уведомление о доставке отправителю.
* @param receiverLogon сессия пользователя
* @param messagesList Строка с ID сообщения или JSON-массив
* @param read если true -- пометить прочитанным (и отправить уведомление), иначе -- снять пометку
* @throws ru.carabi.server.CarabiException
*/
public void markRead(UserLogon receiverLogon, String messagesList, boolean read) throws CarabiException {
//Функция должна выполняться на сервере владельца.
final CarabiUser receiver = receiverLogon.getUser();
CarabiAppServer targetServer = receiver.getMainServer();
final CarabiAppServer currentServer = Settings.getCurrentServer();
List<Long> messageIdList = parseMessagesIdList(messagesList);
if (!currentServer.equals(targetServer)) {
callMarkReadSoap(targetServer, receiverLogon.getToken(), messagesList, read);
return;
}
for (Long messageId: messageIdList) {//Валидация сообщений
//Письмо должно принадлежать текущему пользователю, и он должен быть получателем
ChatMessage receivedMessage = emChat.find(ChatMessage.class, messageId);
if (receivedMessage == null) {
throw new CarabiException("message " + messageId + " not found");
}
if (!receiver.getId().equals(receivedMessage.getOwnerId())) {
throw new CarabiException("Message " + messageId + " does not belong to this user");
}
if (!receiver.getId().equals(receivedMessage.getReceiverId())) {
throw new CarabiException("Message " + messageId + " is not income");
}
}
//Маркируем письма получателя, раскладываем письма по отправителям
Map<String, JsonArrayBuilder> messagesBySenders = new HashMap<>();
Map<String, CarabiUser> senders = new HashMap<>();
for (Long messageId: messageIdList) {
ChatMessage receivedMessage = emChat.find(ChatMessage.class, messageId);
if (read) {
receivedMessage.setReceived(new Date());
} else {
receivedMessage.setReceived(null);
}
receivedMessage = emChat.merge(receivedMessage);
Long senderId = receivedMessage.getSenderId();
CarabiUser sender = emKernel.find(CarabiUser.class, senderId);
String senderLogin;
if (sender == null) {
logger.warning("Unknown sender id " + senderId);
senderLogin = "";
} else {
senderLogin = sender.getLogin();
senders.put(senderLogin, sender);
}
JsonArrayBuilder sendersMessages = messagesBySenders.get(senderLogin);
if (sendersMessages == null) {
sendersMessages = Json.createArrayBuilder();
messagesBySenders.put(senderLogin, sendersMessages);
}
sendersMessages.add(messageId);
}
emChat.flush();
//отправляем события о прочитанных сообщениях
for (String senderLogin: messagesBySenders.keySet()) {
if (!read) {
//У отправителя не скидываем
} else {
//Берём отправителя, помечаем письмо в отправленных
CarabiUser sender = senders.get(senderLogin);
targetServer = sender.getMainServer();
if (currentServer.equals(targetServer)) {
markSentReceived(sender, receiver, messagesBySenders.get(senderLogin).build().toString());
} else {
callMarkSentReceivedSoap(targetServer, senderLogin, receiver.getLogin(), messagesBySenders.get(senderLogin).build().toString());
}
}
try {
JsonObjectBuilder eventText = Json.createObjectBuilder();
eventText.add("sender", senderLogin);
eventText.add("receiver", receiver.getLogin());
eventText.add("read", read);
eventText.add("messagesList", messagesBySenders.get(senderLogin));
eventer.fireEvent("", receiver.getLogin(), (short)13, eventText.build().toString());
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
/**
* Установка уведомления о доставке в отправленном сообщении.
* @param sender
* @param receiver
* @param messagesList
*/
public void markSentReceived(CarabiUser sender, CarabiUser receiver, String messagesList) throws CarabiException {
//При необходимости переходим на сервер отправителя
CarabiAppServer targetServer = sender.getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
callMarkSentReceivedSoap(targetServer, sender.getLogin(), receiver.getLogin(), messagesList);
return;
}
List<Long> messageIdList = parseMessagesIdList(messagesList);
JsonArrayBuilder sentMessages = Json.createArrayBuilder();
for (Long messageId: messageIdList) {
//ищем отправленное письмо по ID и базе полученного
TypedQuery<ChatMessage> getSentByReceived = emChat.createNamedQuery("getSentByReceived", ChatMessage.class);
getSentByReceived.setParameter("received", messageId);
getSentByReceived.setParameter("server", receiver.getMainServer().getId());
try {
ChatMessage sentMessage = getSentByReceived.getSingleResult();
//проверяем, что письмо принадлежит отправителю
if (!sender.getId().equals(sentMessage.getOwnerId())) {
throw new CarabiException("Message does not belong to this user");
}
if (!sender.getId().equals(sentMessage.getSenderId())) {
throw new CarabiException("Message is not outcome");
}
//помечаем
sentMessage.setReceived(new Date());
emChat.merge(sentMessage);
sentMessages.add(sentMessage.getId());
} catch (NoResultException e) {
//отправитель удалил отправленное раньше, чем получатель его прочитал
}
}
emChat.flush();
try {
//отправляем событие, что сообщение получено
JsonObjectBuilder eventText = Json.createObjectBuilder();
eventText.add("sender", sender.getLogin());
eventText.add("receiver", receiver.getLogin());
eventText.add("read", true);
eventText.add("messagesList", sentMessages);
eventer.fireEvent("", sender.getLogin(), (short)13, eventText.build().toString());
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
public Long getUnreadMessagesCount(UserLogon client) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetUnreadMessagesCountSoap(targetServer, client.getToken());
}
Query getUnreadMessagesCount = emChat.createNamedQuery("getUnreadMessagesCount");
getUnreadMessagesCount.setParameter("user", client.getUser().getId());
List count = getUnreadMessagesCount.getResultList();
return (Long) count.get(0);
}
public String getUnreadMessagesSenders(UserLogon client) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetUnreadMessagesSendersSoap(targetServer, client.getToken());
}
Query getUnreadMessagesSenders = emChat.createNamedQuery("getUnreadMessagesSenders", Object[].class);
getUnreadMessagesSenders.setParameter("user", client.getUser().getId());
List<Object[]> sendersMessages = getUnreadMessagesSenders.getResultList();//список ID отправителей и количества непрочитанных сообщений
JsonObjectBuilder result = Json.createObjectBuilder();
for (Object[] senderMessages: sendersMessages) {
Object senderId = senderMessages[0];
CarabiUser sender = emKernel.find(CarabiUser.class, senderId);
String login;
if (sender == null) {
login = senderId.toString();
} else {
login = sender.getLogin();
}
result.add(login, (Long)senderMessages[1]);
}
return result.build().toString();
}
public String getMessage(UserLogon client, Long messageId, boolean read) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetMessageSoap(targetServer, client.getToken(), messageId, read);
}
//Письмо должно принадлежать текущему пользователю
ChatMessage message = emChat.find(ChatMessage.class, messageId);
if (message == null) {
throw new CarabiException("message " + messageId + " not found");
}
final CarabiUser user = client.getUser();
if (!user.getId().equals(message.getOwnerId())) {
throw new CarabiException("Message does not belong to this user");
}
//если пользоватль -- получатель, можно ставить пометку о прочтении
if (read && user.getId().equals(message.getReceiverId())) {
markRead(client, messageId.toString(), true);
}
return message.getMessageText();
}
public String getMessageDetails(UserLogon client, Long messageId, boolean read) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetMessageDetailsSoap(targetServer, client.getToken(), messageId, read);
}
//Письмо должно принадлежать текущему пользователю
ChatMessage message = emChat.find(ChatMessage.class, messageId);
if (message == null) {
throw new CarabiException("message " + messageId + " not found");
}
final CarabiUser user = client.getUser();
if (!user.getId().equals(message.getOwnerId())) {
throw new CarabiException("Message does not belong to this user");
}
//если пользоватль -- получатель, можно ставить пометку о прочтении
if (read && user.getId().equals(message.getReceiverId())) {
markRead(client, messageId.toString(), true);
}
JsonObjectBuilder result = Json.createObjectBuilder();
CarabiUser sender = emKernel.find(CarabiUser.class, message.getSenderId());
result.add("sender", sender.getLogin());
CarabiUser receiver = emKernel.find(CarabiUser.class, message.getReceiverId());
result.add("receiver", receiver.getLogin());
result.add("sent", ThreadSafeDateParser.format(message.getSent(), CarabiDate.pattern));
if (message.getReceived() == null) {
result.addNull("received");
} else {
result.add("received", ThreadSafeDateParser.format(message.getReceived(), CarabiDate.pattern));
}
result.add("message", message.getMessageText());
if (message.getAttachment() != null) {
result.add("attachment", true);
}
return result.build().toString();
}
public String getContactList(UserLogon client, String search) throws CarabiException {
//Выбираем пользователей
TypedQuery<CarabiUser> getUsersList;
if (search != null && !search.isEmpty()) {
getUsersList = emKernel.createNamedQuery("getUsersListSearch", CarabiUser.class);
getUsersList.setParameter("search", "%" + search.toUpperCase() + "%");
} else {
getUsersList = emKernel.createNamedQuery("getAllUsersList", CarabiUser.class);
}
List<CarabiUser> usersList = getUsersList.getResultList();
return printUsersForOutput(client, usersList, null).toString();
}
public String getContact(UserLogon client, String login) throws CarabiException {
List<CarabiUser> usersList = new ArrayList<>(1);
usersList.add(admin.findUser(login));
JsonObject userForOutput = printUsersForOutput(client, usersList, null);
return Utls.redim(userForOutput).toString();
}
public String getLastInterlocutors(UserLogon client, int size, String afterDateStr, String search) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetLastInterlocutorsSoap(targetServer, client.getToken(), size, afterDateStr, search);
}
CarabiDate afterDate = parceDate(afterDateStr, "01.01.1970");
//берём недавно полученные письма
Query getRecentlyMessagesData = emChat.createNamedQuery("getRecentlyMessagesData", Object[].class);
getRecentlyMessagesData.setParameter("user", client.getUser().getId());
getRecentlyMessagesData.setParameter("recently", afterDate);
List<Object[]> messagesMetadata = getRecentlyMessagesData.getResultList();//список писем (отправитель, получатель, дата) в порядке устаревания
List<Long> interlocutorsIdOrdered = new ArrayList();
Set<Long> interlocutorsIdSet = new HashSet();
Map<Long, Date> interlocutorsLastContact = new HashMap<>();
final Long ownerId = client.getUser().getId();
//получаем упорядоченный список id собеседников и дату последнего сообщения от каждого из них
for (Object[] messageMetadata: messagesMetadata) {
Long senderId = (Long) messageMetadata[0];
Long receiverId = (Long) messageMetadata[1];
Long interlocutorId = ownerId.equals(senderId) ? receiverId : senderId;
if (!interlocutorsIdSet.contains(interlocutorId)) {
interlocutorsIdOrdered.add(interlocutorId);
interlocutorsIdSet.add(interlocutorId);
interlocutorsLastContact.put(interlocutorId, (Date) messageMetadata[2]);
}
}
List<CarabiUser> interlocutorsOrdered;
if (!interlocutorsIdOrdered.isEmpty()) {
//получаем данные особеседниках
List<CarabiUser> users;
if (search != null && !search.isEmpty()) {
TypedQuery<CarabiUser> getSelectedUsersListSearch = emKernel.createNamedQuery("getSelectedUsersListSearch", CarabiUser.class);
getSelectedUsersListSearch.setParameter("idlist", interlocutorsIdOrdered);
getSelectedUsersListSearch.setParameter("search", "%" + search.toUpperCase() + "%");
users = getSelectedUsersListSearch.getResultList();
} else {
TypedQuery<CarabiUser> getSelectedUsersList = emKernel.createNamedQuery("getSelectedUsersList", CarabiUser.class);
getSelectedUsersList.setParameter("idlist", interlocutorsIdOrdered);
users = getSelectedUsersList.getResultList();
}
//восстанавливаем порядок устаревания диалогов
Map<Long, CarabiUser> usersPerId = new HashMap<>();
for (CarabiUser sender: users) {
usersPerId.put(sender.getId(), sender);
}
interlocutorsOrdered = new ArrayList<>(interlocutorsIdOrdered.size());
int i = 0;
for (Long senderId: interlocutorsIdOrdered) {
CarabiUser user = usersPerId.get(senderId);
if (user != null) {
interlocutorsOrdered.add(user);
i++;
}
if (size > 0 && i == size) {
break;
}
}
} else {
interlocutorsOrdered = new ArrayList<>(0);
}
return printUsersForOutput(client, interlocutorsOrdered, interlocutorsLastContact).toString();
}
private CarabiDate parceDate(String dateStr, String defaultVal) throws CarabiException {
CarabiDate date;
if (dateStr != null && !dateStr.isEmpty()) {
try {//парсинг даты
date = new CarabiDate(dateStr);
} catch (IllegalArgumentException | ParseException e) {
throw new CarabiException("Illegal date: " + dateStr);
}
} else {
try {
date = new CarabiDate(defaultVal);
} catch (ParseException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
throw new CarabiException("Illegal defailt date: " + defaultVal);
}
}
return date;
}
/**
Вывод списка пользователей в формате, используемом для хранимых запросов.
Добавление информации о непрочитанных сообщениях и онлайне
* @param usersList
* @return
*/
private JsonObject printUsersForOutput(UserLogon client, List<CarabiUser> usersList, Map<Long, Date> userLastContact) throws CarabiException {
Set<String> onlineUsers;
JsonObject unreadMessagesSenders;
if (!usersList.isEmpty()) {//если список пользователей пустой -- доп. статистику не собираем
onlineUsers = getOnlineUsers();
final String unreadMessagesSendersJson = getUnreadMessagesSenders(client);
unreadMessagesSenders = Json.createReader(new StringReader(unreadMessagesSendersJson)).readObject();
} else {
onlineUsers = new HashSet<>();
unreadMessagesSenders = Json.createObjectBuilder().build();
}
//формируем вывод
JsonArrayBuilder headerColumns = Json.createArrayBuilder();
headerColumns.add(Utls.parametersToJson("LOGIN", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("FIRSTNAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("MIDDLENAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("LASTNAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("DEPARTMENT", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("ROLE", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("SCHEMA_NAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("SCHEMA_DESCRIPTION", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("ONLINE", "NUMBER"));
headerColumns.add(Utls.parametersToJson("MESSAGES_UNREAD", "NUMBER"));
if (userLastContact != null) {
headerColumns.add(Utls.parametersToJson("LAST_CONTACT_DATE", "DATE"));
headerColumns.add(Utls.parametersToJson("LAST_CONTACT_DATE_STR", "VARCHAR2"));
}
JsonObjectBuilder result = Json.createObjectBuilder();
result.add("columns", headerColumns);
JsonArrayBuilder rows = Json.createArrayBuilder();
for (CarabiUser user: usersList) {
if (user.equals(client.getUser())) {
continue;
}
JsonArrayBuilder userJson = Json.createArrayBuilder();
final String login = user.getLogin();
Utls.addJsonObject(userJson, login);
Utls.addJsonObject(userJson, user.getFirstname());
Utls.addJsonObject(userJson, user.getMiddlename());
Utls.addJsonObject(userJson, user.getLastname());
Utls.addJsonObject(userJson, user.getDepartment());
Utls.addJsonObject(userJson, user.getRole());
if (user.getDefaultSchema() != null) {
Utls.addJsonObject(userJson, user.getDefaultSchema().getName());
Utls.addJsonObject(userJson, user.getDefaultSchema().getDescription());
} else {
userJson.addNull();
userJson.addNull();
}
if (onlineUsers.contains(user.getLogin())) {
userJson.add("1");
} else {
userJson.add("0");
}
JsonValue unreadMessages = unreadMessagesSenders.get(login);
if (unreadMessages == null) {
userJson.add("0");
} else {
userJson.add(unreadMessages.toString());
}
if (userLastContact != null) {
userJson.add(ThreadSafeDateParser.format(userLastContact.get(user.getId()), CarabiDate.pattern));
userJson.add(ThreadSafeDateParser.format(userLastContact.get(user.getId()), CarabiDate.patternShort));
}
rows.add(userJson);
}
result.add("list", rows);
return result.build();
}
private Set<String> getOnlineUsers() {
//emKernel.clear();
TypedQuery<CarabiAppServer> getSevers = emKernel.createNamedQuery("getAllServers", CarabiAppServer.class);
List<CarabiAppServer> servers = getSevers.getResultList();
//С каждого Eventer пытаемся получить список подключенных пользователей
Set<String> result = new HashSet<>();
for (CarabiAppServer server: servers) {
try {
String usersOnlineJson = eventer.eventerSingleRequestResponse(server, "[]", new Holder<>((short)15), true);
logger.info("online from " + server.getComputer() + ": " + usersOnlineJson);
JsonReader reader = Json.createReader(new StringReader(usersOnlineJson));
JsonObject usersOnline = reader.readObject();
result.addAll(usersOnline.keySet());
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
return result;
}
public String getDialog(UserLogon client, CarabiUser interlocutor, String afterDateStr, String search) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetDialogSoap(targetServer, client.getToken(), interlocutor, afterDateStr, search);
}
//получаем сообщения
CarabiDate afterDate = parceDate(afterDateStr, "01.01.1970");
final Long userId = client.getUser().getId();
final TypedQuery<ChatMessage> getDialog;
if (search != null && !search.equals("")) {
getDialog = emChat.createNamedQuery("searchInDialog", ChatMessage.class);
getDialog.setParameter("search", "%" + search.toUpperCase() + "%");
} else {
getDialog = emChat.createNamedQuery("getDialog", ChatMessage.class);
}
getDialog.setParameter("user", userId);
getDialog.setParameter("recently", afterDate);
getDialog.setParameter("interlocutor", interlocutor.getId());
List<ChatMessage> dialog = getDialog.getResultList();
//формируем вывод
JsonArrayBuilder headerColumns = Json.createArrayBuilder();
headerColumns.add(Utls.parametersToJson("MESSAGE_ID", "NUMBER"));
headerColumns.add(Utls.parametersToJson("SENDER", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("RECEIVER", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("MESSAGE_TEXT", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("ATTACHMENT", "NUMBER"));
headerColumns.add(Utls.parametersToJson("SENT", "DATE"));
headerColumns.add(Utls.parametersToJson("RECEIVED", "DATE"));
JsonObjectBuilder result = Json.createObjectBuilder();
result.add("columns", headerColumns);
JsonArrayBuilder rows = Json.createArrayBuilder();
for (ChatMessage message: dialog) {
JsonArrayBuilder messageJson = Json.createArrayBuilder();
Utls.addJsonObject(messageJson, message.getId().toString());
assert message.getOwnerId().equals(userId);
if (userId.equals(message.getReceiverId())) {
Utls.addJsonObject(messageJson, interlocutor.getLogin());//sender
Utls.addJsonObject(messageJson, client.getUser().getLogin());//receiver
} else if (userId.equals(message.getSenderId())) {
Utls.addJsonObject(messageJson, client.getUser().getLogin());//sender
Utls.addJsonObject(messageJson, interlocutor.getLogin());//receiver
} else {
logger.warning("message " + message.getId() + "do not have current user (" + userId + ") as sender or receiver");
continue;
}
Utls.addJsonObject(messageJson, message.getMessageText());
Utls.addJsonObject(messageJson, message.getAttachment() == null ? "0" : "1");
Utls.addJsonObject(messageJson, CarabiDate.wrap(message.getSent()));
Utls.addJsonObject(messageJson, CarabiDate.wrap(message.getReceived()));
rows.add(messageJson);
}
result.add("list", rows);
return result.build().toString();
}
public int deleteMessages(UserLogon client, String messagesList) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callDeleteMessagesSoap(targetServer, client.getToken(), messagesList);
}
List<Long> idList = parseMessagesIdList(messagesList);
//Получаем вложения удаляемых сообщений
TypedQuery<FileOnServer> getUserMessagesAttachments = emChat.createNamedQuery("getUserMessagesAttachments", FileOnServer.class);
getUserMessagesAttachments.setParameter("user", client.getUser().getId());
getUserMessagesAttachments.setParameter("idlist", idList);
List<FileOnServer> userMessagesAttachments = getUserMessagesAttachments.getResultList();
//Удаляем сообщения
Query deleteMessagesList = emChat.createNamedQuery("deleteMessagesList");
deleteMessagesList.setParameter("user", client.getUser().getId());
deleteMessagesList.setParameter("idlist", idList);
int deletedSize = deleteMessagesList.executeUpdate();
//Смотрим, какие из вложений более не используются
//(некоторые могут присутствовать в сообщении собеседника)
//удаляем их и файлы
for(FileOnServer attachment: userMessagesAttachments) {
Query getMessagesWithAttachment = emChat.createNamedQuery("getMessagesWithAttachment");
getMessagesWithAttachment.setParameter("attachment_id", attachment.getId());
List messagesWithAttachment = getMessagesWithAttachment.getResultList();
if (messagesWithAttachment.size() > 0) {
continue;
}
new File(attachment.getContentAddress()).delete();
emChat.remove(attachment);
}
//Отправляем событие клиентам
try {
eventer.fireEvent("", client.getUser().getLogin(), (short) 16, messagesList);
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
return deletedSize;
}
/**
* Возвращает вложение из сообщения, при его наличии
* @param client клиентская сессия
* @param messageId id сообщения с вложением
* @return вложение из сообщения, null при его отсутствии
* @throws ru.carabi.server.CarabiException если сообщение не найдено или не принадлежит пользователю
*/
public FileOnServer getMessageAttachement(UserLogon client, Long messageId) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetMessageAttachementSoap(targetServer, client.getToken(), messageId);
}
ChatMessage messageWithAttachement = emChat.find(ChatMessage.class, messageId);
if (messageWithAttachement == null) {
RestException restException = new RestException("message " + messageId + " not found", Response.Status.NOT_FOUND);
throw new CarabiException(restException);
}
if (!client.getUser().getId().equals(messageWithAttachement.getOwnerId())) {
RestException restException = new RestException("message " + messageId + " does not belong to user " + client.getUser().getId(), Response.Status.CONFLICT);
throw new CarabiException(restException);
}
return messageWithAttachement.getAttachment();
}
/**
* Создание нового объекта {@link FileOnServer} под аттач и сохранение его в базе для генерации ключа.
* @param userFilename пользовательское имя файла
* @return объект FileOnServer со сгенерированным ID
*/
public FileOnServer createAttachment(String userFilename) {
FileOnServer newAttachment = new FileOnServer();
newAttachment.setName(userFilename);
newAttachment = emChat.merge(newAttachment);
emChat.flush();
newAttachment.setContentAddress(Settings.CHAT_ATTACHMENTS_LOCATION + "/" + newAttachment.getId() + "_" + userFilename);
return newAttachment;
}
/**
* Пересохранение объекта ChatAttachment в базе.
*/
public FileOnServer updateAttachment(FileOnServer attachment) {
return emChat.merge(attachment);
}
public void fireUserState(UserLogon logon, boolean online) throws IOException, CarabiException {
if (online) {
//При подключении передаём событие всегда.
eventer.fireEvent("", "", (short)14, "{\"login\":\"" + logon.getUser().getLogin() + "\",\"online\":true}");
} else {
//При отключении проверяем, что других сессий этого пользователя нет
TypedQuery<CarabiAppServer> getSevers = emKernel.createNamedQuery("getAllServers", CarabiAppServer.class);
List<CarabiAppServer> servers = getSevers.getResultList();
//На каждом Eventer проверяем наличие данного пользователя
boolean stillOnline = false;
for (CarabiAppServer server: servers) {
try {
String userOnlineJson = eventer.eventerSingleRequestResponse(server, "[\"" + logon.getUser().getLogin() + "\"]", new Holder<>((short)15), true);
logger.info("online from " + server.getComputer() + ": " + userOnlineJson);
JsonReader reader = Json.createReader(new StringReader(userOnlineJson));
JsonObject userOnline = reader.readObject();
if (userOnline.getBoolean(logon.getUser().getLogin())) {
stillOnline = true;
break;
}
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
if (!stillOnline) {
eventer.fireEvent("", "", (short)14, "{\"login\":\"" + logon.getUser().getLogin() + "\",\"online\":false}");
}
}
}
private Long callForwardMessageSoap(CarabiAppServer receiverServer, CarabiUser sender, CarabiUser receiver, String messageText, Long attachmentId) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(receiverServer);
String token = chatServicePort.prepareToForward();
token = encrypt(token);
setCookie((BindingProvider)chatServicePort);
return chatServicePort.forwardMessage(token, sender.getLogin(), receiver.getLogin(), messageText, attachmentId);
} catch (MalformedURLException | GeneralSecurityException | WebServiceException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: ", ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private Long callPutMessageSoap(CarabiAppServer targetServer, CarabiUser owner, CarabiUser sender, CarabiUser receiver, Long receivedMessageId, Integer receivedMessageServerId, String messageText, Long attachmentId) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
String token = chatServicePort.prepareToForward();
token = encrypt(token);
setCookie((BindingProvider)chatServicePort);
return chatServicePort.putMessage(token, owner.getLogin(), sender.getLogin(), receiver.getLogin(), receivedMessageId, receivedMessageServerId, messageText, attachmentId);
} catch (MalformedURLException | GeneralSecurityException | WebServiceException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private void callMarkReadSoap(CarabiAppServer targetServer, String clientToken, String messageList, boolean read) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
chatServicePort.markRead(clientToken, messageList, read);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private void callMarkSentReceivedSoap(CarabiAppServer targetServer, String loginSender, String loginReceiver, String messagesList) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
String token = chatServicePort.prepareToForward();
token = encrypt(token);
setCookie((BindingProvider)chatServicePort);
chatServicePort.markReceived(token, loginSender, loginReceiver, messagesList);
} catch (MalformedURLException | GeneralSecurityException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private Long callGetUnreadMessagesCountSoap(CarabiAppServer targetServer, String clientToken) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getUnreadMessagesCount(clientToken);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetUnreadMessagesSendersSoap(CarabiAppServer targetServer, String clientToken) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getUnreadMessagesSenders(clientToken);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetMessageSoap(CarabiAppServer targetServer, String clientToken, Long messageId, boolean read) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getMessage(clientToken, messageId, read);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetMessageDetailsSoap(CarabiAppServer targetServer, String clientToken, Long messageId, boolean read) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getMessageDetails(clientToken, messageId, read);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetLastInterlocutorsSoap(CarabiAppServer targetServer, String clientToken, int size, String afterDate, String search) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getLastInterlocutors(clientToken, size, afterDate, search);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetDialogSoap(CarabiAppServer targetServer, String clientToken, CarabiUser interlocutor, String afterDate, String search) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getDialog(clientToken, interlocutor.getLogin(), afterDate, search);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private int callDeleteMessagesSoap(CarabiAppServer targetServer, String clientToken, String messagesList) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.deleteMessages(clientToken, messagesList);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private FileOnServer callGetMessageAttachementSoap(CarabiAppServer targetServer, String clientToken, Long messageId) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
ru.carabi.stub.FileOnServer messageAttachementSoap = chatServicePort.getMessageAttachement(CarabiFunc.encrypt(clientToken), messageId);
FileOnServer messageAttachement = new FileOnServer();
messageAttachement.setContentAddress(messageAttachementSoap.getContentAddress());
messageAttachement.setContentLength(messageAttachementSoap.getContentLength());
messageAttachement.setId(messageAttachementSoap.getId());
messageAttachement.setMimeType(messageAttachementSoap.getMimeType());
messageAttachement.setName(messageAttachementSoap.getName());
return messageAttachement;
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
} catch (GeneralSecurityException ex) {
logger.log(Level.SEVERE, "Encrypting error", ex);
throw new CarabiException("Encrypting error");
}
}
private ChatService chatServicePort;
private ChatService getChatServicePort(CarabiAppServer targetServer) throws MalformedURLException {
if (chatServicePort != null) {
return chatServicePort;
}
StringBuilder url = new StringBuilder("http://");
url.append(targetServer.getComputer());
url.append(":");
url.append(targetServer.getGlassfishPort());
url.append("/");
url.append(targetServer.getContextroot());
url.append("/ChatService?wsdl");
ChatService_Service chatService = new ChatService_Service(new URL(url.toString()));
chatServicePort = chatService.getChatServicePort();
return chatServicePort;
}
/**
* Рассылка сообщения списку получателей. Используется функция {@link #sendMessage(ru.carabi.server.entities.CarabiUser, ru.carabi.server.entities.CarabiUser, java.lang.String)}
* @param sender отправитель
* @param receiversArray массив логинов получателей
* @param messageText текст сообщения
* @return массив ID отправленных сообщений
* @throws CarabiException
*/
public Long[] sendToReceivers(CarabiUser sender, String[] receiversArray, String messageText) throws CarabiException {
Long[] sentMessagesId = new Long[receiversArray.length];
int i = 0;
for (String login: receiversArray) {
CarabiUser receiver = admin.findUser(login);
sentMessagesId[i] = sendMessage(sender, receiver, messageText, null, null);
i++;
}
return sentMessagesId;
}
private List<Long> parseMessagesIdList(String messagesIdList) throws CarabiException {
long id = -1L;
//проверка, содержит ли параметр одно число
try {
id = Long.parseLong(messagesIdList);
} catch (NumberFormatException e){}
List<Long> idList;
if (id == -1L) {//если нет -- парсинг массива
try {
JsonArray idArrayJson = Json.createReader(new StringReader(messagesIdList)).readArray();
idList = new ArrayList<Long>(idArrayJson.size());
for (Iterator<JsonValue> iterator = idArrayJson.iterator(); iterator.hasNext();) {
JsonValue value = iterator.next();
id = Long.parseLong(value.toString());
idList.add(id);
}
} catch (Exception e) {
throw new CarabiException("input incorrect (not int, nor array of int found)");
}
} else {
idList = new ArrayList<>(0);
idList.add(id);
}
return idList;
}
}
|
src/main/java/ru/carabi/server/kernel/ChatBean.java
|
package ru.carabi.server.kernel;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.GeneralSecurityException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ejb.Singleton;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReader;
import javax.json.JsonValue;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.ws.rs.core.Response;
import javax.xml.ws.BindingProvider;
import javax.xml.ws.Holder;
import javax.xml.ws.WebServiceException;
import me.lima.ThreadSafeDateParser;
import ru.carabi.libs.CarabiFunc;
import static ru.carabi.libs.CarabiFunc.*;
import ru.carabi.server.CarabiException;
import ru.carabi.server.Settings;
import ru.carabi.server.UserLogon;
import ru.carabi.server.Utls;
import ru.carabi.server.entities.CarabiAppServer;
import ru.carabi.server.entities.CarabiUser;
import ru.carabi.server.entities.ChatMessage;
import ru.carabi.server.entities.FileOnServer;
import ru.carabi.server.kernel.oracle.CarabiDate;
import ru.carabi.server.logging.CarabiLogging;
import ru.carabi.server.rest.RestException;
import ru.carabi.stub.CarabiException_Exception;
import ru.carabi.stub.ChatService;
import ru.carabi.stub.ChatService_Service;
/**
*
* @author sasha<[email protected]>
*/
@Singleton
public class ChatBean {
private static final Logger logger = CarabiLogging.getLogger(ChatBean.class);
@EJB AdminBean admin;
@EJB EventerBean eventer;
@PersistenceContext(unitName = "ru.carabi.server_carabiserver-chat")
EntityManager emChat;
@PersistenceContext(unitName = "ru.carabi.server_carabiserver-kernel")
EntityManager emKernel;
/**
* Отправка сообщения. Отправляет сообщение получателю и, при успешной доставке,
* записывает в отправленные отправителю
* @param sender
* @param receiver
* @param messageText
* @return id сообщения на стороне отправителя
* @throws ru.carabi.server.CarabiException
*/
public Long sendMessage(CarabiUser sender, CarabiUser receiver, String messageText, Long senderAttachmentId, Long receiverAttachmentId) throws CarabiException {
CarabiAppServer receiverServer = getTargetUserServer(receiver);
Long recievedMessageId;
//Если целевой сервер -- текущий, вызываем функцию из Bean напрямую.
if (Settings.getCurrentServer().equals(receiverServer)) {
recievedMessageId = forwardMessage(sender, receiver, messageText, receiverAttachmentId);
} else { //иначе по SOAP
recievedMessageId = callForwardMessageSoap(receiverServer, sender, receiver, messageText, receiverAttachmentId);
}
if (recievedMessageId < 0) {
throw new CarabiException("could not forward message");
}
CarabiAppServer senderServer = getTargetUserServer(sender);
Long sentMessageId;
//Аналогично для отправителя, если сообщение дошло получателю
if (Settings.getCurrentServer().equals(senderServer)) {
sentMessageId = putMessage(sender, sender, receiver, recievedMessageId, receiver.getMainServer().getId(), messageText, senderAttachmentId);
} else {
sentMessageId = callPutMessageSoap(senderServer, sender, sender, receiver, recievedMessageId, receiver.getMainServer().getId(), messageText, senderAttachmentId);
}
return sentMessageId;
}
/**
* Доставка сообщения получателю. Предполагается, что его база чата располагается
* на текущем сервере (если нет — вызывается {@link ChatService#forwardMessage(java.lang.String, java.lang.String, java.lang.String, java.lang.String) }
* соответствующего сервера). Сообщение записывается в базу (вложение должно быть записано ранее), а пользователь
* получает уведомление через Eventer.
* @param sender отправитель
* @param receiver получатель
* @param messageText текст сообщения (для вложений -- короткий комментарий, например, имя файла)
* @param attachmentId id существующего вложения
*/
public Long forwardMessage(CarabiUser sender, CarabiUser receiver, String messageText, Long attachmentId) throws CarabiException {
CarabiAppServer receiverServer = getTargetUserServer(receiver);
if (!Settings.getCurrentServer().equals(receiverServer)) {
//Если мы не на сервере получателя -- вызываем функцию по SOAP
return callForwardMessageSoap(receiverServer, sender, receiver, messageText, attachmentId);
}
Long messageId = putMessage(receiver, sender, receiver, null, null, messageText, attachmentId);
return messageId;
}
private void messageToEventer(CarabiUser sender, CarabiUser receiver, boolean toReceiver, Long messageId) throws CarabiException {
try {
String eventText;
JsonObjectBuilder eventTextBuild = Json.createObjectBuilder();
eventTextBuild.add("sender", sender.getLogin());
eventTextBuild.add("receiver", receiver.getLogin());
eventTextBuild.add("id", messageId);
eventText = eventTextBuild.build().toString();
eventer.fireEvent("", toReceiver ? receiver.getLogin() : sender.getLogin(), (short) 12, eventText);
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
}
}
/**
* Запись сообщения в базу чата на текущем сервере.
* @param owner владелец ящика
* @param sender отправитель
* @param receiver получатель
* @param receivedMessageId Id полученного входящего сообщения, при записи парного ему отправленного
* @param receivedMessageServerId
* @param messageText текст сообщения
* @return Id новой записи
* @throws CarabiException
*/
public Long putMessage(CarabiUser owner, CarabiUser sender, CarabiUser receiver, Long receivedMessageId, Integer receivedMessageServerId, String messageText, Long attachmentId) throws CarabiException {
ChatMessage chatMessage = new ChatMessage();
chatMessage.setOwnerId(owner.getId());
chatMessage.setSenderId(sender.getId());
chatMessage.setSent(new Date());
chatMessage.setReceiverId(receiver.getId());
chatMessage.setReceivedMessageId(receivedMessageId);
chatMessage.setReceivedMessageServerId(receivedMessageServerId);
chatMessage.setMessageText(messageText);
if (attachmentId != null) {
chatMessage.setAttachment(emChat.find(FileOnServer.class, attachmentId));
}
chatMessage = emChat.merge(chatMessage);
emChat.flush();
messageToEventer(sender, receiver, owner.equals(receiver), chatMessage.getId());
return chatMessage.getId();
}
/**
* Выдаёт целевой сервер пользователя. Если не задан — Выдаёт основной, устанавливая его пользователю.
* @param user
* @return
*/
private CarabiAppServer getTargetUserServer(CarabiUser user) {
CarabiAppServer userServer = user.getMainServer();
if (userServer == null) {
userServer = Settings.getMasterServer();
user.setMainServer(userServer);
user = emKernel.merge(user);
emKernel.flush();
}
return userServer;
}
/**
* Пометка сообщения прочитанным/непрочитаным. Попутно ставит уведомление о доставке отправителю.
* @param receiverLogon сессия пользователя
* @param messagesList Строка с ID сообщения или JSON-массив
* @param read если true -- пометить прочитанным (и отправить уведомление), иначе -- снять пометку
* @throws ru.carabi.server.CarabiException
*/
public void markRead(UserLogon receiverLogon, String messagesList, boolean read) throws CarabiException {
//Функция должна выполняться на сервере владельца.
final CarabiUser receiver = receiverLogon.getUser();
CarabiAppServer targetServer = receiver.getMainServer();
final CarabiAppServer currentServer = Settings.getCurrentServer();
List<Long> messageIdList = parseMessagesIdList(messagesList);
if (!currentServer.equals(targetServer)) {
callMarkReadSoap(targetServer, receiverLogon.getToken(), messagesList, read);
return;
}
for (Long messageId: messageIdList) {//Валидация сообщений
//Письмо должно принадлежать текущему пользователю, и он должен быть получателем
ChatMessage receivedMessage = emChat.find(ChatMessage.class, messageId);
if (receivedMessage == null) {
throw new CarabiException("message " + messageId + " not found");
}
if (!receiver.getId().equals(receivedMessage.getOwnerId())) {
throw new CarabiException("Message " + messageId + " does not belong to this user");
}
if (!receiver.getId().equals(receivedMessage.getReceiverId())) {
throw new CarabiException("Message " + messageId + " is not income");
}
}
//Маркируем письма получателя, раскладываем письма по отправителям
Map<String, JsonArrayBuilder> messagesBySenders = new HashMap<>();
Map<String, CarabiUser> senders = new HashMap<>();
for (Long messageId: messageIdList) {
ChatMessage receivedMessage = emChat.find(ChatMessage.class, messageId);
if (read) {
receivedMessage.setReceived(new Date());
} else {
receivedMessage.setReceived(null);
}
receivedMessage = emChat.merge(receivedMessage);
Long senderId = receivedMessage.getSenderId();
CarabiUser sender = emKernel.find(CarabiUser.class, senderId);
String senderLogin;
if (sender == null) {
logger.warning("Unknown sender id " + senderId);
senderLogin = "";
} else {
senderLogin = sender.getLogin();
senders.put(senderLogin, sender);
}
JsonArrayBuilder sendersMessages = messagesBySenders.get(senderLogin);
if (sendersMessages == null) {
sendersMessages = Json.createArrayBuilder();
messagesBySenders.put(senderLogin, sendersMessages);
}
sendersMessages.add(messageId);
}
emChat.flush();
//отправляем события о прочитанных сообщениях
for (String senderLogin: messagesBySenders.keySet()) {
if (!read) {
//У отправителя не скидываем
} else {
//Берём отправителя, помечаем письмо в отправленных
CarabiUser sender = senders.get(senderLogin);
targetServer = sender.getMainServer();
if (currentServer.equals(targetServer)) {
markSentReceived(sender, receiver, messagesBySenders.get(senderLogin).build().toString());
} else {
callMarkSentReceivedSoap(targetServer, senderLogin, receiver.getLogin(), messagesBySenders.get(senderLogin).build().toString());
}
}
try {
JsonObjectBuilder eventText = Json.createObjectBuilder();
eventText.add("sender", senderLogin);
eventText.add("receiver", receiver.getLogin());
eventText.add("read", read);
eventText.add("messagesList", messagesBySenders.get(senderLogin));
eventer.fireEvent("", receiver.getLogin(), (short)13, eventText.build().toString());
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
/**
* Установка уведомления о доставке в отправленном сообщении.
* @param sender
* @param receiver
* @param messagesList
*/
public void markSentReceived(CarabiUser sender, CarabiUser receiver, String messagesList) throws CarabiException {
//При необходимости переходим на сервер отправителя
CarabiAppServer targetServer = sender.getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
callMarkSentReceivedSoap(targetServer, sender.getLogin(), receiver.getLogin(), messagesList);
return;
}
List<Long> messageIdList = parseMessagesIdList(messagesList);
JsonArrayBuilder sentMessages = Json.createArrayBuilder();
for (Long messageId: messageIdList) {
//ищем отправленное письмо по ID и базе полученного
TypedQuery<ChatMessage> getSentByReceived = emChat.createNamedQuery("getSentByReceived", ChatMessage.class);
getSentByReceived.setParameter("received", messageId);
getSentByReceived.setParameter("server", receiver.getMainServer().getId());
ChatMessage sentMessage = getSentByReceived.getSingleResult();
//проверяем, что письмо принадлежит отправителю
if (!sender.getId().equals(sentMessage.getOwnerId())) {
throw new CarabiException("Message does not belong to this user");
}
if (!sender.getId().equals(sentMessage.getSenderId())) {
throw new CarabiException("Message is not outcome");
}
//помечаем
sentMessage.setReceived(new Date());
emChat.merge(sentMessage);
sentMessages.add(sentMessage.getId());
}
emChat.flush();
try {
//отправляем событие, что сообщение получено
JsonObjectBuilder eventText = Json.createObjectBuilder();
eventText.add("sender", sender.getLogin());
eventText.add("receiver", receiver.getLogin());
eventText.add("read", true);
eventText.add("messagesList", sentMessages);
eventer.fireEvent("", sender.getLogin(), (short)13, eventText.build().toString());
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
public Long getUnreadMessagesCount(UserLogon client) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetUnreadMessagesCountSoap(targetServer, client.getToken());
}
Query getUnreadMessagesCount = emChat.createNamedQuery("getUnreadMessagesCount");
getUnreadMessagesCount.setParameter("user", client.getUser().getId());
List count = getUnreadMessagesCount.getResultList();
return (Long) count.get(0);
}
public String getUnreadMessagesSenders(UserLogon client) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetUnreadMessagesSendersSoap(targetServer, client.getToken());
}
Query getUnreadMessagesSenders = emChat.createNamedQuery("getUnreadMessagesSenders", Object[].class);
getUnreadMessagesSenders.setParameter("user", client.getUser().getId());
List<Object[]> sendersMessages = getUnreadMessagesSenders.getResultList();//список ID отправителей и количества непрочитанных сообщений
JsonObjectBuilder result = Json.createObjectBuilder();
for (Object[] senderMessages: sendersMessages) {
Object senderId = senderMessages[0];
CarabiUser sender = emKernel.find(CarabiUser.class, senderId);
String login;
if (sender == null) {
login = senderId.toString();
} else {
login = sender.getLogin();
}
result.add(login, (Long)senderMessages[1]);
}
return result.build().toString();
}
public String getMessage(UserLogon client, Long messageId, boolean read) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetMessageSoap(targetServer, client.getToken(), messageId, read);
}
//Письмо должно принадлежать текущему пользователю
ChatMessage message = emChat.find(ChatMessage.class, messageId);
if (message == null) {
throw new CarabiException("message " + messageId + " not found");
}
final CarabiUser user = client.getUser();
if (!user.getId().equals(message.getOwnerId())) {
throw new CarabiException("Message does not belong to this user");
}
//если пользоватль -- получатель, можно ставить пометку о прочтении
if (read && user.getId().equals(message.getReceiverId())) {
markRead(client, messageId.toString(), true);
}
return message.getMessageText();
}
public String getMessageDetails(UserLogon client, Long messageId, boolean read) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetMessageDetailsSoap(targetServer, client.getToken(), messageId, read);
}
//Письмо должно принадлежать текущему пользователю
ChatMessage message = emChat.find(ChatMessage.class, messageId);
if (message == null) {
throw new CarabiException("message " + messageId + " not found");
}
final CarabiUser user = client.getUser();
if (!user.getId().equals(message.getOwnerId())) {
throw new CarabiException("Message does not belong to this user");
}
//если пользоватль -- получатель, можно ставить пометку о прочтении
if (read && user.getId().equals(message.getReceiverId())) {
markRead(client, messageId.toString(), true);
}
JsonObjectBuilder result = Json.createObjectBuilder();
CarabiUser sender = emKernel.find(CarabiUser.class, message.getSenderId());
result.add("sender", sender.getLogin());
CarabiUser receiver = emKernel.find(CarabiUser.class, message.getReceiverId());
result.add("receiver", receiver.getLogin());
result.add("sent", ThreadSafeDateParser.format(message.getSent(), CarabiDate.pattern));
if (message.getReceived() == null) {
result.addNull("received");
} else {
result.add("received", ThreadSafeDateParser.format(message.getReceived(), CarabiDate.pattern));
}
result.add("message", message.getMessageText());
if (message.getAttachment() != null) {
result.add("attachment", true);
}
return result.build().toString();
}
public String getContactList(UserLogon client, String search) throws CarabiException {
//Выбираем пользователей
TypedQuery<CarabiUser> getUsersList;
if (search != null && !search.isEmpty()) {
getUsersList = emKernel.createNamedQuery("getUsersListSearch", CarabiUser.class);
getUsersList.setParameter("search", "%" + search.toUpperCase() + "%");
} else {
getUsersList = emKernel.createNamedQuery("getAllUsersList", CarabiUser.class);
}
List<CarabiUser> usersList = getUsersList.getResultList();
return printUsersForOutput(client, usersList, null).toString();
}
public String getContact(UserLogon client, String login) throws CarabiException {
List<CarabiUser> usersList = new ArrayList<>(1);
usersList.add(admin.findUser(login));
JsonObject userForOutput = printUsersForOutput(client, usersList, null);
return Utls.redim(userForOutput).toString();
}
public String getLastInterlocutors(UserLogon client, int size, String afterDateStr, String search) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetLastInterlocutorsSoap(targetServer, client.getToken(), size, afterDateStr, search);
}
CarabiDate afterDate = parceDate(afterDateStr, "01.01.1970");
//берём недавно полученные письма
Query getRecentlyMessagesData = emChat.createNamedQuery("getRecentlyMessagesData", Object[].class);
getRecentlyMessagesData.setParameter("user", client.getUser().getId());
getRecentlyMessagesData.setParameter("recently", afterDate);
List<Object[]> messagesMetadata = getRecentlyMessagesData.getResultList();//список писем (отправитель, получатель, дата) в порядке устаревания
List<Long> interlocutorsIdOrdered = new ArrayList();
Set<Long> interlocutorsIdSet = new HashSet();
Map<Long, Date> interlocutorsLastContact = new HashMap<>();
final Long ownerId = client.getUser().getId();
//получаем упорядоченный список id собеседников и дату последнего сообщения от каждого из них
for (Object[] messageMetadata: messagesMetadata) {
Long senderId = (Long) messageMetadata[0];
Long receiverId = (Long) messageMetadata[1];
Long interlocutorId = ownerId.equals(senderId) ? receiverId : senderId;
if (!interlocutorsIdSet.contains(interlocutorId)) {
interlocutorsIdOrdered.add(interlocutorId);
interlocutorsIdSet.add(interlocutorId);
interlocutorsLastContact.put(interlocutorId, (Date) messageMetadata[2]);
}
}
List<CarabiUser> interlocutorsOrdered;
if (!interlocutorsIdOrdered.isEmpty()) {
//получаем данные особеседниках
List<CarabiUser> users;
if (search != null && !search.isEmpty()) {
TypedQuery<CarabiUser> getSelectedUsersListSearch = emKernel.createNamedQuery("getSelectedUsersListSearch", CarabiUser.class);
getSelectedUsersListSearch.setParameter("idlist", interlocutorsIdOrdered);
getSelectedUsersListSearch.setParameter("search", "%" + search.toUpperCase() + "%");
users = getSelectedUsersListSearch.getResultList();
} else {
TypedQuery<CarabiUser> getSelectedUsersList = emKernel.createNamedQuery("getSelectedUsersList", CarabiUser.class);
getSelectedUsersList.setParameter("idlist", interlocutorsIdOrdered);
users = getSelectedUsersList.getResultList();
}
//восстанавливаем порядок устаревания диалогов
Map<Long, CarabiUser> usersPerId = new HashMap<>();
for (CarabiUser sender: users) {
usersPerId.put(sender.getId(), sender);
}
interlocutorsOrdered = new ArrayList<>(interlocutorsIdOrdered.size());
int i = 0;
for (Long senderId: interlocutorsIdOrdered) {
CarabiUser user = usersPerId.get(senderId);
if (user != null) {
interlocutorsOrdered.add(user);
i++;
}
if (size > 0 && i == size) {
break;
}
}
} else {
interlocutorsOrdered = new ArrayList<>(0);
}
return printUsersForOutput(client, interlocutorsOrdered, interlocutorsLastContact).toString();
}
private CarabiDate parceDate(String dateStr, String defaultVal) throws CarabiException {
CarabiDate date;
if (dateStr != null && !dateStr.isEmpty()) {
try {//парсинг даты
date = new CarabiDate(dateStr);
} catch (IllegalArgumentException | ParseException e) {
throw new CarabiException("Illegal date: " + dateStr);
}
} else {
try {
date = new CarabiDate(defaultVal);
} catch (ParseException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
throw new CarabiException("Illegal defailt date: " + defaultVal);
}
}
return date;
}
/**
Вывод списка пользователей в формате, используемом для хранимых запросов.
Добавление информации о непрочитанных сообщениях и онлайне
* @param usersList
* @return
*/
private JsonObject printUsersForOutput(UserLogon client, List<CarabiUser> usersList, Map<Long, Date> userLastContact) throws CarabiException {
Set<String> onlineUsers;
JsonObject unreadMessagesSenders;
if (!usersList.isEmpty()) {//если список пользователей пустой -- доп. статистику не собираем
onlineUsers = getOnlineUsers();
final String unreadMessagesSendersJson = getUnreadMessagesSenders(client);
unreadMessagesSenders = Json.createReader(new StringReader(unreadMessagesSendersJson)).readObject();
} else {
onlineUsers = new HashSet<>();
unreadMessagesSenders = Json.createObjectBuilder().build();
}
//формируем вывод
JsonArrayBuilder headerColumns = Json.createArrayBuilder();
headerColumns.add(Utls.parametersToJson("LOGIN", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("FIRSTNAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("MIDDLENAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("LASTNAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("DEPARTMENT", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("ROLE", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("SCHEMA_NAME", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("SCHEMA_DESCRIPTION", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("ONLINE", "NUMBER"));
headerColumns.add(Utls.parametersToJson("MESSAGES_UNREAD", "NUMBER"));
if (userLastContact != null) {
headerColumns.add(Utls.parametersToJson("LAST_CONTACT_DATE", "DATE"));
headerColumns.add(Utls.parametersToJson("LAST_CONTACT_DATE_STR", "VARCHAR2"));
}
JsonObjectBuilder result = Json.createObjectBuilder();
result.add("columns", headerColumns);
JsonArrayBuilder rows = Json.createArrayBuilder();
for (CarabiUser user: usersList) {
if (user.equals(client.getUser())) {
continue;
}
JsonArrayBuilder userJson = Json.createArrayBuilder();
final String login = user.getLogin();
Utls.addJsonObject(userJson, login);
Utls.addJsonObject(userJson, user.getFirstname());
Utls.addJsonObject(userJson, user.getMiddlename());
Utls.addJsonObject(userJson, user.getLastname());
Utls.addJsonObject(userJson, user.getDepartment());
Utls.addJsonObject(userJson, user.getRole());
if (user.getDefaultSchema() != null) {
Utls.addJsonObject(userJson, user.getDefaultSchema().getName());
Utls.addJsonObject(userJson, user.getDefaultSchema().getDescription());
} else {
userJson.addNull();
userJson.addNull();
}
if (onlineUsers.contains(user.getLogin())) {
userJson.add("1");
} else {
userJson.add("0");
}
JsonValue unreadMessages = unreadMessagesSenders.get(login);
if (unreadMessages == null) {
userJson.add("0");
} else {
userJson.add(unreadMessages.toString());
}
if (userLastContact != null) {
userJson.add(ThreadSafeDateParser.format(userLastContact.get(user.getId()), CarabiDate.pattern));
userJson.add(ThreadSafeDateParser.format(userLastContact.get(user.getId()), CarabiDate.patternShort));
}
rows.add(userJson);
}
result.add("list", rows);
return result.build();
}
private Set<String> getOnlineUsers() {
//emKernel.clear();
TypedQuery<CarabiAppServer> getSevers = emKernel.createNamedQuery("getAllServers", CarabiAppServer.class);
List<CarabiAppServer> servers = getSevers.getResultList();
//С каждого Eventer пытаемся получить список подключенных пользователей
Set<String> result = new HashSet<>();
for (CarabiAppServer server: servers) {
try {
String usersOnlineJson = eventer.eventerSingleRequestResponse(server, "[]", new Holder<>((short)15), true);
logger.info("online from " + server.getComputer() + ": " + usersOnlineJson);
JsonReader reader = Json.createReader(new StringReader(usersOnlineJson));
JsonObject usersOnline = reader.readObject();
result.addAll(usersOnline.keySet());
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
return result;
}
public String getDialog(UserLogon client, CarabiUser interlocutor, String afterDateStr, String search) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetDialogSoap(targetServer, client.getToken(), interlocutor, afterDateStr, search);
}
//получаем сообщения
CarabiDate afterDate = parceDate(afterDateStr, "01.01.1970");
final Long userId = client.getUser().getId();
final TypedQuery<ChatMessage> getDialog;
if (search != null && !search.equals("")) {
getDialog = emChat.createNamedQuery("searchInDialog", ChatMessage.class);
getDialog.setParameter("search", "%" + search.toUpperCase() + "%");
} else {
getDialog = emChat.createNamedQuery("getDialog", ChatMessage.class);
}
getDialog.setParameter("user", userId);
getDialog.setParameter("recently", afterDate);
getDialog.setParameter("interlocutor", interlocutor.getId());
List<ChatMessage> dialog = getDialog.getResultList();
//формируем вывод
JsonArrayBuilder headerColumns = Json.createArrayBuilder();
headerColumns.add(Utls.parametersToJson("MESSAGE_ID", "NUMBER"));
headerColumns.add(Utls.parametersToJson("SENDER", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("RECEIVER", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("MESSAGE_TEXT", "VARCHAR2"));
headerColumns.add(Utls.parametersToJson("ATTACHMENT", "NUMBER"));
headerColumns.add(Utls.parametersToJson("SENT", "DATE"));
headerColumns.add(Utls.parametersToJson("RECEIVED", "DATE"));
JsonObjectBuilder result = Json.createObjectBuilder();
result.add("columns", headerColumns);
JsonArrayBuilder rows = Json.createArrayBuilder();
for (ChatMessage message: dialog) {
JsonArrayBuilder messageJson = Json.createArrayBuilder();
Utls.addJsonObject(messageJson, message.getId().toString());
assert message.getOwnerId().equals(userId);
if (userId.equals(message.getReceiverId())) {
Utls.addJsonObject(messageJson, interlocutor.getLogin());//sender
Utls.addJsonObject(messageJson, client.getUser().getLogin());//receiver
} else if (userId.equals(message.getSenderId())) {
Utls.addJsonObject(messageJson, client.getUser().getLogin());//sender
Utls.addJsonObject(messageJson, interlocutor.getLogin());//receiver
} else {
logger.warning("message " + message.getId() + "do not have current user (" + userId + ") as sender or receiver");
continue;
}
Utls.addJsonObject(messageJson, message.getMessageText());
Utls.addJsonObject(messageJson, message.getAttachment() == null ? "0" : "1");
Utls.addJsonObject(messageJson, CarabiDate.wrap(message.getSent()));
Utls.addJsonObject(messageJson, CarabiDate.wrap(message.getReceived()));
rows.add(messageJson);
}
result.add("list", rows);
return result.build().toString();
}
public int deleteMessages(UserLogon client, String messagesList) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callDeleteMessagesSoap(targetServer, client.getToken(), messagesList);
}
List<Long> idList = parseMessagesIdList(messagesList);
//Получаем вложения удаляемых сообщений
TypedQuery<FileOnServer> getUserMessagesAttachments = emChat.createNamedQuery("getUserMessagesAttachments", FileOnServer.class);
getUserMessagesAttachments.setParameter("user", client.getUser().getId());
getUserMessagesAttachments.setParameter("idlist", idList);
List<FileOnServer> userMessagesAttachments = getUserMessagesAttachments.getResultList();
//Удаляем сообщения
Query deleteMessagesList = emChat.createNamedQuery("deleteMessagesList");
deleteMessagesList.setParameter("user", client.getUser().getId());
deleteMessagesList.setParameter("idlist", idList);
int deletedSize = deleteMessagesList.executeUpdate();
//Смотрим, какие из вложений более не используются
//(некоторые могут присутствовать в сообщении собеседника)
//удаляем их и файлы
for(FileOnServer attachment: userMessagesAttachments) {
Query getMessagesWithAttachment = emChat.createNamedQuery("getMessagesWithAttachment");
getMessagesWithAttachment.setParameter("attachment_id", attachment.getId());
List messagesWithAttachment = getMessagesWithAttachment.getResultList();
if (messagesWithAttachment.size() > 0) {
continue;
}
new File(attachment.getContentAddress()).delete();
emChat.remove(attachment);
}
//Отправляем событие клиентам
try {
eventer.fireEvent("", client.getUser().getLogin(), (short) 16, messagesList);
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
return deletedSize;
}
/**
* Возвращает вложение из сообщения, при его наличии
* @param client клиентская сессия
* @param messageId id сообщения с вложением
* @return вложение из сообщения, null при его отсутствии
* @throws ru.carabi.server.CarabiException если сообщение не найдено или не принадлежит пользователю
*/
public FileOnServer getMessageAttachement(UserLogon client, Long messageId) throws CarabiException {
//При необходимости переходим на сервер клиента
CarabiAppServer targetServer = client.getUser().getMainServer();
if (!Settings.getCurrentServer().equals(targetServer)) {
return callGetMessageAttachementSoap(targetServer, client.getToken(), messageId);
}
ChatMessage messageWithAttachement = emChat.find(ChatMessage.class, messageId);
if (messageWithAttachement == null) {
RestException restException = new RestException("message " + messageId + " not found", Response.Status.NOT_FOUND);
throw new CarabiException(restException);
}
if (!client.getUser().getId().equals(messageWithAttachement.getOwnerId())) {
RestException restException = new RestException("message " + messageId + " does not belong to user " + client.getUser().getId(), Response.Status.CONFLICT);
throw new CarabiException(restException);
}
return messageWithAttachement.getAttachment();
}
/**
* Создание нового объекта {@link FileOnServer} под аттач и сохранение его в базе для генерации ключа.
* @param userFilename пользовательское имя файла
* @return объект FileOnServer со сгенерированным ID
*/
public FileOnServer createAttachment(String userFilename) {
FileOnServer newAttachment = new FileOnServer();
newAttachment.setName(userFilename);
newAttachment = emChat.merge(newAttachment);
emChat.flush();
newAttachment.setContentAddress(Settings.CHAT_ATTACHMENTS_LOCATION + "/" + newAttachment.getId() + "_" + userFilename);
return newAttachment;
}
/**
* Пересохранение объекта ChatAttachment в базе.
*/
public FileOnServer updateAttachment(FileOnServer attachment) {
return emChat.merge(attachment);
}
public void fireUserState(UserLogon logon, boolean online) throws IOException, CarabiException {
if (online) {
//При подключении передаём событие всегда.
eventer.fireEvent("", "", (short)14, "{\"login\":\"" + logon.getUser().getLogin() + "\",\"online\":true}");
} else {
//При отключении проверяем, что других сессий этого пользователя нет
TypedQuery<CarabiAppServer> getSevers = emKernel.createNamedQuery("getAllServers", CarabiAppServer.class);
List<CarabiAppServer> servers = getSevers.getResultList();
//На каждом Eventer проверяем наличие данного пользователя
boolean stillOnline = false;
for (CarabiAppServer server: servers) {
try {
String userOnlineJson = eventer.eventerSingleRequestResponse(server, "[\"" + logon.getUser().getLogin() + "\"]", new Holder<>((short)15), true);
logger.info("online from " + server.getComputer() + ": " + userOnlineJson);
JsonReader reader = Json.createReader(new StringReader(userOnlineJson));
JsonObject userOnline = reader.readObject();
if (userOnline.getBoolean(logon.getUser().getLogin())) {
stillOnline = true;
break;
}
} catch (IOException ex) {
Logger.getLogger(ChatBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
if (!stillOnline) {
eventer.fireEvent("", "", (short)14, "{\"login\":\"" + logon.getUser().getLogin() + "\",\"online\":false}");
}
}
}
private Long callForwardMessageSoap(CarabiAppServer receiverServer, CarabiUser sender, CarabiUser receiver, String messageText, Long attachmentId) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(receiverServer);
String token = chatServicePort.prepareToForward();
token = encrypt(token);
setCookie((BindingProvider)chatServicePort);
return chatServicePort.forwardMessage(token, sender.getLogin(), receiver.getLogin(), messageText, attachmentId);
} catch (MalformedURLException | GeneralSecurityException | WebServiceException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: ", ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private Long callPutMessageSoap(CarabiAppServer targetServer, CarabiUser owner, CarabiUser sender, CarabiUser receiver, Long receivedMessageId, Integer receivedMessageServerId, String messageText, Long attachmentId) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
String token = chatServicePort.prepareToForward();
token = encrypt(token);
setCookie((BindingProvider)chatServicePort);
return chatServicePort.putMessage(token, owner.getLogin(), sender.getLogin(), receiver.getLogin(), receivedMessageId, receivedMessageServerId, messageText, attachmentId);
} catch (MalformedURLException | GeneralSecurityException | WebServiceException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private void callMarkReadSoap(CarabiAppServer targetServer, String clientToken, String messageList, boolean read) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
chatServicePort.markRead(clientToken, messageList, read);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private void callMarkSentReceivedSoap(CarabiAppServer targetServer, String loginSender, String loginReceiver, String messagesList) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
String token = chatServicePort.prepareToForward();
token = encrypt(token);
setCookie((BindingProvider)chatServicePort);
chatServicePort.markReceived(token, loginSender, loginReceiver, messagesList);
} catch (MalformedURLException | GeneralSecurityException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private Long callGetUnreadMessagesCountSoap(CarabiAppServer targetServer, String clientToken) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getUnreadMessagesCount(clientToken);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetUnreadMessagesSendersSoap(CarabiAppServer targetServer, String clientToken) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getUnreadMessagesSenders(clientToken);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetMessageSoap(CarabiAppServer targetServer, String clientToken, Long messageId, boolean read) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getMessage(clientToken, messageId, read);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetMessageDetailsSoap(CarabiAppServer targetServer, String clientToken, Long messageId, boolean read) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getMessageDetails(clientToken, messageId, read);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetLastInterlocutorsSoap(CarabiAppServer targetServer, String clientToken, int size, String afterDate, String search) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getLastInterlocutors(clientToken, size, afterDate, search);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private String callGetDialogSoap(CarabiAppServer targetServer, String clientToken, CarabiUser interlocutor, String afterDate, String search) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.getDialog(clientToken, interlocutor.getLogin(), afterDate, search);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private int callDeleteMessagesSoap(CarabiAppServer targetServer, String clientToken, String messagesList) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
return chatServicePort.deleteMessages(clientToken, messagesList);
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
}
}
private FileOnServer callGetMessageAttachementSoap(CarabiAppServer targetServer, String clientToken, Long messageId) throws CarabiException {
try {
ChatService chatServicePort = getChatServicePort(targetServer);
ru.carabi.stub.FileOnServer messageAttachementSoap = chatServicePort.getMessageAttachement(CarabiFunc.encrypt(clientToken), messageId);
FileOnServer messageAttachement = new FileOnServer();
messageAttachement.setContentAddress(messageAttachementSoap.getContentAddress());
messageAttachement.setContentLength(messageAttachementSoap.getContentLength());
messageAttachement.setId(messageAttachementSoap.getId());
messageAttachement.setMimeType(messageAttachementSoap.getMimeType());
messageAttachement.setName(messageAttachementSoap.getName());
return messageAttachement;
} catch (MalformedURLException ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException("Error on connecting to remote server: " + ex.getMessage(), ex);
} catch (CarabiException_Exception ex) {
logger.log(Level.SEVERE, null, ex);
throw new CarabiException(ex);
} catch (GeneralSecurityException ex) {
logger.log(Level.SEVERE, "Encrypting error", ex);
throw new CarabiException("Encrypting error");
}
}
private ChatService chatServicePort;
private ChatService getChatServicePort(CarabiAppServer targetServer) throws MalformedURLException {
if (chatServicePort != null) {
return chatServicePort;
}
StringBuilder url = new StringBuilder("http://");
url.append(targetServer.getComputer());
url.append(":");
url.append(targetServer.getGlassfishPort());
url.append("/");
url.append(targetServer.getContextroot());
url.append("/ChatService?wsdl");
ChatService_Service chatService = new ChatService_Service(new URL(url.toString()));
chatServicePort = chatService.getChatServicePort();
return chatServicePort;
}
/**
* Рассылка сообщения списку получателей. Используется функция {@link #sendMessage(ru.carabi.server.entities.CarabiUser, ru.carabi.server.entities.CarabiUser, java.lang.String)}
* @param sender отправитель
* @param receiversArray массив логинов получателей
* @param messageText текст сообщения
* @return массив ID отправленных сообщений
* @throws CarabiException
*/
public Long[] sendToReceivers(CarabiUser sender, String[] receiversArray, String messageText) throws CarabiException {
Long[] sentMessagesId = new Long[receiversArray.length];
int i = 0;
for (String login: receiversArray) {
CarabiUser receiver = admin.findUser(login);
sentMessagesId[i] = sendMessage(sender, receiver, messageText, null, null);
i++;
}
return sentMessagesId;
}
private List<Long> parseMessagesIdList(String messagesIdList) throws CarabiException {
long id = -1L;
//проверка, содержит ли параметр одно число
try {
id = Long.parseLong(messagesIdList);
} catch (NumberFormatException e){}
List<Long> idList;
if (id == -1L) {//если нет -- парсинг массива
try {
JsonArray idArrayJson = Json.createReader(new StringReader(messagesIdList)).readArray();
idList = new ArrayList<Long>(idArrayJson.size());
for (Iterator<JsonValue> iterator = idArrayJson.iterator(); iterator.hasNext();) {
JsonValue value = iterator.next();
id = Long.parseLong(value.toString());
idList.add(id);
}
} catch (Exception e) {
throw new CarabiException("input incorrect (not int, nor array of int found)");
}
} else {
idList = new ArrayList<>(0);
idList.add(id);
}
return idList;
}
}
|
chat markSentReceived fix
|
src/main/java/ru/carabi/server/kernel/ChatBean.java
|
chat markSentReceived fix
|
<ide><path>rc/main/java/ru/carabi/server/kernel/ChatBean.java
<ide> import javax.json.JsonReader;
<ide> import javax.json.JsonValue;
<ide> import javax.persistence.EntityManager;
<add>import javax.persistence.NoResultException;
<ide> import javax.persistence.PersistenceContext;
<ide> import javax.persistence.Query;
<ide> import javax.persistence.TypedQuery;
<ide> TypedQuery<ChatMessage> getSentByReceived = emChat.createNamedQuery("getSentByReceived", ChatMessage.class);
<ide> getSentByReceived.setParameter("received", messageId);
<ide> getSentByReceived.setParameter("server", receiver.getMainServer().getId());
<del> ChatMessage sentMessage = getSentByReceived.getSingleResult();
<del> //проверяем, что письмо принадлежит отправителю
<del> if (!sender.getId().equals(sentMessage.getOwnerId())) {
<del> throw new CarabiException("Message does not belong to this user");
<del> }
<del> if (!sender.getId().equals(sentMessage.getSenderId())) {
<del> throw new CarabiException("Message is not outcome");
<del> }
<del> //помечаем
<del> sentMessage.setReceived(new Date());
<del> emChat.merge(sentMessage);
<del> sentMessages.add(sentMessage.getId());
<add> try {
<add> ChatMessage sentMessage = getSentByReceived.getSingleResult();
<add> //проверяем, что письмо принадлежит отправителю
<add> if (!sender.getId().equals(sentMessage.getOwnerId())) {
<add> throw new CarabiException("Message does not belong to this user");
<add> }
<add> if (!sender.getId().equals(sentMessage.getSenderId())) {
<add> throw new CarabiException("Message is not outcome");
<add> }
<add> //помечаем
<add> sentMessage.setReceived(new Date());
<add> emChat.merge(sentMessage);
<add> sentMessages.add(sentMessage.getId());
<add> } catch (NoResultException e) {
<add> //отправитель удалил отправленное раньше, чем получатель его прочитал
<add> }
<ide> }
<ide> emChat.flush();
<ide> try {
|
|
Java
|
apache-2.0
|
9525768ab2ea7172832d186452a4bb631020eb87
| 0 |
jingwei/krati,jingwei/krati,linkedin/krati,linkedin/krati,linkedin/krati,jingwei/krati
|
/*
* Copyright (c) 2010-2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package krati.core.segment;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.Map;
import java.util.HashMap;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
import krati.Mode;
import krati.io.Closeable;
import org.apache.log4j.Logger;
/**
* SegmentManager
*
* <pre>
* SegmentManager manager = new SegmentManager(...);
* Segment segment = manager.nextSegment();
*
* while(...) {
* try {
* segment.append(...);
* } catch(SegmentOverflowException e) {
* segment.force();
* manager.updateMeta();
* segment = manger.nextSegment();
* }
* }
* </pre>
*
* @author jwu
* @since 02/05, 2010
*
* <p>
* 05/24, 2010 - Always try to open the manager upon call to SegmentManager.getInstance(...) <br/>
* 02/14, 2012 - Remove the last segment file after being freed <br/>
*/
public final class SegmentManager implements Closeable {
private final static Logger _log = Logger.getLogger(SegmentManager.class);
private final static Map<String, SegmentManager> _segManagerMap = new HashMap<String, SegmentManager>();
/**
* The list of segments.
*/
private final List<Segment> _segList = new ArrayList<Segment>(100);
/**
* The list of segments that were recycled for reuse.
*/
private final LinkedList<Segment> _recycleList = new LinkedList<Segment>();
/**
* The segment factory.
*/
private final SegmentFactory _segFactory;
/**
* The home path where all the segment files are located.
*/
private final String _segHomePath;
/**
* The segment file size in MB.
*/
private final int _segFileSizeMB;
/**
* The limit on the number of recycled segments.
*/
private final int _recycleLimit;
/**
* The meta data for all the managed segments.
*/
private volatile SegmentMeta _segMeta = null;
/**
* The current segment.
*/
private volatile Segment _segCurrent = null;
/**
* The mode can only be <code>Mode.INIT</code>, <code>Mode.OPEN</code> and <code>Mode.CLOSED</code>.
*/
private volatile Mode _mode = Mode.INIT;
private SegmentManager(String segmentHomePath) throws IOException {
this(segmentHomePath, new MappedSegmentFactory());
}
private SegmentManager(String segmentHomePath, SegmentFactory segmentFactory) throws IOException {
this(segmentHomePath, segmentFactory, Segment.defaultSegmentFileSizeMB);
}
private SegmentManager(String segmentHomePath, SegmentFactory segmentFactory, int segmentFileSizeMB) throws IOException {
_log.info("init segHomePath=" + segmentHomePath + " segFileSizeMB=" + segmentFileSizeMB);
this._segFactory = segmentFactory;
this._segHomePath = segmentHomePath;
this._segFileSizeMB = segmentFileSizeMB;
this._recycleLimit = computeRecycleLimit(segmentFileSizeMB);
this.open();
}
private int computeRecycleLimit(int segmentFileSizeMB) {
// Should always return an integer greater than zero.
return (segmentFileSizeMB <= 64) ? 5 : ((segmentFileSizeMB <= 256) ? 3 : 2);
}
public int getSegmentFileSizeMB() {
return _segFileSizeMB;
}
public String getSegmentHomePath() {
return _segHomePath;
}
public SegmentFactory getSegmentFactory() {
return _segFactory;
}
public Segment getCurrentSegment() {
return _segCurrent;
}
public Segment getSegment(int index) {
return _segList.get(index);
}
public int getSegmentCount() {
return _segList.size();
}
public int getLiveSegmentCount() {
int num = 0;
for (int i = 0; i < _segList.size(); i++) {
if (_segList.get(i) != null)
num++;
}
return num;
}
public synchronized void clear() {
clearInternal(true /* CLEAR META */);
}
/**
* Frees a segment.
*/
public synchronized boolean freeSegment(Segment seg) throws IOException {
if (seg == null)
return false;
int segId = seg.getSegmentId();
if (segId < _segList.size() && _segList.get(segId) == seg) {
_segList.set(segId, null);
seg.close(false);
if(segId == (_segList.size() - 1)) {
try {
// Delete the last segment.
_segList.remove(segId);
seg.getSegmentFile().delete();
_log.info("Segment " + seg.getSegmentId() + " deleted");
} catch(Exception e) {
_log.warn("Segment " + seg.getSegmentId() + " not deleted", e);
}
} else {
if (seg.isRecyclable() && recycle(seg)) {
_log.info("Segment " + seg.getSegmentId() + " recycled");
} else {
_log.info("Segment " + seg.getSegmentId() + " freed");
}
}
return true;
}
return false;
}
/**
* Gets the next segment available for read and write.
*/
public synchronized Segment nextSegment() throws IOException {
_segCurrent = nextSegment(false);
return _segCurrent;
}
/**
* Gets the next segment available for read and write.
*
* @param newOnly
* If true, create a new segment from scratch. Otherwise, reuse
* the first free segment.
* @return
* @throws IOException
*/
private synchronized Segment nextSegment(boolean newOnly) throws IOException {
int index;
Segment seg;
if (newOnly) {
index = _segList.size();
} else {
if (_recycleList.size() > 0) {
seg = _recycleList.remove();
seg.reinit();
_segList.set(seg.getSegmentId(), seg);
_log.info("reinit Segment " + seg.getSegmentId());
return seg;
}
for (index = 0; index < _segList.size(); index++) {
if (_segList.get(index) == null)
break;
}
}
// Always create next segment as READ_WRITE
File segFile = new File(_segHomePath, index + ".seg");
seg = getSegmentFactory().createSegment(index, segFile, _segFileSizeMB, Segment.Mode.READ_WRITE);
if (index < _segList.size())
_segList.set(index, seg);
else
_segList.add(seg);
return seg;
}
private void initMeta() throws IOException {
_segMeta = new SegmentMeta(new File(_segHomePath, ".meta"));
}
private void initSegs() throws IOException {
int loaded = 0;
File[] segFiles = listSegmentFiles();
if (segFiles.length == 0) {
return;
}
try {
for (int i = 0; i < segFiles.length; i++) {
File segFile = segFiles[i];
int segId = Integer.parseInt(segFile.getName().substring(0, segFile.getName().indexOf('.')));
if (segId != i) {
throw new IOException("Segment file " + i + ".seg missing");
}
if (getMeta().hasSegmentInService(segId)) {
// Always load a live segment as READ_ONLY
Segment s = getSegmentFactory().createSegment(segId, segFile, _segFileSizeMB, Segment.Mode.READ_ONLY);
s.incrLoadSize(getMeta().getSegmentLoadSize(segId));
_segList.add(s);
loaded++;
} else {
// Segment is not live and is free for reuse
_segList.add(null);
}
}
} catch (IOException e) {
_log.error(e.getMessage());
clearInternal(false /* DO NOT CLEAR META */);
throw e;
}
_log.info("loaded: " + loaded + "/" + segFiles.length);
}
private void clearInternal(boolean clearMeta) {
// Close all known segments
for(int segId = 0, cnt = _segList.size(); segId < cnt; segId++) {
Segment seg = _segList.get(segId);
if(seg != null) {
try {
seg.close(false);
} catch (IOException e) {
_log.warn("failed to close segment " + seg.getSegmentId());
} finally {
_segList.set(segId, null);
}
}
}
if(clearMeta) {
try {
updateMeta();
} catch (IOException e) {
_log.warn("failed to clear segment meta");
}
}
_segList.clear();
_segCurrent = null;
_recycleList.clear();
}
/**
* Recycle a free segment into the <code>_recycleList</code>.
*
* @param seg - the free Segment
* @return <code>true</code> if the specified segment is added to the <code>_recycleList</code>.
*/
private boolean recycle(Segment seg) {
if(_recycleList.size() < _recycleLimit) {
return _recycleList.add(seg);
}
return false;
}
protected File[] listSegmentFiles() {
File segDir = new File(_segHomePath);
File[] segFiles = segDir.listFiles(new FileFilter() {
@Override
public boolean accept(File filePath) {
String fileName = filePath.getName();
if (fileName.matches("^[0-9]+\\.seg$")) {
return true;
}
return false;
}
});
if (segFiles == null) {
segFiles = new File[0];
} else if (segFiles.length > 0) {
Arrays.sort(segFiles, new Comparator<File>() {
@Override
public int compare(File f1, File f2) {
int segId1 = Integer.parseInt(f1.getName().substring(0, f1.getName().indexOf('.')));
int segId2 = Integer.parseInt(f2.getName().substring(0, f2.getName().indexOf('.')));
return (segId1 < segId2) ? -1 : ((segId1 == segId2) ? 0 : 1);
}
});
}
return segFiles;
}
public SegmentMeta getMeta() {
return _segMeta;
}
public synchronized void updateMeta() throws IOException {
_segMeta.wrap(this);
}
public synchronized static SegmentManager getInstance(String segmentHomePath, SegmentFactory segmentFactory, int segmentFileSizeMB) throws IOException {
if (segmentFileSizeMB < Segment.minSegmentFileSizeMB) {
throw new IllegalArgumentException("Invalid argument segmentFileSizeMB " + segmentFileSizeMB + ", smaller than " + Segment.minSegmentFileSizeMB);
}
if (segmentFileSizeMB > Segment.maxSegmentFileSizeMB) {
throw new IllegalArgumentException("Invalid argument segmentFileSizeMB " + segmentFileSizeMB + ", greater than " + Segment.maxSegmentFileSizeMB);
}
File segDir = new File(segmentHomePath);
if (!segDir.exists()) {
if (!segDir.mkdirs()) {
throw new IOException("Failed to create directory " + segmentHomePath);
}
}
if (segDir.isFile()) {
throw new IOException("File " + segmentHomePath + " is not a directory");
}
String key = segDir.getCanonicalPath();
SegmentManager mgr = _segManagerMap.get(key);
if (mgr == null) {
mgr = new SegmentManager(key, segmentFactory, segmentFileSizeMB);
_segManagerMap.put(key, mgr);
}
mgr.open();
return mgr;
}
@Override
public synchronized void close() throws IOException {
if(_mode == Mode.CLOSED) {
return;
}
try {
clearInternal(false /* DO NOT CLEAR META */);
if(_segMeta != null) {
_segMeta.close();
}
} catch(Exception e) {
_log.error("Failed to close", e);
} finally {
_segMeta = null;
}
// The manager is closed properly now.
_mode = Mode.CLOSED;
}
@Override
public synchronized void open() throws IOException {
if(_mode == Mode.OPEN) return;
// Initialize segment meta data.
initMeta();
// Initialize all known segments.
try {
initSegs();
} catch(Exception e) {
this.close();
// Throw original exception if possible
throw (e instanceof IOException) ? (IOException)e : new IOException(e);
}
// The manager is opened properly now.
_mode = Mode.OPEN;
}
@Override
public boolean isOpen() {
return _mode == Mode.OPEN;
}
}
|
krati-main/src/main/java/krati/core/segment/SegmentManager.java
|
/*
* Copyright (c) 2010-2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package krati.core.segment;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.Map;
import java.util.HashMap;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
import krati.Mode;
import krati.io.Closeable;
import org.apache.log4j.Logger;
/**
* SegmentManager
*
* <pre>
* SegmentManager manager = new SegmentManager(...);
* Segment segment = manager.nextSegment();
*
* while(...) {
* try {
* segment.append(...);
* } catch(SegmentOverflowException e) {
* segment.force();
* manager.updateMeta();
* segment = manger.nextSegment();
* }
* }
* </pre>
*
* @author jwu
* @since 02/05, 2010
*
* <p>
* 05/24, 2010 - Always try to open the manager upon call to SegmentManager.getInstance(...) <br/>
* 02/14, 2012 - Remove the last segment file after being freed <br/>
*/
public final class SegmentManager implements Closeable {
private final static Logger _log = Logger.getLogger(SegmentManager.class);
private final static Map<String, SegmentManager> _segManagerMap = new HashMap<String, SegmentManager>();
/**
* The list of segments.
*/
private final List<Segment> _segList = new ArrayList<Segment>(100);
/**
* The list of segments that were recycled for reuse.
*/
private final LinkedList<Segment> _recycleList = new LinkedList<Segment>();
/**
* The segment factory.
*/
private final SegmentFactory _segFactory;
/**
* The home path where all the segment files are located.
*/
private final String _segHomePath;
/**
* The segment file size in MB.
*/
private final int _segFileSizeMB;
/**
* The limit on the number of recycled segments.
*/
private final int _recycleLimit;
/**
* The meta data for all the managed segments.
*/
private volatile SegmentMeta _segMeta = null;
/**
* The current segment.
*/
private volatile Segment _segCurrent = null;
/**
* The mode can only be <code>Mode.INIT</code>, <code>Mode.OPEN</code> and <code>Mode.CLOSED</code>.
*/
private volatile Mode _mode = Mode.INIT;
private SegmentManager(String segmentHomePath) throws IOException {
this(segmentHomePath, new MappedSegmentFactory());
}
private SegmentManager(String segmentHomePath, SegmentFactory segmentFactory) throws IOException {
this(segmentHomePath, segmentFactory, Segment.defaultSegmentFileSizeMB);
}
private SegmentManager(String segmentHomePath, SegmentFactory segmentFactory, int segmentFileSizeMB) throws IOException {
_log.info("init segHomePath=" + segmentHomePath + " segFileSizeMB=" + segmentFileSizeMB);
this._segFactory = segmentFactory;
this._segHomePath = segmentHomePath;
this._segFileSizeMB = segmentFileSizeMB;
this._recycleLimit = computeRecycleLimit(segmentFileSizeMB);
this.open();
}
private int computeRecycleLimit(int segmentFileSizeMB) {
// Should always return an integer greater than zero.
return (segmentFileSizeMB <= 64) ? 5 : ((segmentFileSizeMB <= 256) ? 3 : 2);
}
public int getSegmentFileSizeMB() {
return _segFileSizeMB;
}
public String getSegmentHomePath() {
return _segHomePath;
}
public SegmentFactory getSegmentFactory() {
return _segFactory;
}
public Segment getCurrentSegment() {
return _segCurrent;
}
public Segment getSegment(int index) {
return _segList.get(index);
}
public int getSegmentCount() {
return _segList.size();
}
public int getLiveSegmentCount() {
int num = 0;
for (int i = 0; i < _segList.size(); i++) {
if (_segList.get(i) != null)
num++;
}
return num;
}
public synchronized void clear() {
clearInternal(true /* CLEAR META */);
}
/**
* Frees a segment.
*/
public synchronized boolean freeSegment(Segment seg) throws IOException {
if (seg == null)
return false;
int segId = seg.getSegmentId();
if (segId < _segList.size() && _segList.get(segId) == seg) {
_segList.set(segId, null);
seg.close(false);
if(segId == (_segList.size() - 1)) {
try {
// Delete the last segment.
_segList.remove(segId);
seg.getSegmentFile().delete();
_log.info("Segment " + seg.getSegmentId() + " deleted");
} catch(Exception e) {
_log.warn("Segment " + seg.getSegmentId() + " not deleted", e);
}
} else {
if (seg.isRecyclable() && recycle(seg)) {
_log.info("Segment " + seg.getSegmentId() + " recycled");
} else {
_log.info("Segment " + seg.getSegmentId() + " freed");
}
}
return true;
}
return false;
}
/**
* Gets the next segment available for read and write.
*/
public synchronized Segment nextSegment() throws IOException {
_segCurrent = nextSegment(false);
return _segCurrent;
}
/**
* Gets the next segment available for read and write.
*
* @param newOnly
* If true, create a new segment from scratch. Otherwise, reuse
* the first free segment.
* @return
* @throws IOException
*/
private synchronized Segment nextSegment(boolean newOnly) throws IOException {
int index;
Segment seg;
if (newOnly) {
index = _segList.size();
} else {
if (_recycleList.size() > 0) {
seg = _recycleList.remove();
seg.reinit();
_segList.set(seg.getSegmentId(), seg);
_log.info("reinit Segment " + seg.getSegmentId());
return seg;
}
for (index = 0; index < _segList.size(); index++) {
if (_segList.get(index) == null)
break;
}
}
// Always create next segment as READ_WRITE
File segFile = new File(_segHomePath, index + ".seg");
seg = getSegmentFactory().createSegment(index, segFile, _segFileSizeMB, Segment.Mode.READ_WRITE);
if (index < _segList.size())
_segList.set(index, seg);
else
_segList.add(seg);
return seg;
}
private void initMeta() throws IOException {
_segMeta = new SegmentMeta(new File(_segHomePath, ".meta"));
}
private void initSegs() throws IOException {
int loaded = 0;
File[] segFiles = listSegmentFiles();
if (segFiles.length == 0) {
return;
}
try {
for (int i = 0; i < segFiles.length; i++) {
File segFile = segFiles[i];
int segId = Integer.parseInt(segFile.getName().substring(0, segFile.getName().indexOf('.')));
if (segId != i) {
throw new IOException("Segment file " + i + ".seg missing");
}
if (getMeta().hasSegmentInService(segId)) {
// Always load a live segment as READ_ONLY
Segment s = getSegmentFactory().createSegment(segId, segFile, _segFileSizeMB, Segment.Mode.READ_ONLY);
s.incrLoadSize(getMeta().getSegmentLoadSize(segId));
_segList.add(s);
loaded++;
} else {
// Segment is not live and is free for reuse
_segList.add(null);
}
}
} catch (IOException e) {
_log.error(e.getMessage());
clearInternal(false /* DO NOT CLEAR META */);
throw e;
}
_log.info("loaded: " + loaded + "/" + segFiles.length);
}
private void clearInternal(boolean clearMeta) {
// Close all known segments
for(int segId = 0, cnt = _segList.size(); segId < cnt; segId++) {
Segment seg = _segList.get(segId);
if(seg != null) {
try {
seg.close(false);
} catch (IOException e) {
_log.warn("failed to close segment " + seg.getSegmentId());
} finally {
_segList.set(segId, null);
}
}
}
if(clearMeta) {
try {
updateMeta();
} catch (IOException e) {
_log.warn("failed to clear segment meta");
}
}
_segList.clear();
_segCurrent = null;
_recycleList.clear();
}
/**
* Recycle a free segment into the <code>_recycleList</code>.
*
* @param seg - the free Segment
* @return <code>true</code> if the specified segment is added to the <code>_recycleList</code>.
*/
private boolean recycle(Segment seg) {
if(_recycleList.size() < _recycleLimit) {
return _recycleList.add(seg);
}
return false;
}
protected File[] listSegmentFiles() {
File segDir = new File(_segHomePath);
File[] segFiles = segDir.listFiles(new FileFilter() {
@Override
public boolean accept(File filePath) {
String fileName = filePath.getName();
if (fileName.matches("^[0-9]+\\.seg$")) {
return true;
}
return false;
}
});
if (segFiles == null) {
segFiles = new File[0];
} else if (segFiles.length > 0) {
Arrays.sort(segFiles, new Comparator<File>() {
@Override
public int compare(File f1, File f2) {
int segId1 = Integer.parseInt(f1.getName().substring(0, f1.getName().indexOf('.')));
int segId2 = Integer.parseInt(f2.getName().substring(0, f2.getName().indexOf('.')));
return (segId1 < segId2) ? -1 : ((segId1 == segId2) ? 0 : 1);
}
});
}
return segFiles;
}
public SegmentMeta getMeta() {
return _segMeta;
}
public synchronized void updateMeta() throws IOException {
FileLock lock = null;
FileChannel channel = null;
try {
channel = new RandomAccessFile(getMeta().getMetaFile(), "rw").getChannel();
lock = channel.lock(0, Long.MAX_VALUE, false); // get exclusive file lock
_segMeta.wrap(this);
} finally {
if (lock != null)
lock.release();
if (channel != null)
channel.close();
}
}
public synchronized static SegmentManager getInstance(String segmentHomePath, SegmentFactory segmentFactory, int segmentFileSizeMB) throws IOException {
if (segmentFileSizeMB < Segment.minSegmentFileSizeMB) {
throw new IllegalArgumentException("Invalid argument segmentFileSizeMB " + segmentFileSizeMB + ", smaller than " + Segment.minSegmentFileSizeMB);
}
if (segmentFileSizeMB > Segment.maxSegmentFileSizeMB) {
throw new IllegalArgumentException("Invalid argument segmentFileSizeMB " + segmentFileSizeMB + ", greater than " + Segment.maxSegmentFileSizeMB);
}
File segDir = new File(segmentHomePath);
if (!segDir.exists()) {
if (!segDir.mkdirs()) {
throw new IOException("Failed to create directory " + segmentHomePath);
}
}
if (segDir.isFile()) {
throw new IOException("File " + segmentHomePath + " is not a directory");
}
String key = segDir.getCanonicalPath();
SegmentManager mgr = _segManagerMap.get(key);
if (mgr == null) {
mgr = new SegmentManager(key, segmentFactory, segmentFileSizeMB);
_segManagerMap.put(key, mgr);
}
mgr.open();
return mgr;
}
@Override
public synchronized void close() throws IOException {
if(_mode == Mode.CLOSED) {
return;
}
try {
clearInternal(false /* DO NOT CLEAR META */);
if(_segMeta != null) {
_segMeta.close();
}
} catch(Exception e) {
_log.error("Failed to close", e);
} finally {
_segMeta = null;
}
// The manager is closed properly now.
_mode = Mode.CLOSED;
}
@Override
public synchronized void open() throws IOException {
if(_mode == Mode.OPEN) return;
// Initialize segment meta data.
initMeta();
// Initialize all known segments.
try {
initSegs();
} catch(Exception e) {
this.close();
// Throw original exception if possible
throw (e instanceof IOException) ? (IOException)e : new IOException(e);
}
// The manager is opened properly now.
_mode = Mode.OPEN;
}
@Override
public boolean isOpen() {
return _mode == Mode.OPEN;
}
}
|
removed unnecessary locking
|
krati-main/src/main/java/krati/core/segment/SegmentManager.java
|
removed unnecessary locking
|
<ide><path>rati-main/src/main/java/krati/core/segment/SegmentManager.java
<ide> import java.io.File;
<ide> import java.io.FileFilter;
<ide> import java.io.IOException;
<del>import java.io.RandomAccessFile;
<del>import java.nio.channels.FileChannel;
<del>import java.nio.channels.FileLock;
<ide> import java.util.Comparator;
<ide> import java.util.LinkedList;
<ide> import java.util.Map;
<ide> }
<ide>
<ide> public synchronized void updateMeta() throws IOException {
<del> FileLock lock = null;
<del> FileChannel channel = null;
<del>
<del> try {
<del> channel = new RandomAccessFile(getMeta().getMetaFile(), "rw").getChannel();
<del> lock = channel.lock(0, Long.MAX_VALUE, false); // get exclusive file lock
<del> _segMeta.wrap(this);
<del> } finally {
<del> if (lock != null)
<del> lock.release();
<del> if (channel != null)
<del> channel.close();
<del> }
<add> _segMeta.wrap(this);
<ide> }
<ide>
<ide> public synchronized static SegmentManager getInstance(String segmentHomePath, SegmentFactory segmentFactory, int segmentFileSizeMB) throws IOException {
|
|
Java
|
apache-2.0
|
d19c78bf3a1cc97d941932f31e51e3640ee6b755
| 0 |
subhrajyotim/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,camunda/camunda-bpm-platform,filiphr/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,jangalinski/camunda-bpm-platform,filiphr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,jangalinski/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,bentrm/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,falko/camunda-bpm-platform,langfr/camunda-bpm-platform,bentrm/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,filiphr/camunda-bpm-platform,plexiti/camunda-bpm-platform,langfr/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,xasx/camunda-bpm-platform,jangalinski/camunda-bpm-platform,falko/camunda-bpm-platform,camunda/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,filiphr/camunda-bpm-platform,xasx/camunda-bpm-platform,xasx/camunda-bpm-platform,bentrm/camunda-bpm-platform,camunda/camunda-bpm-platform,plexiti/camunda-bpm-platform,bentrm/camunda-bpm-platform,plexiti/camunda-bpm-platform,xasx/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,plexiti/camunda-bpm-platform,plexiti/camunda-bpm-platform,bentrm/camunda-bpm-platform,langfr/camunda-bpm-platform,camunda/camunda-bpm-platform,langfr/camunda-bpm-platform,bentrm/camunda-bpm-platform,langfr/camunda-bpm-platform,falko/camunda-bpm-platform,xasx/camunda-bpm-platform,filiphr/camunda-bpm-platform,camunda/camunda-bpm-platform,jangalinski/camunda-bpm-platform,xasx/camunda-bpm-platform,jangalinski/camunda-bpm-platform,filiphr/camunda-bpm-platform,plexiti/camunda-bpm-platform,langfr/camunda-bpm-platform,falko/camunda-bpm-platform,falko/camunda-bpm-platform,falko/camunda-bpm-platform,camunda/camunda-bpm-platform
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.helper;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.camunda.bpm.dmn.engine.DmnDecisionRuleResult;
import org.camunda.bpm.dmn.engine.DmnDecisionTableResult;
import org.camunda.bpm.engine.variable.value.TypedValue;
/**
* @author Philipp Ossler
*/
public class MockDecisionTableResultBuilder {
protected List<DmnDecisionRuleResult> ruleResults = new ArrayList<DmnDecisionRuleResult>();
public MockDecisionRuleResultBuilder ruleResult() {
return new MockDecisionRuleResultBuilder(this);
}
public void addRuleResult(DmnDecisionRuleResult ruleResult) {
ruleResults.add(ruleResult);
}
public DmnDecisionTableResult build() {
SimpleDecisionTableResult decisionTableResult = new SimpleDecisionTableResult();
decisionTableResult.addAll(ruleResults);
return decisionTableResult;
}
protected class SimpleDecisionTableResult extends ArrayList<DmnDecisionRuleResult> implements DmnDecisionTableResult {
private static final long serialVersionUID = 1L;
@Override
public DmnDecisionRuleResult getFirstResult() {
throw new UnsupportedOperationException();
}
@Override
public DmnDecisionRuleResult getSingleResult() {
throw new UnsupportedOperationException();
}
@Override
public <T> List<T> collectEntries(String outputName) {
throw new UnsupportedOperationException();
}
@Override
public List<Map<String, Object>> getResultList() {
throw new UnsupportedOperationException();
}
@Override
public <T> T getSingleEntry() {
throw new UnsupportedOperationException();
}
@Override
public <T extends TypedValue> T getSingleEntryTyped() {
throw new UnsupportedOperationException();
}
}
}
|
engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/helper/MockDecisionTableResultBuilder.java
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.helper;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.camunda.bpm.dmn.engine.DmnDecisionRuleResult;
import org.camunda.bpm.dmn.engine.DmnDecisionTableResult;
/**
* @author Philipp Ossler
*/
public class MockDecisionTableResultBuilder {
protected List<DmnDecisionRuleResult> ruleResults = new ArrayList<DmnDecisionRuleResult>();
public MockDecisionRuleResultBuilder ruleResult() {
return new MockDecisionRuleResultBuilder(this);
}
public void addRuleResult(DmnDecisionRuleResult ruleResult) {
ruleResults.add(ruleResult);
}
public DmnDecisionTableResult build() {
SimpleDecisionTableResult decisionTableResult = new SimpleDecisionTableResult();
decisionTableResult.addAll(ruleResults);
return decisionTableResult;
}
protected class SimpleDecisionTableResult extends ArrayList<DmnDecisionRuleResult> implements DmnDecisionTableResult {
private static final long serialVersionUID = 1L;
@Override
public DmnDecisionRuleResult getFirstResult() {
throw new UnsupportedOperationException();
}
@Override
public DmnDecisionRuleResult getSingleResult() {
throw new UnsupportedOperationException();
}
@Override
public <T> List<T> collectEntries(String outputName) {
throw new UnsupportedOperationException();
}
@Override
public List<Map<String, Object>> getResultList() {
throw new UnsupportedOperationException();
}
}
}
|
feat(dmn): adjust the decision result mock in rest api
related to #CAM-6441
|
engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/helper/MockDecisionTableResultBuilder.java
|
feat(dmn): adjust the decision result mock in rest api
|
<ide><path>ngine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/helper/MockDecisionTableResultBuilder.java
<ide>
<ide> import org.camunda.bpm.dmn.engine.DmnDecisionRuleResult;
<ide> import org.camunda.bpm.dmn.engine.DmnDecisionTableResult;
<add>import org.camunda.bpm.engine.variable.value.TypedValue;
<ide>
<ide> /**
<ide> * @author Philipp Ossler
<ide> throw new UnsupportedOperationException();
<ide> }
<ide>
<add> @Override
<add> public <T> T getSingleEntry() {
<add> throw new UnsupportedOperationException();
<add> }
<add>
<add> @Override
<add> public <T extends TypedValue> T getSingleEntryTyped() {
<add> throw new UnsupportedOperationException();
<add> }
<add>
<ide> }
<ide> }
|
|
Java
|
lgpl-2.1
|
ef6262f37b4f007a6fc6087c94f2ccac165cdda8
| 0 |
svn2github/beast-mcmc,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,svn2github/beast-mcmc,svn2github/beast-mcmc,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox
|
/*
* AbstractTreeLikelihood.java
*
* Copyright (C) 2002-2009 Alexei Drummond and Andrew Rambaut
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodel.treelikelihood;
import dr.evolution.alignment.PatternList;
import dr.evolution.datatype.DataType;
import dr.evolution.tree.NodeRef;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.AbstractModelLikelihood;
import dr.inference.model.Model;
import dr.inference.model.Parameter;
import dr.inference.model.Variable;
import dr.xml.Reportable;
/**
* AbstractTreeLikelihood - a base class for likelihood calculators of sites on a tree.
*
* @author Andrew Rambaut
* @version $Id: AbstractTreeLikelihood.java,v 1.16 2005/06/07 16:27:39 alexei Exp $
*/
public abstract class AbstractTreeLikelihood extends AbstractModelLikelihood implements Reportable {
protected static final boolean COUNT_TOTAL_OPERATIONS = true;
public AbstractTreeLikelihood(String name, PatternList patternList,
TreeModel treeModel) {
super(name);
this.patternList = patternList;
this.dataType = patternList.getDataType();
patternCount = patternList.getPatternCount();
stateCount = dataType.getStateCount();
patternWeights = patternList.getPatternWeights();
this.treeModel = treeModel;
addModel(treeModel);
nodeCount = treeModel.getNodeCount();
updateNode = new boolean[nodeCount];
for (int i = 0; i < nodeCount; i++) {
updateNode[i] = true;
}
likelihoodKnown = false;
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setStates(LikelihoodCore likelihoodCore, PatternList patternList,
int sequenceIndex, int nodeIndex) {
int i;
int[] states = new int[patternCount];
for (i = 0; i < patternCount; i++) {
states[i] = patternList.getPatternState(sequenceIndex, i);
}
likelihoodCore.setNodeStates(nodeIndex, states);
}
public TreeModel getTreeModel() {
return treeModel;
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setPartials(LikelihoodCore likelihoodCore, PatternList patternList,
int categoryCount,
int sequenceIndex, int nodeIndex) {
double[] partials = new double[patternCount * stateCount];
boolean[] stateSet;
int v = 0;
for (int i = 0; i < patternCount; i++) {
int state = patternList.getPatternState(sequenceIndex, i);
stateSet = dataType.getStateSet(state);
for (int j = 0; j < stateCount; j++) {
if (stateSet[j]) {
partials[v] = 1.0;
} else {
partials[v] = 0.0;
}
v++;
}
}
likelihoodCore.setNodePartials(nodeIndex, partials);
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setMissingStates(LikelihoodCore likelihoodCore, int nodeIndex) {
int[] states = new int[patternCount];
for (int i = 0; i < patternCount; i++) {
states[i] = dataType.getGapState();
}
likelihoodCore.setNodeStates(nodeIndex, states);
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setMissingPartials(LikelihoodCore likelihoodCore, int nodeIndex) {
double[] partials = new double[patternCount * stateCount];
int v = 0;
for (int i = 0; i < patternCount; i++) {
for (int j = 0; j < stateCount; j++) {
partials[v] = 1.0;
v++;
}
}
likelihoodCore.setNodePartials(nodeIndex, partials);
}
/**
* Set update flag for a node and its children
*/
protected void updateNode(NodeRef node) {
updateNode[node.getNumber()] = true;
likelihoodKnown = false;
}
/**
* Set update flag for a node and its direct children
*/
protected void updateNodeAndChildren(NodeRef node) {
updateNode[node.getNumber()] = true;
for (int i = 0; i < treeModel.getChildCount(node); i++) {
NodeRef child = treeModel.getChild(node, i);
updateNode[child.getNumber()] = true;
}
likelihoodKnown = false;
}
/**
* Set update flag for a node and all its descendents
*/
protected void updateNodeAndDescendents(NodeRef node) {
updateNode[node.getNumber()] = true;
for (int i = 0; i < treeModel.getChildCount(node); i++) {
NodeRef child = treeModel.getChild(node, i);
updateNodeAndDescendents(child);
}
likelihoodKnown = false;
}
/**
* Set update flag for all nodes
*/
protected void updateAllNodes() {
for (int i = 0; i < nodeCount; i++) {
updateNode[i] = true;
}
likelihoodKnown = false;
}
/**
* Set update flag for a pattern
*/
protected void updatePattern(int i) {
if (updatePattern != null) {
updatePattern[i] = true;
}
likelihoodKnown = false;
}
/**
* Set update flag for all patterns
*/
protected void updateAllPatterns() {
if (updatePattern != null) {
for (int i = 0; i < patternCount; i++) {
updatePattern[i] = true;
}
}
likelihoodKnown = false;
}
public final double[] getPatternWeights() {
return patternWeights;
}
public final int getPatternCount() {
return patternCount;
}
// **************************************************************
// VariableListener IMPLEMENTATION
// **************************************************************
protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
// do nothing
}
// **************************************************************
// Model IMPLEMENTATION
// **************************************************************
protected void handleModelChangedEvent(Model model, Object object, int index) {
likelihoodKnown = false;
}
/**
* Stores the additional state other than model components
*/
protected void storeState() {
storedLikelihoodKnown = likelihoodKnown;
storedLogLikelihood = logLikelihood;
}
/**
* Restore the additional stored state
*/
protected void restoreState() {
likelihoodKnown = storedLikelihoodKnown;
logLikelihood = storedLogLikelihood;
}
protected void acceptState() {
} // nothing to do
// **************************************************************
// Likelihood IMPLEMENTATION
// **************************************************************
public final Model getModel() {
return this;
}
public final double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = calculateLogLikelihood();
likelihoodKnown = true;
}
return logLikelihood;
}
/**
* Forces a complete recalculation of the likelihood next time getLikelihood is called
*/
public void makeDirty() {
likelihoodKnown = false;
updateAllNodes();
updateAllPatterns();
}
protected abstract double calculateLogLikelihood();
public String getReport() {
getLogLikelihood();
return getClass().getName() + "(" + logLikelihood + ") total operations = " + totalOperationCount;
}
// **************************************************************
// INSTANCE VARIABLES
// **************************************************************
/**
* the tree
*/
protected TreeModel treeModel = null;
/**
* the patternList
*/
protected PatternList patternList = null;
protected DataType dataType = null;
/**
* the pattern weights
*/
protected double[] patternWeights;
/**
* the number of patterns
*/
protected int patternCount;
/**
* the number of states in the data
*/
protected int stateCount;
/**
* the number of nodes in the tree
*/
protected int nodeCount;
/**
* Flags to specify which patterns are to be updated
*/
protected boolean[] updatePattern = null;
/**
* Flags to specify which nodes are to be updated
*/
protected boolean[] updateNode;
private double logLikelihood;
private double storedLogLikelihood;
protected boolean likelihoodKnown = false;
private boolean storedLikelihoodKnown = false;
protected int totalOperationCount = 0;
}
|
src/dr/evomodel/treelikelihood/AbstractTreeLikelihood.java
|
/*
* AbstractTreeLikelihood.java
*
* Copyright (C) 2002-2009 Alexei Drummond and Andrew Rambaut
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodel.treelikelihood;
import dr.evolution.alignment.PatternList;
import dr.evolution.datatype.DataType;
import dr.evolution.tree.NodeRef;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.AbstractModelLikelihood;
import dr.inference.model.Model;
import dr.inference.model.Parameter;
import dr.inference.model.Variable;
import dr.xml.Reportable;
/**
* AbstractTreeLikelihood - a base class for likelihood calculators of sites on a tree.
*
* @author Andrew Rambaut
* @version $Id: AbstractTreeLikelihood.java,v 1.16 2005/06/07 16:27:39 alexei Exp $
*/
public abstract class AbstractTreeLikelihood extends AbstractModelLikelihood implements Reportable {
protected static final boolean COUNT_TOTAL_OPERATIONS = true;
public AbstractTreeLikelihood(String name, PatternList patternList,
TreeModel treeModel) {
super(name);
this.patternList = patternList;
this.dataType = patternList.getDataType();
patternCount = patternList.getPatternCount();
stateCount = dataType.getStateCount();
patternWeights = patternList.getPatternWeights();
this.treeModel = treeModel;
addModel(treeModel);
nodeCount = treeModel.getNodeCount();
updateNode = new boolean[nodeCount];
for (int i = 0; i < nodeCount; i++) {
updateNode[i] = true;
}
likelihoodKnown = false;
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setStates(LikelihoodCore likelihoodCore, PatternList patternList,
int sequenceIndex, int nodeIndex) {
int i;
int[] states = new int[patternCount];
for (i = 0; i < patternCount; i++) {
states[i] = patternList.getPatternState(sequenceIndex, i);
}
likelihoodCore.setNodeStates(nodeIndex, states);
}
public TreeModel getTreeModel() {
return treeModel;
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setPartials(LikelihoodCore likelihoodCore, PatternList patternList,
int categoryCount,
int sequenceIndex, int nodeIndex) {
double[] partials = new double[patternCount * stateCount];
boolean[] stateSet;
int v = 0;
for (int i = 0; i < patternCount; i++) {
int state = patternList.getPatternState(sequenceIndex, i);
stateSet = dataType.getStateSet(state);
for (int j = 0; j < stateCount; j++) {
if (stateSet[j]) {
partials[v] = 1.0;
} else {
partials[v] = 0.0;
}
v++;
}
}
likelihoodCore.setNodePartials(nodeIndex, partials);
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setMissingStates(LikelihoodCore likelihoodCore, int nodeIndex) {
int[] states = new int[patternCount];
for (int i = 0; i < patternCount; i++) {
states[i] = dataType.getGapState();
}
likelihoodCore.setNodeStates(nodeIndex, states);
}
/**
* Sets the partials from a sequence in an alignment.
*/
protected final void setMissingPartials(LikelihoodCore likelihoodCore, int nodeIndex) {
double[] partials = new double[patternCount * stateCount];
int v = 0;
for (int i = 0; i < patternCount; i++) {
for (int j = 0; j < stateCount; j++) {
partials[v] = 1.0;
v++;
}
}
likelihoodCore.setNodePartials(nodeIndex, partials);
}
/**
* Set update flag for a node and its children
*/
protected void updateNode(NodeRef node) {
updateNode[node.getNumber()] = true;
likelihoodKnown = false;
}
/**
* Set update flag for a node and its direct children
*/
protected void updateNodeAndChildren(NodeRef node) {
updateNode[node.getNumber()] = true;
for (int i = 0; i < treeModel.getChildCount(node); i++) {
NodeRef child = treeModel.getChild(node, i);
updateNode[child.getNumber()] = true;
}
likelihoodKnown = false;
}
/**
* Set update flag for a node and all its descendents
*/
protected void updateNodeAndDescendents(NodeRef node) {
updateNode[node.getNumber()] = true;
for (int i = 0; i < treeModel.getChildCount(node); i++) {
NodeRef child = treeModel.getChild(node, i);
updateNodeAndDescendents(child);
}
likelihoodKnown = false;
}
/**
* Set update flag for all nodes
*/
protected void updateAllNodes() {
for (int i = 0; i < nodeCount; i++) {
updateNode[i] = true;
}
likelihoodKnown = false;
}
/**
* Set update flag for a pattern
*/
protected void updatePattern(int i) {
if (updatePattern != null) {
updatePattern[i] = true;
}
likelihoodKnown = false;
}
/**
* Set update flag for all patterns
*/
protected void updateAllPatterns() {
if (updatePattern != null) {
for (int i = 0; i < patternCount; i++) {
updatePattern[i] = true;
}
}
likelihoodKnown = false;
}
public final double[] getPatternWeights() {
return patternWeights;
}
public final int getPatternCount() {
return patternCount;
}
// **************************************************************
// VariableListener IMPLEMENTATION
// **************************************************************
protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
// do nothing
}
// **************************************************************
// Model IMPLEMENTATION
// **************************************************************
protected void handleModelChangedEvent(Model model, Object object, int index) {
likelihoodKnown = false;
}
/**
* Stores the additional state other than model components
*/
protected void storeState() {
storedLikelihoodKnown = likelihoodKnown;
storedLogLikelihood = logLikelihood;
}
/**
* Restore the additional stored state
*/
protected void restoreState() {
likelihoodKnown = storedLikelihoodKnown;
logLikelihood = storedLogLikelihood;
}
protected void acceptState() {
} // nothing to do
// **************************************************************
// Likelihood IMPLEMENTATION
// **************************************************************
public final Model getModel() {
return this;
}
public final double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = calculateLogLikelihood();
likelihoodKnown = true;
}
return logLikelihood;
}
/**
* Forces a complete recalculation of the likelihood next time getLikelihood is called
*/
public void makeDirty() {
likelihoodKnown = false;
updateAllNodes();
updateAllPatterns();
}
protected abstract double calculateLogLikelihood();
public String getReport() {
getLogLikelihood();
return getClass().getName() + "(" + logLikelihood + ") total operations = " + totalOperationCount;
}
// **************************************************************
// INSTANCE VARIABLES
// **************************************************************
/**
* the tree
*/
protected TreeModel treeModel = null;
/**
* the patternList
*/
protected PatternList patternList = null;
protected DataType dataType = null;
/**
* the pattern weights
*/
protected double[] patternWeights;
/**
* the number of patterns
*/
protected int patternCount;
/**
* the number of states in the data
*/
protected int stateCount;
/**
* the number of nodes in the tree
*/
protected int nodeCount;
/**
* Flags to specify which patterns are to be updated
*/
protected boolean[] updatePattern = null;
/**
* Flags to specify which nodes are to be updated
*/
protected boolean[] updateNode;
private double logLikelihood;
private double storedLogLikelihood;
private boolean likelihoodKnown = false;
private boolean storedLikelihoodKnown = false;
protected int totalOperationCount = 0;
}
|
Changed the scope of variable likelihoodKnown from private to protected.
git-svn-id: 67bc77c75b8364e4e9cdff0eb6560f5818674cd8@2329 ca793f91-a31e-0410-b540-2769d408b6a1
|
src/dr/evomodel/treelikelihood/AbstractTreeLikelihood.java
|
Changed the scope of variable likelihoodKnown from private to protected.
|
<ide><path>rc/dr/evomodel/treelikelihood/AbstractTreeLikelihood.java
<ide>
<ide> private double logLikelihood;
<ide> private double storedLogLikelihood;
<del> private boolean likelihoodKnown = false;
<add> protected boolean likelihoodKnown = false;
<ide> private boolean storedLikelihoodKnown = false;
<ide>
<ide> protected int totalOperationCount = 0;
|
|
Java
|
apache-2.0
|
2dc338359d1a5a2f69d03a132118a6454f453aa3
| 0 |
mtransitapps/ca-kingston-transit-bus-parser
|
package org.mtransit.parser.ca_kingston_transit_bus;
import static org.mtransit.commons.RegexUtils.DIGITS;
import static org.mtransit.commons.StringUtils.EMPTY;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mtransit.commons.CharUtils;
import org.mtransit.commons.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.MTLog;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.mt.data.MAgency;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
// https://openkingston.cityofkingston.ca/explore/dataset/transit-gtfs-routes/
// https://opendatakingston.cityofkingston.ca/explore/dataset/transit-gtfs-stops/
// https://api.cityofkingston.ca/gtfs/gtfs.zip
public class KingstonTransitBusAgencyTools extends DefaultAgencyTools {
public static void main(@NotNull String[] args) {
new KingstonTransitBusAgencyTools().start(args);
}
@Nullable
@Override
public List<Locale> getSupportedLanguages() {
return LANG_EN;
}
@Override
public boolean defaultExcludeEnabled() {
return true;
}
@NotNull
@Override
public String getAgencyName() {
return "Kingston Transit";
}
@Override
public boolean excludeRoute(@NotNull GRoute gRoute) {
final String routeLongNameLC = gRoute.getRouteLongNameOrDefault().toLowerCase(Locale.ENGLISH);
if (routeLongNameLC.contains("out of service")) {
return EXCLUDE;
}
return super.excludeRoute(gRoute);
}
@Override
public boolean excludeTrip(@NotNull GTrip gTrip) {
final String tripHeadSignLC = gTrip.getTripHeadsignOrDefault().toLowerCase(Locale.ENGLISH);
if (tripHeadSignLC.contains("not in service")) {
return EXCLUDE;
}
return super.excludeTrip(gTrip);
}
@NotNull
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public boolean defaultRouteIdEnabled() {
return true;
}
@Override
public boolean useRouteShortNameForRouteId() {
return true;
}
@Nullable
@Override
public Long convertRouteIdFromShortNameNotSupported(@NotNull String routeShortName) {
switch (routeShortName) {
case "COV":
return 99_001L;
case "XTRA":
return 99_002L;
}
return super.convertRouteIdFromShortNameNotSupported(routeShortName);
}
@Override
public boolean defaultRouteLongNameEnabled() {
return true;
}
@Override
public boolean defaultAgencyColorEnabled() {
return true;
}
private static final String AGENCY_COLOR = "009BC9";
@NotNull
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public boolean directionSplitterEnabled() {
return true;
}
@Override
public boolean directionSplitterEnabled(long routeId) {
if (routeId == 99_002L) { // XTRA
return false;
}
return super.directionSplitterEnabled(routeId);
}
@Override
public boolean directionFinderEnabled() {
return true;
}
private static final Pattern ENDS_WITH_PARENTHESIS_ = Pattern.compile("( \\(.*\\))", Pattern.CASE_INSENSITIVE);
private static final Pattern TRANSFER_POINT_ = Pattern.compile("( transfer (point|pt) (platform|p:)\\d+$)", Pattern.CASE_INSENSITIVE);
@NotNull
@Override
public String cleanDirectionHeadsign(boolean fromStopName, @NotNull String directionHeadSign) {
directionHeadSign = super.cleanDirectionHeadsign(fromStopName, directionHeadSign);
if (fromStopName) {
directionHeadSign = ENDS_WITH_PARENTHESIS_.matcher(directionHeadSign).replaceAll(EMPTY);
directionHeadSign = TRANSFER_POINT_.matcher(directionHeadSign).replaceAll(EMPTY);
}
return directionHeadSign;
}
private static final Pattern STARTS_WITH_EXPRESS = Pattern.compile("(^(express -) )*", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_EXTRA_BUS = Pattern.compile("(^(extra bus -) )*", Pattern.CASE_INSENSITIVE);
private static final Pattern KGH_ = CleanUtils.cleanWords("kingston general hosp", "kingston general hospital");
private static final String KGH_REPLACEMENT = CleanUtils.cleanWordsReplacement("KGH");
@NotNull
@Override
public String cleanTripHeadsign(@NotNull String tripHeadsign) {
tripHeadsign = STARTS_WITH_EXPRESS.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = STARTS_WITH_EXTRA_BUS.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = KGH_.matcher(tripHeadsign).replaceAll(KGH_REPLACEMENT);
tripHeadsign = CleanUtils.keepToAndRemoveVia(tripHeadsign);
tripHeadsign = CleanUtils.SAINT.matcher(tripHeadsign).replaceAll(CleanUtils.SAINT_REPLACEMENT);
tripHeadsign = CleanUtils.cleanSlashes(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern SIDE_ = CleanUtils.cleanWord("side");
@NotNull
@Override
public String cleanStopName(@NotNull String gStopName) {
gStopName = SIDE_.matcher(gStopName).replaceAll(EMPTY);
gStopName = CleanUtils.cleanBounds(gStopName);
gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
gStopName = CleanUtils.CLEAN_AND.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
private static final String PLACE_CATC = "place_catc";
private static final String PLACE_CHCA = "place_chca";
private static final String PLACE_DWNP = "place_dwnp";
private static final String PLACE_GRDC = "place_grdc";
private static final String PLACE_KNGC = "place_kngc";
private static final String PLACE_MSPR = "place_mspr";
private static final String PLACE_RAIL = "place_rail";
@NotNull
@Override
public String getStopCode(@NotNull GStop gStop) {
//noinspection deprecation
return gStop.getStopId(); // using stop ID as stop code (useful to match with GTFS real-time)
}
@Override
public int getStopId(@NotNull GStop gStop) {
//noinspection deprecation
final String stopId = gStop.getStopId();
if (stopId.length() > 0 && CharUtils.isDigitsOnly(stopId)) {
return Integer.parseInt(stopId); // using stop code as stop ID
}
switch (stopId) {
case PLACE_CATC:
return 900_000;
case PLACE_CHCA:
return 910_000;
case PLACE_DWNP:
return 920_000;
case PLACE_GRDC:
return 930_000;
case PLACE_KNGC:
return 940_000;
case PLACE_MSPR:
return 950_000;
case PLACE_RAIL:
return 960_000;
}
if ("Smspr1".equals(stopId)) {
return 970000;
}
try {
final Matcher matcher = DIGITS.matcher(stopId);
if (matcher.find()) {
final int digits = Integer.parseInt(matcher.group());
if (stopId.startsWith("S")) {
return 190_000 + digits;
}
throw new MTLog.Fatal("Unexpected stop ID for '%s'!", gStop);
}
} catch (Exception e) {
throw new MTLog.Fatal(e, "Error while finding stop ID for '%s'!", gStop);
}
throw new MTLog.Fatal("Unexpected stop ID for '%s'!", gStop);
}
}
|
src/main/java/org/mtransit/parser/ca_kingston_transit_bus/KingstonTransitBusAgencyTools.java
|
package org.mtransit.parser.ca_kingston_transit_bus;
import static org.mtransit.commons.RegexUtils.DIGITS;
import static org.mtransit.commons.StringUtils.EMPTY;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mtransit.commons.CharUtils;
import org.mtransit.commons.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.MTLog;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.mt.data.MAgency;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
// https://openkingston.cityofkingston.ca/explore/dataset/transit-gtfs-routes/
// https://opendatakingston.cityofkingston.ca/explore/dataset/transit-gtfs-stops/
// https://api.cityofkingston.ca/gtfs/gtfs.zip
public class KingstonTransitBusAgencyTools extends DefaultAgencyTools {
public static void main(@NotNull String[] args) {
new KingstonTransitBusAgencyTools().start(args);
}
@Nullable
@Override
public List<Locale> getSupportedLanguages() {
return LANG_EN;
}
@Override
public boolean defaultExcludeEnabled() {
return true;
}
@NotNull
@Override
public String getAgencyName() {
return "Kingston Transit";
}
@Override
public boolean excludeRoute(@NotNull GRoute gRoute) {
final String routeLongNameLC = gRoute.getRouteLongNameOrDefault().toLowerCase(Locale.ENGLISH);
if (routeLongNameLC.contains("out of service")) {
return EXCLUDE;
}
return super.excludeRoute(gRoute);
}
@Override
public boolean excludeTrip(@NotNull GTrip gTrip) {
final String tripHeadSignLC = gTrip.getTripHeadsignOrDefault().toLowerCase(Locale.ENGLISH);
if (tripHeadSignLC.contains("not in service")) {
return EXCLUDE;
}
return super.excludeTrip(gTrip);
}
@NotNull
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public boolean defaultRouteIdEnabled() {
return true;
}
@Override
public boolean useRouteShortNameForRouteId() {
return true;
}
@Nullable
@Override
public Long convertRouteIdFromShortNameNotSupported(@NotNull String routeShortName) {
switch (routeShortName) {
case "COV":
return 99_001L;
case "XTRA":
return 99_002L;
}
return super.convertRouteIdFromShortNameNotSupported(routeShortName);
}
@Override
public boolean defaultRouteLongNameEnabled() {
return true;
}
@Override
public boolean defaultAgencyColorEnabled() {
return true;
}
private static final String AGENCY_COLOR = "009BC9";
@NotNull
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public boolean directionSplitterEnabled() {
return true;
}
@Override
public boolean directionSplitterEnabled(long routeId) {
if (routeId == 99_001L) {
return false;
}
return super.directionSplitterEnabled(routeId);
}
@Override
public boolean directionFinderEnabled() {
return true;
}
private static final Pattern ENDS_WITH_PARENTHESIS_ = Pattern.compile("( \\(.*\\))", Pattern.CASE_INSENSITIVE);
private static final Pattern TRANSFER_POINT_ = Pattern.compile("( transfer (point|pt) (platform|p:)\\d+$)", Pattern.CASE_INSENSITIVE);
@NotNull
@Override
public String cleanDirectionHeadsign(boolean fromStopName, @NotNull String directionHeadSign) {
directionHeadSign = super.cleanDirectionHeadsign(fromStopName, directionHeadSign);
if (fromStopName) {
directionHeadSign = ENDS_WITH_PARENTHESIS_.matcher(directionHeadSign).replaceAll(EMPTY);
directionHeadSign = TRANSFER_POINT_.matcher(directionHeadSign).replaceAll(EMPTY);
}
return directionHeadSign;
}
private static final Pattern STARTS_WITH_EXPRESS = Pattern.compile("(^(express -) )*", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_EXTRA_BUS = Pattern.compile("(^(extra bus -) )*", Pattern.CASE_INSENSITIVE);
private static final Pattern KGH_ = CleanUtils.cleanWords("kingston general hosp", "kingston general hospital");
private static final String KGH_REPLACEMENT = CleanUtils.cleanWordsReplacement("KGH");
@NotNull
@Override
public String cleanTripHeadsign(@NotNull String tripHeadsign) {
tripHeadsign = STARTS_WITH_EXPRESS.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = STARTS_WITH_EXTRA_BUS.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = KGH_.matcher(tripHeadsign).replaceAll(KGH_REPLACEMENT);
tripHeadsign = CleanUtils.keepToAndRemoveVia(tripHeadsign);
tripHeadsign = CleanUtils.SAINT.matcher(tripHeadsign).replaceAll(CleanUtils.SAINT_REPLACEMENT);
tripHeadsign = CleanUtils.cleanSlashes(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern SIDE_ = CleanUtils.cleanWord("side");
@NotNull
@Override
public String cleanStopName(@NotNull String gStopName) {
gStopName = SIDE_.matcher(gStopName).replaceAll(EMPTY);
gStopName = CleanUtils.cleanBounds(gStopName);
gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
gStopName = CleanUtils.CLEAN_AND.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
private static final String PLACE_CATC = "place_catc";
private static final String PLACE_CHCA = "place_chca";
private static final String PLACE_DWNP = "place_dwnp";
private static final String PLACE_GRDC = "place_grdc";
private static final String PLACE_KNGC = "place_kngc";
private static final String PLACE_MSPR = "place_mspr";
private static final String PLACE_RAIL = "place_rail";
@NotNull
@Override
public String getStopCode(@NotNull GStop gStop) {
//noinspection deprecation
return gStop.getStopId(); // using stop ID as stop code (useful to match with GTFS real-time)
}
@Override
public int getStopId(@NotNull GStop gStop) {
//noinspection deprecation
final String stopId = gStop.getStopId();
if (stopId.length() > 0 && CharUtils.isDigitsOnly(stopId)) {
return Integer.parseInt(stopId); // using stop code as stop ID
}
switch (stopId) {
case PLACE_CATC:
return 900_000;
case PLACE_CHCA:
return 910_000;
case PLACE_DWNP:
return 920_000;
case PLACE_GRDC:
return 930_000;
case PLACE_KNGC:
return 940_000;
case PLACE_MSPR:
return 950_000;
case PLACE_RAIL:
return 960_000;
}
if ("Smspr1".equals(stopId)) {
return 970000;
}
try {
final Matcher matcher = DIGITS.matcher(stopId);
if (matcher.find()) {
final int digits = Integer.parseInt(matcher.group());
if (stopId.startsWith("S")) {
return 190_000 + digits;
}
throw new MTLog.Fatal("Unexpected stop ID for '%s'!", gStop);
}
} catch (Exception e) {
throw new MTLog.Fatal(e, "Error while finding stop ID for '%s'!", gStop);
}
throw new MTLog.Fatal("Unexpected stop ID for '%s'!", gStop);
}
}
|
Cleanup
|
src/main/java/org/mtransit/parser/ca_kingston_transit_bus/KingstonTransitBusAgencyTools.java
|
Cleanup
|
<ide><path>rc/main/java/org/mtransit/parser/ca_kingston_transit_bus/KingstonTransitBusAgencyTools.java
<ide>
<ide> @Override
<ide> public boolean directionSplitterEnabled(long routeId) {
<del> if (routeId == 99_001L) {
<add> if (routeId == 99_002L) { // XTRA
<ide> return false;
<ide> }
<ide> return super.directionSplitterEnabled(routeId);
|
|
JavaScript
|
mit
|
d058ba00a7d68832906f9b95d6feec829b177ee4
| 0 |
worona/worona-app,worona/worona-app
|
/* eslint-disable no-undef */
import { takeEvery } from 'redux-saga';
import { select, take } from 'redux-saga/effects'
import * as deps from '../deps';
export function launchGTMEventsSaga({ type, ...props }) {
window.dataLayer.push({
event: type,
props,
});
}
export function* virtualPageView() {
const pathname = yield select(deps.selectors.getPathname);
const titleFromUrl = capitalize(/\/?([^/]+)/.exec(pathname)[1]).replace(/-/g, ' ');
const title = !service ? titleFromUrl : titleFromPkg;
const url = pathname.replace(/(\/?.+)(\/[a-zA-Z0-9]{17})/, '$1');
window.dataLayer.push({
event: 'virtualPageView',
virtualPage: {
title,
url,
},
});
}
export default function* gtmSagas() {
yield take(deps.types.SITE_ID_CHANGED);
const isPreview = yield select(deps.selectors.getPreview);
if (!isPreview) {
window.dataLayer = window.dataLayer || [];
yield takeEvery('*', launchGTMEventsSaga);
}
}
|
client/packages/core-app-worona/src/app/gtm-app-extension-worona/sagas/index.js
|
/* eslint-disable no-undef */
import { takeEvery } from 'redux-saga';
import { select, take } from 'redux-saga/effects'
import * as deps from '../deps';
export function launchGTMEventsSaga({ type, ...props }) {
window.dataLayer.push({
event: type,
props,
});
}
export default function* gtmSagas() {
yield take(deps.types.SITE_ID_CHANGED);
const isPreview = yield select(deps.selectors.getPreview);
if (!isPreview) {
window.dataLayer = window.dataLayer || [];
yield takeEvery('*', launchGTMEventsSaga);
}
}
|
Start work on virtualPageViews (WIP)
|
client/packages/core-app-worona/src/app/gtm-app-extension-worona/sagas/index.js
|
Start work on virtualPageViews (WIP)
|
<ide><path>lient/packages/core-app-worona/src/app/gtm-app-extension-worona/sagas/index.js
<ide> });
<ide> }
<ide>
<add>export function* virtualPageView() {
<add> const pathname = yield select(deps.selectors.getPathname);
<add> const titleFromUrl = capitalize(/\/?([^/]+)/.exec(pathname)[1]).replace(/-/g, ' ');
<add> const title = !service ? titleFromUrl : titleFromPkg;
<add> const url = pathname.replace(/(\/?.+)(\/[a-zA-Z0-9]{17})/, '$1');
<add> window.dataLayer.push({
<add> event: 'virtualPageView',
<add> virtualPage: {
<add> title,
<add> url,
<add> },
<add> });
<add>}
<add>
<ide> export default function* gtmSagas() {
<ide> yield take(deps.types.SITE_ID_CHANGED);
<ide> const isPreview = yield select(deps.selectors.getPreview);
|
|
Java
|
mit
|
c50135b60111e6c1b9ea6f5862e41b5acf7eb221
| 0 |
SpongePowered/Sponge,SpongePowered/Sponge,SpongePowered/Sponge
|
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.mixin.tracker.network.play;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.entity.player.ServerPlayerEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.network.play.ServerPlayNetHandler;
import net.minecraft.network.play.client.CPlayerDiggingPacket;
import net.minecraft.server.management.PlayerInteractionManager;
import net.minecraft.util.ActionResultType;
import net.minecraft.util.Hand;
import net.minecraft.util.math.BlockRayTraceResult;
import net.minecraft.util.text.ITextComponent;
import org.spongepowered.api.event.CauseStackManager;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.common.SpongeCommon;
import org.spongepowered.common.bridge.entity.PlatformEntityBridge;
import org.spongepowered.common.bridge.server.management.PlayerInteractionManagerBridge;
import org.spongepowered.common.bridge.world.WorldBridge;
import org.spongepowered.common.event.SpongeCommonEventFactory;
import org.spongepowered.common.event.tracking.PhaseTracker;
import org.spongepowered.common.event.tracking.phase.packet.PacketContext;
import org.spongepowered.common.event.tracking.phase.packet.PacketPhaseUtil;
import org.spongepowered.common.event.tracking.phase.tick.PlayerTickContext;
import org.spongepowered.common.event.tracking.phase.tick.TickPhase;
import org.spongepowered.common.item.util.ItemStackUtil;
@Mixin(ServerPlayNetHandler.class)
public abstract class ServerPlayNetHandlerMixin_Tracker {
@Shadow public ServerPlayerEntity player;
@Shadow public abstract void disconnect(ITextComponent textComponent);
@Redirect(method = "tick",
at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/ServerPlayerEntity;doTick()V"))
private void tracker$wrapPlayerTickWithPhase(final ServerPlayerEntity player) {
if (((PlatformEntityBridge) player).bridge$isFakePlayer() || ((WorldBridge) player.level).bridge$isFake()) {
player.doTick();
return;
}
try (final CauseStackManager.StackFrame frame = PhaseTracker.getCauseStackManager().pushCauseFrame();
final PlayerTickContext context = TickPhase.Tick.PLAYER.createPhaseContext(PhaseTracker.SERVER).source(player)) {
context.buildAndSwitch();
frame.pushCause(player);
player.doTick();
}
}
@Redirect(method = "handleUseItemOn",
at = @At(value = "INVOKE",
target = "Lnet/minecraft/server/management/PlayerInteractionManager;useItemOn(Lnet/minecraft/entity/player/ServerPlayerEntity;Lnet/minecraft/world/World;Lnet/minecraft/item/ItemStack;Lnet/minecraft/util/Hand;Lnet/minecraft/util/math/BlockRayTraceResult;)Lnet/minecraft/util/ActionResultType;"))
private ActionResultType tracker$checkState(final PlayerInteractionManager interactionManager, final ServerPlayerEntity playerIn,
final net.minecraft.world.World worldIn, final ItemStack stack, final Hand hand, final BlockRayTraceResult rayTraceResult) {
final ActionResultType actionResult = interactionManager.useItemOn(this.player, worldIn, stack, hand, rayTraceResult);
if (PhaseTracker.getInstance().getPhaseContext().isEmpty()) {
return actionResult;
}
final PacketContext<?> context = ((PacketContext<?>) PhaseTracker.getInstance().getPhaseContext());
// If a plugin or mod has changed the item, avoid restoring
if (!context.getInteractItemChanged()) {
final ItemStack itemStack = ItemStackUtil.toNative(context.getItemUsed());
// Only do a restore if something actually changed. The client does an identity check ('==')
// to determine if it should continue using an itemstack. If we always resend the itemstack, we end up
// cancelling item usage (e.g. eating food) that occurs while targeting a block
final boolean isInteractionCancelled = ((PlayerInteractionManagerBridge) this.player.gameMode).bridge$isInteractBlockRightClickCancelled();
if (!ItemStack.matches(itemStack, this.player.getItemInHand(hand)) && isInteractionCancelled) {
PacketPhaseUtil.handlePlayerSlotRestore(this.player, itemStack, hand);
}
}
context.interactItemChanged(false);
((PlayerInteractionManagerBridge) this.player.gameMode).bridge$setInteractBlockRightClickCancelled(false);
return actionResult;
}
/**
* @author gabizou
* @reason We need to track the last primary packet being processed, and usually
* that's when the processPlayerDigging is called, so, we track that by means of
* suggesting that when the packet is about to be actually processed (before
* the switch statement), we keep track of the last primary packet ticking.
*/
@Inject(method = "handlePlayerAction",
at = @At(value = "INVOKE",
target = "Lnet/minecraft/network/play/client/CPlayerDiggingPacket;getPos()Lnet/minecraft/util/math/BlockPos;"))
private void tracker$updateLastPrimaryPacket(final CPlayerDiggingPacket packetIn, final CallbackInfo ci) {
if (PhaseTracker.getInstance().getPhaseContext().isEmpty()) {
return;
}
SpongeCommonEventFactory.lastPrimaryPacketTick = SpongeCommon.getServer().getTickCount();
}
}
|
src/mixins/java/org/spongepowered/common/mixin/tracker/network/play/ServerPlayNetHandlerMixin_Tracker.java
|
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.mixin.tracker.network.play;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.entity.player.ServerPlayerEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.network.play.ServerPlayNetHandler;
import net.minecraft.network.play.client.CPlayerDiggingPacket;
import net.minecraft.server.management.PlayerInteractionManager;
import net.minecraft.util.ActionResultType;
import net.minecraft.util.Hand;
import net.minecraft.util.math.BlockRayTraceResult;
import net.minecraft.util.text.ITextComponent;
import org.spongepowered.api.event.CauseStackManager;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.common.SpongeCommon;
import org.spongepowered.common.bridge.entity.PlatformEntityBridge;
import org.spongepowered.common.bridge.server.management.PlayerInteractionManagerBridge;
import org.spongepowered.common.bridge.world.WorldBridge;
import org.spongepowered.common.event.SpongeCommonEventFactory;
import org.spongepowered.common.event.tracking.PhaseTracker;
import org.spongepowered.common.event.tracking.phase.packet.PacketContext;
import org.spongepowered.common.event.tracking.phase.packet.PacketPhaseUtil;
import org.spongepowered.common.event.tracking.phase.tick.PlayerTickContext;
import org.spongepowered.common.event.tracking.phase.tick.TickPhase;
import org.spongepowered.common.item.util.ItemStackUtil;
@Mixin(ServerPlayNetHandler.class)
public abstract class ServerPlayNetHandlerMixin_Tracker {
@Shadow public ServerPlayerEntity player;
@Shadow public abstract void disconnect(ITextComponent textComponent);
@Redirect(method = "tick",
at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/ServerPlayerEntity;doTick()V"))
private void tracker$wrapPlayerTickWithPhase(final ServerPlayerEntity player) {
if (((PlatformEntityBridge) player).bridge$isFakePlayer() || ((WorldBridge) player.level).bridge$isFake()) {
player.tick();
return;
}
try (final CauseStackManager.StackFrame frame = PhaseTracker.getCauseStackManager().pushCauseFrame();
final PlayerTickContext context = TickPhase.Tick.PLAYER.createPhaseContext(PhaseTracker.SERVER).source(player)) {
context.buildAndSwitch();
frame.pushCause(player);
player.tick();
}
}
@Redirect(method = "handleUseItemOn",
at = @At(value = "INVOKE",
target = "Lnet/minecraft/server/management/PlayerInteractionManager;useItemOn(Lnet/minecraft/entity/player/ServerPlayerEntity;Lnet/minecraft/world/World;Lnet/minecraft/item/ItemStack;Lnet/minecraft/util/Hand;Lnet/minecraft/util/math/BlockRayTraceResult;)Lnet/minecraft/util/ActionResultType;"))
private ActionResultType tracker$checkState(final PlayerInteractionManager interactionManager, final ServerPlayerEntity playerIn,
final net.minecraft.world.World worldIn, final ItemStack stack, final Hand hand, final BlockRayTraceResult rayTraceResult) {
final ActionResultType actionResult = interactionManager.useItemOn(this.player, worldIn, stack, hand, rayTraceResult);
if (PhaseTracker.getInstance().getPhaseContext().isEmpty()) {
return actionResult;
}
final PacketContext<?> context = ((PacketContext<?>) PhaseTracker.getInstance().getPhaseContext());
// If a plugin or mod has changed the item, avoid restoring
if (!context.getInteractItemChanged()) {
final ItemStack itemStack = ItemStackUtil.toNative(context.getItemUsed());
// Only do a restore if something actually changed. The client does an identity check ('==')
// to determine if it should continue using an itemstack. If we always resend the itemstack, we end up
// cancelling item usage (e.g. eating food) that occurs while targeting a block
final boolean isInteractionCancelled = ((PlayerInteractionManagerBridge) this.player.gameMode).bridge$isInteractBlockRightClickCancelled();
if (!ItemStack.matches(itemStack, this.player.getItemInHand(hand)) && isInteractionCancelled) {
PacketPhaseUtil.handlePlayerSlotRestore(this.player, itemStack, hand);
}
}
context.interactItemChanged(false);
((PlayerInteractionManagerBridge) this.player.gameMode).bridge$setInteractBlockRightClickCancelled(false);
return actionResult;
}
/**
* @author gabizou
* @reason We need to track the last primary packet being processed, and usually
* that's when the processPlayerDigging is called, so, we track that by means of
* suggesting that when the packet is about to be actually processed (before
* the switch statement), we keep track of the last primary packet ticking.
*/
@Inject(method = "handlePlayerAction",
at = @At(value = "INVOKE",
target = "Lnet/minecraft/network/play/client/CPlayerDiggingPacket;getPos()Lnet/minecraft/util/math/BlockPos;"))
private void tracker$updateLastPrimaryPacket(final CPlayerDiggingPacket packetIn, final CallbackInfo ci) {
if (PhaseTracker.getInstance().getPhaseContext().isEmpty()) {
return;
}
SpongeCommonEventFactory.lastPrimaryPacketTick = SpongeCommon.getServer().getTickCount();
}
}
|
Make sure we do the entire player tick loop
|
src/mixins/java/org/spongepowered/common/mixin/tracker/network/play/ServerPlayNetHandlerMixin_Tracker.java
|
Make sure we do the entire player tick loop
|
<ide><path>rc/mixins/java/org/spongepowered/common/mixin/tracker/network/play/ServerPlayNetHandlerMixin_Tracker.java
<ide> at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/ServerPlayerEntity;doTick()V"))
<ide> private void tracker$wrapPlayerTickWithPhase(final ServerPlayerEntity player) {
<ide> if (((PlatformEntityBridge) player).bridge$isFakePlayer() || ((WorldBridge) player.level).bridge$isFake()) {
<del> player.tick();
<add> player.doTick();
<ide> return;
<ide> }
<ide> try (final CauseStackManager.StackFrame frame = PhaseTracker.getCauseStackManager().pushCauseFrame();
<ide> final PlayerTickContext context = TickPhase.Tick.PLAYER.createPhaseContext(PhaseTracker.SERVER).source(player)) {
<ide> context.buildAndSwitch();
<ide> frame.pushCause(player);
<del> player.tick();
<add> player.doTick();
<ide> }
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
5bb97519fb780dbdf87e0983304e7f847ec20c12
| 0 |
raydac/netbeans-mmd-plugin,raydac/netbeans-mmd-plugin,raydac/netbeans-mmd-plugin,raydac/netbeans-mmd-plugin
|
/*
* Copyright (C) 2018 Igor Maznitsa.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package com.igormaznitsa.sciareto.ui.editors;
import static com.igormaznitsa.meta.common.utils.Assertions.fail;
import javax.annotation.Nonnull;
import javax.swing.text.Segment;
import org.fife.ui.rsyntaxtextarea.AbstractTokenMaker;
import org.fife.ui.rsyntaxtextarea.RSyntaxUtilities;
import org.fife.ui.rsyntaxtextarea.Token;
import org.fife.ui.rsyntaxtextarea.TokenMap;
public class PlantUmlTokenMaker extends AbstractTokenMaker {
@Override
@Nonnull
public TokenMap getWordsToHighlight() {
final TokenMap tokenMap = new TokenMap();
tokenMap.put("@startsalt", Token.RESERVED_WORD);
tokenMap.put("@endsalt", Token.RESERVED_WORD);
tokenMap.put("@startgantt", Token.RESERVED_WORD);
tokenMap.put("@endgantt", Token.RESERVED_WORD);
tokenMap.put("@startlatex", Token.RESERVED_WORD);
tokenMap.put("@endlatex", Token.RESERVED_WORD);
tokenMap.put("@startmath", Token.RESERVED_WORD);
tokenMap.put("@endmath", Token.RESERVED_WORD);
tokenMap.put("@startdot", Token.RESERVED_WORD);
tokenMap.put("@enddot", Token.RESERVED_WORD);
tokenMap.put("@startuml", Token.RESERVED_WORD);
tokenMap.put("@enduml", Token.RESERVED_WORD);
tokenMap.put("@startmindmap", Token.RESERVED_WORD);
tokenMap.put("@endmindmap", Token.RESERVED_WORD);
tokenMap.put("header", Token.RESERVED_WORD);
tokenMap.put("endheader", Token.RESERVED_WORD);
tokenMap.put("legend", Token.RESERVED_WORD);
tokenMap.put("endlegend", Token.RESERVED_WORD);
tokenMap.put("scale", Token.RESERVED_WORD);
tokenMap.put("skinparam", Token.RESERVED_WORD);
tokenMap.put("title", Token.RESERVED_WORD);
tokenMap.put("usecase", Token.RESERVED_WORD);
tokenMap.put("boundary", Token.RESERVED_WORD);
tokenMap.put("caption", Token.RESERVED_WORD);
tokenMap.put("control", Token.RESERVED_WORD);
tokenMap.put("collections", Token.RESERVED_WORD);
tokenMap.put("entity", Token.RESERVED_WORD);
tokenMap.put("database", Token.RESERVED_WORD);
tokenMap.put("detach", Token.RESERVED_WORD);
tokenMap.put("participant", Token.RESERVED_WORD);
tokenMap.put("order", Token.RESERVED_WORD);
tokenMap.put("as", Token.RESERVED_WORD);
tokenMap.put("actor", Token.RESERVED_WORD);
tokenMap.put("autonumber", Token.RESERVED_WORD);
tokenMap.put("alt", Token.RESERVED_WORD);
tokenMap.put("resume", Token.RESERVED_WORD);
tokenMap.put("newpage", Token.RESERVED_WORD);
tokenMap.put("is", Token.RESERVED_WORD);
tokenMap.put("if", Token.RESERVED_WORD);
tokenMap.put("then", Token.RESERVED_WORD);
tokenMap.put("endif", Token.RESERVED_WORD);
tokenMap.put("elseif", Token.RESERVED_WORD);
tokenMap.put("repeat", Token.RESERVED_WORD);
tokenMap.put("while", Token.RESERVED_WORD);
tokenMap.put("endwhile", Token.RESERVED_WORD);
tokenMap.put("else", Token.RESERVED_WORD);
tokenMap.put("opt", Token.RESERVED_WORD);
tokenMap.put("loop", Token.RESERVED_WORD);
tokenMap.put("par", Token.RESERVED_WORD);
tokenMap.put("break", Token.RESERVED_WORD);
tokenMap.put("critical", Token.RESERVED_WORD);
tokenMap.put("group", Token.RESERVED_WORD);
tokenMap.put("note", Token.RESERVED_WORD);
tokenMap.put("end", Token.RESERVED_WORD);
tokenMap.put("over", Token.RESERVED_WORD);
tokenMap.put("top", Token.RESERVED_WORD);
tokenMap.put("bottom", Token.RESERVED_WORD);
tokenMap.put("right", Token.RESERVED_WORD);
tokenMap.put("left", Token.RESERVED_WORD);
tokenMap.put("of", Token.RESERVED_WORD);
tokenMap.put("rnote", Token.RESERVED_WORD);
tokenMap.put("hnote", Token.RESERVED_WORD);
tokenMap.put("ref", Token.RESERVED_WORD);
tokenMap.put("create", Token.RESERVED_WORD);
tokenMap.put("box", Token.RESERVED_WORD);
tokenMap.put("hide", Token.RESERVED_WORD);
tokenMap.put("footbox", Token.RESERVED_WORD);
tokenMap.put("skinparam", Token.RESERVED_WORD);
tokenMap.put("sequence", Token.RESERVED_WORD);
tokenMap.put("activate", Token.RESERVED_WORD);
tokenMap.put("deactivate", Token.RESERVED_WORD);
tokenMap.put("start", Token.RESERVED_WORD);
tokenMap.put("state", Token.RESERVED_WORD);
tokenMap.put("stop", Token.RESERVED_WORD);
tokenMap.put("file", Token.RESERVED_WORD);
tokenMap.put("folder", Token.RESERVED_WORD);
tokenMap.put("frame", Token.RESERVED_WORD);
tokenMap.put("fork", Token.RESERVED_WORD);
tokenMap.put("interface", Token.RESERVED_WORD);
tokenMap.put("class", Token.RESERVED_WORD);
tokenMap.put("namespace", Token.RESERVED_WORD);
tokenMap.put("page", Token.RESERVED_WORD);
tokenMap.put("node", Token.RESERVED_WORD);
tokenMap.put("package", Token.RESERVED_WORD);
tokenMap.put("queue", Token.RESERVED_WORD);
tokenMap.put("stack", Token.RESERVED_WORD);
tokenMap.put("rectangle", Token.RESERVED_WORD);
tokenMap.put("storage", Token.RESERVED_WORD);
tokenMap.put("card", Token.RESERVED_WORD);
tokenMap.put("cloud", Token.RESERVED_WORD);
tokenMap.put("component", Token.RESERVED_WORD);
tokenMap.put("agent", Token.RESERVED_WORD);
tokenMap.put("artifact", Token.RESERVED_WORD);
return tokenMap;
}
@Override
@Nonnull
public Token getTokenList(@Nonnull final Segment text, final int startTokenType, final int startOffset) {
resetTokenList();
final char[] array = text.array;
final int offset = text.offset;
final int count = text.count;
final int end = offset + count;
int newStartOffset = startOffset - offset;
int currentTokenStart = offset;
int currentTokenType = startTokenType;
for (int i = offset; i < end; i++) {
char c = array[i];
switch (currentTokenType) {
case Token.NULL: {
currentTokenStart = i;
switch (c) {
case '"': {
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '#': {
currentTokenType = Token.LITERAL_NUMBER_HEXADECIMAL;
}
break;
case '\'': {
currentTokenType = Token.COMMENT_EOL;
}
break;
case '/': {
currentTokenType = Token.COMMENT_KEYWORD;
}
break;
default: {
if (RSyntaxUtilities.isWhitespace(c)) {
currentTokenType = Token.WHITESPACE;
} else if (RSyntaxUtilities.isDigit(c)) {
currentTokenType = Token.LITERAL_NUMBER_DECIMAL_INT;
break;
} else {
currentTokenType = Token.IDENTIFIER;
}
}
break;
}
}
break;
case Token.COMMENT_MULTILINE: {
if (c == '/' && i > offset && array[i - 1] == '\'') {
addToken(text, currentTokenStart, i, Token.COMMENT_MULTILINE, newStartOffset + currentTokenStart);
currentTokenType = Token.NULL;
}
}
break;
case Token.COMMENT_KEYWORD: {
switch (c) {
case '\'': {
currentTokenType = Token.COMMENT_MULTILINE;
}
break;
default: {
currentTokenType = Token.IDENTIFIER;
}
break;
}
}
break;
case Token.WHITESPACE: {
switch (c) {
case '"': {
addToken(text, currentTokenStart, i - 1, Token.WHITESPACE, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '\'': {
addToken(text, currentTokenStart, i - 1, Token.WHITESPACE, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.COMMENT_EOL;
}
break;
default: {
if (!RSyntaxUtilities.isWhitespace(c)) {
addToken(text, currentTokenStart, i - 1, Token.WHITESPACE, newStartOffset + currentTokenStart);
currentTokenStart = i;
if (RSyntaxUtilities.isDigit(c)) {
currentTokenType = Token.LITERAL_NUMBER_DECIMAL_INT;
break;
} else {
currentTokenType = Token.IDENTIFIER;
}
}
}
break;
}
}
break;
case Token.LITERAL_NUMBER_HEXADECIMAL: {
switch (c) {
case '"': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '\'': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.COMMENT_EOL;
}
break;
default: {
if (RSyntaxUtilities.isWhitespace(c)) {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.WHITESPACE;
} else if (RSyntaxUtilities.isDigit(c)) {
// Still a literal number.
} else {
// Otherwise, remember this was a number and start over.
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
i--;
currentTokenType = Token.NULL;
}
}
break;
}
}
break;
case Token.LITERAL_NUMBER_DECIMAL_INT: {
switch (c) {
case '"': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '\'': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.COMMENT_EOL;
}
break;
default: {
if (RSyntaxUtilities.isWhitespace(c)) {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.WHITESPACE;
} else if (RSyntaxUtilities.isDigit(c)) {
// Still a literal number.
} else {
// Otherwise, remember this was a number and start over.
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
i--;
currentTokenType = Token.NULL;
}
}
break;
}
}
break;
case Token.IDENTIFIER:
case Token.RESERVED_WORD: {
if (RSyntaxUtilities.isWhitespace(c) || !RSyntaxUtilities.isLetterOrDigit(c)) {
final int value = wordsToHighlight.get(text, currentTokenStart, i - 1);
if (value < 0) {
addToken(text, currentTokenStart, i - 1, Token.IDENTIFIER, newStartOffset + currentTokenStart);
} else {
addToken(text, currentTokenStart, i - 1, value, newStartOffset + currentTokenStart);
}
currentTokenStart = i;
currentTokenType = RSyntaxUtilities.isWhitespace(c) ? Token.WHITESPACE : Token.IDENTIFIER;
}
}
break;
case Token.COMMENT_EOL: {
if (c == '\n') {
i = end - 1;
addToken(text, currentTokenStart, i, currentTokenType, newStartOffset + currentTokenStart);
// We need to set token type to null so at the bottom we don't add one more token.
currentTokenType = Token.NULL;
}
}
break;
case Token.LITERAL_STRING_DOUBLE_QUOTE: {
if (c == '"') {
addToken(text, currentTokenStart, i, Token.LITERAL_STRING_DOUBLE_QUOTE, newStartOffset + currentTokenStart);
currentTokenType = Token.NULL;
}
}
break;
default: {
throw fail("Should never hapen, state : " + currentTokenType);
}
}
}
switch (currentTokenType) {
// Remember what token type to begin the next line with.
case Token.COMMENT_MULTILINE:
case Token.LITERAL_STRING_DOUBLE_QUOTE: {
addToken(text, currentTokenStart, end - 1, currentTokenType, newStartOffset + currentTokenStart);
}
break;
case Token.NULL: {
addNullToken();
}
break;
case Token.RESERVED_WORD:
case Token.IDENTIFIER: {
final int value = wordsToHighlight.get(text, currentTokenStart, end - 1);
if (value < 0) {
addToken(text, currentTokenStart, end - 1, Token.IDENTIFIER, newStartOffset + currentTokenStart);
} else {
addToken(text, currentTokenStart, end - 1, value, newStartOffset + currentTokenStart);
}
addNullToken();
}
break;
// All other token types don't continue to the next line...
default: {
addToken(text, currentTokenStart, end - 1, currentTokenType, newStartOffset + currentTokenStart);
addNullToken();
}
break;
}
return this.firstToken;
}
private static boolean isAllowedCharReservedWord(final char c) {
return RSyntaxUtilities.isLetterOrDigit(c) || c == '<' || c == '>' || c == '/';
}
}
|
mind-map/scia-reto/src/main/java/com/igormaznitsa/sciareto/ui/editors/PlantUmlTokenMaker.java
|
/*
* Copyright (C) 2018 Igor Maznitsa.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package com.igormaznitsa.sciareto.ui.editors;
import static com.igormaznitsa.meta.common.utils.Assertions.fail;
import javax.annotation.Nonnull;
import javax.swing.text.Segment;
import org.fife.ui.rsyntaxtextarea.AbstractTokenMaker;
import org.fife.ui.rsyntaxtextarea.RSyntaxUtilities;
import org.fife.ui.rsyntaxtextarea.Token;
import org.fife.ui.rsyntaxtextarea.TokenMap;
public class PlantUmlTokenMaker extends AbstractTokenMaker {
@Override
@Nonnull
public TokenMap getWordsToHighlight() {
final TokenMap tokenMap = new TokenMap();
tokenMap.put("@startsalt", Token.RESERVED_WORD);
tokenMap.put("@endsalt", Token.RESERVED_WORD);
tokenMap.put("@startgantt", Token.RESERVED_WORD);
tokenMap.put("@endgantt", Token.RESERVED_WORD);
tokenMap.put("@startlatex", Token.RESERVED_WORD);
tokenMap.put("@endlatex", Token.RESERVED_WORD);
tokenMap.put("@startmath", Token.RESERVED_WORD);
tokenMap.put("@endmath", Token.RESERVED_WORD);
tokenMap.put("@startdot", Token.RESERVED_WORD);
tokenMap.put("@enddot", Token.RESERVED_WORD);
tokenMap.put("@startuml", Token.RESERVED_WORD);
tokenMap.put("@startuml", Token.RESERVED_WORD);
tokenMap.put("@enduml", Token.RESERVED_WORD);
tokenMap.put("header", Token.RESERVED_WORD);
tokenMap.put("endheader", Token.RESERVED_WORD);
tokenMap.put("scale", Token.RESERVED_WORD);
tokenMap.put("skinparam", Token.RESERVED_WORD);
tokenMap.put("title", Token.RESERVED_WORD);
tokenMap.put("usecase", Token.RESERVED_WORD);
tokenMap.put("boundary", Token.RESERVED_WORD);
tokenMap.put("caption", Token.RESERVED_WORD);
tokenMap.put("control", Token.RESERVED_WORD);
tokenMap.put("collections", Token.RESERVED_WORD);
tokenMap.put("entity", Token.RESERVED_WORD);
tokenMap.put("database", Token.RESERVED_WORD);
tokenMap.put("detach", Token.RESERVED_WORD);
tokenMap.put("participant", Token.RESERVED_WORD);
tokenMap.put("order", Token.RESERVED_WORD);
tokenMap.put("as", Token.RESERVED_WORD);
tokenMap.put("actor", Token.RESERVED_WORD);
tokenMap.put("autonumber", Token.RESERVED_WORD);
tokenMap.put("alt", Token.RESERVED_WORD);
tokenMap.put("resume", Token.RESERVED_WORD);
tokenMap.put("newpage", Token.RESERVED_WORD);
tokenMap.put("is", Token.RESERVED_WORD);
tokenMap.put("if", Token.RESERVED_WORD);
tokenMap.put("then", Token.RESERVED_WORD);
tokenMap.put("endif", Token.RESERVED_WORD);
tokenMap.put("elseif", Token.RESERVED_WORD);
tokenMap.put("repeat", Token.RESERVED_WORD);
tokenMap.put("while", Token.RESERVED_WORD);
tokenMap.put("endwhile", Token.RESERVED_WORD);
tokenMap.put("else", Token.RESERVED_WORD);
tokenMap.put("opt", Token.RESERVED_WORD);
tokenMap.put("loop", Token.RESERVED_WORD);
tokenMap.put("par", Token.RESERVED_WORD);
tokenMap.put("break", Token.RESERVED_WORD);
tokenMap.put("critical", Token.RESERVED_WORD);
tokenMap.put("group", Token.RESERVED_WORD);
tokenMap.put("note", Token.RESERVED_WORD);
tokenMap.put("end", Token.RESERVED_WORD);
tokenMap.put("over", Token.RESERVED_WORD);
tokenMap.put("top", Token.RESERVED_WORD);
tokenMap.put("bottom", Token.RESERVED_WORD);
tokenMap.put("right", Token.RESERVED_WORD);
tokenMap.put("left", Token.RESERVED_WORD);
tokenMap.put("of", Token.RESERVED_WORD);
tokenMap.put("rnote", Token.RESERVED_WORD);
tokenMap.put("hnote", Token.RESERVED_WORD);
tokenMap.put("ref", Token.RESERVED_WORD);
tokenMap.put("create", Token.RESERVED_WORD);
tokenMap.put("box", Token.RESERVED_WORD);
tokenMap.put("hide", Token.RESERVED_WORD);
tokenMap.put("footbox", Token.RESERVED_WORD);
tokenMap.put("skinparam", Token.RESERVED_WORD);
tokenMap.put("sequence", Token.RESERVED_WORD);
tokenMap.put("activate", Token.RESERVED_WORD);
tokenMap.put("deactivate", Token.RESERVED_WORD);
tokenMap.put("start", Token.RESERVED_WORD);
tokenMap.put("state", Token.RESERVED_WORD);
tokenMap.put("stop", Token.RESERVED_WORD);
tokenMap.put("file", Token.RESERVED_WORD);
tokenMap.put("folder", Token.RESERVED_WORD);
tokenMap.put("frame", Token.RESERVED_WORD);
tokenMap.put("fork", Token.RESERVED_WORD);
tokenMap.put("interface", Token.RESERVED_WORD);
tokenMap.put("class", Token.RESERVED_WORD);
tokenMap.put("namespace", Token.RESERVED_WORD);
tokenMap.put("page", Token.RESERVED_WORD);
tokenMap.put("node", Token.RESERVED_WORD);
tokenMap.put("package", Token.RESERVED_WORD);
tokenMap.put("queue", Token.RESERVED_WORD);
tokenMap.put("stack", Token.RESERVED_WORD);
tokenMap.put("rectangle", Token.RESERVED_WORD);
tokenMap.put("storage", Token.RESERVED_WORD);
tokenMap.put("card", Token.RESERVED_WORD);
tokenMap.put("cloud", Token.RESERVED_WORD);
tokenMap.put("component", Token.RESERVED_WORD);
tokenMap.put("agent", Token.RESERVED_WORD);
tokenMap.put("artifact", Token.RESERVED_WORD);
return tokenMap;
}
@Override
@Nonnull
public Token getTokenList(@Nonnull final Segment text, final int startTokenType, final int startOffset) {
resetTokenList();
final char[] array = text.array;
final int offset = text.offset;
final int count = text.count;
final int end = offset + count;
int newStartOffset = startOffset - offset;
int currentTokenStart = offset;
int currentTokenType = startTokenType;
for (int i = offset; i < end; i++) {
char c = array[i];
switch (currentTokenType) {
case Token.NULL: {
currentTokenStart = i;
switch (c) {
case '"': {
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '#': {
currentTokenType = Token.LITERAL_NUMBER_HEXADECIMAL;
}
break;
case '\'': {
currentTokenType = Token.COMMENT_EOL;
}
break;
case '/': {
currentTokenType = Token.COMMENT_KEYWORD;
}
break;
default: {
if (RSyntaxUtilities.isWhitespace(c)) {
currentTokenType = Token.WHITESPACE;
} else if (RSyntaxUtilities.isDigit(c)) {
currentTokenType = Token.LITERAL_NUMBER_DECIMAL_INT;
break;
} else {
currentTokenType = Token.IDENTIFIER;
}
}
break;
}
}
break;
case Token.COMMENT_MULTILINE: {
if (c == '/' && i > offset && array[i - 1] == '\'') {
addToken(text, currentTokenStart, i, Token.COMMENT_MULTILINE, newStartOffset + currentTokenStart);
currentTokenType = Token.NULL;
}
}
break;
case Token.COMMENT_KEYWORD: {
switch (c) {
case '\'': {
currentTokenType = Token.COMMENT_MULTILINE;
}
break;
default: {
currentTokenType = Token.IDENTIFIER;
}
break;
}
}
break;
case Token.WHITESPACE: {
switch (c) {
case '"': {
addToken(text, currentTokenStart, i - 1, Token.WHITESPACE, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '\'': {
addToken(text, currentTokenStart, i - 1, Token.WHITESPACE, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.COMMENT_EOL;
}
break;
default: {
if (!RSyntaxUtilities.isWhitespace(c)) {
addToken(text, currentTokenStart, i - 1, Token.WHITESPACE, newStartOffset + currentTokenStart);
currentTokenStart = i;
if (RSyntaxUtilities.isDigit(c)) {
currentTokenType = Token.LITERAL_NUMBER_DECIMAL_INT;
break;
} else {
currentTokenType = Token.IDENTIFIER;
}
}
}
break;
}
}
break;
case Token.LITERAL_NUMBER_HEXADECIMAL: {
switch (c) {
case '"': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '\'': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.COMMENT_EOL;
}
break;
default: {
if (RSyntaxUtilities.isWhitespace(c)) {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.WHITESPACE;
} else if (RSyntaxUtilities.isDigit(c)) {
// Still a literal number.
} else {
// Otherwise, remember this was a number and start over.
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_HEXADECIMAL, newStartOffset + currentTokenStart);
i--;
currentTokenType = Token.NULL;
}
}
break;
}
}
break;
case Token.LITERAL_NUMBER_DECIMAL_INT: {
switch (c) {
case '"': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.LITERAL_STRING_DOUBLE_QUOTE;
}
break;
case '\'': {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.COMMENT_EOL;
}
break;
default: {
if (RSyntaxUtilities.isWhitespace(c)) {
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
currentTokenStart = i;
currentTokenType = Token.WHITESPACE;
} else if (RSyntaxUtilities.isDigit(c)) {
// Still a literal number.
} else {
// Otherwise, remember this was a number and start over.
addToken(text, currentTokenStart, i - 1, Token.LITERAL_NUMBER_DECIMAL_INT, newStartOffset + currentTokenStart);
i--;
currentTokenType = Token.NULL;
}
}
break;
}
}
break;
case Token.IDENTIFIER:
case Token.RESERVED_WORD: {
if (RSyntaxUtilities.isWhitespace(c) || !RSyntaxUtilities.isLetterOrDigit(c)) {
final int value = wordsToHighlight.get(text, currentTokenStart, i - 1);
if (value < 0) {
addToken(text, currentTokenStart, i - 1, Token.IDENTIFIER, newStartOffset + currentTokenStart);
} else {
addToken(text, currentTokenStart, i - 1, value, newStartOffset + currentTokenStart);
}
currentTokenStart = i;
currentTokenType = RSyntaxUtilities.isWhitespace(c) ? Token.WHITESPACE : Token.IDENTIFIER;
}
}
break;
case Token.COMMENT_EOL: {
if (c == '\n') {
i = end - 1;
addToken(text, currentTokenStart, i, currentTokenType, newStartOffset + currentTokenStart);
// We need to set token type to null so at the bottom we don't add one more token.
currentTokenType = Token.NULL;
}
}
break;
case Token.LITERAL_STRING_DOUBLE_QUOTE: {
if (c == '"') {
addToken(text, currentTokenStart, i, Token.LITERAL_STRING_DOUBLE_QUOTE, newStartOffset + currentTokenStart);
currentTokenType = Token.NULL;
}
}
break;
default: {
throw fail("Should never hapen, state : " + currentTokenType);
}
}
}
switch (currentTokenType) {
// Remember what token type to begin the next line with.
case Token.COMMENT_MULTILINE:
case Token.LITERAL_STRING_DOUBLE_QUOTE: {
addToken(text, currentTokenStart, end - 1, currentTokenType, newStartOffset + currentTokenStart);
}
break;
case Token.NULL: {
addNullToken();
}
break;
case Token.RESERVED_WORD:
case Token.IDENTIFIER: {
final int value = wordsToHighlight.get(text, currentTokenStart, end - 1);
if (value < 0) {
addToken(text, currentTokenStart, end - 1, Token.IDENTIFIER, newStartOffset + currentTokenStart);
} else {
addToken(text, currentTokenStart, end - 1, value, newStartOffset + currentTokenStart);
}
addNullToken();
}
break;
// All other token types don't continue to the next line...
default: {
addToken(text, currentTokenStart, end - 1, currentTokenType, newStartOffset + currentTokenStart);
addNullToken();
}
break;
}
return this.firstToken;
}
private static boolean isAllowedCharReservedWord(final char c) {
return RSyntaxUtilities.isLetterOrDigit(c) || c == '<' || c == '>' || c == '/';
}
}
|
added keywords for plantuml mindmap
|
mind-map/scia-reto/src/main/java/com/igormaznitsa/sciareto/ui/editors/PlantUmlTokenMaker.java
|
added keywords for plantuml mindmap
|
<ide><path>ind-map/scia-reto/src/main/java/com/igormaznitsa/sciareto/ui/editors/PlantUmlTokenMaker.java
<ide> tokenMap.put("@startdot", Token.RESERVED_WORD);
<ide> tokenMap.put("@enddot", Token.RESERVED_WORD);
<ide> tokenMap.put("@startuml", Token.RESERVED_WORD);
<del> tokenMap.put("@startuml", Token.RESERVED_WORD);
<ide> tokenMap.put("@enduml", Token.RESERVED_WORD);
<add> tokenMap.put("@startmindmap", Token.RESERVED_WORD);
<add> tokenMap.put("@endmindmap", Token.RESERVED_WORD);
<ide> tokenMap.put("header", Token.RESERVED_WORD);
<ide> tokenMap.put("endheader", Token.RESERVED_WORD);
<add> tokenMap.put("legend", Token.RESERVED_WORD);
<add> tokenMap.put("endlegend", Token.RESERVED_WORD);
<ide> tokenMap.put("scale", Token.RESERVED_WORD);
<ide> tokenMap.put("skinparam", Token.RESERVED_WORD);
<ide> tokenMap.put("title", Token.RESERVED_WORD);
|
|
Java
|
apache-2.0
|
d93ed44243e0c50844d79fc6186217593f9ca878
| 0 |
bozimmerman/CoffeeMud,Tycheo/coffeemud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,Tycheo/coffeemud,MaxRau/CoffeeMud,MaxRau/CoffeeMud,oriontribunal/CoffeeMud,Tycheo/coffeemud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,Tycheo/coffeemud,sfunk1x/CoffeeMud,sfunk1x/CoffeeMud
|
package com.planet_ink.coffee_mud.WebMacros.grinder;
import com.planet_ink.coffee_mud.WebMacros.RoomData;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2006 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class GrinderRooms
{
public static void happilyAddMob(MOB M, Room R)
{
M.setStartRoom(R);
M.setLocation(R);
M.envStats().setRejuv(5000);
M.recoverCharStats();
M.recoverEnvStats();
M.recoverMaxState();
M.resetToMaxState();
M.bringToLife(R,true);
R.recoverRoomStats();
}
public static void happilyAddItem(Item I, Room R)
{
if(I.subjectToWearAndTear())
I.setUsesRemaining(100);
I.recoverEnvStats();
R.addItem(I);
R.recoverRoomStats();
}
public static String editRoom(ExternalHTTPRequests httpReq, Hashtable parms, MOB whom, Room R)
{
if(R==null) return "Old Room not defined!";
boolean redoAllMyDamnRooms=false;
Room oldR=R;
// class!
String className=httpReq.getRequestParameter("CLASSES");
if((className==null)||(className.length()==0))
return "Please select a class type for this room.";
synchronized(("SYNC"+R.roomID()).intern())
{
R=CMLib.map().getRoom(R);
CMLib.map().resetRoom(R);
Room copyRoom=(Room)R.copyOf();
if(!className.equalsIgnoreCase(CMClass.className(R)))
{
R=CMClass.getLocale(className);
if(R==null)
return "The class you chose does not exist. Choose another.";
for(int a=oldR.numEffects()-1;a>=0;a--)
{
Ability A=oldR.fetchEffect(a);
if(A!=null)
{
A.unInvoke();
oldR.delEffect(A);
}
}
CMLib.threads().deleteTick(oldR,-1);
R.setRoomID(oldR.roomID());
R.setArea(oldR.getArea());
for(int d=0;d<R.rawDoors().length;d++)
R.rawDoors()[d]=oldR.rawDoors()[d];
for(int d=0;d<R.rawExits().length;d++)
R.rawExits()[d]=oldR.rawExits()[d];
redoAllMyDamnRooms=true;
}
// name
String name=httpReq.getRequestParameter("NAME");
if((name==null)||(name.length()==0))
return "Please enter a name for this room.";
R.setDisplayText(name);
// description
String desc=httpReq.getRequestParameter("DESCRIPTION");
if(desc==null)desc="";
R.setDescription(desc);
// image
String img=httpReq.getRequestParameter("IMAGE");
if(img==null)img="";
R.setImage(img);
if(R instanceof GridLocale)
{
String x=httpReq.getRequestParameter("XGRID");
if(x==null)x="";
((GridLocale)R).setXGridSize(CMath.s_int(x));
String y=httpReq.getRequestParameter("YGRID");
if(y==null)y="";
((GridLocale)R).setYGridSize(CMath.s_int(y));
((GridLocale)R).clearGrid(null);
}
String err=GrinderAreas.doAffectsNBehavs(R,httpReq,parms);
if(err.length()>0) return err;
// here's where you resolve items and mobs
Vector allmobs=new Vector();
int skip=0;
while(oldR.numInhabitants()>(skip))
{
MOB M=oldR.fetchInhabitant(skip);
if(M.savable())
{
if(!allmobs.contains(M))
allmobs.addElement(M);
oldR.delInhabitant(M);
}
else
if(oldR!=R)
{
oldR.delInhabitant(M);
R.bringMobHere(M,true);
}
else
skip++;
}
Vector allitems=new Vector();
while(oldR.numItems()>0)
{
Item I=oldR.fetchItem(0);
if(!allitems.contains(I))
allitems.addElement(I);
oldR.delItem(I);
}
if(httpReq.isRequestParameter("MOB1"))
{
for(int i=1;;i++)
{
String MATCHING=httpReq.getRequestParameter("MOB"+i);
if(MATCHING==null)
break;
else
if(RoomData.isAllNum(MATCHING))
{
MOB M=RoomData.getMOBFromCode(allmobs,MATCHING);
if(M!=null) happilyAddMob(M,R);
else
{
StringBuffer str=new StringBuffer("!!!No MOB?!!!!");
str.append(" Got: "+MATCHING);
}
}
else
if(MATCHING.indexOf("@")>0)
{
for(int m=0;m<RoomData.mobs.size();m++)
{
MOB M2=(MOB)RoomData.mobs.elementAt(m);
if(MATCHING.equals(""+M2))
{
happilyAddMob((MOB)M2.copyOf(),R);
break;
}
}
}
else
for(Enumeration m=CMClass.mobTypes();m.hasMoreElements();)
{
MOB M2=(MOB)m.nextElement();
if((CMClass.className(M2).equals(MATCHING)))
{
happilyAddMob((MOB)M2.copyOf(),R);
break;
}
}
}
}
else
return "No MOB Data!";
if(httpReq.isRequestParameter("ITEM1"))
{
for(int i=1;;i++)
{
String MATCHING=httpReq.getRequestParameter("ITEM"+i);
if(MATCHING==null)
break;
Item I2=RoomData.getItemFromAnywhere(allitems,MATCHING);
if(I2!=null)
{
if(RoomData.isAllNum(MATCHING))
happilyAddItem(I2,R);
else
happilyAddItem((Item)I2.copyOf(),R);
}
}
}
else
return "No Item Data!";
for(int i=0;i<allitems.size();i++)
{
Item I=(Item)allitems.elementAt(i);
if(!R.isContent(I))
I.destroy();
}
for(int i=0;i<R.numItems();i++)
{
Item I=R.fetchItem(i);
if((I.container()!=null)&&(!R.isContent(I.container())))
I.setContainer(null);
}
for(int m=0;m<allmobs.size();m++)
{
MOB M=(MOB)allmobs.elementAt(m);
if(!R.isInhabitant(M))
M.destroy();
}
if(redoAllMyDamnRooms)
{
try
{
for(Enumeration r=CMLib.map().rooms();r.hasMoreElements();)
{
Room R2=(Room)r.nextElement();
for(int d=0;d<R2.rawDoors().length;d++)
if(R2.rawDoors()[d]==oldR)
{
R2.rawDoors()[d]=R;
if(R2 instanceof GridLocale)
((GridLocale)R2).buildGrid();
}
}
}catch(NoSuchElementException e){}
try
{
for(Enumeration e=CMLib.map().players();e.hasMoreElements();)
{
MOB M=(MOB)e.nextElement();
if(M.getStartRoom()==oldR)
M.setStartRoom(R);
else
if(M.location()==oldR)
M.setLocation(R);
}
}catch(NoSuchElementException e){}
}
R.getArea().fillInAreaRoom(R);
CMLib.database().DBUpdateRoom(R);
CMLib.database().DBUpdateMOBs(R);
CMLib.database().DBUpdateItems(R);
R.startItemRejuv();
if(oldR!=R){ oldR.destroy(); R.getArea().addProperRoom(R);}
if(!copyRoom.sameAs(R))
Log.sysOut("Grinder",whom.Name()+" modified room "+R.roomID()+".");
}
return "";
}
public static String delRoom(Room R)
{
CMLib.map().obliterateRoom(R);
return "";
}
public static Room createLonelyRoom(Area A, Room linkTo, int dir, boolean copyThisOne)
{
Room newRoom=null;
String newRoomID=A.getNewRoomID(linkTo,dir);
if(newRoomID.length()==0) return null;
if((copyThisOne)&&(linkTo!=null))
{
CMLib.map().resetRoom(linkTo);
newRoom=(Room)linkTo.copyOf();
for(int d=0;d<Directions.NUM_DIRECTIONS;d++)
{
newRoom.rawDoors()[d]=null;
newRoom.rawExits()[d]=null;
}
}
else
{
newRoom=CMClass.getLocale("StdRoom");
newRoom.setDisplayText("Title of "+newRoomID);
newRoom.setDescription("Description of "+newRoomID);
}
newRoom.setRoomID(newRoomID);
newRoom.setArea(A);
if(linkTo!=null)
{
newRoom.rawDoors()[Directions.getOpDirectionCode(dir)]=linkTo;
newRoom.rawExits()[Directions.getOpDirectionCode(dir)]=CMClass.getExit("StdOpenDoorway");
}
CMLib.database().DBCreateRoom(newRoom,CMClass.className(newRoom));
CMLib.database().DBUpdateExits(newRoom);
if(newRoom.numInhabitants()>0)
CMLib.database().DBUpdateMOBs(newRoom);
if(newRoom.numItems()>0)
CMLib.database().DBUpdateItems(newRoom);
newRoom.getArea().fillInAreaRoom(newRoom);
return newRoom;
}
public static String createRoom(Room R, int dir, boolean copyThisOne)
{
R.clearSky();
if(R instanceof GridLocale)
((GridLocale)R).clearGrid(null);
Room newRoom=createLonelyRoom(R.getArea(),R,dir,copyThisOne);
R.rawDoors()[dir]=newRoom;
if(R.rawExits()[dir]==null)
R.rawExits()[dir]=CMClass.getExit("StdOpenDoorway");
CMLib.database().DBUpdateExits(R);
R.getArea().fillInAreaRoom(R);
return "";
}
public static Room createGridRoom(Area A, String roomID, Room copyThisOne, RoomnumberSet deferredExitSaves, boolean autoLink)
{
Room R=null;
if(copyThisOne!=null)
{
R=(Room)copyThisOne.copyOf();
R.setRoomID(roomID);
}
else
{
R=CMClass.getLocale("StdRoom");
R.setRoomID(roomID);
R.setDisplayText("Title of "+R.roomID());
R.setDescription("Description of "+R.roomID());
}
R.setArea(A);
CMLib.database().DBCreateRoom(R,CMClass.className(R));
if(R.numInhabitants()>0)
CMLib.database().DBUpdateMOBs(R);
if(R.numItems()>0)
CMLib.database().DBUpdateItems(R);
R.getArea().fillInAreaRoom(R);
if((autoLink)&&(R.getArea() instanceof GridZones))
{
GridZones GZ=(GridZones)R.getArea();
int x=GZ.getGridChildX(R);
int y=GZ.getGridChildY(R);
boolean resaveMyExits=false;
if((x>=0)&&(y>=0))
{
Room R2=null;
for(int d=0;d<Directions.NUM_DIRECTIONS;d++)
{
int[] xy=Directions.adjustXYByDirections(x,y,d);
R2=GZ.getGridChild(xy[0],xy[1]);
if((R2!=null)&&(R!=R2))
{
int opD=Directions.getOpDirectionCode(d);
if(R2.rawDoors()[opD]==null)
{
R2.rawDoors()[opD]=R;
if(R2.rawExits()[opD]==null)
R2.rawExits()[opD]=CMClass.getExit("StdOpenDoorway");
if(deferredExitSaves!=null)
{
if(!deferredExitSaves.contains(R2.roomID()))
deferredExitSaves.add(R2.roomID());
}
else
CMLib.database().DBUpdateExits(R2);
}
if(R.rawDoors()[d]==null)
{
R.rawDoors()[d]=R2;
if(R.rawExits()[d]==null)
R.rawExits()[d]=CMClass.getExit("StdOpenDoorway");
resaveMyExits=true;
}
}
}
}
if(resaveMyExits)
{
if(deferredExitSaves!=null)
{
if(!deferredExitSaves.contains(R.roomID()))
deferredExitSaves.add(R.roomID());
}
else
{
CMLib.database().DBUpdateExits(R);
R.getArea().fillInAreaRoom(R);
}
}
}
return R;
}
}
|
com/planet_ink/coffee_mud/WebMacros/grinder/GrinderRooms.java
|
package com.planet_ink.coffee_mud.WebMacros.grinder;
import com.planet_ink.coffee_mud.WebMacros.RoomData;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2006 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class GrinderRooms
{
public static void happilyAddMob(MOB M, Room R)
{
M.setStartRoom(R);
M.setLocation(R);
M.envStats().setRejuv(5000);
M.recoverCharStats();
M.recoverEnvStats();
M.recoverMaxState();
M.resetToMaxState();
M.bringToLife(R,true);
R.recoverRoomStats();
}
public static void happilyAddItem(Item I, Room R)
{
if(I.subjectToWearAndTear())
I.setUsesRemaining(100);
I.recoverEnvStats();
R.addItem(I);
R.recoverRoomStats();
}
public static String editRoom(ExternalHTTPRequests httpReq, Hashtable parms, MOB whom, Room R)
{
if(R==null) return "Old Room not defined!";
boolean redoAllMyDamnRooms=false;
Room oldR=R;
// class!
String className=httpReq.getRequestParameter("CLASSES");
if((className==null)||(className.length()==0))
return "Please select a class type for this room.";
synchronized(("SYNC"+R.roomID()).intern())
{
R=CMLib.map().getRoom(R);
CMLib.map().resetRoom(R);
Room copyRoom=(Room)R.copyOf();
if(!className.equalsIgnoreCase(CMClass.className(R)))
{
R=CMClass.getLocale(className);
if(R==null)
return "The class you chose does not exist. Choose another.";
for(int a=oldR.numEffects()-1;a>=0;a--)
{
Ability A=oldR.fetchEffect(a);
if(A!=null)
{
A.unInvoke();
oldR.delEffect(A);
}
}
CMLib.threads().deleteTick(oldR,-1);
R.setRoomID(oldR.roomID());
R.setArea(oldR.getArea());
for(int d=0;d<R.rawDoors().length;d++)
R.rawDoors()[d]=oldR.rawDoors()[d];
for(int d=0;d<R.rawExits().length;d++)
R.rawExits()[d]=oldR.rawExits()[d];
redoAllMyDamnRooms=true;
}
// name
String name=httpReq.getRequestParameter("NAME");
if((name==null)||(name.length()==0))
return "Please enter a name for this room.";
R.setDisplayText(name);
// description
String desc=httpReq.getRequestParameter("DESCRIPTION");
if(desc==null)desc="";
R.setDescription(desc);
// image
String img=httpReq.getRequestParameter("IMAGE");
if(img==null)img="";
R.setImage(img);
if(R instanceof GridLocale)
{
String x=httpReq.getRequestParameter("XGRID");
if(x==null)x="";
((GridLocale)R).setXGridSize(CMath.s_int(x));
String y=httpReq.getRequestParameter("YGRID");
if(y==null)y="";
((GridLocale)R).setYGridSize(CMath.s_int(y));
((GridLocale)R).clearGrid(null);
}
String err=GrinderAreas.doAffectsNBehavs(R,httpReq,parms);
if(err.length()>0) return err;
// here's where you resolve items and mobs
Vector allmobs=new Vector();
int skip=0;
while(oldR.numInhabitants()>(skip))
{
MOB M=oldR.fetchInhabitant(skip);
if(M.savable())
{
if(!allmobs.contains(M))
allmobs.addElement(M);
oldR.delInhabitant(M);
}
else
if(oldR!=R)
{
oldR.delInhabitant(M);
R.bringMobHere(M,true);
}
else
skip++;
}
Vector allitems=new Vector();
while(oldR.numItems()>0)
{
Item I=oldR.fetchItem(0);
if(!allitems.contains(I))
allitems.addElement(I);
oldR.delItem(I);
}
if(httpReq.isRequestParameter("MOB1"))
{
for(int i=1;;i++)
{
String MATCHING=httpReq.getRequestParameter("MOB"+i);
if(MATCHING==null)
break;
else
if(RoomData.isAllNum(MATCHING))
{
MOB M=RoomData.getMOBFromCode(allmobs,MATCHING);
if(M!=null) happilyAddMob(M,R);
else
{
StringBuffer str=new StringBuffer("!!!No MOB?!!!!");
str.append(" Got: "+MATCHING);
}
}
else
if(MATCHING.indexOf("@")>0)
{
for(int m=0;m<RoomData.mobs.size();m++)
{
MOB M2=(MOB)RoomData.mobs.elementAt(m);
if(MATCHING.equals(""+M2))
{
happilyAddMob((MOB)M2.copyOf(),R);
break;
}
}
}
else
for(Enumeration m=CMClass.mobTypes();m.hasMoreElements();)
{
MOB M2=(MOB)m.nextElement();
if((CMClass.className(M2).equals(MATCHING)))
{
happilyAddMob((MOB)M2.copyOf(),R);
break;
}
}
}
}
else
return "No MOB Data!";
if(httpReq.isRequestParameter("ITEM1"))
{
for(int i=1;;i++)
{
String MATCHING=httpReq.getRequestParameter("ITEM"+i);
if(MATCHING==null)
break;
Item I2=RoomData.getItemFromAnywhere(allitems,MATCHING);
if(I2!=null)
{
if(RoomData.isAllNum(MATCHING))
happilyAddItem(I2,R);
else
happilyAddItem((Item)I2.copyOf(),R);
}
}
}
else
return "No Item Data!";
for(int i=0;i<allitems.size();i++)
{
Item I=(Item)allitems.elementAt(i);
if(!R.isContent(I))
I.destroy();
}
for(int i=0;i<R.numItems();i++)
{
Item I=R.fetchItem(i);
if((I.container()!=null)&&(!R.isContent(I.container())))
I.setContainer(null);
}
for(int m=0;m<allmobs.size();m++)
{
MOB M=(MOB)allmobs.elementAt(m);
if(!R.isInhabitant(M))
M.destroy();
}
if(redoAllMyDamnRooms)
{
try
{
for(Enumeration r=CMLib.map().rooms();r.hasMoreElements();)
{
Room R2=(Room)r.nextElement();
for(int d=0;d<R2.rawDoors().length;d++)
if(R2.rawDoors()[d]==oldR)
{
R2.rawDoors()[d]=R;
if(R2 instanceof GridLocale)
((GridLocale)R2).buildGrid();
}
}
}catch(NoSuchElementException e){}
try
{
for(Enumeration e=CMLib.map().players();e.hasMoreElements();)
{
MOB M=(MOB)e.nextElement();
if(M.getStartRoom()==oldR)
M.setStartRoom(R);
else
if(M.location()==oldR)
M.setLocation(R);
}
}catch(NoSuchElementException e){}
}
R.getArea().fillInAreaRoom(R);
CMLib.database().DBUpdateRoom(R);
CMLib.database().DBUpdateMOBs(R);
CMLib.database().DBUpdateItems(R);
R.startItemRejuv();
if(oldR!=R) oldR.destroy();
R.getArea().addProperRoomnumber(R.roomID()); // oldR.destroy() would kill it otherwise
if(!copyRoom.sameAs(R))
Log.sysOut("Grinder",whom.Name()+" modified room "+R.roomID()+".");
}
return "";
}
public static String delRoom(Room R)
{
CMLib.map().obliterateRoom(R);
return "";
}
public static Room createLonelyRoom(Area A, Room linkTo, int dir, boolean copyThisOne)
{
Room newRoom=null;
String newRoomID=A.getNewRoomID(linkTo,dir);
if(newRoomID.length()==0) return null;
if((copyThisOne)&&(linkTo!=null))
{
CMLib.map().resetRoom(linkTo);
newRoom=(Room)linkTo.copyOf();
for(int d=0;d<Directions.NUM_DIRECTIONS;d++)
{
newRoom.rawDoors()[d]=null;
newRoom.rawExits()[d]=null;
}
}
else
{
newRoom=CMClass.getLocale("StdRoom");
newRoom.setDisplayText("Title of "+newRoomID);
newRoom.setDescription("Description of "+newRoomID);
}
newRoom.setRoomID(newRoomID);
newRoom.setArea(A);
if(linkTo!=null)
{
newRoom.rawDoors()[Directions.getOpDirectionCode(dir)]=linkTo;
newRoom.rawExits()[Directions.getOpDirectionCode(dir)]=CMClass.getExit("StdOpenDoorway");
}
CMLib.database().DBCreateRoom(newRoom,CMClass.className(newRoom));
CMLib.database().DBUpdateExits(newRoom);
if(newRoom.numInhabitants()>0)
CMLib.database().DBUpdateMOBs(newRoom);
if(newRoom.numItems()>0)
CMLib.database().DBUpdateItems(newRoom);
newRoom.getArea().fillInAreaRoom(newRoom);
return newRoom;
}
public static String createRoom(Room R, int dir, boolean copyThisOne)
{
R.clearSky();
if(R instanceof GridLocale)
((GridLocale)R).clearGrid(null);
Room newRoom=createLonelyRoom(R.getArea(),R,dir,copyThisOne);
R.rawDoors()[dir]=newRoom;
if(R.rawExits()[dir]==null)
R.rawExits()[dir]=CMClass.getExit("StdOpenDoorway");
CMLib.database().DBUpdateExits(R);
R.getArea().fillInAreaRoom(R);
return "";
}
public static Room createGridRoom(Area A, String roomID, Room copyThisOne, RoomnumberSet deferredExitSaves, boolean autoLink)
{
Room R=null;
if(copyThisOne!=null)
{
R=(Room)copyThisOne.copyOf();
R.setRoomID(roomID);
}
else
{
R=CMClass.getLocale("StdRoom");
R.setRoomID(roomID);
R.setDisplayText("Title of "+R.roomID());
R.setDescription("Description of "+R.roomID());
}
R.setArea(A);
CMLib.database().DBCreateRoom(R,CMClass.className(R));
if(R.numInhabitants()>0)
CMLib.database().DBUpdateMOBs(R);
if(R.numItems()>0)
CMLib.database().DBUpdateItems(R);
R.getArea().fillInAreaRoom(R);
if((autoLink)&&(R.getArea() instanceof GridZones))
{
GridZones GZ=(GridZones)R.getArea();
int x=GZ.getGridChildX(R);
int y=GZ.getGridChildY(R);
boolean resaveMyExits=false;
if((x>=0)&&(y>=0))
{
Room R2=null;
for(int d=0;d<Directions.NUM_DIRECTIONS;d++)
{
int[] xy=Directions.adjustXYByDirections(x,y,d);
R2=GZ.getGridChild(xy[0],xy[1]);
if((R2!=null)&&(R!=R2))
{
int opD=Directions.getOpDirectionCode(d);
if(R2.rawDoors()[opD]==null)
{
R2.rawDoors()[opD]=R;
if(R2.rawExits()[opD]==null)
R2.rawExits()[opD]=CMClass.getExit("StdOpenDoorway");
if(deferredExitSaves!=null)
{
if(!deferredExitSaves.contains(R2.roomID()))
deferredExitSaves.add(R2.roomID());
}
else
CMLib.database().DBUpdateExits(R2);
}
if(R.rawDoors()[d]==null)
{
R.rawDoors()[d]=R2;
if(R.rawExits()[d]==null)
R.rawExits()[d]=CMClass.getExit("StdOpenDoorway");
resaveMyExits=true;
}
}
}
}
if(resaveMyExits)
{
if(deferredExitSaves!=null)
{
if(!deferredExitSaves.contains(R.roomID()))
deferredExitSaves.add(R.roomID());
}
else
{
CMLib.database().DBUpdateExits(R);
R.getArea().fillInAreaRoom(R);
}
}
}
return R;
}
}
|
git-svn-id: svn://192.168.1.10/public/CoffeeMud@5491 0d6f1817-ed0e-0410-87c9-987e46238f29
|
com/planet_ink/coffee_mud/WebMacros/grinder/GrinderRooms.java
|
<ide><path>om/planet_ink/coffee_mud/WebMacros/grinder/GrinderRooms.java
<ide> CMLib.database().DBUpdateMOBs(R);
<ide> CMLib.database().DBUpdateItems(R);
<ide> R.startItemRejuv();
<del> if(oldR!=R) oldR.destroy();
<del> R.getArea().addProperRoomnumber(R.roomID()); // oldR.destroy() would kill it otherwise
<add> if(oldR!=R){ oldR.destroy(); R.getArea().addProperRoom(R);}
<ide> if(!copyRoom.sameAs(R))
<ide> Log.sysOut("Grinder",whom.Name()+" modified room "+R.roomID()+".");
<ide> }
|
||
Java
|
bsd-3-clause
|
592aaa57e98dc367542f553aeaf5950e0f082f32
| 0 |
lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon
|
/*
* $Id: V1NamePoll.java,v 1.13 2004-10-22 00:36:41 troberts Exp $
*/
/*
Copyright (c) 2000-2003 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.poller;
import java.io.*;
import java.security.*;
import java.util.*;
import org.lockss.daemon.*;
import org.lockss.hasher.*;
import org.lockss.protocol.*;
import org.lockss.util.*;
import org.lockss.plugin.*;
/**
* <p>Implements a version one name poll.</p>
* @author Claire Griffin
* @version 1.0
*/
public class V1NamePoll extends V1Poll {
ArrayList m_entries;
public V1NamePoll(PollSpec pollspec,
PollManager pm,
PeerIdentity orig,
byte[] challenge,
long duration,
String hashAlg) {
super(pollspec, pm, orig, challenge, duration);
m_replyOpcode = LcapMessage.NAME_POLL_REP;
m_tally = new V1PollTally(this,
NAME_POLL,
m_createTime,
duration,
V1PollFactory.getQuorum(), // XXX AU-specific
hashAlg);
}
/**
* cast our vote for this poll
*/
void castOurVote() {
if (m_msg == null) {
log.error("No vote to cast for " + this);
return;
}
LcapMessage msg;
PeerIdentity local_id = idMgr.getLocalPeerIdentity(Poll.V1_POLL);
long remainingTime = m_deadline.getRemainingTime();
log.debug("castOurVote: " + local_id);
try {
msg = LcapMessage.makeReplyMsg(m_msg, m_hash, m_verifier,
getEntries(), m_replyOpcode,
remainingTime, local_id);
log.debug("vote:" + msg.toString());
m_pollmanager.sendMessage(msg, m_cus.getArchivalUnit());
}
catch (IOException ex) {
log.info("unable to cast our vote.", ex);
}
}
/**
* handle a message which may be a incoming vote
* @param msg the Message to handle
*/
void receiveMessage(LcapMessage msg) {
int opcode = msg.getOpcode();
if (m_msg == null) {
m_msg = msg;
log.debug("Setting message for " + this + " from " + msg);
}
if (opcode == LcapMessage.NAME_POLL_REP) {
startVoteCheck(msg);
}
}
/**
* schedule the hash for this poll.
* @param hasher the MessageDigest used to hash the content
* @param timer the Deadline by which we must complete
* @param key the Object which will be returned from the hasher. Always the
* message which triggered the hash
* @param callback the hashing callback to use on return
* @return true if hash successfully completed.
*/
boolean scheduleHash(MessageDigest hasher, Deadline timer, Object key,
HashService.Callback callback) {
HashService hs = m_pollmanager.getHashService();
return hs.hashNames(m_cus, hasher, timer, callback, key);
}
/**
* start the hash required for a vote cast in this poll
* @param msg the LcapMessage containing the vote we're going to check
*/
void startVoteCheck(LcapMessage msg) {
super.startVoteCheck();
if (shouldCheckVote(msg)) {
Vote vote = new NameVote(msg, false);
log.debug3("created a new NameVote instead of a Vote");
MessageDigest hasher = getInitedHasher(msg.getChallenge(),
msg.getVerifier());
if (!scheduleHash(hasher, m_hashDeadline, vote,
new VoteHashCallback())) {
log.info(m_key + " no time to hash vote by " + m_hashDeadline);
stopVoteCheck();
}
}
}
void clearEntryList() {
m_entries = null;
}
ArrayList generateEntries() {
Iterator it = m_cus.flatSetIterator();
ArrayList alist = new ArrayList();
CachedUrlSetSpec spec = m_cus.getSpec();
String baseUrl = spec.getUrl();
log.debug2("getting a list of entries for spec " + m_cus.getSpec());
while (it.hasNext()) {
CachedUrlSetNode cusn = (CachedUrlSetNode) it.next();
String name = cusn.getUrl();
if (spec.matches(name)) {
boolean hasContent = cusn.hasContent();
if (name.startsWith(baseUrl)) {
name = name.substring(baseUrl.length());
} //XXX add error message
log.debug3("adding file name " + name + " - hasContent=" + hasContent);
alist.add(new PollTally.NameListEntry(hasContent, name));
}
}
m_entries = alist;
return m_entries;
}
ArrayList getEntries() {
if (m_entries == null) {
generateEntries();
}
log.debug2("found " + m_entries.size() + " items in list");
return m_entries;
}
NameVote findWinningVote(Iterator voteIter) {
ArrayList winners = new ArrayList();
NameVoteCounter winningCounter = null;
// build a list of unique disagree votes
while (voteIter.hasNext()) {
Object obj = voteIter.next();
if (! (obj instanceof NameVote)) {
log.error("Expected class NameVote found class " +
obj.getClass().getName());
continue;
}
NameVote vote = (NameVote) obj;
if (!vote.agree) {
NameVoteCounter counter = new NameVoteCounter(vote);
if (winners.contains(counter)) {
counter = (NameVoteCounter) winners.get(winners.indexOf(counter));
counter.addVote();
}
else {
winners.add(counter);
}
}
}
// find the "winner" with the most votes
Iterator it = winners.iterator();
while (it.hasNext()) {
NameVoteCounter counter = (NameVoteCounter) it.next();
if (winningCounter != null) {
if (winningCounter.getNumVotes() < counter.getNumVotes()) {
winningCounter = counter;
}
}
else {
winningCounter = counter;
}
}
return winningCounter;
}
void buildPollLists(Iterator voteIter) {
NameVote winningVote = findWinningVote(voteIter);
if (winningVote != null) {
log.debug("found winning vote: " + winningVote);
m_tally.votedEntries = winningVote.getKnownEntries();
if(log.isDebug3()) {
for(int i=0; i< m_tally.votedEntries.size(); i++) {
log.debug3("winning entry " + i + ": " + m_tally.votedEntries.get(i));
}
}
String lwrRem = winningVote.getLwrRemaining();
String uprRem = winningVote.getUprRemaining();
log.debug3("remainder lwr : "+ lwrRem + " upr: " + uprRem);
if (lwrRem != null) {
callNameSubPoll(m_cus, lwrRem, uprRem);
// we make our list from whatever is in our
// master list that doesn't match the remainder;
ArrayList localSet = new ArrayList();
Iterator localIt = getEntries().iterator();
log.debug3("finding local entries which are below our lwr remainder:" +lwrRem);
while (localIt.hasNext()) {
PollTally.NameListEntry entry = (PollTally.NameListEntry) localIt.next();
String url = entry.name;
if((lwrRem != null) && url.compareTo(lwrRem) < 0) {
log.debug3("adding local entry " + entry);
localSet.add(entry);
}
}
m_tally.localEntries = localSet;
} else {
log.debug3("No entries remain to be sent, return all entries for spec: "
+ m_cus.getSpec());
m_tally.localEntries = getEntries();
}
}
}
/**
* Calls a name poll poll with the lower and upper bounds set.
* @param cus CachedUrlSet
* @param lwr lower bound
* @param upr upper bound
*/
private void callNameSubPoll(CachedUrlSet cus, String lwr, String upr) {
String base = cus.getUrl();
ArchivalUnit au = cus.getArchivalUnit();
CachedUrlSet newCus = au.makeCachedUrlSet(new RangeCachedUrlSetSpec(base, lwr, upr));
PollSpec spec = new PollSpec(newCus, lwr, upr, Poll.NAME_POLL, Poll.V1_POLL);
log.debug3("calling new name poll on: " + spec);
if (!m_pollmanager.callPoll(spec)) {
log.error("unable to call name poll for " + spec);
}
}
/**
* make a NameVote. NB - used only by TestPoll
* @param msg the message needed to make the vote
* @param agree a boolean set true if this is an agree vote, false otherwise.
* @return the newly created NameVote object
*/
NameVote makeNameVote(LcapMessage msg, boolean agree) {
return new NameVote(msg, agree);
}
Vote copyVote(Vote vote, boolean agree) {
NameVote v = new NameVote((NameVote)vote);
v.agree = agree;
return v;
}
static class NameVote extends Vote {
private ArrayList knownEntries;
private String lwrRemaining;
private String uprRemaining;
NameVote(NameVote vote) {
super(vote);
knownEntries = vote.getKnownEntries();
lwrRemaining = vote.getLwrRemaining();
uprRemaining = vote.getUprRemaining();
}
NameVote(LcapMessage msg, boolean agree) {
super(msg, agree);
knownEntries = msg.getEntries();
lwrRemaining = msg.getLwrRemain();
uprRemaining = msg.getUprRemain();
}
ArrayList getKnownEntries() {
return knownEntries;
}
String getLwrRemaining() {
return lwrRemaining;
}
String getUprRemaining() {
return uprRemaining;
}
public boolean equals(Object obj) {
if (obj instanceof NameVote) {
return (sameEntries( ( (NameVote) obj).knownEntries));
}
return false;
}
public int hashCode() {
throw new UnsupportedOperationException();
}
boolean sameEntries(ArrayList entries) {
return CollectionUtil.isIsomorphic(knownEntries,entries);
}
}
static class NameVoteCounter extends NameVote {
private int voteCount = 1;
NameVoteCounter(NameVote vote) {
super(vote);
}
void addVote() {
voteCount++;
}
int getNumVotes() {
return voteCount;
}
}
}
|
src/org/lockss/poller/V1NamePoll.java
|
/*
* $Id: V1NamePoll.java,v 1.12 2004-10-21 22:51:57 clairegriffin Exp $
*/
/*
Copyright (c) 2000-2003 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.poller;
import java.io.*;
import java.security.*;
import java.util.*;
import org.lockss.daemon.*;
import org.lockss.hasher.*;
import org.lockss.protocol.*;
import org.lockss.util.*;
import org.lockss.plugin.*;
/**
* <p>Implements a version one name poll.</p>
* @author Claire Griffin
* @version 1.0
*/
public class V1NamePoll extends V1Poll {
ArrayList m_entries;
public V1NamePoll(PollSpec pollspec,
PollManager pm,
PeerIdentity orig,
byte[] challenge,
long duration,
String hashAlg) {
super(pollspec, pm, orig, challenge, duration);
m_replyOpcode = LcapMessage.NAME_POLL_REP;
m_tally = new V1PollTally(this,
NAME_POLL,
m_createTime,
duration,
V1PollFactory.getQuorum(), // XXX AU-specific
hashAlg);
}
/**
* cast our vote for this poll
*/
void castOurVote() {
if (m_msg == null) {
log.error("No vote to cast for " + this);
return;
}
LcapMessage msg;
PeerIdentity local_id = idMgr.getLocalPeerIdentity(Poll.V1_POLL);
long remainingTime = m_deadline.getRemainingTime();
log.debug("castOurVote: " + local_id);
try {
msg = LcapMessage.makeReplyMsg(m_msg, m_hash, m_verifier,
getEntries(), m_replyOpcode,
remainingTime, local_id);
log.debug("vote:" + msg.toString());
m_pollmanager.sendMessage(msg, m_cus.getArchivalUnit());
}
catch (IOException ex) {
log.info("unable to cast our vote.", ex);
}
}
/**
* handle a message which may be a incoming vote
* @param msg the Message to handle
*/
void receiveMessage(LcapMessage msg) {
int opcode = msg.getOpcode();
if (m_msg == null) {
m_msg = msg;
log.debug("Setting message for " + this + " from " + msg);
}
if (opcode == LcapMessage.NAME_POLL_REP) {
startVoteCheck(msg);
}
}
/**
* schedule the hash for this poll.
* @param hasher the MessageDigest used to hash the content
* @param timer the Deadline by which we must complete
* @param key the Object which will be returned from the hasher. Always the
* message which triggered the hash
* @param callback the hashing callback to use on return
* @return true if hash successfully completed.
*/
boolean scheduleHash(MessageDigest hasher, Deadline timer, Object key,
HashService.Callback callback) {
HashService hs = m_pollmanager.getHashService();
return hs.hashNames(m_cus, hasher, timer, callback, key);
}
/**
* start the hash required for a vote cast in this poll
* @param msg the LcapMessage containing the vote we're going to check
*/
void startVoteCheck(LcapMessage msg) {
super.startVoteCheck();
if (shouldCheckVote(msg)) {
Vote vote = new NameVote(msg, false);
log.debug3("created a new NameVote instead of a Vote");
MessageDigest hasher = getInitedHasher(msg.getChallenge(),
msg.getVerifier());
if (!scheduleHash(hasher, m_hashDeadline, vote,
new VoteHashCallback())) {
log.info(m_key + " no time to hash vote by " + m_hashDeadline);
stopVoteCheck();
}
}
}
void clearEntryList() {
m_entries = null;
}
ArrayList generateEntries() {
Iterator it = m_cus.flatSetIterator();
ArrayList alist = new ArrayList();
CachedUrlSetSpec spec = m_cus.getSpec();
String baseUrl = spec.getUrl();
log.debug2("getting a list of entries for spec " + m_cus.getSpec());
while (it.hasNext()) {
CachedUrlSetNode cusn = (CachedUrlSetNode) it.next();
String name = cusn.getUrl();
if (spec.matches(name)) {
boolean hasContent = cusn.hasContent();
if (name.startsWith(baseUrl)) {
name = name.substring(baseUrl.length());
}
log.debug3("adding file name " + name + " - hasContent=" + hasContent);
alist.add(new PollTally.NameListEntry(hasContent, name));
}
}
m_entries = alist;
return m_entries;
}
ArrayList getEntries() {
if (m_entries == null) {
generateEntries();
}
log.debug2("found " + m_entries.size() + " items in list");
return m_entries;
}
NameVote findWinningVote(Iterator voteIter) {
ArrayList winners = new ArrayList();
NameVoteCounter winningCounter = null;
// build a list of unique disagree votes
while (voteIter.hasNext()) {
Object obj = voteIter.next();
if (! (obj instanceof NameVote)) {
log.error("Expected class NameVote found class " +
obj.getClass().getName());
continue;
}
NameVote vote = (NameVote) obj;
if (!vote.agree) {
NameVoteCounter counter = new NameVoteCounter(vote);
if (winners.contains(counter)) {
counter = (NameVoteCounter) winners.get(winners.indexOf(counter));
counter.addVote();
}
else {
winners.add(counter);
}
}
}
// find the "winner" with the most votes
Iterator it = winners.iterator();
while (it.hasNext()) {
NameVoteCounter counter = (NameVoteCounter) it.next();
if (winningCounter != null) {
if (winningCounter.getNumVotes() < counter.getNumVotes()) {
winningCounter = counter;
}
}
else {
winningCounter = counter;
}
}
return winningCounter;
}
void buildPollLists(Iterator voteIter) {
NameVote winningVote = findWinningVote(voteIter);
if (winningVote != null) {
log.debug("found winning vote: " + winningVote);
m_tally.votedEntries = winningVote.getKnownEntries();
if(log.isDebug3()) {
for(int i=0; i< m_tally.votedEntries.size(); i++) {
log.debug3("winning entry " + i + ": " + m_tally.votedEntries.get(i));
}
}
String lwrRem = winningVote.getLwrRemaining();
String uprRem = winningVote.getUprRemaining();
log.debug3("remainder lwr : "+ lwrRem + " upr: " + uprRem);
if (lwrRem != null) {
callNameSubPoll(m_cus, lwrRem, uprRem);
// we make our list from whatever is in our
// master list that doesn't match the remainder;
ArrayList localSet = new ArrayList();
Iterator localIt = getEntries().iterator();
log.debug3("finding local entries which are below our lwr remainder:" +lwrRem);
while (localIt.hasNext()) {
PollTally.NameListEntry entry = (PollTally.NameListEntry) localIt.next();
String url = entry.name;
if((lwrRem != null) && url.compareTo(lwrRem) < 0) {
log.debug3("adding local entry " + entry);
localSet.add(entry);
}
}
m_tally.localEntries = localSet;
} else {
log.debug3("No entries remain to be sent, return all entries for spec: "
+ m_cus.getSpec());
m_tally.localEntries = getEntries();
}
}
}
/**
* Calls a name poll poll with the lower and upper bounds set.
* @param cus CachedUrlSet
* @param lwr lower bound
* @param upr upper bound
*/
private void callNameSubPoll(CachedUrlSet cus, String lwr, String upr) {
String base = cus.getUrl();
ArchivalUnit au = cus.getArchivalUnit();
CachedUrlSet newCus = au.makeCachedUrlSet(new RangeCachedUrlSetSpec(base, lwr, upr));
PollSpec spec = new PollSpec(newCus, lwr, upr, Poll.NAME_POLL, Poll.V1_POLL);
log.debug3("calling new name poll on: " + spec);
if (!m_pollmanager.callPoll(spec)) {
log.error("unable to call name poll for " + spec);
}
}
/**
* make a NameVote. NB - used only by TestPoll
* @param msg the message needed to make the vote
* @param agree a boolean set true if this is an agree vote, false otherwise.
* @return the newly created NameVote object
*/
NameVote makeNameVote(LcapMessage msg, boolean agree) {
return new NameVote(msg, agree);
}
Vote copyVote(Vote vote, boolean agree) {
NameVote v = new NameVote((NameVote)vote);
v.agree = agree;
return v;
}
static class NameVote extends Vote {
private ArrayList knownEntries;
private String lwrRemaining;
private String uprRemaining;
NameVote(NameVote vote) {
super(vote);
knownEntries = vote.getKnownEntries();
lwrRemaining = vote.getLwrRemaining();
uprRemaining = vote.getUprRemaining();
}
NameVote(LcapMessage msg, boolean agree) {
super(msg, agree);
knownEntries = msg.getEntries();
lwrRemaining = msg.getLwrRemain();
uprRemaining = msg.getUprRemain();
}
ArrayList getKnownEntries() {
return knownEntries;
}
String getLwrRemaining() {
return lwrRemaining;
}
String getUprRemaining() {
return uprRemaining;
}
public boolean equals(Object obj) {
if (obj instanceof NameVote) {
return (sameEntries( ( (NameVote) obj).knownEntries));
}
return false;
}
public int hashCode() {
throw new UnsupportedOperationException();
}
boolean sameEntries(ArrayList entries) {
return CollectionUtil.isIsomorphic(knownEntries,entries);
}
}
static class NameVoteCounter extends NameVote {
private int voteCount = 1;
NameVoteCounter(NameVote vote) {
super(vote);
}
void addVote() {
voteCount++;
}
int getNumVotes() {
return voteCount;
}
}
}
|
added comment reminding to add error message
git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@3607 4f837ed2-42f5-46e7-a7a5-fa17313484d4
|
src/org/lockss/poller/V1NamePoll.java
|
added comment reminding to add error message
|
<ide><path>rc/org/lockss/poller/V1NamePoll.java
<ide> /*
<del> * $Id: V1NamePoll.java,v 1.12 2004-10-21 22:51:57 clairegriffin Exp $
<add> * $Id: V1NamePoll.java,v 1.13 2004-10-22 00:36:41 troberts Exp $
<ide> */
<ide>
<ide> /*
<ide> boolean hasContent = cusn.hasContent();
<ide> if (name.startsWith(baseUrl)) {
<ide> name = name.substring(baseUrl.length());
<del> }
<add> } //XXX add error message
<ide> log.debug3("adding file name " + name + " - hasContent=" + hasContent);
<ide> alist.add(new PollTally.NameListEntry(hasContent, name));
<ide> }
|
|
Java
|
apache-2.0
|
fa52a80b420a2cfd5e47d981f5c8771f76c9a7a6
| 0 |
anHALytics/anHALytics-core,anHALytics/anHALytics-core
|
package fr.inria.anhalytics.commons.dao;
import com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException;
import fr.inria.anhalytics.commons.entities.Document_Organisation;
import fr.inria.anhalytics.commons.entities.Organisation;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
/**
*
* @author achraf
*/
public class Document_OrganisationDAO extends DAO<Document_Organisation, Long> {
private static final String SQL_INSERT
= "INSERT INTO DOCUMENT_ORGANISATION (docID, organisationID, type) VALUES (?, ?, ?)";
public Document_OrganisationDAO(Connection conn) {
super(conn);
}
@Override
public boolean create(Document_Organisation obj) throws SQLException {
boolean result = false;
if (obj.getDoc() == null || obj.getOrgs() == null) {
throw new IllegalArgumentException("No Document nor organisation is already created, the Affiliation ID is not null.");
}
PreparedStatement statement = connect.prepareStatement(SQL_INSERT, Statement.RETURN_GENERATED_KEYS);
for (Organisation org : obj.getOrgs()) {
try {
statement.setString(1, obj.getDoc().getDocID());
statement.setLong(2, org.getOrganisationId());
statement.setString(3, org.getType());
int code = statement.executeUpdate();
result = true;
} catch (MySQLIntegrityConstraintViolationException e) {
}
}
statement.close();
return result;
}
@Override
public boolean delete(Document_Organisation obj) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public boolean update(Document_Organisation obj) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Document_Organisation find(Long id) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
|
anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/Document_OrganisationDAO.java
|
package fr.inria.anhalytics.commons.dao;
import com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException;
import fr.inria.anhalytics.commons.entities.Document_Organisation;
import fr.inria.anhalytics.commons.entities.Organisation;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
/**
*
* @author achraf
*/
public class Document_OrganisationDAO extends DAO<Document_Organisation, Long> {
private static final String SQL_INSERT
= "INSERT INTO DOCUMENT_ORGANISATION (docID, organisationID, type) VALUES (?, ?, ?)";
public Document_OrganisationDAO(Connection conn) {
super(conn);
}
@Override
public boolean create(Document_Organisation obj) throws SQLException {
boolean result = false;
if (obj.getDoc() == null || obj.getOrgs() == null) {
throw new IllegalArgumentException("No Document nor organisation is already created, the Affiliation ID is not null.");
}
PreparedStatement statement;
try {
statement = connect.prepareStatement(SQL_INSERT, Statement.RETURN_GENERATED_KEYS);
for (Organisation org : obj.getOrgs()) {
statement.setString(1, obj.getDoc().getDocID());
statement.setLong(2, org.getOrganisationId());
statement.setString(3, org.getType());
int code = statement.executeUpdate();
result = true;
}
statement.close();
} catch (MySQLIntegrityConstraintViolationException e) {
}
return result;
}
@Override
public boolean delete(Document_Organisation obj) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public boolean update(Document_Organisation obj) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Document_Organisation find(Long id) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
|
Typo.
|
anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/Document_OrganisationDAO.java
|
Typo.
|
<ide><path>nhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/Document_OrganisationDAO.java
<ide> throw new IllegalArgumentException("No Document nor organisation is already created, the Affiliation ID is not null.");
<ide> }
<ide>
<del> PreparedStatement statement;
<del> try {
<del> statement = connect.prepareStatement(SQL_INSERT, Statement.RETURN_GENERATED_KEYS);
<del> for (Organisation org : obj.getOrgs()) {
<add> PreparedStatement statement = connect.prepareStatement(SQL_INSERT, Statement.RETURN_GENERATED_KEYS);
<add>
<add> for (Organisation org : obj.getOrgs()) {
<add> try {
<ide> statement.setString(1, obj.getDoc().getDocID());
<ide> statement.setLong(2, org.getOrganisationId());
<ide> statement.setString(3, org.getType());
<ide> int code = statement.executeUpdate();
<add>
<ide> result = true;
<add> } catch (MySQLIntegrityConstraintViolationException e) {
<ide> }
<del> statement.close();
<del> } catch (MySQLIntegrityConstraintViolationException e) {
<ide> }
<add> statement.close();
<add>
<ide> return result;
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
09239d8233c6da9ffaf36fae4ce14d0aa4106ca1
| 0 |
treasure-data/td-import-java,treasure-data/td-import-java
|
package com.treasure_data.file;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.supercsv.cellprocessor.CellProcessorAdaptor;
import org.supercsv.exception.SuperCsvCellProcessorException;
import org.supercsv.util.CsvContext;
import com.treasure_data.commands.CommandException;
public class TimeFormatSuggestionProcessor extends CellProcessorAdaptor {
public static enum TimeFormat {
INT("int", 0),
LONG("long", 1),
FLOAT("float", 2),
RFC_822_1123_FORMAT("RFC_822_1123", 3),
RFC_850_1036_FORMAT("RFC_850_1036", 4),
APACHE_CLF_FORMAT("Apache_CLF", 5),
ANSI_C_ASCTIME_FORMAT("ANSI_C_Asctime", 6);
private String type;
private int index;
TimeFormat(String type, int index) {
this.type = type;
this.index = index;
}
public String type() {
return type;
}
public int index() {
return index;
}
public static TimeFormat fromString(String type) {
return StringToTimeFormat.get(type);
}
public static TimeFormat fromInt(int index) {
return IntToTimeFormat.get(index);
}
private static class StringToTimeFormat {
private static final Map<String, TimeFormat> REVERSE_DICTIONARY;
static {
Map<String, TimeFormat> map = new HashMap<String, TimeFormat>();
for (TimeFormat elem : TimeFormat.values()) {
map.put(elem.type, elem);
}
REVERSE_DICTIONARY = Collections.unmodifiableMap(map);
}
static TimeFormat get(String key) {
return REVERSE_DICTIONARY.get(key);
}
}
private static class IntToTimeFormat {
private static final Map<Integer, TimeFormat> REVERSE_DICTIONARY;
static {
Map<Integer, TimeFormat> map = new HashMap<Integer, TimeFormat>();
for (TimeFormat elem : TimeFormat.values()) {
map.put(elem.index, elem);
}
REVERSE_DICTIONARY = Collections.unmodifiableMap(map);
}
static TimeFormat get(Integer index) {
return REVERSE_DICTIONARY.get(index);
}
}
}
private int[] scores = new int[] { 0, 0, 0, 0, 0, 0, 0 };
private TimeFormatMatcher[] matchers;
private int rowSize;
TimeFormatSuggestionProcessor(int rowSize) {
this.rowSize = rowSize;
this.matchers = new TimeFormatMatcher[7];
matchers[0] = new IntegerTimeFormatMatcher();
matchers[1] = new LongTimeFormatMatcher();
matchers[2] = new FloatTimeFormatMatcher();
matchers[3] = new RFC_822_1123_FormatMatcher();
matchers[4] = new RFC_850_1036_FormatMatcher();
matchers[5] = new ApacheCLFFormatMatcher();
matchers[6] = new ANSICAscTimeFormatMatcher();
}
void addHint() throws CommandException { // TODO e.g. strf time
throw new UnsupportedOperationException();
}
TimeFormat getSuggestedTimeFormat() {
int max = -rowSize;
int maxIndex = 0;
for (int i = 0; i < scores.length; i++) {
if (max < scores[i]) {
max = scores[i];
maxIndex = i;
}
}
return TimeFormat.fromInt(maxIndex);
}
TimeFormatProcessor createTimeFormatProcessor(TimeFormat tf)
throws CommandException {
switch (tf) {
case INT:
return new IntegerTimeFormatProcessor();
case LONG:
return new LongTimeFormatProcessor();
case FLOAT:
return new FloatTimeFormatProcessor();
case RFC_822_1123_FORMAT:
return new RFC_822_1123_FormatProcessor();
case RFC_850_1036_FORMAT:
return new RFC_850_1036_FormatProcessor();
case APACHE_CLF_FORMAT:
return new ApacheCLFFormatProcessor();
case ANSI_C_ASCTIME_FORMAT:
return new ANSICAscTimeFormatProcessor();
default:
throw new CommandException("fatal error");
}
}
@Override
public Object execute(Object value, CsvContext context) {
if (value == null) {
// any score are not changed
return null;
}
for (int i = 0; i < matchers.length; i++) {
if (matchers[i].match(value)) {
scores[i] += 1;
}
}
// null object is returned TODO ??
return next.execute(null, context);
}
public static interface TimeFormatMatcher {
public boolean match(Object v);
}
public static class TimeFormatProcessor extends CellProcessorAdaptor {
@Override
public Object execute(Object value, CsvContext context) {
return null;
}
}
public static class IntegerTimeFormatMatcher implements TimeFormatMatcher {
public boolean match(Object v) {
if (v instanceof Integer) {
return true;
} else if (v instanceof String) {
try {
Integer.parseInt((String) v);
return true;
} catch (NumberFormatException e) {
return false;
}
} else {
return false;
}
}
}
public static class IntegerTimeFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value instanceof Integer) {
return (Integer) value;
} else if (value instanceof String) {
try {
return Integer.parseInt((String) value);
} catch (NumberFormatException e) {
throw new SuperCsvCellProcessorException(String.format(
"'%s' could not be parsed as an Integer", value),
context, this, e);
}
} else {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type Integer or String but is of type %s",
actualClassName), context, this);
}
}
}
public static class LongTimeFormatMatcher implements TimeFormatMatcher {
public boolean match(Object v) {
if (v instanceof Long) {
return true;
} else if (v instanceof String) {
try {
Long.parseLong((String) v);
return true;
} catch (NumberFormatException e) {
return false;
}
} else {
return false;
}
}
}
public static class LongTimeFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value instanceof Long) {
return (Long) value;
} else if (value instanceof Long) {
try {
return Long.parseLong((String) value);
} catch (NumberFormatException e) {
throw new SuperCsvCellProcessorException(String.format(
"'%s' could not be parsed as an Long", value),
context, this, e);
}
} else {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type Long or String but is of type %s",
actualClassName), context, this);
}
}
}
public static class FloatTimeFormatMatcher implements TimeFormatMatcher {
public boolean match(Object v) {
if (v instanceof Float) {
return true;
} else if (v instanceof String) {
try {
Float.parseFloat((String) v);
return true;
} catch (NumberFormatException e) {
return false;
}
} else {
return false;
}
}
}
public static class FloatTimeFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value instanceof Float) {
return (long) ((float) ((Float) value));
} else if (value instanceof Float) {
try {
return (long) Float.parseFloat((String) value);
} catch (NumberFormatException e) {
throw new SuperCsvCellProcessorException(String.format(
"'%s' could not be parsed as an Float", value),
context, this, e);
}
} else {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type Float or String but is of type %s",
actualClassName), context, this);
}
}
}
public abstract static class SimpleDateFormatMatcher implements
TimeFormatMatcher {
public boolean match(Object v) {
if (! (v instanceof String)) {
return false;
}
String text = (String) v;
ParsePosition pp = new ParsePosition(0);
Date d = getFormat().parse(text, pp);
return d != null && pp.getErrorIndex() == -1;
}
protected abstract SimpleDateFormat getFormat();
}
public abstract static class SimpleDateFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value == null) {
throw new SuperCsvCellProcessorException("value is null",
context, this);
}
if (!(value instanceof String)) {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type String but is of type %s",
actualClassName), context, this);
}
String text = (String) value;
ParsePosition pp = new ParsePosition(0);
Date d = getFormat().parse(text, pp);
if (d != null && pp.getErrorIndex() == -1) {
throw new SuperCsvCellProcessorException(String.format(
"the input value cannot be parsed by the format %s",
getFormat()), context, this);
} else {
return d.getTime() / 1000;
}
}
protected abstract SimpleDateFormat getFormat();
}
public static class RFC_822_1123_FormatMatcher extends
SimpleDateFormatMatcher {
private final SimpleDateFormat RFC_822_1123_FORMAT = new SimpleDateFormat(
"EEE, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_822_1123_FORMAT;
}
}
public static class RFC_822_1123_FormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat RFC_822_1123_FORMAT = new SimpleDateFormat(
"EEE, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_822_1123_FORMAT;
}
}
public static class RFC_850_1036_FormatMatcher extends
SimpleDateFormatMatcher {
private final SimpleDateFormat RFC_850_1036_FORMAT = new SimpleDateFormat(
"EEEE, dd-MMM-yy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_850_1036_FORMAT;
}
}
public static class RFC_850_1036_FormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat RFC_850_1036_FORMAT = new SimpleDateFormat(
"EEEE, dd-MMM-yy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_850_1036_FORMAT;
}
}
public static class ApacheCLFFormatMatcher extends SimpleDateFormatMatcher {
private final SimpleDateFormat APACHE_CLF_FORMAT = new SimpleDateFormat(
"dd/MMM/yyyy HH:mm:ss Z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return APACHE_CLF_FORMAT;
}
}
public static class ApacheCLFFormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat APACHE_CLF_FORMAT = new SimpleDateFormat(
"dd/MMM/yyyy HH:mm:ss Z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return APACHE_CLF_FORMAT;
}
}
public static class ANSICAscTimeFormatMatcher extends
SimpleDateFormatMatcher {
private final SimpleDateFormat ANSI_C_ASCTIME_FORMAT = new SimpleDateFormat(
"EEE MMM d HH:mm:ss yyyy", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return ANSI_C_ASCTIME_FORMAT;
}
}
public static class ANSICAscTimeFormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat ANSI_C_ASCTIME_FORMAT = new SimpleDateFormat(
"EEE MMM d HH:mm:ss yyyy", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return ANSI_C_ASCTIME_FORMAT;
}
}
}
|
src/main/java/com/treasure_data/file/TimeFormatSuggestionProcessor.java
|
package com.treasure_data.file;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.supercsv.cellprocessor.CellProcessorAdaptor;
import org.supercsv.exception.SuperCsvCellProcessorException;
import org.supercsv.util.CsvContext;
import com.treasure_data.commands.CommandException;
public class TimeFormatSuggestionProcessor extends CellProcessorAdaptor {
public static enum TimeFormat {
INT("int", 0),
LONG("long", 1),
FLOAT("float", 2),
RFC_822_1123_FORMAT("RFC_822_1123", 3),
RFC_850_1036_FORMAT("RFC_850_1036", 4),
APACHE_CLF_FORMAT("Apache_CLF", 5),
ANSI_C_ASCTIME_FORMAT("ANSI_C_Asctime", 6);
private String type;
private int index;
TimeFormat(String type, int index) {
this.type = type;
this.index = index;
}
public String type() {
return type;
}
public int index() {
return index;
}
public static TimeFormat fromString(String type) {
return StringToTimeFormat.get(type);
}
public static TimeFormat fromInt(int index) {
return IntToTimeFormat.get(index);
}
private static class StringToTimeFormat {
private static final Map<String, TimeFormat> REVERSE_DICTIONARY;
static {
Map<String, TimeFormat> map = new HashMap<String, TimeFormat>();
for (TimeFormat elem : TimeFormat.values()) {
map.put(elem.type, elem);
}
REVERSE_DICTIONARY = Collections.unmodifiableMap(map);
}
static TimeFormat get(String key) {
return REVERSE_DICTIONARY.get(key);
}
}
private static class IntToTimeFormat {
private static final Map<Integer, TimeFormat> REVERSE_DICTIONARY;
static {
Map<Integer, TimeFormat> map = new HashMap<Integer, TimeFormat>();
for (TimeFormat elem : TimeFormat.values()) {
map.put(elem.index, elem);
}
REVERSE_DICTIONARY = Collections.unmodifiableMap(map);
}
static TimeFormat get(Integer index) {
return REVERSE_DICTIONARY.get(index);
}
}
}
private int[] scores = new int[] { 0, 0, 0, 0, 0, 0, 0 };
private TimeFormatMatcher[] matchers;
private int rowSize;
TimeFormatSuggestionProcessor(int rowSize) {
this.rowSize = rowSize;
this.matchers = new TimeFormatMatcher[7];
matchers[0] = new IntegerTimeFormatMatcher();
matchers[1] = new LongTimeFormatMatcher();
matchers[2] = new FloatTimeFormatMatcher();
matchers[3] = new RFC_822_1123_FormatMatcher();
matchers[4] = new RFC_850_1036_FormatMatcher();
matchers[5] = new ApacheCLFFormatMatcher();
matchers[6] = new ANSICAscTimeFormatMatcher();
}
void addHint() throws CommandException { // TODO e.g. strf time
throw new UnsupportedOperationException();
}
TimeFormat getSuggestedTimeFormat() {
int max = -rowSize;
int maxIndex = 0;
for (int i = 0; i < scores.length; i++) {
if (max < scores[i]) {
max = scores[i];
maxIndex = i;
}
}
return TimeFormat.fromInt(maxIndex);
}
TimeFormatProcessor createTimeFormatProcessor(TimeFormat tf)
throws CommandException {
switch (tf) {
case INT:
return new IntegerTimeFormatProcessor();
case LONG:
return new LongTimeFormatProcessor();
case FLOAT:
return new FloatTimeFormatProcessor();
case RFC_822_1123_FORMAT:
return new RFC_822_1123_FormatProcessor();
case RFC_850_1036_FORMAT:
return new RFC_850_1036_FormatProcessor();
case APACHE_CLF_FORMAT:
return new ApacheCLFFormatProcessor();
case ANSI_C_ASCTIME_FORMAT:
return new ANSICAscTimeFormatProcessor();
default:
throw new CommandException("fatal error");
}
}
@Override
public Object execute(Object value, CsvContext context) {
if (value == null) {
// any score are not changed
return null;
}
for (int i = 0; i < matchers.length; i++) {
if (matchers[i].match(value)) {
scores[i] += 1;
}
}
// null object is returned TODO ??
return next.execute(null, context);
}
public static interface TimeFormatMatcher {
public boolean match(Object v);
}
public static class TimeFormatProcessor extends CellProcessorAdaptor {
@Override
public Object execute(Object value, CsvContext context) {
return null;
}
}
public static class IntegerTimeFormatMatcher implements TimeFormatMatcher {
public boolean match(Object v) {
if (v instanceof Integer) {
return true;
} else if (v instanceof String) {
try {
Integer.parseInt((String) v);
return true;
} catch (NumberFormatException e) {
return false;
}
} else {
return false;
}
}
}
public static class IntegerTimeFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value instanceof Integer) {
return (Integer) value;
} else if (value instanceof String) {
try {
return Integer.parseInt((String) value);
} catch (NumberFormatException e) {
throw new SuperCsvCellProcessorException(String.format(
"'%s' could not be parsed as an Integer", value),
context, this, e);
}
} else {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type Integer or String but is of type %s",
actualClassName), context, this);
}
}
}
public static class LongTimeFormatMatcher implements TimeFormatMatcher {
public boolean match(Object v) {
if (v instanceof Long) {
return true;
} else if (v instanceof String) {
try {
Long.parseLong((String) v);
return true;
} catch (NumberFormatException e) {
return false;
}
} else {
return false;
}
}
}
public static class LongTimeFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value instanceof Long) {
return (Long) value;
} else if (value instanceof Long) {
try {
return Long.parseLong((String) value);
} catch (NumberFormatException e) {
throw new SuperCsvCellProcessorException(String.format(
"'%s' could not be parsed as an Long", value),
context, this, e);
}
} else {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type Long or String but is of type %s",
actualClassName), context, this);
}
}
}
public static class FloatTimeFormatMatcher implements TimeFormatMatcher {
public boolean match(Object v) {
if (v instanceof Float) {
return true;
} else if (v instanceof String) {
try {
Float.parseFloat((String) v);
return true;
} catch (NumberFormatException e) {
return false;
}
} else {
return false;
}
}
}
public static class FloatTimeFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value instanceof Float) {
return (long) ((float) ((Float) value));
} else if (value instanceof Float) {
try {
return (long) Float.parseFloat((String) value);
} catch (NumberFormatException e) {
throw new SuperCsvCellProcessorException(String.format(
"'%s' could not be parsed as an Float", value),
context, this, e);
}
} else {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type Float or String but is of type %s",
actualClassName), context, this);
}
}
}
public abstract static class SimpleDateFormatMatcher implements
TimeFormatMatcher {
public boolean match(Object v) {
if (! (v instanceof String)) {
return false;
}
String text = (String) v;
ParsePosition pp = new ParsePosition(0);
Date d = getFormat().parse(text, pp);
return d != null && pp.getErrorIndex() == -1;
}
protected abstract SimpleDateFormat getFormat();
}
public abstract static class SimpleDateFormatProcessor extends TimeFormatProcessor {
@Override
public Object execute(Object value, CsvContext context) {
if (value == null) {
throw new SuperCsvCellProcessorException("value is null",
context, this);
}
if (!(value instanceof String)) {
final String actualClassName = value.getClass().getName();
throw new SuperCsvCellProcessorException(
String.format(
"the input value should be of type String but is of type %s",
actualClassName), context, this);
}
String text = (String) value;
ParsePosition pp = new ParsePosition(0);
Date d = getFormat().parse(text, pp);
return d.getTime() / 1000;
}
protected abstract SimpleDateFormat getFormat();
}
public static class RFC_822_1123_FormatMatcher extends
SimpleDateFormatMatcher {
private final SimpleDateFormat RFC_822_1123_FORMAT = new SimpleDateFormat(
"EEE, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_822_1123_FORMAT;
}
}
public static class RFC_822_1123_FormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat RFC_822_1123_FORMAT = new SimpleDateFormat(
"EEE, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_822_1123_FORMAT;
}
}
public static class RFC_850_1036_FormatMatcher extends
SimpleDateFormatMatcher {
private final SimpleDateFormat RFC_850_1036_FORMAT = new SimpleDateFormat(
"EEEE, dd-MMM-yy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_850_1036_FORMAT;
}
}
public static class RFC_850_1036_FormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat RFC_850_1036_FORMAT = new SimpleDateFormat(
"EEEE, dd-MMM-yy HH:mm:ss z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return RFC_850_1036_FORMAT;
}
}
public static class ApacheCLFFormatMatcher extends SimpleDateFormatMatcher {
private final SimpleDateFormat APACHE_CLF_FORMAT = new SimpleDateFormat(
"dd/MMM/yyyy HH:mm:ss Z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return APACHE_CLF_FORMAT;
}
}
public static class ApacheCLFFormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat APACHE_CLF_FORMAT = new SimpleDateFormat(
"dd/MMM/yyyy HH:mm:ss Z", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return APACHE_CLF_FORMAT;
}
}
public static class ANSICAscTimeFormatMatcher extends
SimpleDateFormatMatcher {
private final SimpleDateFormat ANSI_C_ASCTIME_FORMAT = new SimpleDateFormat(
"EEE MMM d HH:mm:ss yyyy", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return ANSI_C_ASCTIME_FORMAT;
}
}
public static class ANSICAscTimeFormatProcessor extends
SimpleDateFormatProcessor {
private final SimpleDateFormat ANSI_C_ASCTIME_FORMAT = new SimpleDateFormat(
"EEE MMM d HH:mm:ss yyyy", Locale.ENGLISH);
@Override
public SimpleDateFormat getFormat() {
return ANSI_C_ASCTIME_FORMAT;
}
}
}
|
modified TimeFormatSuggestionProcessor class
|
src/main/java/com/treasure_data/file/TimeFormatSuggestionProcessor.java
|
modified TimeFormatSuggestionProcessor class
|
<ide><path>rc/main/java/com/treasure_data/file/TimeFormatSuggestionProcessor.java
<ide> String text = (String) value;
<ide> ParsePosition pp = new ParsePosition(0);
<ide> Date d = getFormat().parse(text, pp);
<del> return d.getTime() / 1000;
<add> if (d != null && pp.getErrorIndex() == -1) {
<add> throw new SuperCsvCellProcessorException(String.format(
<add> "the input value cannot be parsed by the format %s",
<add> getFormat()), context, this);
<add> } else {
<add> return d.getTime() / 1000;
<add> }
<ide> }
<ide>
<ide> protected abstract SimpleDateFormat getFormat();
|
|
Java
|
bsd-3-clause
|
73ff3711f9708326abf14e4de3305f475d97c84f
| 0 |
LiquidEngine/legui
|
package org.liquidengine.legui.system.renderer.nvg.util;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_BASELINE;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_BOTTOM;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_CENTER;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_LEFT;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_MIDDLE;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_RIGHT;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_TOP;
import static org.lwjgl.nanovg.NanoVG.NVG_HOLE;
import static org.lwjgl.nanovg.NanoVG.nvgBeginPath;
import static org.lwjgl.nanovg.NanoVG.nvgBoxGradient;
import static org.lwjgl.nanovg.NanoVG.nvgFill;
import static org.lwjgl.nanovg.NanoVG.nvgFillPaint;
import static org.lwjgl.nanovg.NanoVG.nvgIntersectScissor;
import static org.lwjgl.nanovg.NanoVG.nvgPathWinding;
import static org.lwjgl.nanovg.NanoVG.nvgRect;
import static org.lwjgl.nanovg.NanoVG.nvgResetScissor;
import static org.lwjgl.nanovg.NanoVG.nvgRoundedRect;
import static org.lwjgl.nanovg.NanoVG.nvgScissor;
import static org.lwjgl.nanovg.NanoVG.nvgTextAlign;
import static org.lwjgl.nanovg.NanoVG.nvgTextBounds;
import static org.lwjgl.system.MemoryUtil.memFree;
import static org.lwjgl.system.MemoryUtil.memUTF8;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.joml.Vector2f;
import org.joml.Vector4f;
import org.liquidengine.legui.component.Component;
import org.liquidengine.legui.component.optional.align.HorizontalAlign;
import org.liquidengine.legui.component.optional.align.VerticalAlign;
import org.lwjgl.nanovg.NVGColor;
import org.lwjgl.nanovg.NVGPaint;
/**
* Created by Aliaksandr_Shcherbin on 2/2/2017.
*/
public final class NvgRenderUtils {
/**
* Private constructor.
*/
private NvgRenderUtils() {
}
public static float[] calculateTextBoundsRect(long context, Vector4f rect, String text, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign) {
return calculateTextBoundsRect(context, rect.x, rect.y, rect.z, rect.w, text, horizontalAlign, verticalAlign);
}
public static float[] calculateTextBoundsRect(long context, float x, float y, float w, float h, String text, HorizontalAlign horizontalAlign,
VerticalAlign verticalAlign) {
ByteBuffer byteText = null;
try {
byteText = memUTF8(text, false);
return calculateTextBoundsRect(context, x, y, w, h, byteText, horizontalAlign, verticalAlign);
} finally {
if (byteText != null) {
memFree(byteText);
}
}
}
public static float[] calculateTextBoundsRect(long context, float x, float y, float w, float h, ByteBuffer text, HorizontalAlign horizontalAlign,
VerticalAlign verticalAlign) {
float bounds[] = new float[4];
nvgTextBounds(context, x, y, text, bounds);
return createBounds(x, y, w, h, horizontalAlign, verticalAlign, bounds);
}
public static float[] createBounds(float x, float y, float w, float h, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign, float[] bounds) {
float ww = bounds[2] - bounds[0];
float hh = bounds[3] - bounds[1];
return createBounds(x, y, w, h, horizontalAlign, verticalAlign, /*bounds, */ww, hh);
}
public static float[] createBounds(float w, float h, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign, float[] bounds, float ww, float hh) {
int hp = horizontalAlign == HorizontalAlign.LEFT ? 0 : horizontalAlign == HorizontalAlign.CENTER ? 1 : 2;
int vp = verticalAlign == VerticalAlign.TOP ? 0 : verticalAlign == VerticalAlign.MIDDLE ? 1 : verticalAlign == VerticalAlign.BOTTOM ? 2 : 3;
float x1 = bounds[0] + (w + ww) * 0.5f * hp;
float baseline = (vp > 2 ? hh / 4.0f : 0);
float vv = (vp == 3 ? 1 : vp);
float y1 = bounds[1] + (h + hh) * 0.5f * vv + (vp > 2 ? (+baseline) : 0);
return new float[]{
x1, y1, ww, hh,
x1 - (ww * 0.5f * hp), y1 - (hh * 0.5f * vv) - baseline, ww, hh
};
}
public static float[] createBounds(float x, float y, float w, float h, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign, float tw, float th) {
int hp = horizontalAlign.index;
int vp = verticalAlign.index;
float x1 = x + w * 0.5f * hp;
float baseline = (vp > 2 ? th / 4.0f : 0);
float vv = (vp == 3 ? 1 : vp);
float y1 = y + h * 0.5f * vv + (vp > 2 ? (+baseline) : 0);
return new float[]{
x1, y1, tw, th,
x1 - (tw * 0.5f * hp), y1 - (th * 0.5f * vv) - baseline, tw, th
};
}
public static void alignTextInBox(long context, HorizontalAlign hAlig, VerticalAlign vAlig) {
int hAlign = hAlig == HorizontalAlign.CENTER ? NVG_ALIGN_CENTER : hAlig == HorizontalAlign.LEFT ? NVG_ALIGN_LEFT : NVG_ALIGN_RIGHT;
int vAlign = vAlig == VerticalAlign.TOP ? NVG_ALIGN_TOP : vAlig == VerticalAlign.BOTTOM ?
NVG_ALIGN_BOTTOM : vAlig == VerticalAlign.MIDDLE ? NVG_ALIGN_MIDDLE : NVG_ALIGN_BASELINE;
nvgTextAlign(context, hAlign | vAlign);
}
public static void dropShadow(long context, float x, float y, float w, float h, float cornerRadius, Vector4f shadowColor) {
NVGPaint shadowPaint = NVGPaint.calloc();
NVGColor colorA = NVGColor.calloc();
NVGColor colorB = NVGColor.calloc();
nvgBoxGradient(context, x, y + 2, w, h, cornerRadius * 2, 10, NvgColorUtil.rgba(shadowColor, colorA), NvgColorUtil.rgba(0, 0, 0, 0, colorB),
shadowPaint);
nvgBeginPath(context);
nvgRect(context, x - 10, y - 10, w + 20, h + 30);
nvgRoundedRect(context, x, y, w, h, cornerRadius);
nvgPathWinding(context, NVG_HOLE);
nvgFillPaint(context, shadowPaint);
nvgFill(context);
shadowPaint.free();
colorA.free();
colorB.free();
}
/**
* Creates scissor for provided component by it's parent components.
*
* @param context nanovg context.
* @param gui {@link Component}.
*/
public static void createScissor(long context, Component gui) {
Component parent = gui.getParent();
createScissorByParent(context, parent);
}
/**
* Creates scissor for provided bounds.
*
* @param context nanovg context.
* @param bounds bounds.
*/
public static void createScissor(long context, Vector4f bounds) {
nvgScissor(context, bounds.x, bounds.y, bounds.z, bounds.w);
}
/**
* Intersects scissor for provided bounds.
*
* @param context nanovg context.
* @param bounds bounds.
*/
public static void intersectScissor(long context, Vector4f bounds) {
nvgIntersectScissor(context, bounds.x, bounds.y, bounds.z, bounds.w);
}
/**
* Creates scissor by provided component and it's parent components.
*
* @param context nanovg context.
* @param parent parent component.
*/
public static void createScissorByParent(long context, Component parent) {
List<Component> parents = new ArrayList<>();
while (parent != null) {
parents.add(parent);
parent = parent.getParent();
}
Vector2f pos = new Vector2f();
int size = parents.size();
if (size > 0) {
parent = parents.get(size-1);
pos.add(parent.getPosition());
Vector2f s = parent.getSize();
createScissor(context, new Vector4f(pos, s.x, s.y));
if (size > 1) {
for (int i = size - 2; i >= 0; i--) {
parent = parents.get(i);
s = parent.getSize();
pos.add(parent.getPosition());
nvgIntersectScissor(context, pos.x, pos.y, s.x, s.y);
}
}
}
}
/**
* Used to reset scissor.
*
* @param context nanovg context pointer.
*/
public static void resetScissor(long context) {
nvgResetScissor(context);
}
/**
* Used to call function wrapped to scissor call.
*
* @param nanovg nanovg context.
* @param component component to create scissor.
* @param function function to call.
*/
public static void drawInScissor(long nanovg, Component component, Runnable function) {
createScissor(nanovg, component);
function.run();
resetScissor(nanovg);
}
public static boolean visibleInParents(Component component) {
List<Component> parentList = new ArrayList<>();
for (Component parent = component.getParent(); parent != null; parent = parent.getParent()) {
parentList.add(parent);
}
if (parentList.size() > 0) {
Vector2f pos = new Vector2f(0, 0);
Vector2f rect = new Vector2f(0, 0);
Vector2f absolutePosition = component.getAbsolutePosition();
Vector2f cSize = component.getSize();
Vector2f cPos = component.getPosition();
float lx = absolutePosition.x;
float rx = absolutePosition.x + cSize.x;
float ty = absolutePosition.y;
float by = absolutePosition.y + cSize.y;
// check top parent
if (cPos.x > component.getParent().getSize().x ||
cPos.x + cSize.x < 0 ||
cPos.y > component.getParent().getSize().y ||
cPos.y + cSize.y < 0
) {
return false;
}
if (parentList.size() != 1) {
// check from bottom parent to top parent
for (int i = parentList.size() - 1; i >= 1; i--) {
Component parent = parentList.get(i);
pos.add(parent.getPosition());
rect.set(pos).add(parent.getSize());
if (lx > rect.x || rx < pos.x || ty > rect.y || by < pos.y) {
return false;
}
}
}
}
return true;
}
}
|
src/main/java/org/liquidengine/legui/system/renderer/nvg/util/NvgRenderUtils.java
|
package org.liquidengine.legui.system.renderer.nvg.util;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_BASELINE;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_BOTTOM;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_CENTER;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_LEFT;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_MIDDLE;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_RIGHT;
import static org.lwjgl.nanovg.NanoVG.NVG_ALIGN_TOP;
import static org.lwjgl.nanovg.NanoVG.NVG_HOLE;
import static org.lwjgl.nanovg.NanoVG.nvgBeginPath;
import static org.lwjgl.nanovg.NanoVG.nvgBoxGradient;
import static org.lwjgl.nanovg.NanoVG.nvgFill;
import static org.lwjgl.nanovg.NanoVG.nvgFillPaint;
import static org.lwjgl.nanovg.NanoVG.nvgIntersectScissor;
import static org.lwjgl.nanovg.NanoVG.nvgPathWinding;
import static org.lwjgl.nanovg.NanoVG.nvgRect;
import static org.lwjgl.nanovg.NanoVG.nvgResetScissor;
import static org.lwjgl.nanovg.NanoVG.nvgRoundedRect;
import static org.lwjgl.nanovg.NanoVG.nvgScissor;
import static org.lwjgl.nanovg.NanoVG.nvgTextAlign;
import static org.lwjgl.nanovg.NanoVG.nvgTextBounds;
import static org.lwjgl.system.MemoryUtil.memFree;
import static org.lwjgl.system.MemoryUtil.memUTF8;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.joml.Vector2f;
import org.joml.Vector4f;
import org.liquidengine.legui.component.Component;
import org.liquidengine.legui.component.optional.align.HorizontalAlign;
import org.liquidengine.legui.component.optional.align.VerticalAlign;
import org.lwjgl.nanovg.NVGColor;
import org.lwjgl.nanovg.NVGPaint;
/**
* Created by Aliaksandr_Shcherbin on 2/2/2017.
*/
public final class NvgRenderUtils {
/**
* Private constructor.
*/
private NvgRenderUtils() {
}
public static float[] calculateTextBoundsRect(long context, Vector4f rect, String text, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign) {
return calculateTextBoundsRect(context, rect.x, rect.y, rect.z, rect.w, text, horizontalAlign, verticalAlign);
}
public static float[] calculateTextBoundsRect(long context, float x, float y, float w, float h, String text, HorizontalAlign horizontalAlign,
VerticalAlign verticalAlign) {
ByteBuffer byteText = null;
try {
byteText = memUTF8(text, false);
return calculateTextBoundsRect(context, x, y, w, h, byteText, horizontalAlign, verticalAlign);
} finally {
if (byteText != null) {
memFree(byteText);
}
}
}
public static float[] calculateTextBoundsRect(long context, float x, float y, float w, float h, ByteBuffer text, HorizontalAlign horizontalAlign,
VerticalAlign verticalAlign) {
float bounds[] = new float[4];
nvgTextBounds(context, x, y, text, bounds);
return createBounds(x, y, w, h, horizontalAlign, verticalAlign, bounds);
}
public static float[] createBounds(float x, float y, float w, float h, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign, float[] bounds) {
float ww = bounds[2] - bounds[0];
float hh = bounds[3] - bounds[1];
return createBounds(x, y, w, h, horizontalAlign, verticalAlign, /*bounds, */ww, hh);
}
public static float[] createBounds(float w, float h, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign, float[] bounds, float ww, float hh) {
int hp = horizontalAlign == HorizontalAlign.LEFT ? 0 : horizontalAlign == HorizontalAlign.CENTER ? 1 : 2;
int vp = verticalAlign == VerticalAlign.TOP ? 0 : verticalAlign == VerticalAlign.MIDDLE ? 1 : verticalAlign == VerticalAlign.BOTTOM ? 2 : 3;
float x1 = bounds[0] + (w + ww) * 0.5f * hp;
float baseline = (vp > 2 ? hh / 4.0f : 0);
float vv = (vp == 3 ? 1 : vp);
float y1 = bounds[1] + (h + hh) * 0.5f * vv + (vp > 2 ? (+baseline) : 0);
return new float[]{
x1, y1, ww, hh,
x1 - (ww * 0.5f * hp), y1 - (hh * 0.5f * vv) - baseline, ww, hh
};
}
public static float[] createBounds(float x, float y, float w, float h, HorizontalAlign horizontalAlign, VerticalAlign verticalAlign, float tw, float th) {
int hp = horizontalAlign.index;
int vp = verticalAlign.index;
float x1 = x + w * 0.5f * hp;
float baseline = (vp > 2 ? th / 4.0f : 0);
float vv = (vp == 3 ? 1 : vp);
float y1 = y + h * 0.5f * vv + (vp > 2 ? (+baseline) : 0);
return new float[]{
x1, y1, tw, th,
x1 - (tw * 0.5f * hp), y1 - (th * 0.5f * vv) - baseline, tw, th
};
}
public static void alignTextInBox(long context, HorizontalAlign hAlig, VerticalAlign vAlig) {
int hAlign = hAlig == HorizontalAlign.CENTER ? NVG_ALIGN_CENTER : hAlig == HorizontalAlign.LEFT ? NVG_ALIGN_LEFT : NVG_ALIGN_RIGHT;
int vAlign = vAlig == VerticalAlign.TOP ? NVG_ALIGN_TOP : vAlig == VerticalAlign.BOTTOM ?
NVG_ALIGN_BOTTOM : vAlig == VerticalAlign.MIDDLE ? NVG_ALIGN_MIDDLE : NVG_ALIGN_BASELINE;
nvgTextAlign(context, hAlign | vAlign);
}
public static void dropShadow(long context, float x, float y, float w, float h, float cornerRadius, Vector4f shadowColor) {
NVGPaint shadowPaint = NVGPaint.calloc();
NVGColor colorA = NVGColor.calloc();
NVGColor colorB = NVGColor.calloc();
nvgBoxGradient(context, x, y + 2, w, h, cornerRadius * 2, 10, NvgColorUtil.rgba(shadowColor, colorA), NvgColorUtil.rgba(0, 0, 0, 0, colorB),
shadowPaint);
nvgBeginPath(context);
nvgRect(context, x - 10, y - 10, w + 20, h + 30);
nvgRoundedRect(context, x, y, w, h, cornerRadius);
nvgPathWinding(context, NVG_HOLE);
nvgFillPaint(context, shadowPaint);
nvgFill(context);
shadowPaint.free();
colorA.free();
colorB.free();
}
/**
* Creates scissor for provided component by it's parent components.
*
* @param context nanovg context.
* @param gui {@link Component}.
*/
public static void createScissor(long context, Component gui) {
Component parent = gui.getParent();
createScissorByParent(context, parent);
}
/**
* Creates scissor for provided bounds.
*
* @param context nanovg context.
* @param bounds bounds.
*/
public static void createScissor(long context, Vector4f bounds) {
nvgScissor(context, bounds.x, bounds.y, bounds.z, bounds.w);
}
/**
* Intersects scissor for provided bounds.
*
* @param context nanovg context.
* @param bounds bounds.
*/
public static void intersectScissor(long context, Vector4f bounds) {
nvgIntersectScissor(context, bounds.x, bounds.y, bounds.z, bounds.w);
}
/**
* Creates scissor by provided component and it's parent components.
*
* @param context nanovg context.
* @param parent parent component.
*/
public static void createScissorByParent(long context, Component parent) {
if (parent != null) {
Vector2f p = parent.getAbsolutePosition();
Vector2f s = parent.getSize();
createScissor(context, new Vector4f(p, s.x, s.y));
while ((parent = parent.getParent()) != null) {
p = parent.getAbsolutePosition();
s = parent.getSize();
nvgIntersectScissor(context, p.x, p.y, s.x, s.y);
}
}
}
/**
* Used to reset scissor.
*
* @param context nanovg context pointer.
*/
public static void resetScissor(long context) {
nvgResetScissor(context);
}
/**
* Used to call function wrapped to scissor call.
*
* @param nanovg nanovg context.
* @param component component to create scissor.
* @param function function to call.
*/
public static void drawInScissor(long nanovg, Component component, Runnable function) {
createScissor(nanovg, component);
function.run();
resetScissor(nanovg);
}
public static boolean visibleInParents(Component component) {
List<Component> parentList = new ArrayList<>();
for (Component parent = component.getParent(); parent != null; parent = parent.getParent()) {
parentList.add(parent);
}
if (parentList.size() > 0) {
Vector2f pos = new Vector2f(0, 0);
Vector2f rect = new Vector2f(0, 0);
Vector2f absolutePosition = component.getAbsolutePosition();
Vector2f cSize = component.getSize();
Vector2f cPos = component.getPosition();
float lx = absolutePosition.x;
float rx = absolutePosition.x + cSize.x;
float ty = absolutePosition.y;
float by = absolutePosition.y + cSize.y;
// check top parent
if (cPos.x > component.getParent().getSize().x ||
cPos.x + cSize.x < 0 ||
cPos.y > component.getParent().getSize().y ||
cPos.y + cSize.y < 0
) {
return false;
}
if (parentList.size() != 1) {
// check from bottom parent to top parent
for (int i = parentList.size() - 1; i >= 1; i--) {
Component parent = parentList.get(i);
pos.add(parent.getPosition());
rect.set(pos).add(parent.getSize());
if (lx > rect.x || rx < pos.x || ty > rect.y || by < pos.y) {
return false;
}
}
}
}
return true;
}
}
|
Refactored scissor creation method
|
src/main/java/org/liquidengine/legui/system/renderer/nvg/util/NvgRenderUtils.java
|
Refactored scissor creation method
|
<ide><path>rc/main/java/org/liquidengine/legui/system/renderer/nvg/util/NvgRenderUtils.java
<ide> * @param parent parent component.
<ide> */
<ide> public static void createScissorByParent(long context, Component parent) {
<del> if (parent != null) {
<del> Vector2f p = parent.getAbsolutePosition();
<add> List<Component> parents = new ArrayList<>();
<add> while (parent != null) {
<add> parents.add(parent);
<add> parent = parent.getParent();
<add> }
<add> Vector2f pos = new Vector2f();
<add> int size = parents.size();
<add> if (size > 0) {
<add> parent = parents.get(size-1);
<add> pos.add(parent.getPosition());
<ide> Vector2f s = parent.getSize();
<del>
<del> createScissor(context, new Vector4f(p, s.x, s.y));
<del>
<del> while ((parent = parent.getParent()) != null) {
<del> p = parent.getAbsolutePosition();
<del> s = parent.getSize();
<del> nvgIntersectScissor(context, p.x, p.y, s.x, s.y);
<add> createScissor(context, new Vector4f(pos, s.x, s.y));
<add> if (size > 1) {
<add> for (int i = size - 2; i >= 0; i--) {
<add> parent = parents.get(i);
<add> s = parent.getSize();
<add> pos.add(parent.getPosition());
<add> nvgIntersectScissor(context, pos.x, pos.y, s.x, s.y);
<add> }
<ide> }
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
bf3849f6ca4bb75d112ce6e40b7214169c3e5371
| 0 |
redisson/redisson
|
/**
* Copyright (c) 2013-2021 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.connection.pool;
import org.redisson.api.NodeType;
import org.redisson.api.RFuture;
import org.redisson.client.RedisConnection;
import org.redisson.client.RedisConnectionException;
import org.redisson.client.protocol.RedisCommand;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.config.MasterSlaveServersConfig;
import org.redisson.connection.ClientConnectionsEntry;
import org.redisson.connection.ClientConnectionsEntry.FreezeReason;
import org.redisson.connection.ConnectionManager;
import org.redisson.connection.MasterSlaveEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Base connection pool class
*
* @author Nikita Koksharov
*
* @param <T> - connection type
*/
abstract class ConnectionPool<T extends RedisConnection> {
private final Logger log = LoggerFactory.getLogger(getClass());
protected final Queue<ClientConnectionsEntry> entries = new ConcurrentLinkedQueue<>();
final ConnectionManager connectionManager;
final MasterSlaveServersConfig config;
final MasterSlaveEntry masterSlaveEntry;
ConnectionPool(MasterSlaveServersConfig config, ConnectionManager connectionManager, MasterSlaveEntry masterSlaveEntry) {
this.config = config;
this.masterSlaveEntry = masterSlaveEntry;
this.connectionManager = connectionManager;
}
public CompletableFuture<Void> add(ClientConnectionsEntry entry) {
CompletableFuture<Void> promise = initConnections(entry, true);
return promise.thenAccept(r -> {
entries.add(entry);
});
}
public CompletableFuture<Void> initConnections(ClientConnectionsEntry entry) {
return initConnections(entry, false);
}
private CompletableFuture<Void> initConnections(ClientConnectionsEntry entry, boolean checkFreezed) {
int minimumIdleSize = getMinimumIdleSize(entry);
if (minimumIdleSize == 0 || (checkFreezed && entry.isFreezed())) {
return CompletableFuture.completedFuture(null);
}
CompletableFuture<Void> initPromise = new CompletableFuture<>();
AtomicInteger initializedConnections = new AtomicInteger(minimumIdleSize);
int startAmount = Math.min(2, minimumIdleSize);
AtomicInteger requests = new AtomicInteger(startAmount);
for (int i = 0; i < startAmount; i++) {
createConnection(checkFreezed, requests, entry, initPromise, minimumIdleSize, initializedConnections);
}
return initPromise;
}
private void createConnection(boolean checkFreezed, AtomicInteger requests, ClientConnectionsEntry entry,
CompletableFuture<Void> initPromise, int minimumIdleSize, AtomicInteger initializedConnections) {
if ((checkFreezed && entry.isFreezed()) || !tryAcquireConnection(entry)) {
int totalInitializedConnections = minimumIdleSize - initializedConnections.get();
Throwable cause = new RedisConnectionException(
"Unable to init enough connections amount! Only " + totalInitializedConnections + " of " + minimumIdleSize + " were initialized. Server: "
+ entry.getClient().getAddr());
initPromise.completeExceptionally(cause);
return;
}
CompletableFuture<Void> f = acquireConnection(entry, null);
f.thenAccept(r -> {
CompletableFuture<T> promise = new CompletableFuture<T>();
createConnection(entry, promise);
promise.whenComplete((conn, e) -> {
if (e == null) {
if (!initPromise.isDone()) {
entry.addConnection(conn);
} else {
conn.closeAsync();
}
}
releaseConnection(entry);
if (e != null) {
if (initPromise.isDone()) {
return;
}
for (RedisConnection connection : entry.getAllConnections()) {
if (!connection.isClosed()) {
connection.closeAsync();
}
}
entry.getAllConnections().clear();
for (RedisConnection connection : entry.getAllSubscribeConnections()) {
if (!connection.isClosed()) {
connection.closeAsync();
}
}
entry.getAllSubscribeConnections().clear();
int totalInitializedConnections = minimumIdleSize - initializedConnections.get();
String errorMsg;
if (totalInitializedConnections == 0) {
errorMsg = "Unable to connect to Redis server: " + entry.getClient().getAddr();
} else {
errorMsg = "Unable to init enough connections amount! Only " + totalInitializedConnections
+ " of " + minimumIdleSize + " were initialized. Redis server: " + entry.getClient().getAddr();
}
Throwable cause = new RedisConnectionException(errorMsg, e);
initPromise.completeExceptionally(cause);
return;
}
int value = initializedConnections.decrementAndGet();
if (value == 0) {
if (initPromise.complete(null)) {
log.info("{} connections initialized for {}", minimumIdleSize, entry.getClient().getAddr());
}
} else if (value > 0 && !initPromise.isDone()) {
if (requests.incrementAndGet() <= minimumIdleSize) {
createConnection(checkFreezed, requests, entry, initPromise, minimumIdleSize, initializedConnections);
}
}
});
});
}
protected CompletableFuture<Void> acquireConnection(ClientConnectionsEntry entry, RedisCommand<?> command) {
return entry.acquireConnection(command);
}
protected abstract int getMinimumIdleSize(ClientConnectionsEntry entry);
public CompletableFuture<T> get(RedisCommand<?> command) {
List<ClientConnectionsEntry> entriesCopy = new LinkedList<ClientConnectionsEntry>(entries);
for (Iterator<ClientConnectionsEntry> iterator = entriesCopy.iterator(); iterator.hasNext();) {
ClientConnectionsEntry entry = iterator.next();
if (!((!entry.isFreezed() || entry.isMasterForRead())
&& tryAcquireConnection(entry))) {
iterator.remove();
}
}
if (!entriesCopy.isEmpty()) {
ClientConnectionsEntry entry = config.getLoadBalancer().getEntry(entriesCopy);
return acquireConnection(command, entry);
}
List<InetSocketAddress> failed = new LinkedList<>();
List<InetSocketAddress> freezed = new LinkedList<>();
for (ClientConnectionsEntry entry : entries) {
if (entry.isFailed()) {
failed.add(entry.getClient().getAddr());
} else if (entry.isFreezed()) {
freezed.add(entry.getClient().getAddr());
}
}
StringBuilder errorMsg = new StringBuilder(getClass().getSimpleName() + " no available Redis entries. Master entry host: " + masterSlaveEntry.getClient().getAddr());
if (!freezed.isEmpty()) {
errorMsg.append(" Disconnected hosts: ").append(freezed);
}
if (!failed.isEmpty()) {
errorMsg.append(" Hosts disconnected due to errors during `failedSlaveCheckInterval`: ").append(failed);
}
RedisConnectionException exception = new RedisConnectionException(errorMsg.toString());
CompletableFuture<T> result = new CompletableFuture<>();
result.completeExceptionally(exception);
return result;
}
public CompletableFuture<T> get(RedisCommand<?> command, ClientConnectionsEntry entry) {
return acquireConnection(command, entry);
}
protected final CompletableFuture<T> acquireConnection(RedisCommand<?> command, ClientConnectionsEntry entry) {
CompletableFuture<T> result = new CompletableFuture<T>();
CompletableFuture<Void> f = acquireConnection(entry, command);
f.thenAccept(r -> {
connectTo(entry, result, command);
});
result.whenComplete((r, e) -> {
if (e != null) {
f.completeExceptionally(e);
}
});
return result;
}
protected boolean tryAcquireConnection(ClientConnectionsEntry entry) {
if (entry.getNodeType() == NodeType.SLAVE && entry.isFailed()) {
checkForReconnect(entry, null);
return false;
}
return true;
}
protected T poll(ClientConnectionsEntry entry, RedisCommand<?> command) {
return (T) entry.pollConnection(command);
}
protected CompletionStage<T> connect(ClientConnectionsEntry entry) {
return (CompletionStage<T>) entry.connect();
}
private void connectTo(ClientConnectionsEntry entry, CompletableFuture<T> promise, RedisCommand<?> command) {
if (promise.isDone()) {
connectionManager.getGroup().submit(() -> {
releaseConnection(entry);
});
return;
}
T conn = poll(entry, command);
if (conn != null) {
if (!conn.isActive() && entry.getNodeType() == NodeType.SLAVE) {
entry.trySetupFistFail();
}
connectedSuccessful(entry, promise, conn);
return;
}
createConnection(entry, promise);
}
private void createConnection(ClientConnectionsEntry entry, CompletableFuture<T> promise) {
CompletionStage<T> connFuture = connect(entry);
connFuture.whenComplete((conn, e) -> {
if (e != null) {
promiseFailure(entry, promise, e);
return;
}
if (!conn.isActive()) {
promiseFailure(entry, promise, conn);
return;
}
connectedSuccessful(entry, promise, conn);
});
}
private void connectedSuccessful(ClientConnectionsEntry entry, CompletableFuture<T> promise, T conn) {
if (conn.isActive() && entry.getNodeType() == NodeType.SLAVE) {
entry.resetFirstFail();
}
if (!promise.complete(conn)) {
releaseConnection(entry, conn);
releaseConnection(entry);
}
}
private void promiseFailure(ClientConnectionsEntry entry, CompletableFuture<T> promise, Throwable cause) {
if (entry.getNodeType() == NodeType.SLAVE) {
entry.trySetupFistFail();
if (entry.isFailed()) {
checkForReconnect(entry, cause);
}
}
releaseConnection(entry);
promise.completeExceptionally(cause);
}
private void promiseFailure(ClientConnectionsEntry entry, CompletableFuture<T> promise, T conn) {
if (entry.getNodeType() == NodeType.SLAVE) {
entry.trySetupFistFail();
if (entry.isFailed()) {
conn.closeAsync();
entry.getAllConnections().remove(conn);
checkForReconnect(entry, null);
} else {
releaseConnection(entry, conn);
}
} else {
releaseConnection(entry, conn);
}
releaseConnection(entry);
RedisConnectionException cause = new RedisConnectionException(conn + " is not active!");
promise.completeExceptionally(cause);
}
private void checkForReconnect(ClientConnectionsEntry entry, Throwable cause) {
if (masterSlaveEntry.slaveDown(entry, FreezeReason.RECONNECT)) {
log.error("slave " + entry.getClient().getAddr() + " has been disconnected after "
+ config.getFailedSlaveCheckInterval() + " ms interval since moment of the first failed connection", cause);
scheduleCheck(entry);
}
}
private void scheduleCheck(ClientConnectionsEntry entry) {
connectionManager.getConnectionEventsHub().fireDisconnect(entry.getClient().getAddr());
connectionManager.newTimeout(timeout -> {
synchronized (entry) {
if (entry.getFreezeReason() != FreezeReason.RECONNECT
|| connectionManager.isShuttingDown()) {
return;
}
}
CompletionStage<RedisConnection> connectionFuture = entry.getClient().connectAsync();
connectionFuture.whenComplete((c, e) -> {
synchronized (entry) {
if (entry.getFreezeReason() != FreezeReason.RECONNECT) {
return;
}
}
if (e != null) {
scheduleCheck(entry);
return;
}
if (!c.isActive()) {
c.closeAsync();
scheduleCheck(entry);
return;
}
RFuture<String> f = c.async(RedisCommands.PING);
f.whenComplete((t, ex) -> {
try {
synchronized (entry) {
if (entry.getFreezeReason() != FreezeReason.RECONNECT) {
return;
}
}
if ("PONG".equals(t)) {
if (masterSlaveEntry.slaveUp(entry, FreezeReason.RECONNECT)) {
log.info("slave {} has been successfully reconnected", entry.getClient().getAddr());
}
} else {
scheduleCheck(entry);
}
} finally {
c.closeAsync();
}
});
});
}, config.getFailedSlaveReconnectionInterval(), TimeUnit.MILLISECONDS);
}
public void returnConnection(ClientConnectionsEntry entry, T connection) {
if (entry == null) {
connection.closeAsync();
return;
}
if (entry.isFreezed() && entry.getFreezeReason() != FreezeReason.SYSTEM) {
connection.closeAsync();
entry.getAllConnections().remove(connection);
} else {
releaseConnection(entry, connection);
}
releaseConnection(entry);
}
protected void releaseConnection(ClientConnectionsEntry entry) {
entry.releaseConnection();
}
protected void releaseConnection(ClientConnectionsEntry entry, T conn) {
entry.releaseConnection(conn);
}
}
|
redisson/src/main/java/org/redisson/connection/pool/ConnectionPool.java
|
/**
* Copyright (c) 2013-2021 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.connection.pool;
import org.redisson.api.NodeType;
import org.redisson.api.RFuture;
import org.redisson.client.RedisConnection;
import org.redisson.client.RedisConnectionException;
import org.redisson.client.protocol.RedisCommand;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.config.MasterSlaveServersConfig;
import org.redisson.connection.ClientConnectionsEntry;
import org.redisson.connection.ClientConnectionsEntry.FreezeReason;
import org.redisson.connection.ConnectionManager;
import org.redisson.connection.MasterSlaveEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Base connection pool class
*
* @author Nikita Koksharov
*
* @param <T> - connection type
*/
abstract class ConnectionPool<T extends RedisConnection> {
private final Logger log = LoggerFactory.getLogger(getClass());
protected final Queue<ClientConnectionsEntry> entries = new ConcurrentLinkedQueue<>();
final ConnectionManager connectionManager;
final MasterSlaveServersConfig config;
final MasterSlaveEntry masterSlaveEntry;
ConnectionPool(MasterSlaveServersConfig config, ConnectionManager connectionManager, MasterSlaveEntry masterSlaveEntry) {
this.config = config;
this.masterSlaveEntry = masterSlaveEntry;
this.connectionManager = connectionManager;
}
public CompletableFuture<Void> add(ClientConnectionsEntry entry) {
CompletableFuture<Void> promise = initConnections(entry, true);
return promise.thenAccept(r -> {
entries.add(entry);
});
}
public CompletableFuture<Void> initConnections(ClientConnectionsEntry entry) {
return initConnections(entry, false);
}
private CompletableFuture<Void> initConnections(ClientConnectionsEntry entry, boolean checkFreezed) {
int minimumIdleSize = getMinimumIdleSize(entry);
if (minimumIdleSize == 0 || (checkFreezed && entry.isFreezed())) {
return CompletableFuture.completedFuture(null);
}
CompletableFuture<Void> initPromise = new CompletableFuture<>();
AtomicInteger initializedConnections = new AtomicInteger(minimumIdleSize);
int startAmount = Math.min(5, minimumIdleSize);
AtomicInteger requests = new AtomicInteger(startAmount);
for (int i = 0; i < startAmount; i++) {
createConnection(checkFreezed, requests, entry, initPromise, minimumIdleSize, initializedConnections);
}
return initPromise;
}
private void createConnection(boolean checkFreezed, AtomicInteger requests, ClientConnectionsEntry entry,
CompletableFuture<Void> initPromise, int minimumIdleSize, AtomicInteger initializedConnections) {
if ((checkFreezed && entry.isFreezed()) || !tryAcquireConnection(entry)) {
int totalInitializedConnections = minimumIdleSize - initializedConnections.get();
Throwable cause = new RedisConnectionException(
"Unable to init enough connections amount! Only " + totalInitializedConnections + " of " + minimumIdleSize + " were initialized. Server: "
+ entry.getClient().getAddr());
initPromise.completeExceptionally(cause);
return;
}
CompletableFuture<Void> f = acquireConnection(entry, null);
f.thenAccept(r -> {
CompletableFuture<T> promise = new CompletableFuture<T>();
createConnection(entry, promise);
promise.whenComplete((conn, e) -> {
if (e == null) {
if (!initPromise.isDone()) {
entry.addConnection(conn);
} else {
conn.closeAsync();
}
}
releaseConnection(entry);
if (e != null) {
if (initPromise.isDone()) {
return;
}
for (RedisConnection connection : entry.getAllConnections()) {
if (!connection.isClosed()) {
connection.closeAsync();
}
}
entry.getAllConnections().clear();
for (RedisConnection connection : entry.getAllSubscribeConnections()) {
if (!connection.isClosed()) {
connection.closeAsync();
}
}
entry.getAllSubscribeConnections().clear();
int totalInitializedConnections = minimumIdleSize - initializedConnections.get();
String errorMsg;
if (totalInitializedConnections == 0) {
errorMsg = "Unable to connect to Redis server: " + entry.getClient().getAddr();
} else {
errorMsg = "Unable to init enough connections amount! Only " + totalInitializedConnections
+ " of " + minimumIdleSize + " were initialized. Redis server: " + entry.getClient().getAddr();
}
Throwable cause = new RedisConnectionException(errorMsg, e);
initPromise.completeExceptionally(cause);
return;
}
int value = initializedConnections.decrementAndGet();
if (value == 0) {
if (initPromise.complete(null)) {
log.info("{} connections initialized for {}", minimumIdleSize, entry.getClient().getAddr());
}
} else if (value > 0 && !initPromise.isDone()) {
if (requests.incrementAndGet() <= minimumIdleSize) {
createConnection(checkFreezed, requests, entry, initPromise, minimumIdleSize, initializedConnections);
}
}
});
});
}
protected CompletableFuture<Void> acquireConnection(ClientConnectionsEntry entry, RedisCommand<?> command) {
return entry.acquireConnection(command);
}
protected abstract int getMinimumIdleSize(ClientConnectionsEntry entry);
public CompletableFuture<T> get(RedisCommand<?> command) {
List<ClientConnectionsEntry> entriesCopy = new LinkedList<ClientConnectionsEntry>(entries);
for (Iterator<ClientConnectionsEntry> iterator = entriesCopy.iterator(); iterator.hasNext();) {
ClientConnectionsEntry entry = iterator.next();
if (!((!entry.isFreezed() || entry.isMasterForRead())
&& tryAcquireConnection(entry))) {
iterator.remove();
}
}
if (!entriesCopy.isEmpty()) {
ClientConnectionsEntry entry = config.getLoadBalancer().getEntry(entriesCopy);
return acquireConnection(command, entry);
}
List<InetSocketAddress> failed = new LinkedList<>();
List<InetSocketAddress> freezed = new LinkedList<>();
for (ClientConnectionsEntry entry : entries) {
if (entry.isFailed()) {
failed.add(entry.getClient().getAddr());
} else if (entry.isFreezed()) {
freezed.add(entry.getClient().getAddr());
}
}
StringBuilder errorMsg = new StringBuilder(getClass().getSimpleName() + " no available Redis entries. Master entry host: " + masterSlaveEntry.getClient().getAddr());
if (!freezed.isEmpty()) {
errorMsg.append(" Disconnected hosts: ").append(freezed);
}
if (!failed.isEmpty()) {
errorMsg.append(" Hosts disconnected due to errors during `failedSlaveCheckInterval`: ").append(failed);
}
RedisConnectionException exception = new RedisConnectionException(errorMsg.toString());
CompletableFuture<T> result = new CompletableFuture<>();
result.completeExceptionally(exception);
return result;
}
public CompletableFuture<T> get(RedisCommand<?> command, ClientConnectionsEntry entry) {
return acquireConnection(command, entry);
}
protected final CompletableFuture<T> acquireConnection(RedisCommand<?> command, ClientConnectionsEntry entry) {
CompletableFuture<T> result = new CompletableFuture<T>();
CompletableFuture<Void> f = acquireConnection(entry, command);
f.thenAccept(r -> {
connectTo(entry, result, command);
});
result.whenComplete((r, e) -> {
if (e != null) {
f.completeExceptionally(e);
}
});
return result;
}
protected boolean tryAcquireConnection(ClientConnectionsEntry entry) {
if (entry.getNodeType() == NodeType.SLAVE && entry.isFailed()) {
checkForReconnect(entry, null);
return false;
}
return true;
}
protected T poll(ClientConnectionsEntry entry, RedisCommand<?> command) {
return (T) entry.pollConnection(command);
}
protected CompletionStage<T> connect(ClientConnectionsEntry entry) {
return (CompletionStage<T>) entry.connect();
}
private void connectTo(ClientConnectionsEntry entry, CompletableFuture<T> promise, RedisCommand<?> command) {
if (promise.isDone()) {
connectionManager.getGroup().submit(() -> {
releaseConnection(entry);
});
return;
}
T conn = poll(entry, command);
if (conn != null) {
if (!conn.isActive() && entry.getNodeType() == NodeType.SLAVE) {
entry.trySetupFistFail();
}
connectedSuccessful(entry, promise, conn);
return;
}
createConnection(entry, promise);
}
private void createConnection(ClientConnectionsEntry entry, CompletableFuture<T> promise) {
CompletionStage<T> connFuture = connect(entry);
connFuture.whenComplete((conn, e) -> {
if (e != null) {
promiseFailure(entry, promise, e);
return;
}
if (!conn.isActive()) {
promiseFailure(entry, promise, conn);
return;
}
connectedSuccessful(entry, promise, conn);
});
}
private void connectedSuccessful(ClientConnectionsEntry entry, CompletableFuture<T> promise, T conn) {
if (conn.isActive() && entry.getNodeType() == NodeType.SLAVE) {
entry.resetFirstFail();
}
if (!promise.complete(conn)) {
releaseConnection(entry, conn);
releaseConnection(entry);
}
}
private void promiseFailure(ClientConnectionsEntry entry, CompletableFuture<T> promise, Throwable cause) {
if (entry.getNodeType() == NodeType.SLAVE) {
entry.trySetupFistFail();
if (entry.isFailed()) {
checkForReconnect(entry, cause);
}
}
releaseConnection(entry);
promise.completeExceptionally(cause);
}
private void promiseFailure(ClientConnectionsEntry entry, CompletableFuture<T> promise, T conn) {
if (entry.getNodeType() == NodeType.SLAVE) {
entry.trySetupFistFail();
if (entry.isFailed()) {
conn.closeAsync();
entry.getAllConnections().remove(conn);
checkForReconnect(entry, null);
} else {
releaseConnection(entry, conn);
}
} else {
releaseConnection(entry, conn);
}
releaseConnection(entry);
RedisConnectionException cause = new RedisConnectionException(conn + " is not active!");
promise.completeExceptionally(cause);
}
private void checkForReconnect(ClientConnectionsEntry entry, Throwable cause) {
if (masterSlaveEntry.slaveDown(entry, FreezeReason.RECONNECT)) {
log.error("slave " + entry.getClient().getAddr() + " has been disconnected after "
+ config.getFailedSlaveCheckInterval() + " ms interval since moment of the first failed connection", cause);
scheduleCheck(entry);
}
}
private void scheduleCheck(ClientConnectionsEntry entry) {
connectionManager.getConnectionEventsHub().fireDisconnect(entry.getClient().getAddr());
connectionManager.newTimeout(timeout -> {
synchronized (entry) {
if (entry.getFreezeReason() != FreezeReason.RECONNECT
|| connectionManager.isShuttingDown()) {
return;
}
}
CompletionStage<RedisConnection> connectionFuture = entry.getClient().connectAsync();
connectionFuture.whenComplete((c, e) -> {
synchronized (entry) {
if (entry.getFreezeReason() != FreezeReason.RECONNECT) {
return;
}
}
if (e != null) {
scheduleCheck(entry);
return;
}
if (!c.isActive()) {
c.closeAsync();
scheduleCheck(entry);
return;
}
RFuture<String> f = c.async(RedisCommands.PING);
f.whenComplete((t, ex) -> {
try {
synchronized (entry) {
if (entry.getFreezeReason() != FreezeReason.RECONNECT) {
return;
}
}
if ("PONG".equals(t)) {
if (masterSlaveEntry.slaveUp(entry, FreezeReason.RECONNECT)) {
log.info("slave {} has been successfully reconnected", entry.getClient().getAddr());
}
} else {
scheduleCheck(entry);
}
} finally {
c.closeAsync();
}
});
});
}, config.getFailedSlaveReconnectionInterval(), TimeUnit.MILLISECONDS);
}
public void returnConnection(ClientConnectionsEntry entry, T connection) {
if (entry == null) {
connection.closeAsync();
return;
}
if (entry.isFreezed() && entry.getFreezeReason() != FreezeReason.SYSTEM) {
connection.closeAsync();
entry.getAllConnections().remove(connection);
} else {
releaseConnection(entry, connection);
}
releaseConnection(entry);
}
protected void releaseConnection(ClientConnectionsEntry entry) {
entry.releaseConnection();
}
protected void releaseConnection(ClientConnectionsEntry entry, T conn) {
entry.releaseConnection(conn);
}
}
|
Improvement - amount of created connections in parallel reduced to 2, for better stability
|
redisson/src/main/java/org/redisson/connection/pool/ConnectionPool.java
|
Improvement - amount of created connections in parallel reduced to 2, for better stability
|
<ide><path>edisson/src/main/java/org/redisson/connection/pool/ConnectionPool.java
<ide>
<ide> CompletableFuture<Void> initPromise = new CompletableFuture<>();
<ide> AtomicInteger initializedConnections = new AtomicInteger(minimumIdleSize);
<del> int startAmount = Math.min(5, minimumIdleSize);
<add> int startAmount = Math.min(2, minimumIdleSize);
<ide> AtomicInteger requests = new AtomicInteger(startAmount);
<ide> for (int i = 0; i < startAmount; i++) {
<ide> createConnection(checkFreezed, requests, entry, initPromise, minimumIdleSize, initializedConnections);
|
|
Java
|
apache-2.0
|
0a350564f3532661d7720f108b5fecd7fbcc70ef
| 0 |
francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import java.io.IOException;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.regionserver.ReplicationSourceService;
import org.apache.hadoop.hbase.regionserver.ReplicationSinkService;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.replication.ReplicationQueues;
import org.apache.hadoop.hbase.replication.ReplicationQueuesZKImpl;
import org.apache.hadoop.hbase.replication.ReplicationZookeeper;
import org.apache.hadoop.hbase.replication.master.ReplicationLogCleaner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS;
import static org.apache.hadoop.hbase.HConstants.REPLICATION_ENABLE_KEY;
import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL;
/**
* Gateway to Replication. Used by {@link org.apache.hadoop.hbase.regionserver.HRegionServer}.
*/
@InterfaceAudience.Private
public class Replication implements WALActionsListener,
ReplicationSourceService, ReplicationSinkService {
private static final Log LOG =
LogFactory.getLog(Replication.class);
private boolean replication;
private ReplicationSourceManager replicationManager;
private final AtomicBoolean replicating = new AtomicBoolean(true);
private ReplicationZookeeper zkHelper;
private ReplicationQueues replicationQueues;
private Configuration conf;
private ReplicationSink replicationSink;
// Hosting server
private Server server;
/** Statistics thread schedule pool */
private ScheduledExecutorService scheduleThreadPool;
private int statsThreadPeriod;
/**
* Instantiate the replication management (if rep is enabled).
* @param server Hosting server
* @param fs handle to the filesystem
* @param logDir
* @param oldLogDir directory where logs are archived
* @throws IOException
*/
public Replication(final Server server, final FileSystem fs,
final Path logDir, final Path oldLogDir) throws IOException{
initialize(server, fs, logDir, oldLogDir);
}
/**
* Empty constructor
*/
public Replication() {
}
public void initialize(final Server server, final FileSystem fs,
final Path logDir, final Path oldLogDir) throws IOException {
this.server = server;
this.conf = this.server.getConfiguration();
this.replication = isReplication(this.conf);
this.scheduleThreadPool = Executors.newScheduledThreadPool(1,
new ThreadFactoryBuilder()
.setNameFormat(server.getServerName() + "Replication Statistics #%d")
.setDaemon(true)
.build());
if (replication) {
try {
this.zkHelper = new ReplicationZookeeper(server, this.replicating);
this.replicationQueues =
new ReplicationQueuesZKImpl(server.getZooKeeper(), this.conf, this.server);
this.replicationQueues.init(this.server.getServerName().toString());
} catch (KeeperException ke) {
throw new IOException("Failed replication handler create " +
"(replicating=" + this.replicating, ke);
}
this.replicationManager =
new ReplicationSourceManager(zkHelper, replicationQueues, conf, this.server, fs,
this.replicating, logDir, oldLogDir);
this.statsThreadPeriod =
this.conf.getInt("replication.stats.thread.period.seconds", 5 * 60);
LOG.debug("ReplicationStatisticsThread " + this.statsThreadPeriod);
} else {
this.replicationManager = null;
this.zkHelper = null;
this.replicationQueues = null;
}
}
/**
* @param c Configuration to look at
* @return True if replication is enabled.
*/
public static boolean isReplication(final Configuration c) {
return c.getBoolean(REPLICATION_ENABLE_KEY, false);
}
/*
* Returns an object to listen to new hlog changes
**/
public WALActionsListener getWALActionsListener() {
return this;
}
/**
* Stops replication service.
*/
public void stopReplicationService() {
join();
}
/**
* Join with the replication threads
*/
public void join() {
if (this.replication) {
this.replicationManager.join();
if (this.replicationSink != null) {
this.replicationSink.stopReplicationSinkServices();
}
}
}
/**
* Carry on the list of log entries down to the sink
* @param entries list of entries to replicate
* @throws IOException
*/
public void replicateLogEntries(HLog.Entry[] entries) throws IOException {
if (this.replication) {
this.replicationSink.replicateEntries(entries);
}
}
/**
* If replication is enabled and this cluster is a master,
* it starts
* @throws IOException
*/
public void startReplicationService() throws IOException {
if (this.replication) {
this.replicationManager.init();
this.replicationSink = new ReplicationSink(this.conf, this.server);
this.scheduleThreadPool.scheduleAtFixedRate(
new ReplicationStatisticsThread(this.replicationSink, this.replicationManager),
statsThreadPeriod, statsThreadPeriod, TimeUnit.SECONDS);
}
}
/**
* Get the replication sources manager
* @return the manager if replication is enabled, else returns false
*/
public ReplicationSourceManager getReplicationManager() {
return this.replicationManager;
}
@Override
public void visitLogEntryBeforeWrite(HRegionInfo info, HLogKey logKey,
WALEdit logEdit) {
// Not interested
}
@Override
public void visitLogEntryBeforeWrite(HTableDescriptor htd, HLogKey logKey,
WALEdit logEdit) {
NavigableMap<byte[], Integer> scopes =
new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
byte[] family;
for (KeyValue kv : logEdit.getKeyValues()) {
family = kv.getFamily();
int scope = htd.getFamily(family).getScope();
if (scope != REPLICATION_SCOPE_LOCAL &&
!scopes.containsKey(family)) {
scopes.put(family, scope);
}
}
if (!scopes.isEmpty()) {
logEdit.setScopes(scopes);
}
}
@Override
public void preLogRoll(Path oldPath, Path newPath) throws IOException {
getReplicationManager().preLogRoll(newPath);
}
@Override
public void postLogRoll(Path oldPath, Path newPath) throws IOException {
getReplicationManager().postLogRoll(newPath);
}
@Override
public void preLogArchive(Path oldPath, Path newPath) throws IOException {
// Not interested
}
@Override
public void postLogArchive(Path oldPath, Path newPath) throws IOException {
// Not interested
}
/**
* This method modifies the master's configuration in order to inject
* replication-related features
* @param conf
*/
public static void decorateMasterConfiguration(Configuration conf) {
if (!isReplication(conf)) {
return;
}
String plugins = conf.get(HBASE_MASTER_LOGCLEANER_PLUGINS);
String cleanerClass = ReplicationLogCleaner.class.getCanonicalName();
if (!plugins.contains(cleanerClass)) {
conf.set(HBASE_MASTER_LOGCLEANER_PLUGINS, plugins + "," + cleanerClass);
}
}
@Override
public void logRollRequested() {
// Not interested
}
@Override
public void logCloseRequested() {
// not interested
}
/*
* Statistics thread. Periodically prints the cache statistics to the log.
*/
static class ReplicationStatisticsThread extends Thread {
private final ReplicationSink replicationSink;
private final ReplicationSourceManager replicationManager;
public ReplicationStatisticsThread(final ReplicationSink replicationSink,
final ReplicationSourceManager replicationManager) {
super("ReplicationStatisticsThread");
this.replicationManager = replicationManager;
this.replicationSink = replicationSink;
}
@Override
public void run() {
printStats(this.replicationManager.getStats());
printStats(this.replicationSink.getStats());
}
private void printStats(String stats) {
if (!stats.isEmpty()) {
LOG.info(stats);
}
}
}
}
|
hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import java.io.IOException;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.regionserver.ReplicationSourceService;
import org.apache.hadoop.hbase.regionserver.ReplicationSinkService;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.replication.ReplicationQueues;
import org.apache.hadoop.hbase.replication.ReplicationQueuesZKImpl;
import org.apache.hadoop.hbase.replication.ReplicationZookeeper;
import org.apache.hadoop.hbase.replication.master.ReplicationLogCleaner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS;
import static org.apache.hadoop.hbase.HConstants.REPLICATION_ENABLE_KEY;
import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL;
/**
* Gateway to Replication. Used by {@link org.apache.hadoop.hbase.regionserver.HRegionServer}.
*/
@InterfaceAudience.Private
public class Replication implements WALActionsListener,
ReplicationSourceService, ReplicationSinkService {
private static final Log LOG =
LogFactory.getLog(Replication.class);
private boolean replication;
private ReplicationSourceManager replicationManager;
private final AtomicBoolean replicating = new AtomicBoolean(true);
private ReplicationZookeeper zkHelper;
private ReplicationQueues replicationQueues;
private Configuration conf;
private ReplicationSink replicationSink;
// Hosting server
private Server server;
/** Statistics thread schedule pool */
private ScheduledExecutorService scheduleThreadPool;
private int statsThreadPeriod;
/**
* Instantiate the replication management (if rep is enabled).
* @param server Hosting server
* @param fs handle to the filesystem
* @param logDir
* @param oldLogDir directory where logs are archived
* @throws IOException
*/
public Replication(final Server server, final FileSystem fs,
final Path logDir, final Path oldLogDir) throws IOException{
initialize(server, fs, logDir, oldLogDir);
}
/**
* Empty constructor
*/
public Replication() {
}
public void initialize(final Server server, final FileSystem fs,
final Path logDir, final Path oldLogDir) throws IOException {
this.server = server;
this.conf = this.server.getConfiguration();
this.replication = isReplication(this.conf);
this.scheduleThreadPool = Executors.newScheduledThreadPool(1,
new ThreadFactoryBuilder()
.setNameFormat(server.getServerName() + "Replication Statistics #%d")
.setDaemon(true)
.build());
if (replication) {
try {
this.zkHelper = new ReplicationZookeeper(server, this.replicating);
this.replicationQueues =
new ReplicationQueuesZKImpl(server.getZooKeeper(), this.conf, this.server);
this.replicationQueues.init(this.server.getServerName().toString());
} catch (KeeperException ke) {
throw new IOException("Failed replication handler create " +
"(replicating=" + this.replicating, ke);
}
this.replicationManager =
new ReplicationSourceManager(zkHelper, replicationQueues, conf, this.server, fs,
this.replicating, logDir, oldLogDir);
this.statsThreadPeriod =
this.conf.getInt("replication.stats.thread.period.seconds", 5 * 60);
LOG.debug("ReplicationStatisticsThread " + this.statsThreadPeriod);
} else {
this.replicationManager = null;
this.zkHelper = null;
this.replicationQueues = null;
}
}
/**
* @param c Configuration to look at
* @return True if replication is enabled.
*/
public static boolean isReplication(final Configuration c) {
return c.getBoolean(REPLICATION_ENABLE_KEY, false);
}
/*
* Returns an object to listen to new hlog changes
**/
public WALActionsListener getWALActionsListener() {
return this;
}
/**
* Stops replication service.
*/
public void stopReplicationService() {
join();
}
/**
* Join with the replication threads
*/
public void join() {
if (this.replication) {
this.replicationManager.join();
this.replicationSink.stopReplicationSinkServices();
}
}
/**
* Carry on the list of log entries down to the sink
* @param entries list of entries to replicate
* @throws IOException
*/
public void replicateLogEntries(HLog.Entry[] entries) throws IOException {
if (this.replication) {
this.replicationSink.replicateEntries(entries);
}
}
/**
* If replication is enabled and this cluster is a master,
* it starts
* @throws IOException
*/
public void startReplicationService() throws IOException {
if (this.replication) {
this.replicationManager.init();
this.replicationSink = new ReplicationSink(this.conf, this.server);
this.scheduleThreadPool.scheduleAtFixedRate(
new ReplicationStatisticsThread(this.replicationSink, this.replicationManager),
statsThreadPeriod, statsThreadPeriod, TimeUnit.SECONDS);
}
}
/**
* Get the replication sources manager
* @return the manager if replication is enabled, else returns false
*/
public ReplicationSourceManager getReplicationManager() {
return this.replicationManager;
}
@Override
public void visitLogEntryBeforeWrite(HRegionInfo info, HLogKey logKey,
WALEdit logEdit) {
// Not interested
}
@Override
public void visitLogEntryBeforeWrite(HTableDescriptor htd, HLogKey logKey,
WALEdit logEdit) {
NavigableMap<byte[], Integer> scopes =
new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
byte[] family;
for (KeyValue kv : logEdit.getKeyValues()) {
family = kv.getFamily();
int scope = htd.getFamily(family).getScope();
if (scope != REPLICATION_SCOPE_LOCAL &&
!scopes.containsKey(family)) {
scopes.put(family, scope);
}
}
if (!scopes.isEmpty()) {
logEdit.setScopes(scopes);
}
}
@Override
public void preLogRoll(Path oldPath, Path newPath) throws IOException {
getReplicationManager().preLogRoll(newPath);
}
@Override
public void postLogRoll(Path oldPath, Path newPath) throws IOException {
getReplicationManager().postLogRoll(newPath);
}
@Override
public void preLogArchive(Path oldPath, Path newPath) throws IOException {
// Not interested
}
@Override
public void postLogArchive(Path oldPath, Path newPath) throws IOException {
// Not interested
}
/**
* This method modifies the master's configuration in order to inject
* replication-related features
* @param conf
*/
public static void decorateMasterConfiguration(Configuration conf) {
if (!isReplication(conf)) {
return;
}
String plugins = conf.get(HBASE_MASTER_LOGCLEANER_PLUGINS);
String cleanerClass = ReplicationLogCleaner.class.getCanonicalName();
if (!plugins.contains(cleanerClass)) {
conf.set(HBASE_MASTER_LOGCLEANER_PLUGINS, plugins + "," + cleanerClass);
}
}
@Override
public void logRollRequested() {
// Not interested
}
@Override
public void logCloseRequested() {
// not interested
}
/*
* Statistics thread. Periodically prints the cache statistics to the log.
*/
static class ReplicationStatisticsThread extends Thread {
private final ReplicationSink replicationSink;
private final ReplicationSourceManager replicationManager;
public ReplicationStatisticsThread(final ReplicationSink replicationSink,
final ReplicationSourceManager replicationManager) {
super("ReplicationStatisticsThread");
this.replicationManager = replicationManager;
this.replicationSink = replicationSink;
}
@Override
public void run() {
printStats(this.replicationManager.getStats());
printStats(this.replicationSink.getStats());
}
private void printStats(String stats) {
if (!stats.isEmpty()) {
LOG.info(stats);
}
}
}
}
|
HBASE-8230 Possible NPE on regionserver abort if replication service has not been started
git-svn-id: 949c06ec81f1cb709fd2be51dd530a930344d7b3@1464280 13f79535-47bb-0310-9956-ffa450edef68
|
hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
|
HBASE-8230 Possible NPE on regionserver abort if replication service has not been started
|
<ide><path>base-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
<ide> public void join() {
<ide> if (this.replication) {
<ide> this.replicationManager.join();
<del> this.replicationSink.stopReplicationSinkServices();
<add> if (this.replicationSink != null) {
<add> this.replicationSink.stopReplicationSinkServices();
<add> }
<ide> }
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
c46dbbb85b2275d14a9a756e46cc603d0f19ad33
| 0 |
googleapis/java-container,googleapis/java-container,googleapis/java-container
|
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.devtools.containeranalysis.v1beta1;
import static com.google.cloud.devtools.containeranalysis.v1beta1.GrafeasV1Beta1Client.ListNoteOccurrencesPagedResponse;
import static com.google.cloud.devtools.containeranalysis.v1beta1.GrafeasV1Beta1Client.ListNotesPagedResponse;
import static com.google.cloud.devtools.containeranalysis.v1beta1.GrafeasV1Beta1Client.ListOccurrencesPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.common.collect.Lists;
import com.google.containeranalysis.v1beta1.NoteName;
import com.google.containeranalysis.v1beta1.OccurrenceName;
import com.google.containeranalysis.v1beta1.ProjectName;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.GeneratedMessageV3;
import io.grafeas.v1beta1.BatchCreateNotesRequest;
import io.grafeas.v1beta1.BatchCreateNotesResponse;
import io.grafeas.v1beta1.BatchCreateOccurrencesRequest;
import io.grafeas.v1beta1.BatchCreateOccurrencesResponse;
import io.grafeas.v1beta1.CreateNoteRequest;
import io.grafeas.v1beta1.CreateOccurrenceRequest;
import io.grafeas.v1beta1.DeleteNoteRequest;
import io.grafeas.v1beta1.DeleteOccurrenceRequest;
import io.grafeas.v1beta1.GetNoteRequest;
import io.grafeas.v1beta1.GetOccurrenceNoteRequest;
import io.grafeas.v1beta1.GetOccurrenceRequest;
import io.grafeas.v1beta1.GetVulnerabilityOccurrencesSummaryRequest;
import io.grafeas.v1beta1.ListNoteOccurrencesRequest;
import io.grafeas.v1beta1.ListNoteOccurrencesResponse;
import io.grafeas.v1beta1.ListNotesRequest;
import io.grafeas.v1beta1.ListNotesResponse;
import io.grafeas.v1beta1.ListOccurrencesRequest;
import io.grafeas.v1beta1.ListOccurrencesResponse;
import io.grafeas.v1beta1.Note;
import io.grafeas.v1beta1.Occurrence;
import io.grafeas.v1beta1.UpdateNoteRequest;
import io.grafeas.v1beta1.UpdateOccurrenceRequest;
import io.grafeas.v1beta1.VulnerabilityOccurrencesSummary;
import io.grpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@javax.annotation.Generated("by GAPIC")
public class GrafeasV1Beta1ClientTest {
private static MockContainerAnalysisV1Beta1 mockContainerAnalysisV1Beta1;
private static MockGrafeasV1Beta1 mockGrafeasV1Beta1;
private static MockServiceHelper serviceHelper;
private GrafeasV1Beta1Client client;
private LocalChannelProvider channelProvider;
@BeforeClass
public static void startStaticServer() {
mockContainerAnalysisV1Beta1 = new MockContainerAnalysisV1Beta1();
mockGrafeasV1Beta1 = new MockGrafeasV1Beta1();
serviceHelper =
new MockServiceHelper(
"in-process-1",
Arrays.<MockGrpcService>asList(mockContainerAnalysisV1Beta1, mockGrafeasV1Beta1));
serviceHelper.start();
}
@AfterClass
public static void stopServer() {
serviceHelper.stop();
}
@Before
public void setUp() throws IOException {
serviceHelper.reset();
channelProvider = serviceHelper.createChannelProvider();
GrafeasV1Beta1Settings settings =
GrafeasV1Beta1Settings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = GrafeasV1Beta1Client.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
@SuppressWarnings("all")
public void getOccurrenceTest() {
String name2 = "name2-1052831874";
String noteName = "noteName1780787896";
String remediation = "remediation779381797";
Occurrence expectedResponse =
Occurrence.newBuilder()
.setName(name2)
.setNoteName(noteName)
.setRemediation(remediation)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Occurrence actualResponse = client.getOccurrence(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetOccurrenceRequest actualRequest = (GetOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.getOccurrence(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void listOccurrencesTest() {
String nextPageToken = "";
Occurrence occurrencesElement = Occurrence.newBuilder().build();
List<Occurrence> occurrences = Arrays.asList(occurrencesElement);
ListOccurrencesResponse expectedResponse =
ListOccurrencesResponse.newBuilder()
.setNextPageToken(nextPageToken)
.addAllOccurrences(occurrences)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
ListOccurrencesPagedResponse pagedListResponse = client.listOccurrences(parent, filter);
List<Occurrence> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getOccurrencesList().get(0), resources.get(0));
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListOccurrencesRequest actualRequest = (ListOccurrencesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void listOccurrencesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
client.listOccurrences(parent, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void deleteOccurrenceTest() {
Empty expectedResponse = Empty.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.deleteOccurrence(name);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteOccurrenceRequest actualRequest = (DeleteOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void deleteOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.deleteOccurrence(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void createOccurrenceTest() {
String name = "name3373707";
String noteName = "noteName1780787896";
String remediation = "remediation779381797";
Occurrence expectedResponse =
Occurrence.newBuilder()
.setName(name)
.setNoteName(noteName)
.setRemediation(remediation)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
Occurrence occurrence = Occurrence.newBuilder().build();
Occurrence actualResponse = client.createOccurrence(parent, occurrence);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateOccurrenceRequest actualRequest = (CreateOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(occurrence, actualRequest.getOccurrence());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void createOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
Occurrence occurrence = Occurrence.newBuilder().build();
client.createOccurrence(parent, occurrence);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void batchCreateOccurrencesTest() {
BatchCreateOccurrencesResponse expectedResponse =
BatchCreateOccurrencesResponse.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
List<Occurrence> occurrences = new ArrayList<>();
BatchCreateOccurrencesResponse actualResponse =
client.batchCreateOccurrences(parent, occurrences);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
BatchCreateOccurrencesRequest actualRequest =
(BatchCreateOccurrencesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(occurrences, actualRequest.getOccurrencesList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void batchCreateOccurrencesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
List<Occurrence> occurrences = new ArrayList<>();
client.batchCreateOccurrences(parent, occurrences);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void updateOccurrenceTest() {
String name2 = "name2-1052831874";
String noteName = "noteName1780787896";
String remediation = "remediation779381797";
Occurrence expectedResponse =
Occurrence.newBuilder()
.setName(name2)
.setNoteName(noteName)
.setRemediation(remediation)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Occurrence occurrence = Occurrence.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Occurrence actualResponse = client.updateOccurrence(name, occurrence, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateOccurrenceRequest actualRequest = (UpdateOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertEquals(occurrence, actualRequest.getOccurrence());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void updateOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Occurrence occurrence = Occurrence.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateOccurrence(name, occurrence, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void getOccurrenceNoteTest() {
String name2 = "name2-1052831874";
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name2)
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Note actualResponse = client.getOccurrenceNote(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetOccurrenceNoteRequest actualRequest = (GetOccurrenceNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getOccurrenceNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.getOccurrenceNote(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void getNoteTest() {
String name2 = "name2-1052831874";
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name2)
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
Note actualResponse = client.getNote(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetNoteRequest actualRequest = (GetNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
client.getNote(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void listNotesTest() {
String nextPageToken = "";
Note notesElement = Note.newBuilder().build();
List<Note> notes = Arrays.asList(notesElement);
ListNotesResponse expectedResponse =
ListNotesResponse.newBuilder().setNextPageToken(nextPageToken).addAllNotes(notes).build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
ListNotesPagedResponse pagedListResponse = client.listNotes(parent, filter);
List<Note> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getNotesList().get(0), resources.get(0));
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListNotesRequest actualRequest = (ListNotesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void listNotesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
client.listNotes(parent, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void deleteNoteTest() {
Empty expectedResponse = Empty.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
client.deleteNote(name);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteNoteRequest actualRequest = (DeleteNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void deleteNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
client.deleteNote(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void createNoteTest() {
String name = "name3373707";
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name)
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String noteId = "noteId2129224840";
Note note = Note.newBuilder().build();
Note actualResponse = client.createNote(parent, noteId, note);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateNoteRequest actualRequest = (CreateNoteRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(noteId, actualRequest.getNoteId());
Assert.assertEquals(note, actualRequest.getNote());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void createNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String noteId = "noteId2129224840";
Note note = Note.newBuilder().build();
client.createNote(parent, noteId, note);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void batchCreateNotesTest() {
BatchCreateNotesResponse expectedResponse = BatchCreateNotesResponse.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
Map<String, Note> notes = new HashMap<>();
BatchCreateNotesResponse actualResponse = client.batchCreateNotes(parent, notes);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
BatchCreateNotesRequest actualRequest = (BatchCreateNotesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(notes, actualRequest.getNotesMap());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void batchCreateNotesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
Map<String, Note> notes = new HashMap<>();
client.batchCreateNotes(parent, notes);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void updateNoteTest() {
String name2 = "name2-1052831874";
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name2)
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
Note note = Note.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Note actualResponse = client.updateNote(name, note, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateNoteRequest actualRequest = (UpdateNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertEquals(note, actualRequest.getNote());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void updateNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
Note note = Note.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateNote(name, note, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void listNoteOccurrencesTest() {
String nextPageToken = "";
Occurrence occurrencesElement = Occurrence.newBuilder().build();
List<Occurrence> occurrences = Arrays.asList(occurrencesElement);
ListNoteOccurrencesResponse expectedResponse =
ListNoteOccurrencesResponse.newBuilder()
.setNextPageToken(nextPageToken)
.addAllOccurrences(occurrences)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
String filter = "filter-1274492040";
ListNoteOccurrencesPagedResponse pagedListResponse = client.listNoteOccurrences(name, filter);
List<Occurrence> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getOccurrencesList().get(0), resources.get(0));
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListNoteOccurrencesRequest actualRequest = (ListNoteOccurrencesRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void listNoteOccurrencesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
String filter = "filter-1274492040";
client.listNoteOccurrences(name, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void getVulnerabilityOccurrencesSummaryTest() {
VulnerabilityOccurrencesSummary expectedResponse =
VulnerabilityOccurrencesSummary.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
VulnerabilityOccurrencesSummary actualResponse =
client.getVulnerabilityOccurrencesSummary(parent, filter);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetVulnerabilityOccurrencesSummaryRequest actualRequest =
(GetVulnerabilityOccurrencesSummaryRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getVulnerabilityOccurrencesSummaryExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
client.getVulnerabilityOccurrencesSummary(parent, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
}
|
google-cloud-containeranalysis/src/test/java/com/google/cloud/devtools/containeranalysis/v1beta1/GrafeasV1Beta1ClientTest.java
|
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.devtools.containeranalysis.v1beta1;
import static com.google.cloud.devtools.containeranalysis.v1beta1.GrafeasV1Beta1Client.ListNoteOccurrencesPagedResponse;
import static com.google.cloud.devtools.containeranalysis.v1beta1.GrafeasV1Beta1Client.ListNotesPagedResponse;
import static com.google.cloud.devtools.containeranalysis.v1beta1.GrafeasV1Beta1Client.ListOccurrencesPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.common.collect.Lists;
import com.google.containeranalysis.v1beta1.NoteName;
import com.google.containeranalysis.v1beta1.OccurrenceName;
import com.google.containeranalysis.v1beta1.ProjectName;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.GeneratedMessageV3;
import io.grafeas.v1beta1.BatchCreateNotesRequest;
import io.grafeas.v1beta1.BatchCreateNotesResponse;
import io.grafeas.v1beta1.BatchCreateOccurrencesRequest;
import io.grafeas.v1beta1.BatchCreateOccurrencesResponse;
import io.grafeas.v1beta1.CreateNoteRequest;
import io.grafeas.v1beta1.CreateOccurrenceRequest;
import io.grafeas.v1beta1.DeleteNoteRequest;
import io.grafeas.v1beta1.DeleteOccurrenceRequest;
import io.grafeas.v1beta1.GetNoteRequest;
import io.grafeas.v1beta1.GetOccurrenceNoteRequest;
import io.grafeas.v1beta1.GetOccurrenceRequest;
import io.grafeas.v1beta1.GetVulnerabilityOccurrencesSummaryRequest;
import io.grafeas.v1beta1.ListNoteOccurrencesRequest;
import io.grafeas.v1beta1.ListNoteOccurrencesResponse;
import io.grafeas.v1beta1.ListNotesRequest;
import io.grafeas.v1beta1.ListNotesResponse;
import io.grafeas.v1beta1.ListOccurrencesRequest;
import io.grafeas.v1beta1.ListOccurrencesResponse;
import io.grafeas.v1beta1.Note;
import io.grafeas.v1beta1.Occurrence;
import io.grafeas.v1beta1.UpdateNoteRequest;
import io.grafeas.v1beta1.UpdateOccurrenceRequest;
import io.grafeas.v1beta1.VulnerabilityOccurrencesSummary;
import io.grpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@javax.annotation.Generated("by GAPIC")
public class GrafeasV1Beta1ClientTest {
private static MockContainerAnalysisV1Beta1 mockContainerAnalysisV1Beta1;
private static MockGrafeasV1Beta1 mockGrafeasV1Beta1;
private static MockServiceHelper serviceHelper;
private GrafeasV1Beta1Client client;
private LocalChannelProvider channelProvider;
@BeforeClass
public static void startStaticServer() {
mockContainerAnalysisV1Beta1 = new MockContainerAnalysisV1Beta1();
mockGrafeasV1Beta1 = new MockGrafeasV1Beta1();
serviceHelper =
new MockServiceHelper(
"in-process-1",
Arrays.<MockGrpcService>asList(mockContainerAnalysisV1Beta1, mockGrafeasV1Beta1));
serviceHelper.start();
}
@AfterClass
public static void stopServer() {
serviceHelper.stop();
}
@Before
public void setUp() throws IOException {
serviceHelper.reset();
channelProvider = serviceHelper.createChannelProvider();
GrafeasV1Beta1Settings settings =
GrafeasV1Beta1Settings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = GrafeasV1Beta1Client.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
@SuppressWarnings("all")
public void getOccurrenceTest() {
OccurrenceName name2 = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
String noteName = "noteName1780787896";
String remediation = "remediation779381797";
Occurrence expectedResponse =
Occurrence.newBuilder()
.setName(name2.toString())
.setNoteName(noteName)
.setRemediation(remediation)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Occurrence actualResponse = client.getOccurrence(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetOccurrenceRequest actualRequest = (GetOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.getOccurrence(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void listOccurrencesTest() {
String nextPageToken = "";
Occurrence occurrencesElement = Occurrence.newBuilder().build();
List<Occurrence> occurrences = Arrays.asList(occurrencesElement);
ListOccurrencesResponse expectedResponse =
ListOccurrencesResponse.newBuilder()
.setNextPageToken(nextPageToken)
.addAllOccurrences(occurrences)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
ListOccurrencesPagedResponse pagedListResponse = client.listOccurrences(parent, filter);
List<Occurrence> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getOccurrencesList().get(0), resources.get(0));
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListOccurrencesRequest actualRequest = (ListOccurrencesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void listOccurrencesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
client.listOccurrences(parent, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void deleteOccurrenceTest() {
Empty expectedResponse = Empty.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.deleteOccurrence(name);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteOccurrenceRequest actualRequest = (DeleteOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void deleteOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.deleteOccurrence(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void createOccurrenceTest() {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
String noteName = "noteName1780787896";
String remediation = "remediation779381797";
Occurrence expectedResponse =
Occurrence.newBuilder()
.setName(name.toString())
.setNoteName(noteName)
.setRemediation(remediation)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
Occurrence occurrence = Occurrence.newBuilder().build();
Occurrence actualResponse = client.createOccurrence(parent, occurrence);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateOccurrenceRequest actualRequest = (CreateOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(occurrence, actualRequest.getOccurrence());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void createOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
Occurrence occurrence = Occurrence.newBuilder().build();
client.createOccurrence(parent, occurrence);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void batchCreateOccurrencesTest() {
BatchCreateOccurrencesResponse expectedResponse =
BatchCreateOccurrencesResponse.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
List<Occurrence> occurrences = new ArrayList<>();
BatchCreateOccurrencesResponse actualResponse =
client.batchCreateOccurrences(parent, occurrences);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
BatchCreateOccurrencesRequest actualRequest =
(BatchCreateOccurrencesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(occurrences, actualRequest.getOccurrencesList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void batchCreateOccurrencesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
List<Occurrence> occurrences = new ArrayList<>();
client.batchCreateOccurrences(parent, occurrences);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void updateOccurrenceTest() {
OccurrenceName name2 = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
String noteName = "noteName1780787896";
String remediation = "remediation779381797";
Occurrence expectedResponse =
Occurrence.newBuilder()
.setName(name2.toString())
.setNoteName(noteName)
.setRemediation(remediation)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Occurrence occurrence = Occurrence.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Occurrence actualResponse = client.updateOccurrence(name, occurrence, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateOccurrenceRequest actualRequest = (UpdateOccurrenceRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertEquals(occurrence, actualRequest.getOccurrence());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void updateOccurrenceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Occurrence occurrence = Occurrence.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateOccurrence(name, occurrence, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void getOccurrenceNoteTest() {
NoteName name2 = NoteName.of("[PROJECT]", "[NOTE]");
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name2.toString())
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
Note actualResponse = client.getOccurrenceNote(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetOccurrenceNoteRequest actualRequest = (GetOccurrenceNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, OccurrenceName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getOccurrenceNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
client.getOccurrenceNote(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void getNoteTest() {
NoteName name2 = NoteName.of("[PROJECT]", "[NOTE]");
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name2.toString())
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
Note actualResponse = client.getNote(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetNoteRequest actualRequest = (GetNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
client.getNote(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void listNotesTest() {
String nextPageToken = "";
Note notesElement = Note.newBuilder().build();
List<Note> notes = Arrays.asList(notesElement);
ListNotesResponse expectedResponse =
ListNotesResponse.newBuilder().setNextPageToken(nextPageToken).addAllNotes(notes).build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
ListNotesPagedResponse pagedListResponse = client.listNotes(parent, filter);
List<Note> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getNotesList().get(0), resources.get(0));
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListNotesRequest actualRequest = (ListNotesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void listNotesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
client.listNotes(parent, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void deleteNoteTest() {
Empty expectedResponse = Empty.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
client.deleteNote(name);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteNoteRequest actualRequest = (DeleteNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void deleteNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
client.deleteNote(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void createNoteTest() {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name.toString())
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String noteId = "noteId2129224840";
Note note = Note.newBuilder().build();
Note actualResponse = client.createNote(parent, noteId, note);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateNoteRequest actualRequest = (CreateNoteRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(noteId, actualRequest.getNoteId());
Assert.assertEquals(note, actualRequest.getNote());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void createNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String noteId = "noteId2129224840";
Note note = Note.newBuilder().build();
client.createNote(parent, noteId, note);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void batchCreateNotesTest() {
BatchCreateNotesResponse expectedResponse = BatchCreateNotesResponse.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
Map<String, Note> notes = new HashMap<>();
BatchCreateNotesResponse actualResponse = client.batchCreateNotes(parent, notes);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
BatchCreateNotesRequest actualRequest = (BatchCreateNotesRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(notes, actualRequest.getNotesMap());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void batchCreateNotesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
Map<String, Note> notes = new HashMap<>();
client.batchCreateNotes(parent, notes);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void updateNoteTest() {
NoteName name2 = NoteName.of("[PROJECT]", "[NOTE]");
String shortDescription = "shortDescription-235369287";
String longDescription = "longDescription-1747792199";
Note expectedResponse =
Note.newBuilder()
.setName(name2.toString())
.setShortDescription(shortDescription)
.setLongDescription(longDescription)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
Note note = Note.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Note actualResponse = client.updateNote(name, note, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateNoteRequest actualRequest = (UpdateNoteRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertEquals(note, actualRequest.getNote());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void updateNoteExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
Note note = Note.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateNote(name, note, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void listNoteOccurrencesTest() {
String nextPageToken = "";
Occurrence occurrencesElement = Occurrence.newBuilder().build();
List<Occurrence> occurrences = Arrays.asList(occurrencesElement);
ListNoteOccurrencesResponse expectedResponse =
ListNoteOccurrencesResponse.newBuilder()
.setNextPageToken(nextPageToken)
.addAllOccurrences(occurrences)
.build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
String filter = "filter-1274492040";
ListNoteOccurrencesPagedResponse pagedListResponse = client.listNoteOccurrences(name, filter);
List<Occurrence> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getOccurrencesList().get(0), resources.get(0));
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListNoteOccurrencesRequest actualRequest = (ListNoteOccurrencesRequest) actualRequests.get(0);
Assert.assertEquals(name, NoteName.parse(actualRequest.getName()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void listNoteOccurrencesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
String filter = "filter-1274492040";
client.listNoteOccurrences(name, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void getVulnerabilityOccurrencesSummaryTest() {
VulnerabilityOccurrencesSummary expectedResponse =
VulnerabilityOccurrencesSummary.newBuilder().build();
mockGrafeasV1Beta1.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
VulnerabilityOccurrencesSummary actualResponse =
client.getVulnerabilityOccurrencesSummary(parent, filter);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGrafeasV1Beta1.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetVulnerabilityOccurrencesSummaryRequest actualRequest =
(GetVulnerabilityOccurrencesSummaryRequest) actualRequests.get(0);
Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void getVulnerabilityOccurrencesSummaryExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGrafeasV1Beta1.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
String filter = "filter-1274492040";
client.getVulnerabilityOccurrencesSummary(parent, filter);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
}
|
Regenerate clients (#3713)
* Release 1.45.0/0.63.0
* Regenerate clients
|
google-cloud-containeranalysis/src/test/java/com/google/cloud/devtools/containeranalysis/v1beta1/GrafeasV1Beta1ClientTest.java
|
Regenerate clients (#3713)
|
<ide><path>oogle-cloud-containeranalysis/src/test/java/com/google/cloud/devtools/containeranalysis/v1beta1/GrafeasV1Beta1ClientTest.java
<ide> @Test
<ide> @SuppressWarnings("all")
<ide> public void getOccurrenceTest() {
<del> OccurrenceName name2 = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
<add> String name2 = "name2-1052831874";
<ide> String noteName = "noteName1780787896";
<ide> String remediation = "remediation779381797";
<ide> Occurrence expectedResponse =
<ide> Occurrence.newBuilder()
<del> .setName(name2.toString())
<add> .setName(name2)
<ide> .setNoteName(noteName)
<ide> .setRemediation(remediation)
<ide> .build();
<ide> @Test
<ide> @SuppressWarnings("all")
<ide> public void createOccurrenceTest() {
<del> OccurrenceName name = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
<add> String name = "name3373707";
<ide> String noteName = "noteName1780787896";
<ide> String remediation = "remediation779381797";
<ide> Occurrence expectedResponse =
<ide> Occurrence.newBuilder()
<del> .setName(name.toString())
<add> .setName(name)
<ide> .setNoteName(noteName)
<ide> .setRemediation(remediation)
<ide> .build();
<ide> @Test
<ide> @SuppressWarnings("all")
<ide> public void updateOccurrenceTest() {
<del> OccurrenceName name2 = OccurrenceName.of("[PROJECT]", "[OCCURRENCE]");
<add> String name2 = "name2-1052831874";
<ide> String noteName = "noteName1780787896";
<ide> String remediation = "remediation779381797";
<ide> Occurrence expectedResponse =
<ide> Occurrence.newBuilder()
<del> .setName(name2.toString())
<add> .setName(name2)
<ide> .setNoteName(noteName)
<ide> .setRemediation(remediation)
<ide> .build();
<ide> @Test
<ide> @SuppressWarnings("all")
<ide> public void getOccurrenceNoteTest() {
<del> NoteName name2 = NoteName.of("[PROJECT]", "[NOTE]");
<add> String name2 = "name2-1052831874";
<ide> String shortDescription = "shortDescription-235369287";
<ide> String longDescription = "longDescription-1747792199";
<ide> Note expectedResponse =
<ide> Note.newBuilder()
<del> .setName(name2.toString())
<add> .setName(name2)
<ide> .setShortDescription(shortDescription)
<ide> .setLongDescription(longDescription)
<ide> .build();
<ide> @Test
<ide> @SuppressWarnings("all")
<ide> public void getNoteTest() {
<del> NoteName name2 = NoteName.of("[PROJECT]", "[NOTE]");
<add> String name2 = "name2-1052831874";
<ide> String shortDescription = "shortDescription-235369287";
<ide> String longDescription = "longDescription-1747792199";
<ide> Note expectedResponse =
<ide> Note.newBuilder()
<del> .setName(name2.toString())
<add> .setName(name2)
<ide> .setShortDescription(shortDescription)
<ide> .setLongDescription(longDescription)
<ide> .build();
<ide> @Test
<ide> @SuppressWarnings("all")
<ide> public void createNoteTest() {
<del> NoteName name = NoteName.of("[PROJECT]", "[NOTE]");
<add> String name = "name3373707";
<ide> String shortDescription = "shortDescription-235369287";
<ide> String longDescription = "longDescription-1747792199";
<ide> Note expectedResponse =
<ide> Note.newBuilder()
<del> .setName(name.toString())
<add> .setName(name)
<ide> .setShortDescription(shortDescription)
<ide> .setLongDescription(longDescription)
<ide> .build();
<ide> @Test
<ide> @SuppressWarnings("all")
<ide> public void updateNoteTest() {
<del> NoteName name2 = NoteName.of("[PROJECT]", "[NOTE]");
<add> String name2 = "name2-1052831874";
<ide> String shortDescription = "shortDescription-235369287";
<ide> String longDescription = "longDescription-1747792199";
<ide> Note expectedResponse =
<ide> Note.newBuilder()
<del> .setName(name2.toString())
<add> .setName(name2)
<ide> .setShortDescription(shortDescription)
<ide> .setLongDescription(longDescription)
<ide> .build();
|
|
Java
|
mit
|
2b44dda7c6bd09b7b59124594a37b8d03186b6dd
| 0 |
gurkenlabs/litiengine,gurkenlabs/litiengine
|
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import java.util.ArrayList;
import org.junit.Test;
import junit.framework.Assert;
public class CustomPropertyProviderTests {
@Test
public void testSetCustomProperty() {
CustomPropertyProvider propProvider = new CustomPropertyProvider();
propProvider.setCustomProperty("test", "testvalue");
Assert.assertEquals("testvalue", propProvider.getCustomProperty("test"));
Assert.assertNull(propProvider.getCustomProperty("test2"));
Assert.assertEquals(1, propProvider.getAllCustomProperties().size());
propProvider.setCustomProperty("test", "testvalue2");
Assert.assertEquals("testvalue2", propProvider.getCustomProperty("test"));
ArrayList<Property> props = new ArrayList<>();
props.add(new Property("test2", "testvalue3"));
props.add(new Property("test3", "testvalue4"));
propProvider.setCustomProperties(props);
Assert.assertEquals(2, propProvider.getAllCustomProperties().size());
Assert.assertEquals("testvalue3", propProvider.getCustomProperty("test2"));
Assert.assertEquals("testvalue4", propProvider.getCustomProperty("test3"));
propProvider.setCustomProperties(null);
Assert.assertNull(propProvider.getAllCustomProperties());
}
}
|
tests/de/gurkenlabs/litiengine/environment/tilemap/xml/CustomPropertyProviderTests.java
|
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import java.util.ArrayList;
import org.junit.Test;
import junit.framework.Assert;
public class CustomPropertyProviderTests {
@Test
public void testSetCustomProperty() {
CustomPropertyProvider propProvider = new CustomPropertyProvider();
propProvider.setCustomProperty("test", "testvalue");
Assert.assertEquals("testvalue", propProvider.getCustomProperty("test"));
Assert.assertNull(propProvider.getCustomProperty("test2"));
Assert.assertEquals(1, propProvider.getAllCustomProperties().size());
propProvider.setCustomProperty("test", "testvalue2");
Assert.assertEquals("testvalue2", propProvider.getCustomProperty("test"));
ArrayList<Property> props = new ArrayList<>();
props.add(new Property("test2", "testvalue3"));
props.add(new Property("test3", "testvalue4"));
propProvider.setCustomProperties(props);
Assert.assertEquals(2, propProvider.getAllCustomProperties().size());
Assert.assertEquals("testvalue3", propProvider.getCustomProperty("test2"));
Assert.assertEquals("testvalue4", propProvider.getCustomProperty("test3"));
}
}
|
Add another test case for the CustomPropertyProvider.
|
tests/de/gurkenlabs/litiengine/environment/tilemap/xml/CustomPropertyProviderTests.java
|
Add another test case for the CustomPropertyProvider.
|
<ide><path>ests/de/gurkenlabs/litiengine/environment/tilemap/xml/CustomPropertyProviderTests.java
<ide> Assert.assertEquals(2, propProvider.getAllCustomProperties().size());
<ide> Assert.assertEquals("testvalue3", propProvider.getCustomProperty("test2"));
<ide> Assert.assertEquals("testvalue4", propProvider.getCustomProperty("test3"));
<add>
<add> propProvider.setCustomProperties(null);
<add> Assert.assertNull(propProvider.getAllCustomProperties());
<ide> }
<ide> }
|
|
Java
|
mit
|
4252783f8270b975ac5b02a06591bf6dc80c5398
| 0 |
CS2103AUG2016-W11-C3/main
|
package harmony.mastermind.logic.parser;
import static harmony.mastermind.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static harmony.mastermind.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.ocpsoft.prettytime.nlp.PrettyTimeParser;
import org.ocpsoft.prettytime.nlp.parse.DateGroup;
import com.google.common.base.Strings;
import harmony.mastermind.commons.exceptions.IllegalValueException;
import harmony.mastermind.commons.exceptions.InvalidEventDateException;
import harmony.mastermind.commons.util.StringUtil;
import harmony.mastermind.logic.commands.*;
import harmony.mastermind.model.ModelManager;
import harmony.mastermind.model.tag.Tag;
/**
* Parses user input.
*/
public class Parser {
/**
* Used for initial separation of command word and args.
*/
private static final Pattern BASIC_COMMAND_FORMAT = Pattern.compile("(?<keyword>\\S+)(?<arguments>.*)");
private static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one
// or
// more
// keywords
// separated
// by
// whitespace
private static final Pattern TASK_DATA_ARGS_FORMAT = // '/' forward slashes
// are reserved for
// delimiter prefixes
Pattern.compile(
"(?<name>[^/]+)" + " at/(?<time>[^/]+)" + " on/(?<date>[^/]+)" + "(?<tagArguments>(?: t/[^/]+)*)"); // variable
// number
// of
// tags
private static final Pattern TASK_INDEX_ARGS_FORMAT = Pattern.compile("(?<targetIndex>.+)");
private static final Pattern TASK_ARCHIVE_ARGS_FORMAT = Pattern.compile("(?<type>[^/]+)");
private static final String TAB_ARCHIVES = "Archives";
public Parser() {
}
/**
* Parses user input into command for execution.
*
* @param userInput
* full user input string
* @return the command based on the user input
*/
public Command parseCommand(String userInput, String currentTab) {
final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(userInput.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
final String keyword = matcher.group("keyword");
final String arguments = matcher.group("arguments");
switch (keyword) {
case AddCommand.COMMAND_KEYWORD_ADD: // main command
case AddCommand.COMMAND_KEYWORD_DO: // alias (fall through)
return prepareAdd(arguments);
case DeleteCommand.COMMAND_WORD:
return prepareDelete(arguments);
case ClearCommand.COMMAND_WORD:
return new ClearCommand();
case FindCommand.COMMAND_WORD:
return prepareFind(arguments);
case FindTagCommand.COMMAND_WORD:
return prepareFindTag(arguments);
case ListCommand.COMMAND_WORD:
return prepareList(arguments);
case UpcomingCommand.COMMAND_WORD:
return prepareUpcoming(arguments);
case MarkCommand.COMMAND_WORD:
return prepareMark(arguments, currentTab);
case EditCommand.COMMAND_KEYWORD_EDIT:
case EditCommand.COMMAND_KEYWORD_UPDATE:
case EditCommand.COMMAND_KEYWORD_CHANGE:
return prepareEdit(arguments);
case UndoCommand.COMMAND_WORD:
return new UndoCommand();
case RelocateCommand.COMMAND_WORD:
return new RelocateCommand(arguments);
case RedoCommand.COMMAND_WORD:
return new RedoCommand();
case UnmarkCommand.COMMAND_WORD:
return prepareUnmark(arguments, currentTab);
case ExitCommand.COMMAND_WORD:
return new ExitCommand();
case HelpCommand.COMMAND_WORD:
return new HelpCommand();
default:
return new IncorrectCommand(MESSAGE_UNKNOWN_COMMAND+": "+userInput);
}
}
/**
* Parses arguments in the context of the add task command.
*
* @param args
* full command args string
* @return the prepared command
*/
// @@author A0138862W
private Command prepareAdd(String args) {
final Matcher matcher = AddCommand.COMMAND_ARGUMENTS_PATTERN.matcher(args.trim());
// Validate arg string format
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_EXAMPLES));
}
try {
// mandatory
// there's no need to check for existence as the regex only capture full match of mandatory components
final String name = matcher.group("name");
// optionals
final Optional<String> recur = Optional.ofNullable(matcher.group("recur"));
final Optional<String> dates = Optional.ofNullable(matcher.group("dates"));
Optional<String> startDate = Optional.empty();
Optional<String> endDate = Optional.empty();
final Optional<String> tags = Optional.ofNullable(matcher.group("tags"));
if(dates.isPresent()){
PrettyTimeParser ptp = new PrettyTimeParser();
List<DateGroup> dateGroups = ptp.parseSyntax(dates.get());
if(!dateGroups.isEmpty()){
List<Date> startEndDates = dateGroups.get(0).getDates();
if(startEndDates.size() == 1){ // only 1 date is found, assume deadline
startDate = Optional.empty();
endDate = Optional.ofNullable(startEndDates.get(0).toString());
}else if(startEndDates.size() == 2){ // 2 date value is found, assume event
startDate = Optional.ofNullable(startEndDates.get(0).toString());
endDate = Optional.ofNullable(startEndDates.get(1).toString());
}
}
}
// return internal value if present. else, return empty string
Set<String> tagSet = getTagsFromArgs(tags.map(val -> val).orElse(""));
String recurVal = null;
//check if recur has a valid keyword
if (recur.isPresent()) {
recurVal = recur.get();
}
if (startDate.isPresent() && endDate.isPresent()) {
// event
String start = startDate.get().toLowerCase();
String end = endDate.get().toLowerCase();
if (start.equals("today")) {
start += " 2359";
}else if (start.equals("tomorrow")) {
start += " 2359";
}
if (end.equals("today")) {
end += " 2359";
}else if (start.equals("tomorrow")) {
end += " 2359";
}
try {
return new AddCommand(name, start, end, tagSet, recurVal);
} catch (InvalidEventDateException iede) {
return new IncorrectCommand(iede.getMessage());
}
} else if (!startDate.isPresent() && endDate.isPresent()) {
// deadline
String end = endDate.get().toLowerCase();
if (end.equals("today")) {
end += " 2359";
}else if (end.equals("tomorrow")) {
end += " 2359";
}
return new AddCommand(name, end, tagSet, recurVal);
} else if (startDate.isPresent() && !endDate.isPresent()) {
// task with only startdate is not supported.
throw new IllegalValueException("Cannot create a task with only start date.");
} else {
// floating
return new AddCommand(name, tagSet);
}
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
/**
* Parses arguments in the context of the edit task command.
*
* @param args
* full command args string
* @return the prepared command
*/
// @@author A0138862W
private Command prepareEdit(String args) {
final Matcher matcher = EditCommand.COMMAND_ARGUMENTS_PATTERN.matcher(args.trim());
// Validate arg string format
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE));
}
try {
// mandatory
// regex accept only numbers in index field, encountering NumberFormatException is impossible
final int index = Integer.parseInt(matcher.group("index"));
//optional
final Optional<String> recur = Optional.ofNullable(matcher.group("recur"));
final Optional<String> name = Optional.ofNullable(matcher.group("name"));
final Optional<String> startDate = Optional.ofNullable(matcher.group("startDate"));
final Optional<String> endDate = Optional.ofNullable(matcher.group("endDate"));
final Optional<String> tags = Optional.ofNullable(matcher.group("tags"));
Optional<Set<String>> tagSet = Optional.empty();
if(tags.isPresent()){
tagSet = Optional.ofNullable(getTagsFromArgs(tags.get()));
};
return new EditCommand(index, name, startDate, endDate, tagSet, recur);
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
} catch (ParseException pe) {
return new IncorrectCommand(pe.getMessage());
}
}
// @@author
/**
* Extracts the new task's tags from the add command's tag arguments string.
* Merges duplicate tag strings.
*/
private static Set<String> getTagsFromArgs(String tagArguments) throws IllegalValueException {
// no tags
if (Strings.isNullOrEmpty(tagArguments)) {
return Collections.emptySet();
}
// replace first delimiter prefix, then split
final Collection<String> tagStrings = Arrays.asList(tagArguments.split(","));
return new HashSet<>(tagStrings);
}
/**
* Parses arguments in the context of the delete task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareDelete(String args) {
Optional<Integer> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE));
}
Command result = new DeleteCommand(index.get());
return result;
}
/**
* Parses arguments in the context of the mark task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareMark(String args, String currentTab) {
Optional<Integer> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, MarkCommand.MESSAGE_USAGE));
}
return new MarkCommand(index.get(), currentTab);
}
/**
* Parses arguments in the context of the list task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareList(String args) {
Optional<String> type = parseType(args);
if (!type.isPresent()) {
return new ListCommand();
}else {
if (type.get().equals(ModelManager.TAB_TASKS.toLowerCase()) ||
type.get().equals(ModelManager.TAB_EVENTS.toLowerCase()) ||
type.get().equals(ModelManager.TAB_DEADLINES.toLowerCase()) ||
type.get().equals(ModelManager.TAB_ARCHIVES.toLowerCase())) {
return new ListCommand(type);
}else {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, ListCommand.MESSAGE_USAGE));
}
}
}
/**
* Parses arguments in the context of the unmark task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareUnmark(String args, String currentTab) {
if (!currentTab.equals(TAB_ARCHIVES)) {
return new IncorrectCommand(UnmarkCommand.MESSAGE_UNMARK_TASK_FAILURE);
}
Optional<Integer> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, UnmarkCommand.MESSAGE_USAGE));
}
return new UnmarkCommand(index.get());
}
/**
* Returns the specified index in the {@code command} IF a positive unsigned
* integer is given as the index. Returns an {@code Optional.empty()}
* otherwise.
*/
private Optional<Integer> parseIndex(String command) {
final Matcher matcher = TASK_INDEX_ARGS_FORMAT.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String index = matcher.group("targetIndex");
if (!StringUtil.isUnsignedInteger(index)) {
return Optional.empty();
}
return Optional.of(Integer.parseInt(index));
}
/**
* checks if have the type to list archive
*/
private Optional<String> parseType(String command) {
final Matcher matcher = TASK_ARCHIVE_ARGS_FORMAT.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String type = matcher.group("type").toLowerCase();
return Optional.of(type);
}
private Optional<String> parseUpcoming(String command) {
if (command.isEmpty()) {
return Optional.of("empty");
}
final Matcher matcher = UpcomingCommand.COMMAND_ARGUMENTS_PATTERN.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String type = matcher.group("taskType").toLowerCase();
return Optional.of(type);
}
/**
* Parses arguments in the context of the find task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareFind(String args) {
final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE));
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split("\\s+");
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new FindCommand(keywordSet);
}
/**
* Parses arguments in the context of the delete task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareUpcoming(String args) {
Optional<String> taskType = parseUpcoming(args);
if (taskType.isPresent()) {
return new UpcomingCommand(taskType.get());
} else {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, UpcomingCommand.MESSAGE_USAGE));
}
}
/**
* Parses arguments in the context of the find tag command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareFindTag(String args) {
final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE));
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split("\\s+");
final Set<Tag> tagSet = new HashSet<>();
try {
for (String tagName : keywords) {
tagSet.add(new Tag(tagName));
}
return new FindTagCommand(tagSet);
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
}
|
src/main/java/harmony/mastermind/logic/parser/Parser.java
|
package harmony.mastermind.logic.parser;
import static harmony.mastermind.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static harmony.mastermind.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.ocpsoft.prettytime.nlp.PrettyTimeParser;
import org.ocpsoft.prettytime.nlp.parse.DateGroup;
import com.google.common.base.Strings;
import harmony.mastermind.commons.exceptions.IllegalValueException;
import harmony.mastermind.commons.exceptions.InvalidEventDateException;
import harmony.mastermind.commons.util.StringUtil;
import harmony.mastermind.logic.commands.*;
import harmony.mastermind.model.ModelManager;
import harmony.mastermind.model.tag.Tag;
/**
* Parses user input.
*/
public class Parser {
/**
* Used for initial separation of command word and args.
*/
private static final Pattern BASIC_COMMAND_FORMAT = Pattern.compile("(?<keyword>\\S+)(?<arguments>.*)");
private static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one
// or
// more
// keywords
// separated
// by
// whitespace
private static final Pattern TASK_DATA_ARGS_FORMAT = // '/' forward slashes
// are reserved for
// delimiter prefixes
Pattern.compile(
"(?<name>[^/]+)" + " at/(?<time>[^/]+)" + " on/(?<date>[^/]+)" + "(?<tagArguments>(?: t/[^/]+)*)"); // variable
// number
// of
// tags
private static final Pattern TASK_INDEX_ARGS_FORMAT = Pattern.compile("(?<targetIndex>.+)");
private static final Pattern TASK_ARCHIVE_ARGS_FORMAT = Pattern.compile("(?<type>[^/]+)");
private static final String TAB_ARCHIVES = "Archives";
public Parser() {
}
/**
* Parses user input into command for execution.
*
* @param userInput
* full user input string
* @return the command based on the user input
*/
public Command parseCommand(String userInput, String currentTab) {
final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(userInput.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
final String keyword = matcher.group("keyword");
final String arguments = matcher.group("arguments");
switch (keyword) {
case AddCommand.COMMAND_KEYWORD_ADD: // main command
case AddCommand.COMMAND_KEYWORD_DO: // alias (fall through)
return prepareAdd(arguments);
case DeleteCommand.COMMAND_WORD:
return prepareDelete(arguments);
case ClearCommand.COMMAND_WORD:
return new ClearCommand();
case FindCommand.COMMAND_WORD:
return prepareFind(arguments);
case FindTagCommand.COMMAND_WORD:
return prepareFindTag(arguments);
case ListCommand.COMMAND_WORD:
return prepareList(arguments);
case UpcomingCommand.COMMAND_WORD:
return prepareUpcoming(arguments);
case MarkCommand.COMMAND_WORD:
return prepareMark(arguments, currentTab);
case EditCommand.COMMAND_KEYWORD_EDIT:
case EditCommand.COMMAND_KEYWORD_UPDATE:
case EditCommand.COMMAND_KEYWORD_CHANGE:
return prepareEdit(arguments);
case UndoCommand.COMMAND_WORD:
return new UndoCommand();
case RelocateCommand.COMMAND_WORD:
return new RelocateCommand(arguments);
case RedoCommand.COMMAND_WORD:
return new RedoCommand();
case UnmarkCommand.COMMAND_WORD:
return prepareUnmark(arguments, currentTab);
case ExitCommand.COMMAND_WORD:
return new ExitCommand();
case HelpCommand.COMMAND_WORD:
return new HelpCommand();
default:
return new IncorrectCommand(MESSAGE_UNKNOWN_COMMAND+": "+userInput);
}
}
/**
* Parses arguments in the context of the add task command.
*
* @param args
* full command args string
* @return the prepared command
*/
// @@author A0138862W
private Command prepareAdd(String args) {
final Matcher matcher = AddCommand.COMMAND_ARGUMENTS_PATTERN.matcher(args);
// Validate arg string format
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_EXAMPLES));
}
try {
// mandatory
// there's no need to check for existence as the regex only capture full match of mandatory components
final String name = matcher.group("name");
// optionals
final Optional<String> recur = Optional.ofNullable(matcher.group("recur"));
final Optional<String> dates = Optional.ofNullable(matcher.group("dates"));
Optional<String> startDate = Optional.empty();
Optional<String> endDate = Optional.empty();
final Optional<String> tags = Optional.ofNullable(matcher.group("tags"));
if(dates.isPresent()){
PrettyTimeParser ptp = new PrettyTimeParser();
List<DateGroup> dateGroups = ptp.parseSyntax(dates.get());
if(!dateGroups.isEmpty()){
List<Date> startEndDates = dateGroups.get(0).getDates();
if(startEndDates.size() == 1){ // only 1 date is found, assume deadline
startDate = Optional.empty();
endDate = Optional.ofNullable(startEndDates.get(0).toString());
}else if(startEndDates.size() == 2){ // 2 date value is found, assume event
startDate = Optional.ofNullable(startEndDates.get(0).toString());
endDate = Optional.ofNullable(startEndDates.get(1).toString());
}
}
}
// return internal value if present. else, return empty string
Set<String> tagSet = getTagsFromArgs(tags.map(val -> val).orElse(""));
String recurVal = null;
//check if recur has a valid keyword
if (recur.isPresent()) {
recurVal = recur.get();
}
if (startDate.isPresent() && endDate.isPresent()) {
// event
String start = startDate.get().toLowerCase();
String end = endDate.get().toLowerCase();
if (start.equals("today")) {
start += " 2359";
}else if (start.equals("tomorrow")) {
start += " 2359";
}
if (end.equals("today")) {
end += " 2359";
}else if (start.equals("tomorrow")) {
end += " 2359";
}
try {
return new AddCommand(name, start, end, tagSet, recurVal);
} catch (InvalidEventDateException iede) {
return new IncorrectCommand(iede.getMessage());
}
} else if (!startDate.isPresent() && endDate.isPresent()) {
// deadline
String end = endDate.get().toLowerCase();
if (end.equals("today")) {
end += " 2359";
}else if (end.equals("tomorrow")) {
end += " 2359";
}
return new AddCommand(name, end, tagSet, recurVal);
} else if (startDate.isPresent() && !endDate.isPresent()) {
// task with only startdate is not supported.
throw new IllegalValueException("Cannot create a task with only start date.");
} else {
// floating
return new AddCommand(name, tagSet);
}
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
/**
* Parses arguments in the context of the edit task command.
*
* @param args
* full command args string
* @return the prepared command
*/
// @@author A0138862W
private Command prepareEdit(String args) {
final Matcher matcher = EditCommand.COMMAND_ARGUMENTS_PATTERN.matcher(args.trim());
// Validate arg string format
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE));
}
try {
// mandatory
// regex accept only numbers in index field, encountering NumberFormatException is impossible
final int index = Integer.parseInt(matcher.group("index"));
//optional
final Optional<String> recur = Optional.ofNullable(matcher.group("recur"));
final Optional<String> name = Optional.ofNullable(matcher.group("name"));
final Optional<String> startDate = Optional.ofNullable(matcher.group("startDate"));
final Optional<String> endDate = Optional.ofNullable(matcher.group("endDate"));
final Optional<String> tags = Optional.ofNullable(matcher.group("tags"));
Optional<Set<String>> tagSet = Optional.empty();
if(tags.isPresent()){
tagSet = Optional.ofNullable(getTagsFromArgs(tags.get()));
};
return new EditCommand(index, name, startDate, endDate, tagSet, recur);
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
} catch (ParseException pe) {
return new IncorrectCommand(pe.getMessage());
}
}
// @@author
/**
* Extracts the new task's tags from the add command's tag arguments string.
* Merges duplicate tag strings.
*/
private static Set<String> getTagsFromArgs(String tagArguments) throws IllegalValueException {
// no tags
if (Strings.isNullOrEmpty(tagArguments)) {
return Collections.emptySet();
}
// replace first delimiter prefix, then split
final Collection<String> tagStrings = Arrays.asList(tagArguments.split(","));
return new HashSet<>(tagStrings);
}
/**
* Parses arguments in the context of the delete task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareDelete(String args) {
Optional<Integer> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE));
}
Command result = new DeleteCommand(index.get());
return result;
}
/**
* Parses arguments in the context of the mark task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareMark(String args, String currentTab) {
Optional<Integer> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, MarkCommand.MESSAGE_USAGE));
}
return new MarkCommand(index.get(), currentTab);
}
/**
* Parses arguments in the context of the list task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareList(String args) {
Optional<String> type = parseType(args);
if (!type.isPresent()) {
return new ListCommand();
}else {
if (type.get().equals(ModelManager.TAB_TASKS.toLowerCase()) ||
type.get().equals(ModelManager.TAB_EVENTS.toLowerCase()) ||
type.get().equals(ModelManager.TAB_DEADLINES.toLowerCase()) ||
type.get().equals(ModelManager.TAB_ARCHIVES.toLowerCase())) {
return new ListCommand(type);
}else {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, ListCommand.MESSAGE_USAGE));
}
}
}
/**
* Parses arguments in the context of the unmark task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareUnmark(String args, String currentTab) {
if (!currentTab.equals(TAB_ARCHIVES)) {
return new IncorrectCommand(UnmarkCommand.MESSAGE_UNMARK_TASK_FAILURE);
}
Optional<Integer> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, UnmarkCommand.MESSAGE_USAGE));
}
return new UnmarkCommand(index.get());
}
/**
* Returns the specified index in the {@code command} IF a positive unsigned
* integer is given as the index. Returns an {@code Optional.empty()}
* otherwise.
*/
private Optional<Integer> parseIndex(String command) {
final Matcher matcher = TASK_INDEX_ARGS_FORMAT.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String index = matcher.group("targetIndex");
if (!StringUtil.isUnsignedInteger(index)) {
return Optional.empty();
}
return Optional.of(Integer.parseInt(index));
}
/**
* checks if have the type to list archive
*/
private Optional<String> parseType(String command) {
final Matcher matcher = TASK_ARCHIVE_ARGS_FORMAT.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String type = matcher.group("type").toLowerCase();
return Optional.of(type);
}
private Optional<String> parseUpcoming(String command) {
if (command.isEmpty()) {
return Optional.of("empty");
}
final Matcher matcher = UpcomingCommand.COMMAND_ARGUMENTS_PATTERN.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String type = matcher.group("taskType").toLowerCase();
return Optional.of(type);
}
/**
* Parses arguments in the context of the find task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareFind(String args) {
final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE));
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split("\\s+");
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new FindCommand(keywordSet);
}
/**
* Parses arguments in the context of the delete task command.
*
* @param args
* full command args string
* @return the prepared command
*/
//@@author A0124797R
private Command prepareUpcoming(String args) {
Optional<String> taskType = parseUpcoming(args);
if (taskType.isPresent()) {
return new UpcomingCommand(taskType.get());
} else {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, UpcomingCommand.MESSAGE_USAGE));
}
}
/**
* Parses arguments in the context of the find tag command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareFindTag(String args) {
final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE));
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split("\\s+");
final Set<Tag> tagSet = new HashSet<>();
try {
for (String tagName : keywords) {
tagSet.add(new Tag(tagName));
}
return new FindTagCommand(tagSet);
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
}
|
Fix extra space by trim()
|
src/main/java/harmony/mastermind/logic/parser/Parser.java
|
Fix extra space by trim()
|
<ide><path>rc/main/java/harmony/mastermind/logic/parser/Parser.java
<ide> */
<ide> // @@author A0138862W
<ide> private Command prepareAdd(String args) {
<del> final Matcher matcher = AddCommand.COMMAND_ARGUMENTS_PATTERN.matcher(args);
<add> final Matcher matcher = AddCommand.COMMAND_ARGUMENTS_PATTERN.matcher(args.trim());
<ide>
<ide> // Validate arg string format
<ide> if (!matcher.matches()) {
|
|
Java
|
epl-1.0
|
58a1f16f113465ebbfeb7db451a30663d7e60362
| 0 |
rohitmohan96/ceylon-ide-eclipse,rohitmohan96/ceylon-ide-eclipse
|
package com.redhat.ceylon.eclipse.code.refactor;
import static com.redhat.ceylon.eclipse.util.Nodes.getReferencedExplicitDeclaration;
import static org.eclipse.ltk.core.refactoring.RefactoringStatus.createWarningStatus;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.OperationCanceledException;
import org.eclipse.jface.text.Region;
import org.eclipse.ltk.core.refactoring.CompositeChange;
import org.eclipse.ltk.core.refactoring.DocumentChange;
import org.eclipse.ltk.core.refactoring.RefactoringStatus;
import org.eclipse.ltk.core.refactoring.TextChange;
import org.eclipse.ltk.core.refactoring.TextFileChange;
import org.eclipse.ltk.core.refactoring.resource.RenameResourceChange;
import org.eclipse.text.edits.MultiTextEdit;
import org.eclipse.text.edits.ReplaceEdit;
import org.eclipse.ui.IEditorPart;
import com.redhat.ceylon.compiler.typechecker.context.PhasedUnit;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Referenceable;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.DocLink;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.SpecifierStatement;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.eclipse.util.FindReferencesVisitor;
import com.redhat.ceylon.eclipse.util.FindRefinementsVisitor;
import com.redhat.ceylon.eclipse.util.Nodes;
public class RenameRefactoring extends AbstractRefactoring {
private static class FindRenamedReferencesVisitor
extends FindReferencesVisitor {
private FindRenamedReferencesVisitor(Declaration declaration) {
super(declaration);
}
@Override
protected boolean isReference(Declaration ref) {
return super.isReference(ref) ||
ref!=null && ref.refines((Declaration)getDeclaration());
}
@Override
protected boolean isReference(Declaration ref, String id) {
return isReference(ref) && id!=null &&
getDeclaration().getNameAsString().equals(id); //TODO: really lame way to tell if it's an alias!
}
}
private static class FindDocLinkReferencesVisitor extends Visitor {
private Declaration declaration;
int count;
FindDocLinkReferencesVisitor(Declaration declaration) {
this.declaration = declaration;
}
@Override
public void visit(DocLink that) {
if (that.getBase()!=null) {
if (that.getBase().equals(declaration)) {
count++;
}
else if (that.getQualified()!=null) {
if (that.getQualified().contains(declaration)) {
count++;
}
}
}
}
}
private String newName;
private final Declaration declaration;
private boolean renameFile;
public Node getNode() {
return node;
}
public RenameRefactoring(IEditorPart editor) {
super(editor);
if (rootNode!=null) {
Referenceable refDec =
getReferencedExplicitDeclaration(node, rootNode);
if (refDec instanceof Declaration) {
declaration = ((Declaration) refDec).getRefinedDeclaration();
newName = declaration.getName();
String filename = declaration.getUnit().getFilename();
renameFile = (declaration.getName()+".ceylon").equals(filename);
}
else {
declaration = null;
}
}
else {
declaration = null;
}
}
@Override
public boolean isEnabled() {
return declaration instanceof Declaration &&
project != null &&
inSameProject(declaration);
}
public int getCount() {
return declaration==null ? 0 : countDeclarationOccurrences();
}
@Override
int countReferences(Tree.CompilationUnit cu) {
FindRenamedReferencesVisitor frv =
new FindRenamedReferencesVisitor(declaration);
Declaration dec = (Declaration) frv.getDeclaration();
FindRefinementsVisitor fdv =
new FindRefinementsVisitor(dec);
FindDocLinkReferencesVisitor fdlrv =
new FindDocLinkReferencesVisitor(dec);
cu.visit(frv);
cu.visit(fdv);
cu.visit(fdlrv);
return frv.getNodes().size() +
fdv.getDeclarationNodes().size() +
fdlrv.count;
}
public String getName() {
return "Rename";
}
public RefactoringStatus checkInitialConditions(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
// Check parameters retrieved from editor context
return new RefactoringStatus();
}
public RefactoringStatus checkFinalConditions(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
Declaration existing = declaration.getContainer()
.getMemberOrParameter(declaration.getUnit(),
newName, null, false);
if (null!=existing && !existing.equals(declaration)) {
return createWarningStatus("An existing declaration named '" +
newName + "' already exists in the same scope");
}
return new RefactoringStatus();
}
public CompositeChange createChange(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
List<PhasedUnit> units = getAllUnits();
pm.beginTask(getName(), units.size());
CompositeChange cc = new CompositeChange(getName());
int i=0;
for (PhasedUnit pu: units) {
if (searchInFile(pu)) {
TextFileChange tfc = newTextFileChange(pu);
renameInFile(tfc, cc, pu.getCompilationUnit());
pm.worked(i++);
}
}
if (searchInEditor()) {
DocumentChange dc = newDocumentChange();
renameInFile(dc, cc, editor.getParseController().getRootNode());
pm.worked(i++);
}
if (project!=null && renameFile) {
IPath oldPath = project.getFullPath()
.append(declaration.getUnit().getFullPath());
String newFileName = getNewName() + ".ceylon";
IPath newPath = oldPath.removeFirstSegments(1).removeLastSegments(1)
.append(newFileName);
if (!project.getFile(newPath).exists()) {
cc.add(new RenameResourceChange(oldPath, newFileName));
}
}
pm.done();
return cc;
}
private void renameInFile(TextChange tfc, CompositeChange cc,
Tree.CompilationUnit root) {
tfc.setEdit(new MultiTextEdit());
if (declaration!=null) {
for (Node node: getNodesToRename(root)) {
renameNode(tfc, node, root);
}
for (Region region: getStringsToReplace(root)) {
renameRegion(tfc, region, root);
}
}
if (tfc.getEdit().hasChildren()) {
cc.add(tfc);
}
}
public List<Node> getNodesToRename(Tree.CompilationUnit root) {
ArrayList<Node> list = new ArrayList<Node>();
FindRenamedReferencesVisitor frv =
new FindRenamedReferencesVisitor(declaration);
root.visit(frv);
list.addAll(frv.getNodes());
FindRefinementsVisitor fdv =
new FindRefinementsVisitor((Declaration)frv.getDeclaration()) {
@Override
public void visit(SpecifierStatement that) {}
};
root.visit(fdv);
list.addAll(fdv.getDeclarationNodes());
return list;
}
public List<Region> getStringsToReplace(Tree.CompilationUnit root) {
final List<Region> result = new ArrayList<Region>();
new Visitor() {
private void visitIt(String name, int offset, Declaration dec) {
if (dec!=null && dec.equals(declaration)) {
result.add(new Region(offset, name.length()));
}
}
@Override
public void visit(Tree.DocLink that) {
String text = that.getText();
Integer offset = that.getStartIndex();
int pipeIndex = text.indexOf("|");
if (pipeIndex > -1) {
text = text.substring(pipeIndex + 1);
offset += pipeIndex + 1;
}
int scopeIndex = text.indexOf("::");
int start = scopeIndex<0 ? 0 : scopeIndex+2;
Declaration base = that.getBase();
if (base!=null) {
int index = text.indexOf('.', start);
String name = index<0 ?
text.substring(start) :
text.substring(start, index);
visitIt(name, offset+start, base);
start = index+1;
int i=0;
List<Declaration> qualified = that.getQualified();
if (qualified!=null) {
while (start>0 && i<qualified.size()) {
index = text.indexOf('.', start);
name = index<0 ?
text.substring(start) :
text.substring(start, index);
visitIt(name, offset+start, qualified.get(i++));
start = index+1;
}
}
}
}
}.visit(root);
return result;
}
protected void renameRegion(TextChange tfc, Region region,
Tree.CompilationUnit root) {
tfc.addEdit(new ReplaceEdit(region.getOffset(),
region.getLength(), newName));
}
protected void renameNode(TextChange tfc, Node node,
Tree.CompilationUnit root) {
Node identifyingNode = Nodes.getIdentifyingNode(node);
tfc.addEdit(new ReplaceEdit(identifyingNode.getStartIndex(),
identifyingNode.getText().length(), newName));
}
public void setNewName(String text) {
newName = text;
}
public Declaration getDeclaration() {
return declaration;
}
public String getNewName() {
return newName;
}
public boolean isRenameFile() {
return renameFile;
}
public void setRenameFile(boolean renameFile) {
this.renameFile = renameFile;
}
}
|
plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/code/refactor/RenameRefactoring.java
|
package com.redhat.ceylon.eclipse.code.refactor;
import static com.redhat.ceylon.eclipse.util.Nodes.getReferencedExplicitDeclaration;
import static org.eclipse.ltk.core.refactoring.RefactoringStatus.createWarningStatus;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.OperationCanceledException;
import org.eclipse.jface.text.Region;
import org.eclipse.ltk.core.refactoring.CompositeChange;
import org.eclipse.ltk.core.refactoring.DocumentChange;
import org.eclipse.ltk.core.refactoring.RefactoringStatus;
import org.eclipse.ltk.core.refactoring.TextChange;
import org.eclipse.ltk.core.refactoring.TextFileChange;
import org.eclipse.ltk.core.refactoring.resource.RenameResourceChange;
import org.eclipse.text.edits.MultiTextEdit;
import org.eclipse.text.edits.ReplaceEdit;
import org.eclipse.ui.IEditorPart;
import com.redhat.ceylon.compiler.typechecker.context.PhasedUnit;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Referenceable;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.DocLink;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.eclipse.util.FindReferencesVisitor;
import com.redhat.ceylon.eclipse.util.FindRefinementsVisitor;
import com.redhat.ceylon.eclipse.util.Nodes;
public class RenameRefactoring extends AbstractRefactoring {
private static class FindRenamedReferencesVisitor
extends FindReferencesVisitor {
private FindRenamedReferencesVisitor(Declaration declaration) {
super(declaration);
}
@Override
protected boolean isReference(Declaration ref) {
return super.isReference(ref) ||
ref!=null && ref.refines((Declaration)getDeclaration());
}
@Override
protected boolean isReference(Declaration ref, String id) {
return isReference(ref) && id!=null &&
getDeclaration().getNameAsString().equals(id); //TODO: really lame way to tell if it's an alias!
}
}
private static class FindDocLinkReferencesVisitor extends Visitor {
private Declaration declaration;
int count;
FindDocLinkReferencesVisitor(Declaration declaration) {
this.declaration = declaration;
}
@Override
public void visit(DocLink that) {
if (that.getBase()!=null) {
if (that.getBase().equals(declaration)) {
count++;
}
else if (that.getQualified()!=null) {
if (that.getQualified().contains(declaration)) {
count++;
}
}
}
}
}
private String newName;
private final Declaration declaration;
private boolean renameFile;
public Node getNode() {
return node;
}
public RenameRefactoring(IEditorPart editor) {
super(editor);
if (rootNode!=null) {
Referenceable refDec =
getReferencedExplicitDeclaration(node, rootNode);
if (refDec instanceof Declaration) {
declaration = ((Declaration) refDec).getRefinedDeclaration();
newName = declaration.getName();
String filename = declaration.getUnit().getFilename();
renameFile = (declaration.getName()+".ceylon").equals(filename);
}
else {
declaration = null;
}
}
else {
declaration = null;
}
}
@Override
public boolean isEnabled() {
return declaration instanceof Declaration &&
project != null &&
inSameProject(declaration);
}
public int getCount() {
return declaration==null ? 0 : countDeclarationOccurrences();
}
@Override
int countReferences(Tree.CompilationUnit cu) {
FindRenamedReferencesVisitor frv =
new FindRenamedReferencesVisitor(declaration);
Declaration dec = (Declaration) frv.getDeclaration();
FindRefinementsVisitor fdv =
new FindRefinementsVisitor(dec);
FindDocLinkReferencesVisitor fdlrv =
new FindDocLinkReferencesVisitor(dec);
cu.visit(frv);
cu.visit(fdv);
cu.visit(fdlrv);
return frv.getNodes().size() +
fdv.getDeclarationNodes().size() +
fdlrv.count;
}
public String getName() {
return "Rename";
}
public RefactoringStatus checkInitialConditions(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
// Check parameters retrieved from editor context
return new RefactoringStatus();
}
public RefactoringStatus checkFinalConditions(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
Declaration existing = declaration.getContainer()
.getMemberOrParameter(declaration.getUnit(),
newName, null, false);
if (null!=existing && !existing.equals(declaration)) {
return createWarningStatus("An existing declaration named '" +
newName + "' already exists in the same scope");
}
return new RefactoringStatus();
}
public CompositeChange createChange(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
List<PhasedUnit> units = getAllUnits();
pm.beginTask(getName(), units.size());
CompositeChange cc = new CompositeChange(getName());
int i=0;
for (PhasedUnit pu: units) {
if (searchInFile(pu)) {
TextFileChange tfc = newTextFileChange(pu);
renameInFile(tfc, cc, pu.getCompilationUnit());
pm.worked(i++);
}
}
if (searchInEditor()) {
DocumentChange dc = newDocumentChange();
renameInFile(dc, cc, editor.getParseController().getRootNode());
pm.worked(i++);
}
if (project!=null && renameFile) {
IPath oldPath = project.getFullPath()
.append(declaration.getUnit().getFullPath());
String newFileName = getNewName() + ".ceylon";
IPath newPath = oldPath.removeFirstSegments(1).removeLastSegments(1)
.append(newFileName);
if (!project.getFile(newPath).exists()) {
cc.add(new RenameResourceChange(oldPath, newFileName));
}
}
pm.done();
return cc;
}
private void renameInFile(TextChange tfc, CompositeChange cc,
Tree.CompilationUnit root) {
tfc.setEdit(new MultiTextEdit());
if (declaration!=null) {
for (Node node: getNodesToRename(root)) {
renameNode(tfc, node, root);
}
for (Region region: getStringsToReplace(root)) {
renameRegion(tfc, region, root);
}
}
if (tfc.getEdit().hasChildren()) {
cc.add(tfc);
}
}
public List<Node> getNodesToRename(Tree.CompilationUnit root) {
ArrayList<Node> list = new ArrayList<Node>();
FindRenamedReferencesVisitor frv =
new FindRenamedReferencesVisitor(declaration);
root.visit(frv);
list.addAll(frv.getNodes());
FindRefinementsVisitor fdv =
new FindRefinementsVisitor((Declaration)frv.getDeclaration());
root.visit(fdv);
list.addAll(fdv.getDeclarationNodes());
return list;
}
public List<Region> getStringsToReplace(Tree.CompilationUnit root) {
final List<Region> result = new ArrayList<Region>();
new Visitor() {
private void visitIt(String name, int offset, Declaration dec) {
if (dec!=null && dec.equals(declaration)) {
result.add(new Region(offset, name.length()));
}
}
@Override
public void visit(Tree.DocLink that) {
String text = that.getText();
Integer offset = that.getStartIndex();
int pipeIndex = text.indexOf("|");
if (pipeIndex > -1) {
text = text.substring(pipeIndex + 1);
offset += pipeIndex + 1;
}
int scopeIndex = text.indexOf("::");
int start = scopeIndex<0 ? 0 : scopeIndex+2;
Declaration base = that.getBase();
if (base!=null) {
int index = text.indexOf('.', start);
String name = index<0 ?
text.substring(start) :
text.substring(start, index);
visitIt(name, offset+start, base);
start = index+1;
int i=0;
List<Declaration> qualified = that.getQualified();
if (qualified!=null) {
while (start>0 && i<qualified.size()) {
index = text.indexOf('.', start);
name = index<0 ?
text.substring(start) :
text.substring(start, index);
visitIt(name, offset+start, qualified.get(i++));
start = index+1;
}
}
}
}
}.visit(root);
return result;
}
protected void renameRegion(TextChange tfc, Region region,
Tree.CompilationUnit root) {
tfc.addEdit(new ReplaceEdit(region.getOffset(),
region.getLength(), newName));
}
protected void renameNode(TextChange tfc, Node node,
Tree.CompilationUnit root) {
Node identifyingNode = Nodes.getIdentifyingNode(node);
tfc.addEdit(new ReplaceEdit(identifyingNode.getStartIndex(),
identifyingNode.getText().length(), newName));
}
public void setNewName(String text) {
newName = text;
}
public Declaration getDeclaration() {
return declaration;
}
public String getNewName() {
return newName;
}
public boolean isRenameFile() {
return renameFile;
}
public void setRenameFile(boolean renameFile) {
this.renameFile = renameFile;
}
}
|
fix #1020
|
plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/code/refactor/RenameRefactoring.java
|
fix #1020
|
<ide><path>lugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/code/refactor/RenameRefactoring.java
<ide> import com.redhat.ceylon.compiler.typechecker.tree.Node;
<ide> import com.redhat.ceylon.compiler.typechecker.tree.Tree;
<ide> import com.redhat.ceylon.compiler.typechecker.tree.Tree.DocLink;
<add>import com.redhat.ceylon.compiler.typechecker.tree.Tree.SpecifierStatement;
<ide> import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
<ide> import com.redhat.ceylon.eclipse.util.FindReferencesVisitor;
<ide> import com.redhat.ceylon.eclipse.util.FindRefinementsVisitor;
<ide> root.visit(frv);
<ide> list.addAll(frv.getNodes());
<ide> FindRefinementsVisitor fdv =
<del> new FindRefinementsVisitor((Declaration)frv.getDeclaration());
<add> new FindRefinementsVisitor((Declaration)frv.getDeclaration()) {
<add> @Override
<add> public void visit(SpecifierStatement that) {}
<add> };
<ide> root.visit(fdv);
<ide> list.addAll(fdv.getDeclarationNodes());
<ide> return list;
|
|
Java
|
bsd-3-clause
|
dbfd0689b9b84d12343cde3dab97cbe9bd9c6da2
| 0 |
NCIP/caadapter,NCIP/caadapter,NCIP/caadapter
|
/**
* <!-- LICENSE_TEXT_START -->
* $Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.3 2007-10-03 16:21:49 wangeug Exp $
*
* ******************************************************************
* COPYRIGHT NOTICE
* ******************************************************************
*
* The caAdapter Software License, Version 1.3
* Copyright Notice.
*
* Copyright 2006 SAIC. This software was developed in conjunction with the National Cancer Institute. To the extent government employees are co-authors, any rights in such works are subject to Title 17 of the United States Code, section 105.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the Copyright Notice above, this list of conditions, and the disclaimer of Article 3, below. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment:
*
*
* "This product includes software developed by the SAIC and the National Cancer Institute."
*
*
* If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself, wherever such third-party acknowledgments normally appear.
*
* 3. The names "The National Cancer Institute", "NCI" and "SAIC" must not be used to endorse or promote products derived from this software.
*
* 4. This license does not authorize the incorporation of this software into any third party proprietary programs. This license does not authorize the recipient to use any trademarks owned by either NCI or SAIC-Frederick.
*
* 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT, THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* <!-- LICENSE_TEXT_END -->
*/
package gov.nih.nci.caadapter.ui.common.resource;
import gov.nih.nci.caadapter.hl7.mif.v1.BuildResourceUtil;
import gov.nih.nci.caadapter.ui.common.DefaultSettings;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
/**
* This class is the main entry class of message wizard to collect user's inputs.
* @author OWNER: Scott Jiang
* @author LAST UPDATE $Author: wangeug $
* @version Since caAdapter v1.2
* revision $Revision: 1.3 $
* date $Date: 2007-10-03 16:21:49 $
*/
public class BuildHL7ResourceDialog extends JDialog implements ActionListener
{
/**
* Logging constant used to identify source of log entry, that could be later used to create
* logging mechanism to uniquely identify the logged class.
*/
private static final String LOGID = "$RCSfile: BuildHL7ResourceDialog.java,v $";
/**
* String that identifies the class version and solves the serial version UID problem.
* This String is for informational purposes only and MUST not be made final.
*
* @see <a href="http://www.visi.com/~gyles19/cgi-bin/fom.cgi?file=63">JBuilder vice javac serial version UID</a>
*/
public static String RCSID = "$Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.3 2007-10-03 16:21:49 wangeug Exp $";
private static final String OK_COMMAND = "OK";
private static final String CANCEL_COMMAND = "Cancel";
private OpenHL7ResourceFrontPage frontPage;
public BuildHL7ResourceDialog(Frame owner, String title, boolean modal, String resourceSite) throws HeadlessException
{
super(owner, title, modal);
initialize(title, resourceSite);
DefaultSettings.centerWindow(this);
}
private void initialize(String title,String resourceSite)
{
Container contentPane = getContentPane();
contentPane.setLayout(new BorderLayout());
frontPage = new OpenHL7ResourceFrontPage((JFrame)getOwner(),title,resourceSite);
contentPane.add(frontPage, BorderLayout.CENTER);
JPanel southPanel = new JPanel(new BorderLayout());
JPanel buttonPanel = new JPanel(new FlowLayout(FlowLayout.TRAILING));//new BorderLayout());
JButton okButton = new JButton(OK_COMMAND);
okButton.setMnemonic('O');
okButton.addActionListener(this);
JButton cancelButton = new JButton(CANCEL_COMMAND);
cancelButton.setMnemonic('C');
cancelButton.addActionListener(this);
JPanel tempPanel = new JPanel(new GridLayout(1, 2));
tempPanel.add(okButton);
tempPanel.add(cancelButton);
buttonPanel.add(tempPanel);//, BorderLayout.EAST);
southPanel.add(buttonPanel, BorderLayout.NORTH);
contentPane.add(southPanel, BorderLayout.SOUTH);
pack();
}
private void updateMonitor(ProgressMonitor monitor,int step,String note)
{
monitor.setProgress(step);
monitor.setNote(note);
// System.out.println("BuildHL7ResourceDialog.updateMonitor()..."+monitor.getNote());
}
private String buildHL7V3Resource(final String resourceHome, final String targetSite)
{
String rtnMsg="Failed to build HL7 V3 resource";
final ProgressMonitor monitor=new ProgressMonitor(this.getParent(),
this.getTitle()+"\n","Additional note",0,6);
monitor.setMillisToDecideToPopup(0);
monitor.setMillisToPopup(0);
updateMonitor(monitor, 1, "start");
final JDialog owner=this;
Thread localThread=new Thread
(
new Runnable()
{
public void run()
{
//process mif.zip
try {
String targetHome=targetSite.substring(0,targetSite.lastIndexOf(File.separator));
BuildResourceUtil.RESOURCE_DIR=targetHome+File.separator+"temp";
String mifZipPath=resourceHome+"/processable/mif/mif.zip";
int stepCount=0;
updateMonitor(monitor, stepCount++, "Serialize MIF files");
BuildResourceUtil.parserMifFromZipFile(mifZipPath);
updateMonitor(monitor, stepCount++, "Create message index");
BuildResourceUtil.parerMifIndexFromZipFile(mifZipPath);
//process "core schema"/datatypes
String coreSchemaSrcHome=resourceHome+File.separator+"processable"+File.separator+"coreschemas";
updateMonitor(monitor, stepCount++, "Serialize datatype files");
BuildResourceUtil.loadDatatypes(coreSchemaSrcHome);
updateMonitor(monitor, stepCount++, "Create ZIP");
BuildResourceUtil.zipDir(targetSite,BuildResourceUtil.RESOURCE_DIR);
//copy schema:
//find parentDir of tagetHome../lib/
// System.out.println(".run()..targetSite:"+targetSite);
String schemaHome=targetSite.substring(0,targetSite.lastIndexOf(File.separator));
// System.out.println(".run()..schemaHome:"+schemaHome);
schemaHome=schemaHome.substring(0, schemaHome.lastIndexOf(File.separator));
// System.out.println(".run()..schemaHome:"+schemaHome);
schemaHome=schemaHome+File.separator+"schemas";
updateMonitor(monitor, stepCount++, "Copy coreschema files "+schemaHome+ File.separator+"coreschemas");
BuildResourceUtil.copyFiles(coreSchemaSrcHome, schemaHome+File.separator+"coreschemas", ".xsd");
String mifSchemaSrcHome=resourceHome+File.separator+"processable"+File.separator+"multicacheschemas";
updateMonitor(monitor, stepCount++, "Copy MIF schema files "+schemaHome+File.separator+"multicacheschemas");
BuildResourceUtil.copyFiles(mifSchemaSrcHome, schemaHome+File.separator+"multicacheschemas", "xsd");
monitor.close();
String confirmMsg="HL7 V3 resource being successfully built at: "+targetSite;
JOptionPane.showMessageDialog(owner, confirmMsg,"Success",JOptionPane.DEFAULT_OPTION);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
// rtnMsg=
String confirmMsg="Failed to build HL7 V3 resource"+":"+e.getMessage();
JOptionPane.showMessageDialog(owner, confirmMsg,"Failed",JOptionPane.DEFAULT_OPTION);
monitor.close();
}
}
}
);
localThread.start();
return rtnMsg;
}
private String buildHL7V2Resource(String resourceHome, String targetSite)
{
return "Buid HL7 V2 resource:Waiting for integration";
}
/**
* Invoked when an action occurs.
*/
public void actionPerformed(ActionEvent e)
{
String command = e.getActionCommand();
if (OK_COMMAND.equals(command))
{
String confirmMsg="ConfirmAction";
if(this.getTitle().equals(BuildHL7ResourceAction.COMMAND_BUILD_V3))
{
String srcHome=frontPage.getSelectFileHome();
String targetSite=frontPage.getTargetSite();
confirmMsg=buildHL7V3Resource(srcHome,targetSite);
}
else if(this.getTitle().equals(BuildHL7ResourceAction.COMMAND_BUILD_V2))
{
confirmMsg=buildHL7V2Resource(frontPage.getSelectFileHome(), frontPage.getTargetSite());
int userReply=JOptionPane.showConfirmDialog(this, confirmMsg,"Confirm",JOptionPane.YES_NO_OPTION);
}
}
setVisible(false);
dispose();
}
}
|
caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java
|
/**
* <!-- LICENSE_TEXT_START -->
* $Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.2 2007-09-18 18:07:26 wangeug Exp $
*
* ******************************************************************
* COPYRIGHT NOTICE
* ******************************************************************
*
* The caAdapter Software License, Version 1.3
* Copyright Notice.
*
* Copyright 2006 SAIC. This software was developed in conjunction with the National Cancer Institute. To the extent government employees are co-authors, any rights in such works are subject to Title 17 of the United States Code, section 105.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the Copyright Notice above, this list of conditions, and the disclaimer of Article 3, below. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment:
*
*
* "This product includes software developed by the SAIC and the National Cancer Institute."
*
*
* If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself, wherever such third-party acknowledgments normally appear.
*
* 3. The names "The National Cancer Institute", "NCI" and "SAIC" must not be used to endorse or promote products derived from this software.
*
* 4. This license does not authorize the incorporation of this software into any third party proprietary programs. This license does not authorize the recipient to use any trademarks owned by either NCI or SAIC-Frederick.
*
* 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT, THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* <!-- LICENSE_TEXT_END -->
*/
package gov.nih.nci.caadapter.ui.common.resource;
import gov.nih.nci.caadapter.hl7.mif.v1.BuildResourceUtil;
import gov.nih.nci.caadapter.ui.common.DefaultSettings;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
/**
* This class is the main entry class of message wizard to collect user's inputs.
* @author OWNER: Scott Jiang
* @author LAST UPDATE $Author: wangeug $
* @version Since caAdapter v1.2
* revision $Revision: 1.2 $
* date $Date: 2007-09-18 18:07:26 $
*/
public class BuildHL7ResourceDialog extends JDialog implements ActionListener
{
/**
* Logging constant used to identify source of log entry, that could be later used to create
* logging mechanism to uniquely identify the logged class.
*/
private static final String LOGID = "$RCSfile: BuildHL7ResourceDialog.java,v $";
/**
* String that identifies the class version and solves the serial version UID problem.
* This String is for informational purposes only and MUST not be made final.
*
* @see <a href="http://www.visi.com/~gyles19/cgi-bin/fom.cgi?file=63">JBuilder vice javac serial version UID</a>
*/
public static String RCSID = "$Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.2 2007-09-18 18:07:26 wangeug Exp $";
private static final String OK_COMMAND = "OK";
private static final String CANCEL_COMMAND = "Cancel";
private OpenHL7ResourceFrontPage frontPage;
public BuildHL7ResourceDialog(Frame owner, String title, boolean modal, String resourceSite) throws HeadlessException
{
super(owner, title, modal);
initialize(title, resourceSite);
DefaultSettings.centerWindow(this);
}
private void initialize(String title,String resourceSite)
{
Container contentPane = getContentPane();
contentPane.setLayout(new BorderLayout());
frontPage = new OpenHL7ResourceFrontPage((JFrame)getOwner(),title,resourceSite);
contentPane.add(frontPage, BorderLayout.CENTER);
JPanel southPanel = new JPanel(new BorderLayout());
JPanel buttonPanel = new JPanel(new FlowLayout(FlowLayout.TRAILING));//new BorderLayout());
JButton okButton = new JButton(OK_COMMAND);
okButton.setMnemonic('O');
okButton.addActionListener(this);
JButton cancelButton = new JButton(CANCEL_COMMAND);
cancelButton.setMnemonic('C');
cancelButton.addActionListener(this);
JPanel tempPanel = new JPanel(new GridLayout(1, 2));
tempPanel.add(okButton);
tempPanel.add(cancelButton);
buttonPanel.add(tempPanel);//, BorderLayout.EAST);
southPanel.add(buttonPanel, BorderLayout.NORTH);
contentPane.add(southPanel, BorderLayout.SOUTH);
pack();
}
private void updateMonitor(ProgressMonitor monitor,int step,String note)
{
monitor.setProgress(step);
monitor.setNote(note);
// System.out.println("BuildHL7ResourceDialog.updateMonitor()..."+monitor.getNote());
}
private String buildHL7V3Resource(final String resourceHome, final String targetSite)
{
String rtnMsg="Failed to build HL7 V3 resource";
final ProgressMonitor monitor=new ProgressMonitor(this.getParent(),
this.getTitle()+"\n","Additional note",0,6);
monitor.setMillisToDecideToPopup(0);
monitor.setMillisToPopup(0);
updateMonitor(monitor, 1, "start");
final JDialog owner=this;
Thread localThread=new Thread
(
new Runnable()
{
public void run()
{
//process mif.zip
try {
String targetHome=targetSite.substring(0,targetSite.lastIndexOf(File.separator));
BuildResourceUtil.RESOURCE_DIR=targetHome+File.separator+"temp";
String mifZipPath=resourceHome+"/processable/mif/mif.zip";
int stepCount=0;
updateMonitor(monitor, stepCount++, "Serialize MIF files");
BuildResourceUtil.parserMifFromZipFile(mifZipPath);
updateMonitor(monitor, stepCount++, "Create message index");
BuildResourceUtil.parerMifIndexFromZipFile(mifZipPath);
//process "core schema"/datatypes
String coreSchemaSrcHome=resourceHome+File.separator+"processable"+File.separator+"coreschemas";
updateMonitor(monitor, stepCount++, "Serialize datatype files");
BuildResourceUtil.loadDatatypes(coreSchemaSrcHome);
updateMonitor(monitor, stepCount++, "Create ZIP");
BuildResourceUtil.zipDir(targetSite,BuildResourceUtil.RESOURCE_DIR);
//copy schema:
//find parentDir of tagetHome../lib/
// System.out.println(".run()..targetSite:"+targetSite);
String schemaHome=targetSite.substring(0,targetSite.lastIndexOf(File.separator));
// System.out.println(".run()..schemaHome:"+schemaHome);
schemaHome=schemaHome.substring(0, schemaHome.lastIndexOf(File.separator));
// System.out.println(".run()..schemaHome:"+schemaHome);
schemaHome=schemaHome+File.separator+"schemas";
updateMonitor(monitor, stepCount++, "Copy coreschema files "+schemaHome+ File.separator+"coreschemas");
BuildResourceUtil.copyFiles(coreSchemaSrcHome, schemaHome+File.separator+"coreschemas", ".xsd");
String mifSchemaSrcHome=resourceHome+File.separator+"processable"+File.separator+"multicacheschemas";
updateMonitor(monitor, stepCount++, "Copy MIF schema files "+schemaHome+File.separator+"multicacheschemas");
BuildResourceUtil.copyFiles(mifSchemaSrcHome, schemaHome+File.separator+"multicacheschemas", "xsd");
monitor.close();
String confirmMsg="HL7 V3 resource being successfully built at: "+targetSite;
int userReply=JOptionPane.showConfirmDialog(owner, confirmMsg,"Confirm",JOptionPane.YES_NO_OPTION);
// if (userReply==JOptionPane.YES_OPTION)
// toClose=true;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
// rtnMsg=
String confirmMsg="Failed to build HL7 V3 resource"+":"+e.getMessage();
int userReply=JOptionPane.showConfirmDialog(owner, confirmMsg,"Confirm",JOptionPane.YES_NO_OPTION);
monitor.close();
}
}
}
);
localThread.start();
return rtnMsg;
}
private String buildHL7V2Resource(String resourceHome, String targetSite)
{
return "Buid HL7 V2 resource:Waiting for integration";
}
/**
* Invoked when an action occurs.
*/
public void actionPerformed(ActionEvent e)
{
String command = e.getActionCommand();
if (OK_COMMAND.equals(command))
{
String confirmMsg="ConfirmAction";
if(this.getTitle().equals(BuildHL7ResourceAction.COMMAND_BUILD_V3))
{
String srcHome=frontPage.getSelectFileHome();
String targetSite=frontPage.getTargetSite();
confirmMsg=buildHL7V3Resource(srcHome,targetSite);
}
else if(this.getTitle().equals(BuildHL7ResourceAction.COMMAND_BUILD_V2))
{
confirmMsg=buildHL7V2Resource(frontPage.getSelectFileHome(), frontPage.getTargetSite());
int userReply=JOptionPane.showConfirmDialog(this, confirmMsg,"Confirm",JOptionPane.YES_NO_OPTION);
}
}
setVisible(false);
dispose();
}
}
|
change message dialog type
SVN-Revision: 957
|
caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java
|
change message dialog type
|
<ide><path>aadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java
<ide> /**
<ide> * <!-- LICENSE_TEXT_START -->
<del> * $Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.2 2007-09-18 18:07:26 wangeug Exp $
<add> * $Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.3 2007-10-03 16:21:49 wangeug Exp $
<ide> *
<ide> * ******************************************************************
<ide> * COPYRIGHT NOTICE
<ide> * @author OWNER: Scott Jiang
<ide> * @author LAST UPDATE $Author: wangeug $
<ide> * @version Since caAdapter v1.2
<del> * revision $Revision: 1.2 $
<del> * date $Date: 2007-09-18 18:07:26 $
<add> * revision $Revision: 1.3 $
<add> * date $Date: 2007-10-03 16:21:49 $
<ide> */
<ide> public class BuildHL7ResourceDialog extends JDialog implements ActionListener
<ide> {
<ide> *
<ide> * @see <a href="http://www.visi.com/~gyles19/cgi-bin/fom.cgi?file=63">JBuilder vice javac serial version UID</a>
<ide> */
<del> public static String RCSID = "$Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.2 2007-09-18 18:07:26 wangeug Exp $";
<add> public static String RCSID = "$Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/common/resource/BuildHL7ResourceDialog.java,v 1.3 2007-10-03 16:21:49 wangeug Exp $";
<ide>
<ide> private static final String OK_COMMAND = "OK";
<ide> private static final String CANCEL_COMMAND = "Cancel";
<ide> BuildResourceUtil.copyFiles(mifSchemaSrcHome, schemaHome+File.separator+"multicacheschemas", "xsd");
<ide> monitor.close();
<ide> String confirmMsg="HL7 V3 resource being successfully built at: "+targetSite;
<del> int userReply=JOptionPane.showConfirmDialog(owner, confirmMsg,"Confirm",JOptionPane.YES_NO_OPTION);
<del>// if (userReply==JOptionPane.YES_OPTION)
<del>// toClose=true;
<add> JOptionPane.showMessageDialog(owner, confirmMsg,"Success",JOptionPane.DEFAULT_OPTION);
<ide> } catch (Exception e) {
<ide> // TODO Auto-generated catch block
<ide> e.printStackTrace();
<ide> // rtnMsg=
<ide> String confirmMsg="Failed to build HL7 V3 resource"+":"+e.getMessage();
<del> int userReply=JOptionPane.showConfirmDialog(owner, confirmMsg,"Confirm",JOptionPane.YES_NO_OPTION);
<add> JOptionPane.showMessageDialog(owner, confirmMsg,"Failed",JOptionPane.DEFAULT_OPTION);
<ide> monitor.close();
<ide> }
<ide> }
|
|
Java
|
mit
|
955e3153dc4dc390063667a0065b0358ac3ec541
| 0 |
abaditsegay/git-plugin,loomchild/git-plugin,jenkinsci/git-plugin,abaditsegay/git-plugin,recena/git-plugin,ydubreuil/git-plugin,loomchild/git-plugin,ndeloof/git-plugin,Nonymus/git-plugin,Jellyvision/git-plugin,recena/git-plugin,ndeloof/git-plugin,avdv/git-plugin,nKey/git-plugin,MarkEWaite/git-plugin,martinda/git-plugin,kzantow/git-plugin,bjacklyn/git-plugin,MarkEWaite/git-plugin,sgargan/git-plugin,jacob-keller/git-plugin,avdv/git-plugin,martinda/git-plugin,mlgiroux/git-plugin,jacob-keller/git-plugin,jacob-keller/git-plugin,mlgiroux/git-plugin,kzantow/git-plugin,v1v/git-plugin,Vlatombe/git-plugin,recena/git-plugin,mklein0/git-plugin,KostyaSha/git-plugin,bjacklyn/git-plugin,pauxus/git-plugin,Nonymus/git-plugin,Talend/git-plugin,Jellyvision/git-plugin,ivan-fedorov/git-plugin,ndeloof/git-plugin,jenkinsci/git-plugin,avdv/git-plugin,pauxus/git-plugin,mklein0/git-plugin,Talend/git-plugin,sgargan/git-plugin,nKey/git-plugin,MarkEWaite/git-plugin,abaditsegay/git-plugin,ialbors/git-plugin,Talend/git-plugin,v1v/git-plugin,Deveo/git-plugin,Jellyvision/git-plugin,sgargan/git-plugin,ldtkms/git-plugin,bjacklyn/git-plugin,ydubreuil/git-plugin,Deveo/git-plugin,martinda/git-plugin,KostyaSha/git-plugin,ldtkms/git-plugin,kzantow/git-plugin,v1v/git-plugin,nordstrand/git-plugin,ldtkms/git-plugin,jenkinsci/git-plugin,ivan-fedorov/git-plugin,Vlatombe/git-plugin,nKey/git-plugin,Vlatombe/git-plugin,Deveo/git-plugin,pauxus/git-plugin,mklein0/git-plugin,jenkinsci/git-plugin,nordstrand/git-plugin,ivan-fedorov/git-plugin,Nonymus/git-plugin,nordstrand/git-plugin,ydubreuil/git-plugin,ialbors/git-plugin,mlgiroux/git-plugin,MarkEWaite/git-plugin,loomchild/git-plugin
|
package hudson.plugins.git;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.model.BuildListener;
import hudson.model.FreeStyleProject;
import hudson.model.Node;
import hudson.model.Run;
import hudson.plugins.git.browser.GitRepositoryBrowser;
import hudson.plugins.git.extensions.GitSCMExtension;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static junit.framework.TestCase.assertNotNull;
import static junit.framework.TestCase.assertNull;
import org.jenkinsci.plugins.gitclient.GitClient;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.jvnet.hudson.test.JenkinsRule;
public class GitSCMCreateClientNullTest {
@Rule
public JenkinsRule j = new JenkinsRule();
@Before
public void setSecurity() {
j.jenkins.setSecurityRealm(j.createDummySecurityRealm());
}
@Before
public void configureGitTool() {
GitTool.onLoaded();
}
@Test
public void testGetClientAvoidNPEAfterSlaveDisconnected() throws Exception {
Node node = j.createOnlineSlave();
FreeStyleProject myProject = j.createFreeStyleProject();
/* Force myProject to execute on the new slave */
myProject.setAssignedLabel(j.jenkins.getLabel(node.getDisplayName()));
/* Configure SCM for the project - use this repo as the remote */
List<UserRemoteConfig> userRemoteConfigs = new ArrayList<UserRemoteConfig>();
String repoURL = (new File(".git")).toURI().toURL().toString();
String refspec = "+refs/heads/*:refs/remotes/origin/*";
userRemoteConfigs.add(new UserRemoteConfig(repoURL, "origin", refspec, null));
List<BranchSpec> branches = new ArrayList<BranchSpec>();
branches.add(new BranchSpec("refs/tags/git-2.2.6"));
Boolean doGenerateSubmoduleConfigurations = false;
Collection<SubmoduleConfig> submoduleCfg = null;
GitRepositoryBrowser browser = null;
List<GitSCMExtension> extensions = null;
GitSCM gitSCM = new GitSCM(
userRemoteConfigs,
branches,
doGenerateSubmoduleConfigurations,
submoduleCfg,
browser,
GitTool.DEFAULT,
extensions
);
myProject.setScm(gitSCM);
/* Build the project and assert it succeeded */
j.buildAndAssertSuccess(myProject);
assertNotNull(myProject.getFirstBuild().getWorkspace());
/* Disconnect the online slave */
node.toComputer().cliDisconnect("Disconnected the node to show NPE");
FilePath ws = myProject.getFirstBuild().getWorkspace();
assertNull(ws);
/* Create a GitClient from the first build. Failed with a null
* pointer exception prior to git plugin 2.2.7 due to disconnected slave.
*/
final Run myRun = myProject.getFirstBuild();
EnvVars myEnv = new EnvVars();
BuildListener myBuildListener = null;
GitClient client = gitSCM.createClient(myBuildListener, myEnv, myRun, ws);
assertNotNull(client);
}
}
|
src/test/java/hudson/plugins/git/GitSCMCreateClientNullTest.java
|
package hudson.plugins.git;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.model.BuildListener;
import hudson.model.FreeStyleProject;
import hudson.model.Node;
import hudson.model.Run;
import hudson.plugins.git.browser.GitRepositoryBrowser;
import hudson.plugins.git.extensions.GitSCMExtension;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static junit.framework.TestCase.assertNotNull;
import static junit.framework.TestCase.assertNull;
import org.jenkinsci.plugins.gitclient.GitClient;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.jvnet.hudson.test.JenkinsRule;
public class GitSCMCreateClientNullTest {
@Rule
public JenkinsRule j = new JenkinsRule();
@Before
public void setSecurity() {
j.jenkins.setSecurityRealm(j.createDummySecurityRealm());
}
@Before
public void configureGitTool() {
GitTool.onLoaded();
}
@Test
public void testGetClientAvoidNPEAfterSlaveDisconnected() throws Exception {
Node node = j.createOnlineSlave();
FreeStyleProject myProject = j.createFreeStyleProject();
/* Force myProject to execute on the new slave */
myProject.setAssignedLabel(j.jenkins.getLabel(node.getDisplayName()));
/* Configure SCM for the project - use this repo as the remote */
List<UserRemoteConfig> userRemoteConfigs = new ArrayList<UserRemoteConfig>();
String repoURL = (new File(".git")).toURI().toURL().toString();
String refspec = "+refs/heads/*:refs/remotes/origin/*";
userRemoteConfigs.add(new UserRemoteConfig(repoURL, "origin", refspec, null));
List<BranchSpec> branches = null;
Boolean doGenerateSubmoduleConfigurations = false;
Collection<SubmoduleConfig> submoduleCfg = null;
GitRepositoryBrowser browser = null;
List<GitSCMExtension> extensions = null;
GitSCM gitSCM = new GitSCM(
userRemoteConfigs,
branches,
doGenerateSubmoduleConfigurations,
submoduleCfg,
browser,
GitTool.DEFAULT,
extensions
);
myProject.setScm(gitSCM);
/* Build the project and assert it succeeded */
j.buildAndAssertSuccess(myProject);
assertNotNull(myProject.getFirstBuild().getWorkspace());
/* Disconnect the online slave */
node.toComputer().cliDisconnect("Disconnected the node to show NPE");
FilePath ws = myProject.getFirstBuild().getWorkspace();
assertNull(ws);
/* Create a GitClient from the first build. Failed with a null
* pointer exception prior to git plugin 2.2.7 due to disconnected slave.
*/
final Run myRun = myProject.getFirstBuild();
EnvVars myEnv = new EnvVars();
BuildListener myBuildListener = null;
GitClient client = gitSCM.createClient(myBuildListener, myEnv, myRun, ws);
assertNotNull(client);
}
}
|
Checkout a known tag from the repo so test job doesn't fail prematurely
|
src/test/java/hudson/plugins/git/GitSCMCreateClientNullTest.java
|
Checkout a known tag from the repo so test job doesn't fail prematurely
|
<ide><path>rc/test/java/hudson/plugins/git/GitSCMCreateClientNullTest.java
<ide> String repoURL = (new File(".git")).toURI().toURL().toString();
<ide> String refspec = "+refs/heads/*:refs/remotes/origin/*";
<ide> userRemoteConfigs.add(new UserRemoteConfig(repoURL, "origin", refspec, null));
<del> List<BranchSpec> branches = null;
<add> List<BranchSpec> branches = new ArrayList<BranchSpec>();
<add> branches.add(new BranchSpec("refs/tags/git-2.2.6"));
<ide> Boolean doGenerateSubmoduleConfigurations = false;
<ide> Collection<SubmoduleConfig> submoduleCfg = null;
<ide> GitRepositoryBrowser browser = null;
|
|
Java
|
apache-2.0
|
ec1e0542042df5f62e08f1c846341b61ecda124f
| 0 |
xiexingguang/RocketMQ,xiexingguang/RocketMQ,xiexingguang/RocketMQ
|
package com.alibaba.rocketmq.client.log;
import java.net.URL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.joran.JoranConfigurator;
import com.alibaba.rocketmq.common.constant.LoggerName;
public class ClientLogger {
private static Logger log;
static {
// ʼLogger
log = createLogger(LoggerName.ClientLoggerName);
}
private static Logger createLogger(final String loggerName) {
String logConfigFilePath =
System.getProperty("rocketmq.client.log.configFile",
System.getenv("ROCKETMQ_CLIENT_LOG_CONFIGFILE"));
try {
LoggerContext lc = new LoggerContext();
JoranConfigurator configurator = new JoranConfigurator();
configurator.setContext(lc);
lc.reset();
if (null == logConfigFilePath) {
// Ӧûãʹjar
URL url = ClientLogger.class.getClassLoader().getResource("logback_rocketmq_client.xml");
configurator.doConfigure(url);
}
else {
configurator.doConfigure(logConfigFilePath);
}
return lc.getLogger(LoggerName.ClientLoggerName);
}
catch (Exception e) {
System.err.println(e);
}
return LoggerFactory.getLogger(LoggerName.ClientLoggerName);
}
public static Logger getLog() {
return log;
}
public static void setLog(Logger log) {
ClientLogger.log = log;
}
}
|
rocketmq-client/src/main/java/com/alibaba/rocketmq/client/log/ClientLogger.java
|
package com.alibaba.rocketmq.client.log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.joran.JoranConfigurator;
import com.alibaba.rocketmq.common.constant.LoggerName;
public class ClientLogger {
private static Logger log;
static {
String logConfigFilePath =
System.getProperty("rocketmq.client.log.configFile",
System.getenv("ROCKETMQ_CLIENT_LOG_CONFIGFILE"));
if (null == logConfigFilePath) {
// Ӧûãʹjar
logConfigFilePath = "logback_rocketmq_client.xml";
}
// ʼLogger
log = createLogger(LoggerName.ClientLoggerName, logConfigFilePath);
}
private static Logger createLogger(final String loggerName, final String logConfigFile) {
try {
LoggerContext lc = new LoggerContext();
JoranConfigurator configurator = new JoranConfigurator();
configurator.setContext(lc);
lc.reset();
// ļѾClient Jar
configurator.doConfigure(logConfigFile);
return lc.getLogger(LoggerName.ClientLoggerName);
}
catch (Exception e) {
System.err.println(e);
}
return LoggerFactory.getLogger(LoggerName.ClientLoggerName);
}
public static Logger getLog() {
return log;
}
public static void setLog(Logger log) {
ClientLogger.log = log;
}
}
|
#4 修复客户端找不到log配置文件的问题
|
rocketmq-client/src/main/java/com/alibaba/rocketmq/client/log/ClientLogger.java
|
#4 修复客户端找不到log配置文件的问题
|
<ide><path>ocketmq-client/src/main/java/com/alibaba/rocketmq/client/log/ClientLogger.java
<ide> package com.alibaba.rocketmq.client.log;
<add>
<add>import java.net.URL;
<ide>
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide> private static Logger log;
<ide>
<ide> static {
<add> // ʼLogger
<add> log = createLogger(LoggerName.ClientLoggerName);
<add> }
<add>
<add>
<add> private static Logger createLogger(final String loggerName) {
<ide> String logConfigFilePath =
<ide> System.getProperty("rocketmq.client.log.configFile",
<ide> System.getenv("ROCKETMQ_CLIENT_LOG_CONFIGFILE"));
<del> if (null == logConfigFilePath) {
<del> // Ӧûãʹjar
<del> logConfigFilePath = "logback_rocketmq_client.xml";
<del> }
<ide>
<del> // ʼLogger
<del> log = createLogger(LoggerName.ClientLoggerName, logConfigFilePath);
<del> }
<del>
<del>
<del> private static Logger createLogger(final String loggerName, final String logConfigFile) {
<ide> try {
<ide> LoggerContext lc = new LoggerContext();
<ide> JoranConfigurator configurator = new JoranConfigurator();
<ide> configurator.setContext(lc);
<ide> lc.reset();
<del> // ļѾClient Jar
<del> configurator.doConfigure(logConfigFile);
<add>
<add> if (null == logConfigFilePath) {
<add> // Ӧûãʹjar
<add> URL url = ClientLogger.class.getClassLoader().getResource("logback_rocketmq_client.xml");
<add> configurator.doConfigure(url);
<add> }
<add> else {
<add> configurator.doConfigure(logConfigFilePath);
<add> }
<add>
<ide> return lc.getLogger(LoggerName.ClientLoggerName);
<ide> }
<ide> catch (Exception e) {
|
|
Java
|
mit
|
a144a89a0809386b62f0a7853c5ce2dd638ba5ae
| 0 |
ktsuench/eCommerce,ktsuench/eCommerce
|
package com.KST.eCommerce;
import java.util.ArrayList;
/**
*
* @author Kent Tsuenchy
*/
public class EcommercePlatform {
private final Session session;
private final PaymentProcessor checkout;
private final ArrayList<User> users;
public EcommercePlatform() {
Database db = new Database("users.data");
this.session = new Session();
this.checkout = new PaymentProcessor();
this.users = db.readUsers();
db.closeDb();
}
public ArrayList<Item> listItems() {
ArrayList<Item> items = new ArrayList<>();
for (User u : users) {
for (Item i : ((Seller) u).getItems()) {
items.add(i);
}
}
return (ArrayList<Item>) items.clone();
}
public Session getSession() {
return this.session;
}
public PaymentProcessor getPaymentProcessor() {
return this.checkout;
}
public void showInterface() {
EcommerceGUI.showGUI(this);
}
public static void main(String[] args) {
EcommercePlatform platform = new EcommercePlatform();
platform.showInterface();
}
}
|
src/com/KST/eCommerce/EcommercePlatform.java
|
package com.KST.eCommerce;
import java.util.ArrayList;
/**
*
* @author Kent Tsuenchy
*/
public class EcommercePlatform {
private final Session session;
private final PaymentProcessor checkout;
private final ArrayList<User> users;
public EcommercePlatform() {
Database db = new Database("users.data");
this.session = new Session();
this.checkout = new PaymentProcessor();
this.users = db.readUsers();
db.closeDb();
}
public ArrayList<Item> listItems() {
ArrayList<Item> items = new ArrayList<>();
for (User u : users) {
for (Item i : ((Seller) u).getItems()) {
items.add(i);
}
}
return (ArrayList<Item>) items.clone();
}
public Session getSession() {
return this.session;
}
public void showInterface() {
EcommerceGUI.showGUI(this);
}
public static void main(String[] args) {
EcommercePlatform platform = new EcommercePlatform();
platform.showInterface();
}
}
|
added getPaymentProcessor method
|
src/com/KST/eCommerce/EcommercePlatform.java
|
added getPaymentProcessor method
|
<ide><path>rc/com/KST/eCommerce/EcommercePlatform.java
<ide> return this.session;
<ide> }
<ide>
<add> public PaymentProcessor getPaymentProcessor() {
<add> return this.checkout;
<add> }
<add>
<ide> public void showInterface() {
<ide> EcommerceGUI.showGUI(this);
<ide> }
|
|
Java
|
apache-2.0
|
efe86dac758c9b1a2dc4f2e5a2542505655ec2c1
| 0 |
3dcitydb/importer-exporter,3dcitydb/importer-exporter,3dcitydb/importer-exporter
|
/*
* 3D City Database - The Open Source CityGML Database
* http://www.3dcitydb.org/
*
* Copyright 2013 - 2019
* Chair of Geoinformatics
* Technical University of Munich, Germany
* https://www.gis.bgu.tum.de/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* virtualcitySYSTEMS GmbH, Berlin <http://www.virtualcitysystems.de/>
* M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citydb.query.builder.sql;
import org.citydb.database.schema.path.AbstractNode;
import org.citydb.database.schema.path.SchemaPath;
import org.citydb.query.builder.QueryBuildException;
import org.citydb.query.filter.sorting.SortProperty;
import org.citydb.query.filter.sorting.Sorting;
import org.citydb.sqlbuilder.schema.Column;
import org.citydb.sqlbuilder.schema.Table;
import org.citydb.sqlbuilder.select.OrderByToken;
import org.citydb.sqlbuilder.select.PredicateToken;
import org.citydb.sqlbuilder.select.Select;
import org.citydb.sqlbuilder.select.join.Join;
import org.citydb.sqlbuilder.select.operator.comparison.BinaryComparisonOperator;
import org.citydb.sqlbuilder.select.operator.logical.BinaryLogicalOperator;
import org.citydb.sqlbuilder.select.operator.logical.LogicalOperationName;
import org.citydb.sqlbuilder.select.orderBy.SortOrder;
import java.util.HashSet;
import java.util.Set;
public class SortingBuilder {
protected SortingBuilder() {
}
protected void buildSorting(Sorting sorting, SchemaPathBuilder builder, SQLQueryContext queryContext) throws QueryBuildException {
if (!sorting.hasSortProperties())
throw new QueryBuildException("No valid sort properties provided.");
Set<String> valueReferences = new HashSet<>();
for (SortProperty sortProperty : sorting.getSortProperties()) {
SchemaPath schemaPath = sortProperty.getValueReference().getSchemaPath();
if (!valueReferences.add(schemaPath.toXPath()))
throw new QueryBuildException("Duplicate value references pointing to the same sorting key are not allowed.");
AbstractNode<?> node = schemaPath.getFirstNode();
if (node.isSetPredicate())
throw new QueryBuildException("Predicates on the root feature are not supported for value references of sort properties.");
queryContext = builder.addSchemaPath(schemaPath, queryContext);
SortOrder sortOrder = sortProperty.getSortOrder() == org.citydb.query.filter.sorting.SortOrder.DESCENDING ?
SortOrder.DESCENDING : SortOrder.ASCENDING;
queryContext.select.addOrderBy(new OrderByToken(queryContext.targetColumn, sortOrder));
if (queryContext.hasPredicates()) {
for (PredicateToken predicate : queryContext.predicates)
addJoinConditions(predicate, queryContext.select);
queryContext.unsetPredicates();
}
}
}
private void addJoinConditions(PredicateToken predicate, Select select) throws QueryBuildException {
if (predicate instanceof BinaryLogicalOperator) {
if (((BinaryLogicalOperator) predicate).getOperationName() == LogicalOperationName.OR)
throw new QueryBuildException("Logical OR predicates are not supported for value references of sort properties.");
for (PredicateToken operand : ((BinaryLogicalOperator) predicate).getOperands())
addJoinConditions(operand, select);
} else if (predicate instanceof BinaryComparisonOperator) {
BinaryComparisonOperator operator = (BinaryComparisonOperator) predicate;
if (!(operator.getLeftOperand() instanceof Column))
throw new QueryBuildException("Found unexpected predicate operand in value reference of sort property.");
Table table = ((Column) operator.getLeftOperand()).getTable();
for (Join join : select.getJoins()) {
if (join.getToColumn().getTable().equals(table)) {
join.addCondition(predicate);
return;
}
}
throw new QueryBuildException("Failed to map predicates in value reference to join conditions.");
} else
throw new QueryBuildException("Failed to map predicates in value reference to join conditions.");
}
}
|
impexp-core/src/main/java/org/citydb/query/builder/sql/SortingBuilder.java
|
/*
* 3D City Database - The Open Source CityGML Database
* http://www.3dcitydb.org/
*
* Copyright 2013 - 2019
* Chair of Geoinformatics
* Technical University of Munich, Germany
* https://www.gis.bgu.tum.de/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* virtualcitySYSTEMS GmbH, Berlin <http://www.virtualcitysystems.de/>
* M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citydb.query.builder.sql;
import org.citydb.database.schema.path.AbstractNode;
import org.citydb.database.schema.path.SchemaPath;
import org.citydb.query.builder.QueryBuildException;
import org.citydb.query.filter.sorting.SortProperty;
import org.citydb.query.filter.sorting.Sorting;
import org.citydb.sqlbuilder.schema.Column;
import org.citydb.sqlbuilder.schema.Table;
import org.citydb.sqlbuilder.select.OrderByToken;
import org.citydb.sqlbuilder.select.PredicateToken;
import org.citydb.sqlbuilder.select.Select;
import org.citydb.sqlbuilder.select.join.Join;
import org.citydb.sqlbuilder.select.operator.comparison.BinaryComparisonOperator;
import org.citydb.sqlbuilder.select.operator.logical.BinaryLogicalOperator;
import org.citydb.sqlbuilder.select.operator.logical.LogicalOperationName;
import org.citydb.sqlbuilder.select.orderBy.SortOrder;
public class SortingBuilder {
protected SortingBuilder() {
}
protected void buildSorting(Sorting sorting, SchemaPathBuilder builder, SQLQueryContext queryContext) throws QueryBuildException {
if (!sorting.hasSortProperties())
throw new QueryBuildException("No valid sort properties provided.");
for (SortProperty sortProperty : sorting.getSortProperties()) {
SchemaPath schemaPath = sortProperty.getValueReference().getSchemaPath();
AbstractNode<?> node = schemaPath.getFirstNode();
if (node.isSetPredicate())
throw new QueryBuildException("Predicates on the root feature are not supported for value references of sort properties.");
queryContext = builder.addSchemaPath(schemaPath, queryContext);
SortOrder sortOrder = sortProperty.getSortOrder() == org.citydb.query.filter.sorting.SortOrder.DESCENDING ?
SortOrder.DESCENDING : SortOrder.ASCENDING;
queryContext.select.addOrderBy(new OrderByToken(queryContext.targetColumn, sortOrder));
if (queryContext.hasPredicates()) {
for (PredicateToken predicate : queryContext.predicates)
addJoinConditions(predicate, queryContext.select);
queryContext.unsetPredicates();
}
}
}
private void addJoinConditions(PredicateToken predicate, Select select) throws QueryBuildException {
if (predicate instanceof BinaryLogicalOperator) {
if (((BinaryLogicalOperator) predicate).getOperationName() == LogicalOperationName.OR)
throw new QueryBuildException("Logical OR predicates are not supported for value references of sort properties.");
for (PredicateToken operand : ((BinaryLogicalOperator) predicate).getOperands())
addJoinConditions(operand, select);
} else if (predicate instanceof BinaryComparisonOperator) {
BinaryComparisonOperator operator = (BinaryComparisonOperator) predicate;
if (!(operator.getLeftOperand() instanceof Column))
throw new QueryBuildException("Found unexpected predicate operand in value reference of sort property.");
Table table = ((Column) operator.getLeftOperand()).getTable();
for (Join join : select.getJoins()) {
if (join.getToColumn().getTable().equals(table)) {
join.addCondition(predicate);
return;
}
}
throw new QueryBuildException("Failed to map predicates in value reference to join conditions.");
} else
throw new QueryBuildException("Failed to map predicates in value reference to join conditions.");
}
}
|
avoid duplicate sort keys
|
impexp-core/src/main/java/org/citydb/query/builder/sql/SortingBuilder.java
|
avoid duplicate sort keys
|
<ide><path>mpexp-core/src/main/java/org/citydb/query/builder/sql/SortingBuilder.java
<ide> import org.citydb.sqlbuilder.select.operator.logical.LogicalOperationName;
<ide> import org.citydb.sqlbuilder.select.orderBy.SortOrder;
<ide>
<add>import java.util.HashSet;
<add>import java.util.Set;
<add>
<ide> public class SortingBuilder {
<ide>
<ide> protected SortingBuilder() {
<ide> if (!sorting.hasSortProperties())
<ide> throw new QueryBuildException("No valid sort properties provided.");
<ide>
<add> Set<String> valueReferences = new HashSet<>();
<ide> for (SortProperty sortProperty : sorting.getSortProperties()) {
<ide> SchemaPath schemaPath = sortProperty.getValueReference().getSchemaPath();
<add> if (!valueReferences.add(schemaPath.toXPath()))
<add> throw new QueryBuildException("Duplicate value references pointing to the same sorting key are not allowed.");
<ide>
<ide> AbstractNode<?> node = schemaPath.getFirstNode();
<ide> if (node.isSetPredicate())
|
|
Java
|
epl-1.0
|
4bdde270f871e27915f3b5ac8d02253b2eae51af
| 0 |
debrief/debrief,alastrina123/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief,alastrina123/debrief,theanuradha/debrief,debrief/debrief,alastrina123/debrief,theanuradha/debrief,pecko/debrief,alastrina123/debrief,alastrina123/debrief,theanuradha/debrief,debrief/debrief,pecko/debrief,debrief/debrief,pecko/debrief,pecko/debrief,pecko/debrief,debrief/debrief,pecko/debrief,alastrina123/debrief,theanuradha/debrief,theanuradha/debrief,alastrina123/debrief,pecko/debrief
|
package org.mwc.cmap.overview.views;
import java.awt.*;
import java.awt.Font;
import java.awt.Point;
import java.beans.*;
import java.util.Enumeration;
import org.eclipse.jface.action.*;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.*;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.*;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.*;
import org.eclipse.ui.part.ViewPart;
import org.mwc.cmap.core.CorePlugin;
import org.mwc.cmap.core.interfaces.IControllableViewport;
import org.mwc.cmap.core.operations.DebriefActionWrapper;
import org.mwc.cmap.core.ui_support.PartMonitor;
import org.mwc.cmap.core.ui_support.swt.SWTCanvasAdapter;
import org.mwc.cmap.gt2plot.proj.GtProjection;
import org.mwc.cmap.overview.Activator;
import org.mwc.cmap.plotViewer.editors.chart.*;
import MWC.Algorithms.PlainProjection;
import MWC.GUI.*;
import MWC.GUI.CanvasType.PaintListener;
import MWC.GUI.Tools.Action;
import MWC.GenericData.*;
/**
* This sample class demonstrates how to plug-in a new workbench view. The view
* shows data obtained from the model. The sample creates a dummy model on the
* fly, but a real implementation would connect to the model available either in
* this or another plug-in (e.g. the workspace). The view is connected to the
* model using a content provider.
* <p>
* The view uses a label provider to define how model objects should be
* presented in the view. Each view can present the same model objects using
* different labels and icons, if needed. Alternatively, a single label provider
* can be shared between views in order to ensure that objects of the same type
* are presented in the same way everywhere.
* <p>
*/
public class ChartOverview extends ViewPart implements PropertyChangeListener
{
/**
* helper application to help track creation/activation of new plots
*/
private PartMonitor _myPartMonitor;
protected Layers _targetLayers;
OverviewSWTChart _myOverviewChart;
IControllableViewport _targetViewport;
private org.eclipse.jface.action.Action _fitToWindow;
private final GtProjection _myProjection;
/**
* The constructor.
*/
public ChartOverview()
{
_myProjection = new GtProjection();
}
/**
* This is a callback that will allow us to create the viewer and initialize
* it.
*/
public void createPartControl(final Composite parent)
{
// declare our context sensitive help
CorePlugin.declareContextHelp(parent, "org.mwc.debrief.help.OverviewChart");
// hey, first create the chart
_myOverviewChart = new OverviewSWTChart(parent)
{
/**
*
*/
private static final long serialVersionUID = 1L;
IControllableViewport getParentViewport()
{
return _targetViewport;
}
};
// use our special dragger
_myOverviewChart.setDragMode(new MyZoomMode());
// and update the chart
_myOverviewChart.setChartOverview(this);
// and our special painter
_myOverviewChart.getCanvas().addPainter(new PaintListener()
{
public WorldArea getDataArea()
{
return null;
}
public String getName()
{
return "Overview data area";
}
public void paintMe(final CanvasType dest)
{
// ok - just paint in our rectangle
paintDataRect(dest);
}
public void resizedEvent(final PlainProjection theProj,
final Dimension newScreenArea)
{
}
});
makeActions();
contributeToActionBars();
// /////////////////////////////////////////
// ok - listen out for changes in the view
// /////////////////////////////////////////
watchMyParts();
}
/**
* sort out what we're listening to...
*/
private void watchMyParts()
{
_myPartMonitor = new PartMonitor(getSite().getWorkbenchWindow()
.getPartService());
_myPartMonitor.addPartListener(Layers.class, PartMonitor.ACTIVATED,
new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final Layers provider = (Layers) part;
// is this different to our current one?
if (provider != _targetLayers)
{
// ok, start listening to the new one
_targetLayers = provider;
plotSelected(provider, parentPart);
}
}
});
_myPartMonitor.addPartListener(Layers.class, PartMonitor.CLOSED,
new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
if (part == _targetLayers)
{
// cancel the listeners
plotSelected(null, null);
_targetLayers = null;
clearPlot();
}
}
});
_myPartMonitor.addPartListener(IControllableViewport.class,
PartMonitor.ACTIVATED, new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final IControllableViewport provider = (IControllableViewport) part;
// is this different to our current one?
if (provider != _targetViewport)
{
// ok, stop listening to the current viewport (if we
// have one)
if (_targetViewport != null)
stopListeningToViewport();
// and start listening to the new one
_targetViewport = provider;
startListeningToViewport();
}
}
});
_myPartMonitor.addPartListener(IControllableViewport.class,
PartMonitor.CLOSED, new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
if (part == _targetViewport)
{
if (_targetViewport != null)
stopListeningToViewport();
_targetViewport = null;
}
}
});
// ok we're all ready now. just try and see if the current part is valid
_myPartMonitor.fireActivePart(getSite().getWorkbenchWindow()
.getActivePage());
}
/**
* paint the data-rectangle in our overview, to show the currently visible
* area
*
* @param dest
*/
protected void paintDataRect(final CanvasType dest)
{
// check we're alive
if (_targetViewport == null)
return;
// get the projection
final PlainProjection proj = _targetViewport.getProjection();
// get the dimensions
final java.awt.Dimension scrArea = proj.getScreenArea();
// did we find any data?
if (scrArea == null)
return;
// now convert to data coordinates
WorldLocation loc = proj.toWorld(new Point(0, 0));
// did it work?
if (loc == null)
return;
// produce the screen coordinate in the overview
final Point thePt = _myOverviewChart.getCanvas().getProjection()
.toScreen(loc);
// did it work?
if (thePt == null)
return;
// and the other corner
loc = proj.toWorld(new Point(scrArea.width, scrArea.height));
// create the screen coordinates
final Point tl = new Point(thePt);
final Point br = new Point(_myOverviewChart.getCanvas().getProjection()
.toScreen(loc));
//
// // also, draw in the data-area
// WorldArea dataRect = _currentViewport.getViewport();
// // convert to my coords
// java.awt.Point tl = new
// java.awt.Point(dest.getProjection().toScreen(dataRect.getTopLeft()));
// java.awt.Point br = new
// java.awt.Point(dest.getProjection().toScreen(dataRect.getBottomRight()));
dest.setColor(new java.awt.Color(200, 200, 200));
dest.drawRect(tl.x, tl.y, br.x - tl.x, br.y - tl.y);
}
/**
* disable the plot we-re no longer looking at anything...
*/
protected void clearPlot()
{
// ok - we're no longer looking at anything. clear the plot..
}
/**
* ok, a new plot is selected - better show it then
*
* @param provider
* the new plot
* @param parentPart
* the part containing the plot
*/
protected void plotSelected(final Layers provider,
final IWorkbenchPart parentPart)
{
// ok - update our chart to show the indicated plot.
_myOverviewChart.setLayers(provider);
if (provider != null)
{
_myOverviewChart.rescale();
}
_myOverviewChart.repaint();
// this.setPartName(parentPart.getTitle());
}
private void contributeToActionBars()
{
final IActionBars bars = getViewSite().getActionBars();
fillLocalPullDown(bars.getMenuManager());
fillLocalToolBar(bars.getToolBarManager());
}
private void fillLocalPullDown(final IMenuManager manager)
{
}
private void fillLocalToolBar(final IToolBarManager manager)
{
manager.add(_fitToWindow);
// and the help link
manager.add(new Separator());
manager.add(CorePlugin.createOpenHelpAction(
"org.mwc.debrief.help.OverviewChart", null, this));
}
private void makeActions()
{
_fitToWindow = new org.eclipse.jface.action.Action()
{
public void run()
{
// ok, fit the plot to the window...
fitTargetToWindow();
}
};
_fitToWindow.setText("Fit to window");
_fitToWindow
.setToolTipText("Zoom the selected plot out to show the full data");
_fitToWindow.setImageDescriptor(Activator
.getImageDescriptor("icons/fit_to_win.gif"));
}
/**
* Passing the focus request to the viewer's control.
*/
public void setFocus()
{
}
public class MyZoomMode extends SWTChart.PlotMouseDragger
{
org.eclipse.swt.graphics.Point _startPoint;
SWTCanvas _myCanvas;
public void doMouseDrag(final org.eclipse.swt.graphics.Point pt,
final int JITTER, final Layers theLayers, final SWTCanvas theCanvas)
{
// just do a check that we have our start point (it may have been
// cleared
// at the end of the move operation)
if (_startPoint != null)
{
final int deltaX = _startPoint.x - pt.x;
final int deltaY = _startPoint.y - pt.y;
if (Math.abs(deltaX) < JITTER && Math.abs(deltaY) < JITTER)
return;
Tracker _dragTracker = new Tracker((Composite) _myCanvas.getCanvas(),
SWT.RESIZE);
final Rectangle rect = new Rectangle(_startPoint.x, _startPoint.y,
deltaX, deltaY);
_dragTracker.setRectangles(new Rectangle[]
{ rect });
final boolean dragResult = _dragTracker.open();
if (dragResult)
{
final Rectangle[] rects = _dragTracker.getRectangles();
final Rectangle res = rects[0];
// get world area
final java.awt.Point tl = new java.awt.Point(res.x, res.y);
final java.awt.Point br = new java.awt.Point(res.x + res.width, res.y
+ res.height);
final WorldLocation locA = new WorldLocation(_myCanvas
.getProjection().toWorld(tl));
final WorldLocation locB = new WorldLocation(_myCanvas
.getProjection().toWorld(br));
final WorldArea area = new WorldArea(locA, locB);
// hmm, check we have a controllable viewport
if (_targetViewport != null)
{
try
{
// ok, we also need to get hold of the target chart
final WorldArea oldArea = _targetViewport.getViewport();
final Action theAction = new OverviewZoomInAction(
_targetViewport, oldArea, area);
// and wrap it
final DebriefActionWrapper daw = new DebriefActionWrapper(
theAction, null, null);
// and add it to the clipboard
CorePlugin.run(daw);
}
catch (final RuntimeException re)
{
re.printStackTrace();
}
}
_dragTracker = null;
_startPoint = null;
}
}
}
public void doMouseUp(final org.eclipse.swt.graphics.Point point,
final int keyState)
{
_startPoint = null;
}
public void mouseDown(final org.eclipse.swt.graphics.Point point,
final SWTCanvas canvas, final PlainChart theChart)
{
_startPoint = point;
_myCanvas = canvas;
}
}
public class OverviewZoomInAction implements Action
{
private final IControllableViewport _theViewport;
private final WorldArea _oldArea;
private final WorldArea _newArea;
public OverviewZoomInAction(final IControllableViewport theChart,
final WorldArea oldArea, final WorldArea newArea)
{
_theViewport = theChart;
_oldArea = oldArea;
_newArea = newArea;
}
public boolean isRedoable()
{
return true;
}
public boolean isUndoable()
{
return true;
}
public String toString()
{
return "Zoom in operation";
}
public void undo()
{
// set the data area for the chart to the specified area
_theViewport.setViewport(_oldArea);
_theViewport.update();
_myOverviewChart.update();
}
public void execute()
{
_theViewport.setViewport(_newArea);
_theViewport.update();
_myOverviewChart.update();
}
}
public MWC.GUI.Rubberband getRubberband()
{
return null;
}
protected class OverviewSWTCanvas extends SWTCanvas
{
public OverviewSWTCanvas(final Composite parent)
{
super(parent, _myProjection);
}
/**
*
*/
private static final long serialVersionUID = 1L;
public void drawText(final Font theFont, final String theStr, final int x,
final int y)
{
// ignore - we don't do text in overview
}
public void drawText(final String theStr, final int x, final int y)
{
// ignore - we don't do text in overview
}
}
protected static class OverviewSWTCanvasAdapter extends SWTCanvasAdapter
{
public OverviewSWTCanvasAdapter(final PlainProjection proj)
{
super(proj);
}
/**
*
*/
private static final long serialVersionUID = 1L;
public void drawText(final Font theFont, final String theStr, final int x,
final int y)
{
// ignore - we don't do text in overview
}
public void drawText(final String theStr, final int x, final int y)
{
// ignore - we don't do text in overview
}
}
abstract public class OverviewSWTChart extends SWTChart
{
ChartOverview _parentView;
/**
*
*/
private static final long serialVersionUID = 1L;
abstract IControllableViewport getParentViewport();
public OverviewSWTChart(final Composite parent)
{
super(null, parent, _myProjection);
// ok, setup double-click handler to zoom in on target location
this.addCursorDblClickedListener(new ChartDoubleClickListener()
{
public void cursorDblClicked(final PlainChart theChart,
final WorldLocation theLocation, final Point thePoint)
{
// ok - got location centre plot on target loc
final WorldArea currentArea = new WorldArea(getParentViewport()
.getViewport());
currentArea.setCentre(theLocation);
getParentViewport().setViewport(currentArea);
// and trigger an update
getParentViewport().update();
}
});
}
public void setChartOverview(final ChartOverview view)
{
_parentView = view;
}
public void chartFireSelectionChanged(final ISelection sel)
{
}
/**
* over-ride the parent's version of paint, so that we can try to do it by
* layers.
*/
public final void paintMe(final CanvasType dest)
{
// just double-check we have some layers (if we're part of an
// overview
// chart, we may not have...)
if (_theLayers == null)
return;
// check that we have a valid canvas (that the sizes are set)
final java.awt.Dimension sArea = dest.getProjection().getScreenArea();
if (sArea != null)
{
if (sArea.width > 0)
{
// hey, we've plotted at least once, has the data area
// changed?
if (_lastDataArea != _parentView._myOverviewChart.getCanvas()
.getProjection().getDataArea())
{
// remember the data area for next time
_lastDataArea = _parentView._myOverviewChart.getCanvas()
.getProjection().getDataArea();
// clear out all of the layers we are using
_myLayers.clear();
}
final int canvasHeight = _parentView._myOverviewChart.getCanvas()
.getSize().getSize().height;
final int canvasWidth = _parentView._myOverviewChart.getCanvas()
.getSize().width;
paintBackground(dest);
// ok, pass through the layers, repainting any which need it
final Enumeration<Layer> numer = _theLayers.sortedElements();
while (numer.hasMoreElements())
{
final Layer thisLayer = (Layer) numer.nextElement();
boolean isAlreadyPlotted = false;
// hmm, do we want to paint this layer?
if (_parentView.doWePaintThisLayer(thisLayer))
{
// just check if this layer is visible
if (thisLayer.getVisible())
{
// System.out.println("painting:" +
// thisLayer.getName());
if (doubleBufferPlot())
{
// check we're plotting to a SwingCanvas,
// because we don't
// double-buffer anything else
if (dest instanceof SWTCanvas)
{
// does this layer want to be
// double-buffered?
if (thisLayer instanceof BaseLayer)
{
// just check if there is a property
// which over-rides the
// double-buffering
final BaseLayer bl = (BaseLayer) thisLayer;
if (bl.isBuffered())
{
isAlreadyPlotted = true;
// do our double-buffering bit
// do we have a layer for this
// object
org.eclipse.swt.graphics.Image image = (org.eclipse.swt.graphics.Image) _myLayers
.get(thisLayer);
if (image == null)
{
// ok - create our image
if (_myImageTemplate == null)
{
Image tmpTemplate = new Image(Display.getCurrent(),
canvasWidth, canvasHeight);
_myImageTemplate = tmpTemplate.getImageData();
tmpTemplate.dispose();
tmpTemplate = null;
}
image = createSWTImage(_myImageTemplate);
final GC newGC = new GC(image);
// wrap the GC into
// something we know how to
// plot to.
final SWTCanvasAdapter ca = new OverviewSWTCanvasAdapter(
dest.getProjection());
ca.setScreenSize(dest.getProjection().getScreenArea());
// and store the GC
ca.startDraw(newGC);
// ok, paint the layer into
// this canvas
thisLayer.paint(ca);
// done.
ca.endDraw(null);
// store this image in our
// list, indexed by the
// layer
// object itself
_myLayers.put(thisLayer, image);
// and ditch the GC
newGC.dispose();
}
// have we ended up with an
// image to paint?
if (image != null)
{
// get the graphics to paint
// to
final SWTCanvas canv = (SWTCanvas) dest;
// lastly add this image to
// our Graphics object
canv.drawSWTImage(image, 0, 0, canvasWidth,
canvasHeight, 255);
// but, we also have to
// ditch the image
// image.dispose();
}
}
}
} // whether we were plotting to a
// SwingCanvas (which may be
// double-buffered
} // whther we are happy to do double-buffering
// did we manage to paint it
if (!isAlreadyPlotted)
{
paintThisLayer(thisLayer, dest);
isAlreadyPlotted = true;
}
}
}
}
}
}
}
/**
* over-rideable member function which allows us to over-ride the canvas
* which gets used.
*
* @return the Canvas to use
*/
public final SWTCanvas createCanvas(final Composite parent)
{
return new OverviewSWTCanvas(parent);
}
}
/**
* do a fit-to-window of the target viewport
*/
protected void fitTargetToWindow()
{
_targetViewport.rescale();
_targetViewport.update();
// now, redraw our rectable
_myOverviewChart.repaint();
}
/**
* decide whether to paint this layer...
*
* @param thisLayer
* the layer we're looking at
* @return
*/
public boolean doWePaintThisLayer(final Layer thisLayer)
{
final boolean res = true;
// no, don't check for ETOPO data - just paint the lot.
// if (thisLayer instanceof SpatialRasterPainter)
// res = false;
return res;
}
public void propertyChange(final PropertyChangeEvent evt)
{
// ok, we've had a range change. better update
_myOverviewChart.repaint();
}
/**
*
*/
void stopListeningToViewport()
{
if (_targetViewport != null)
if (_targetViewport.getProjection() != null)
{
_targetViewport.getProjection().removeListener(this);
}
}
/**
*
*/
void startListeningToViewport()
{
_targetViewport.getProjection().addListener(this);
}
@Override
public void dispose()
{
// don't forget to stop listening for layer changes
_myOverviewChart.setLayers(null);
// and stop listening for projection changes
stopListeningToViewport();
if (_myPartMonitor != null)
{
_myPartMonitor.ditch();
}
super.dispose();
}
}
|
trunk/org.mwc.cmap.overview/src/org/mwc/cmap/overview/views/ChartOverview.java
|
package org.mwc.cmap.overview.views;
import java.awt.*;
import java.awt.Font;
import java.awt.Point;
import java.beans.*;
import java.util.Enumeration;
import org.eclipse.jface.action.*;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.*;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.*;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.*;
import org.eclipse.ui.part.ViewPart;
import org.mwc.cmap.core.CorePlugin;
import org.mwc.cmap.core.interfaces.IControllableViewport;
import org.mwc.cmap.core.operations.DebriefActionWrapper;
import org.mwc.cmap.core.ui_support.PartMonitor;
import org.mwc.cmap.core.ui_support.swt.SWTCanvasAdapter;
import org.mwc.cmap.gt2plot.proj.GtProjection;
import org.mwc.cmap.overview.Activator;
import org.mwc.cmap.plotViewer.editors.chart.*;
import MWC.Algorithms.PlainProjection;
import MWC.GUI.*;
import MWC.GUI.CanvasType.PaintListener;
import MWC.GUI.Tools.Action;
import MWC.GenericData.*;
/**
* This sample class demonstrates how to plug-in a new workbench view. The view
* shows data obtained from the model. The sample creates a dummy model on the
* fly, but a real implementation would connect to the model available either in
* this or another plug-in (e.g. the workspace). The view is connected to the
* model using a content provider.
* <p>
* The view uses a label provider to define how model objects should be
* presented in the view. Each view can present the same model objects using
* different labels and icons, if needed. Alternatively, a single label provider
* can be shared between views in order to ensure that objects of the same type
* are presented in the same way everywhere.
* <p>
*/
public class ChartOverview extends ViewPart implements PropertyChangeListener
{
/**
* helper application to help track creation/activation of new plots
*/
private PartMonitor _myPartMonitor;
protected Layers _targetLayers;
OverviewSWTChart _myOverviewChart;
IControllableViewport _targetViewport;
private org.eclipse.jface.action.Action _fitToWindow;
private final GtProjection _myProjection;
/**
* The constructor.
*/
public ChartOverview()
{
_myProjection = new GtProjection();
}
/**
* This is a callback that will allow us to create the viewer and initialize
* it.
*/
public void createPartControl(final Composite parent)
{
// declare our context sensitive help
CorePlugin.declareContextHelp(parent, "org.mwc.debrief.help.OverviewChart");
// hey, first create the chart
_myOverviewChart = new OverviewSWTChart(parent)
{
/**
*
*/
private static final long serialVersionUID = 1L;
IControllableViewport getParentViewport()
{
return _targetViewport;
}
};
// use our special dragger
_myOverviewChart.setDragMode(new MyZoomMode());
// and update the chart
_myOverviewChart.setChartOverview(this);
// and our special painter
_myOverviewChart.getCanvas().addPainter(new PaintListener()
{
public WorldArea getDataArea()
{
return null;
}
public String getName()
{
return "Overview data area";
}
public void paintMe(final CanvasType dest)
{
// ok - just paint in our rectangle
paintDataRect(dest);
}
public void resizedEvent(final PlainProjection theProj,
final Dimension newScreenArea)
{
}
});
makeActions();
contributeToActionBars();
// /////////////////////////////////////////
// ok - listen out for changes in the view
// /////////////////////////////////////////
watchMyParts();
}
/**
* sort out what we're listening to...
*/
private void watchMyParts()
{
_myPartMonitor = new PartMonitor(getSite().getWorkbenchWindow()
.getPartService());
_myPartMonitor.addPartListener(Layers.class, PartMonitor.ACTIVATED,
new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final Layers provider = (Layers) part;
// is this different to our current one?
if (provider != _targetLayers)
{
// ok, start listening to the new one
_targetLayers = provider;
plotSelected(provider, parentPart);
}
}
});
_myPartMonitor.addPartListener(Layers.class, PartMonitor.CLOSED,
new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
if (part == _targetLayers)
{
// cancel the listeners
plotSelected(null, null);
_targetLayers = null;
clearPlot();
}
}
});
_myPartMonitor.addPartListener(IControllableViewport.class,
PartMonitor.ACTIVATED, new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final IControllableViewport provider = (IControllableViewport) part;
// is this different to our current one?
if (provider != _targetViewport)
{
// ok, stop listening to the current viewport (if we
// have one)
if (_targetViewport != null)
stopListeningToViewport();
// and start listening to the new one
_targetViewport = provider;
startListeningToViewport();
}
}
});
_myPartMonitor.addPartListener(IControllableViewport.class,
PartMonitor.CLOSED, new PartMonitor.ICallback()
{
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
if (part == _targetViewport)
{
if (_targetViewport != null)
stopListeningToViewport();
_targetViewport = null;
}
}
});
// ok we're all ready now. just try and see if the current part is valid
_myPartMonitor.fireActivePart(getSite().getWorkbenchWindow()
.getActivePage());
}
/**
* paint the data-rectangle in our overview, to show the currently visible
* area
*
* @param dest
*/
protected void paintDataRect(final CanvasType dest)
{
// check we're alive
if (_targetViewport == null)
return;
// get the projection
final PlainProjection proj = _targetViewport.getProjection();
// get the dimensions
final java.awt.Dimension scrArea = proj.getScreenArea();
// did we find any data?
if (scrArea == null)
return;
// now convert to data coordinates
WorldLocation loc = proj.toWorld(new Point(0, 0));
// did it work?
if (loc == null)
return;
// produce the screen coordinate in the overview
final Point thePt = _myOverviewChart.getCanvas().getProjection()
.toScreen(loc);
// did it work?
if (thePt == null)
return;
// and the other corner
loc = proj.toWorld(new Point(scrArea.width, scrArea.height));
// create the screen coordinates
final Point tl = new Point(thePt);
final Point br = new Point(_myOverviewChart.getCanvas().getProjection()
.toScreen(loc));
//
// // also, draw in the data-area
// WorldArea dataRect = _currentViewport.getViewport();
// // convert to my coords
// java.awt.Point tl = new
// java.awt.Point(dest.getProjection().toScreen(dataRect.getTopLeft()));
// java.awt.Point br = new
// java.awt.Point(dest.getProjection().toScreen(dataRect.getBottomRight()));
dest.setColor(new java.awt.Color(200, 200, 200));
dest.drawRect(tl.x, tl.y, br.x - tl.x, br.y - tl.y);
}
/**
* disable the plot we-re no longer looking at anything...
*/
protected void clearPlot()
{
// ok - we're no longer looking at anything. clear the plot..
}
/**
* ok, a new plot is selected - better show it then
*
* @param provider
* the new plot
* @param parentPart
* the part containing the plot
*/
protected void plotSelected(final Layers provider,
final IWorkbenchPart parentPart)
{
// ok - update our chart to show the indicated plot.
_myOverviewChart.setLayers(provider);
_myOverviewChart.rescale();
_myOverviewChart.repaint();
// this.setPartName(parentPart.getTitle());
}
private void contributeToActionBars()
{
final IActionBars bars = getViewSite().getActionBars();
fillLocalPullDown(bars.getMenuManager());
fillLocalToolBar(bars.getToolBarManager());
}
private void fillLocalPullDown(final IMenuManager manager)
{
}
private void fillLocalToolBar(final IToolBarManager manager)
{
manager.add(_fitToWindow);
// and the help link
manager.add(new Separator());
manager.add(CorePlugin.createOpenHelpAction(
"org.mwc.debrief.help.OverviewChart", null, this));
}
private void makeActions()
{
_fitToWindow = new org.eclipse.jface.action.Action()
{
public void run()
{
// ok, fit the plot to the window...
fitTargetToWindow();
}
};
_fitToWindow.setText("Fit to window");
_fitToWindow
.setToolTipText("Zoom the selected plot out to show the full data");
_fitToWindow.setImageDescriptor(Activator
.getImageDescriptor("icons/fit_to_win.gif"));
}
/**
* Passing the focus request to the viewer's control.
*/
public void setFocus()
{
}
public class MyZoomMode extends SWTChart.PlotMouseDragger
{
org.eclipse.swt.graphics.Point _startPoint;
SWTCanvas _myCanvas;
public void doMouseDrag(final org.eclipse.swt.graphics.Point pt,
final int JITTER, final Layers theLayers, final SWTCanvas theCanvas)
{
// just do a check that we have our start point (it may have been
// cleared
// at the end of the move operation)
if (_startPoint != null)
{
final int deltaX = _startPoint.x - pt.x;
final int deltaY = _startPoint.y - pt.y;
if (Math.abs(deltaX) < JITTER && Math.abs(deltaY) < JITTER)
return;
Tracker _dragTracker = new Tracker((Composite) _myCanvas.getCanvas(),
SWT.RESIZE);
final Rectangle rect = new Rectangle(_startPoint.x, _startPoint.y,
deltaX, deltaY);
_dragTracker.setRectangles(new Rectangle[]
{ rect });
final boolean dragResult = _dragTracker.open();
if (dragResult)
{
final Rectangle[] rects = _dragTracker.getRectangles();
final Rectangle res = rects[0];
// get world area
final java.awt.Point tl = new java.awt.Point(res.x, res.y);
final java.awt.Point br = new java.awt.Point(res.x + res.width, res.y
+ res.height);
final WorldLocation locA = new WorldLocation(_myCanvas
.getProjection().toWorld(tl));
final WorldLocation locB = new WorldLocation(_myCanvas
.getProjection().toWorld(br));
final WorldArea area = new WorldArea(locA, locB);
// hmm, check we have a controllable viewport
if (_targetViewport != null)
{
try
{
// ok, we also need to get hold of the target chart
final WorldArea oldArea = _targetViewport.getViewport();
final Action theAction = new OverviewZoomInAction(
_targetViewport, oldArea, area);
// and wrap it
final DebriefActionWrapper daw = new DebriefActionWrapper(
theAction, null, null);
// and add it to the clipboard
CorePlugin.run(daw);
}
catch (final RuntimeException re)
{
re.printStackTrace();
}
}
_dragTracker = null;
_startPoint = null;
}
}
}
public void doMouseUp(final org.eclipse.swt.graphics.Point point,
final int keyState)
{
_startPoint = null;
}
public void mouseDown(final org.eclipse.swt.graphics.Point point,
final SWTCanvas canvas, final PlainChart theChart)
{
_startPoint = point;
_myCanvas = canvas;
}
}
public class OverviewZoomInAction implements Action
{
private final IControllableViewport _theViewport;
private final WorldArea _oldArea;
private final WorldArea _newArea;
public OverviewZoomInAction(final IControllableViewport theChart,
final WorldArea oldArea, final WorldArea newArea)
{
_theViewport = theChart;
_oldArea = oldArea;
_newArea = newArea;
}
public boolean isRedoable()
{
return true;
}
public boolean isUndoable()
{
return true;
}
public String toString()
{
return "Zoom in operation";
}
public void undo()
{
// set the data area for the chart to the specified area
_theViewport.setViewport(_oldArea);
_theViewport.update();
_myOverviewChart.update();
}
public void execute()
{
_theViewport.setViewport(_newArea);
_theViewport.update();
_myOverviewChart.update();
}
}
public MWC.GUI.Rubberband getRubberband()
{
return null;
}
protected class OverviewSWTCanvas extends SWTCanvas
{
public OverviewSWTCanvas(final Composite parent)
{
super(parent, _myProjection);
}
/**
*
*/
private static final long serialVersionUID = 1L;
public void drawText(final Font theFont, final String theStr, final int x,
final int y)
{
// ignore - we don't do text in overview
}
public void drawText(final String theStr, final int x, final int y)
{
// ignore - we don't do text in overview
}
}
protected static class OverviewSWTCanvasAdapter extends SWTCanvasAdapter
{
public OverviewSWTCanvasAdapter(final PlainProjection proj)
{
super(proj);
}
/**
*
*/
private static final long serialVersionUID = 1L;
public void drawText(final Font theFont, final String theStr, final int x,
final int y)
{
// ignore - we don't do text in overview
}
public void drawText(final String theStr, final int x, final int y)
{
// ignore - we don't do text in overview
}
}
abstract public class OverviewSWTChart extends SWTChart
{
ChartOverview _parentView;
/**
*
*/
private static final long serialVersionUID = 1L;
abstract IControllableViewport getParentViewport();
public OverviewSWTChart(final Composite parent)
{
super(null, parent, _myProjection);
// ok, setup double-click handler to zoom in on target location
this.addCursorDblClickedListener(new ChartDoubleClickListener()
{
public void cursorDblClicked(final PlainChart theChart,
final WorldLocation theLocation, final Point thePoint)
{
// ok - got location centre plot on target loc
final WorldArea currentArea = new WorldArea(getParentViewport()
.getViewport());
currentArea.setCentre(theLocation);
getParentViewport().setViewport(currentArea);
// and trigger an update
getParentViewport().update();
}
});
}
public void setChartOverview(final ChartOverview view)
{
_parentView = view;
}
public void chartFireSelectionChanged(final ISelection sel)
{
}
/**
* over-ride the parent's version of paint, so that we can try to do it by
* layers.
*/
public final void paintMe(final CanvasType dest)
{
// just double-check we have some layers (if we're part of an
// overview
// chart, we may not have...)
if (_theLayers == null)
return;
// check that we have a valid canvas (that the sizes are set)
final java.awt.Dimension sArea = dest.getProjection().getScreenArea();
if (sArea != null)
{
if (sArea.width > 0)
{
// hey, we've plotted at least once, has the data area
// changed?
if (_lastDataArea != _parentView._myOverviewChart.getCanvas()
.getProjection().getDataArea())
{
// remember the data area for next time
_lastDataArea = _parentView._myOverviewChart.getCanvas()
.getProjection().getDataArea();
// clear out all of the layers we are using
_myLayers.clear();
}
final int canvasHeight = _parentView._myOverviewChart.getCanvas()
.getSize().getSize().height;
final int canvasWidth = _parentView._myOverviewChart.getCanvas()
.getSize().width;
paintBackground(dest);
// ok, pass through the layers, repainting any which need it
final Enumeration<Layer> numer = _theLayers.sortedElements();
while (numer.hasMoreElements())
{
final Layer thisLayer = (Layer) numer.nextElement();
boolean isAlreadyPlotted = false;
// hmm, do we want to paint this layer?
if (_parentView.doWePaintThisLayer(thisLayer))
{
// just check if this layer is visible
if (thisLayer.getVisible())
{
// System.out.println("painting:" +
// thisLayer.getName());
if (doubleBufferPlot())
{
// check we're plotting to a SwingCanvas,
// because we don't
// double-buffer anything else
if (dest instanceof SWTCanvas)
{
// does this layer want to be
// double-buffered?
if (thisLayer instanceof BaseLayer)
{
// just check if there is a property
// which over-rides the
// double-buffering
final BaseLayer bl = (BaseLayer) thisLayer;
if (bl.isBuffered())
{
isAlreadyPlotted = true;
// do our double-buffering bit
// do we have a layer for this
// object
org.eclipse.swt.graphics.Image image = (org.eclipse.swt.graphics.Image) _myLayers
.get(thisLayer);
if (image == null)
{
// ok - create our image
if (_myImageTemplate == null)
{
Image tmpTemplate = new Image(Display.getCurrent(),
canvasWidth, canvasHeight);
_myImageTemplate = tmpTemplate.getImageData();
tmpTemplate.dispose();
tmpTemplate = null;
}
image = createSWTImage(_myImageTemplate);
final GC newGC = new GC(image);
// wrap the GC into
// something we know how to
// plot to.
final SWTCanvasAdapter ca = new OverviewSWTCanvasAdapter(
dest.getProjection());
ca.setScreenSize(dest.getProjection().getScreenArea());
// and store the GC
ca.startDraw(newGC);
// ok, paint the layer into
// this canvas
thisLayer.paint(ca);
// done.
ca.endDraw(null);
// store this image in our
// list, indexed by the
// layer
// object itself
_myLayers.put(thisLayer, image);
// and ditch the GC
newGC.dispose();
}
// have we ended up with an
// image to paint?
if (image != null)
{
// get the graphics to paint
// to
final SWTCanvas canv = (SWTCanvas) dest;
// lastly add this image to
// our Graphics object
canv.drawSWTImage(image, 0, 0, canvasWidth,
canvasHeight, 255);
// but, we also have to
// ditch the image
// image.dispose();
}
}
}
} // whether we were plotting to a
// SwingCanvas (which may be
// double-buffered
} // whther we are happy to do double-buffering
// did we manage to paint it
if (!isAlreadyPlotted)
{
paintThisLayer(thisLayer, dest);
isAlreadyPlotted = true;
}
}
}
}
}
}
}
/**
* over-rideable member function which allows us to over-ride the canvas
* which gets used.
*
* @return the Canvas to use
*/
public final SWTCanvas createCanvas(final Composite parent)
{
return new OverviewSWTCanvas(parent);
}
}
/**
* do a fit-to-window of the target viewport
*/
protected void fitTargetToWindow()
{
_targetViewport.rescale();
_targetViewport.update();
// now, redraw our rectable
_myOverviewChart.repaint();
}
/**
* decide whether to paint this layer...
*
* @param thisLayer
* the layer we're looking at
* @return
*/
public boolean doWePaintThisLayer(final Layer thisLayer)
{
final boolean res = true;
// no, don't check for ETOPO data - just paint the lot.
// if (thisLayer instanceof SpatialRasterPainter)
// res = false;
return res;
}
public void propertyChange(final PropertyChangeEvent evt)
{
// ok, we've had a range change. better update
_myOverviewChart.repaint();
}
/**
*
*/
void stopListeningToViewport()
{
if (_targetViewport != null)
if (_targetViewport.getProjection() != null)
{
_targetViewport.getProjection().removeListener(this);
}
}
/**
*
*/
void startListeningToViewport()
{
_targetViewport.getProjection().addListener(this);
}
@Override
public void dispose()
{
// don't forget to stop listening for layer changes
_myOverviewChart.setLayers(null);
// and stop listening for projection changes
stopListeningToViewport();
if (_myPartMonitor != null)
{
_myPartMonitor.ditch();
}
super.dispose();
}
}
|
hotfix - avoid NPE in chart overview when no map visible
|
trunk/org.mwc.cmap.overview/src/org/mwc/cmap/overview/views/ChartOverview.java
|
hotfix - avoid NPE in chart overview when no map visible
|
<ide><path>runk/org.mwc.cmap.overview/src/org/mwc/cmap/overview/views/ChartOverview.java
<ide> {
<ide> // ok - update our chart to show the indicated plot.
<ide> _myOverviewChart.setLayers(provider);
<del> _myOverviewChart.rescale();
<add> if (provider != null)
<add> {
<add> _myOverviewChart.rescale();
<add> }
<ide> _myOverviewChart.repaint();
<ide> // this.setPartName(parentPart.getTitle());
<ide> }
|
|
Java
|
apache-2.0
|
766ea3d9f8fbd4ada9e8cc2be87c46bbde453a4f
| 0 |
OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard,AnySoftKeyboard/AnySoftKeyboard,AnySoftKeyboard/AnySoftKeyboard,OmerMachluf/Mykeyboard,OmerMachluf/Mykeyboard,OmerMachluf/Mykeyboard,OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard,OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard,AnySoftKeyboard/AnySoftKeyboard,OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard
|
/*
* Copyright (C) 2011 AnySoftKeyboard.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.anysoftkeyboard;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.inputmethodservice.InputMethodService;
import android.media.AudioManager;
import android.net.Uri;
import android.os.Debug;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.SystemClock;
import android.os.Vibrator;
import android.preference.PreferenceManager;
import android.text.AutoText;
import android.text.ClipboardManager;
import android.text.TextUtils;
import android.util.Log;
import android.util.TypedValue;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.inputmethod.CompletionInfo;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputConnection;
import android.view.inputmethod.InputMethodManager;
import android.widget.ImageView;
import android.widget.SimpleAdapter;
import android.widget.TextView;
import android.widget.Toast;
import com.anysoftkeyboard.api.KeyCodes;
import com.anysoftkeyboard.dictionaries.AddableDictionary;
import com.anysoftkeyboard.dictionaries.AutoDictionary;
import com.anysoftkeyboard.dictionaries.Dictionary;
import com.anysoftkeyboard.dictionaries.DictionaryAddOnAndBuilder;
import com.anysoftkeyboard.dictionaries.DictionaryFactory;
import com.anysoftkeyboard.dictionaries.ExternalDictionaryFactory;
import com.anysoftkeyboard.dictionaries.Suggest;
import com.anysoftkeyboard.dictionaries.TextEntryState;
import com.anysoftkeyboard.keyboards.AnyKeyboard;
import com.anysoftkeyboard.keyboards.AnyKeyboard.HardKeyboardTranslator;
import com.anysoftkeyboard.keyboards.GenericKeyboard;
import com.anysoftkeyboard.keyboards.Keyboard;
import com.anysoftkeyboard.keyboards.KeyboardAddOnAndBuilder;
import com.anysoftkeyboard.keyboards.KeyboardSwitcher;
import com.anysoftkeyboard.keyboards.KeyboardSwitcher.NextKeyboardType;
import com.anysoftkeyboard.keyboards.physical.HardKeyboardActionImpl;
import com.anysoftkeyboard.keyboards.physical.MyMetaKeyKeyListener;
import com.anysoftkeyboard.keyboards.views.AnyKeyboardBaseView.OnKeyboardActionListener;
import com.anysoftkeyboard.keyboards.views.AnyKeyboardView;
import com.anysoftkeyboard.keyboards.views.CandidateView;
import com.anysoftkeyboard.quicktextkeys.QuickTextKey;
import com.anysoftkeyboard.quicktextkeys.QuickTextKeyFactory;
import com.anysoftkeyboard.theme.KeyboardTheme;
import com.anysoftkeyboard.theme.KeyboardThemeFactory;
import com.anysoftkeyboard.ui.settings.MainSettings;
import com.anysoftkeyboard.ui.tutorials.TutorialsProvider;
import com.anysoftkeyboard.utils.ModifierKeyState;
import com.anysoftkeyboard.utils.Workarounds;
import com.menny.android.anysoftkeyboard.AnyApplication;
import com.menny.android.anysoftkeyboard.R;
/**
* Input method implementation for Qwerty'ish keyboard.
*/
public class AnySoftKeyboard extends InputMethodService implements
OnKeyboardActionListener,
OnSharedPreferenceChangeListener, AnyKeyboardContextProvider {
private final static String TAG = "ASK";
private final static int SWIPE_CORD = -2;
/*
public final static String NOTIFY_LAYOUT_SWITCH = "com.menny.android.anysoftkeyboard.api.NOTIFY_LAYOUT_SWITCH";
//API
private static final String NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_RESID = "current_layout_resid";
private static final String NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_NAME = "current_layout_name";
private static final String NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE = "current_layout_package";
private static final String NOTIFY_LAYOUT_SWITCH_NOTIFICATION_FLAGS = "notification_flags";
private static final String NOTIFY_LAYOUT_SWITCH_NOTIFICATION_TITLE = "notification_title";
*/
private final boolean TRACE_SDCARD = false;
private static final int MSG_UPDATE_SUGGESTIONS = 0;
//private static final int MSG_START_TUTORIAL = 1;
//private static final int MSG_UPDATE_SHIFT_STATE = 2;
//private static final int KEYBOARD_NOTIFICATION_ID = 1;
/*
private static final HashSet<Integer> SPACE_SWAP_CHARACTERS = new HashSet<Integer>(
6);
private static final HashSet<Integer> PUNCTUATION_CHARACTERS = new HashSet<Integer>(
16);
static {
String src = ".\n!?,:;@<>()[]{}";
for (int i = 0; i < src.length(); ++i)
PUNCTUATION_CHARACTERS.add((int) src.charAt(i));
src = ".!?,:;";
for (int i = 0; i < src.length(); ++i)
SPACE_SWAP_CHARACTERS.add((int) src.charAt(i));
}
*/
// Keep track of the last selection range to decide if we need to show word alternatives
private int mLastSelectionStart;
private int mLastSelectionEnd;
private final com.anysoftkeyboard.Configuration mConfig;
private static final boolean DEBUG = AnyApplication.DEBUG;
private ModifierKeyState mShiftKeyState = new ModifierKeyState();
private ModifierKeyState mControlKeyState = new ModifierKeyState();
private AnyKeyboardView mInputView;
private CandidateView mCandidateView;
private static final long MINIMUM_REFRESH_TIME_FOR_DICTIONARIES = 30*1000;
private long mLastDictionaryRefresh = -1;
private Suggest mSuggest;
private CompletionInfo[] mCompletions;
private AlertDialog mOptionsDialog;
private AlertDialog mQuickTextKeyDialog;
KeyboardSwitcher mKeyboardSwitcher;
private final HardKeyboardActionImpl mHardKeyboardAction;
private long mMetaState;
private AnyKeyboard mCurrentKeyboard = null;
private HashSet<Character> mSentenceSeparators = new HashSet<Character>();
//private UserDictionaryBase mContactsDictionary;
private AddableDictionary mUserDictionary;
private AutoDictionary mAutoDictionary;
private StringBuilder mComposing = new StringBuilder();
private WordComposer mWord = new WordComposer();
private int mOrientation = Configuration.ORIENTATION_PORTRAIT;
private int mCommittedLength;
/*
* Do we do prediction now
*/
private boolean mPredicting;
private CharSequence mBestWord;
private final boolean mPredictionLandscape = false;
/*
* is prediction needed for the current input connection
*/
private boolean mPredictionOn;
/*
* is out-side completions needed
*/
private boolean mCompletionOn;
private boolean mAutoSpace;
private boolean mAutoCorrectOn;
private boolean mCapsLock;
private static final String SMILEY_PLUGIN_ID = "0077b34d-770f-4083-83e4-081957e06c27";
private boolean mSmileyOnShortPress;
private String mOverrideQuickTextText = null;
private boolean mAutoCap;
private boolean mQuickFixes;
/*
* Configuration flag. Should we support dictionary suggestions
*/
private boolean mShowSuggestions = false;
private boolean mAutoComplete;
private int mCorrectionMode;
private String mKeyboardChangeNotificationType;
private static final String KEYBOARD_NOTIFICATION_ALWAYS = "1";
private static final String KEYBOARD_NOTIFICATION_ON_PHYSICAL = "2";
private static final String KEYBOARD_NOTIFICATION_NEVER = "3";
// Indicates whether the suggestion strip is to be on in landscape
private boolean mJustAccepted;
private CharSequence mJustRevertedSeparator;
private AudioManager mAudioManager;
private boolean mSilentMode;
private boolean mSoundOn;
// between 0..100. This is the custom volume
private int mSoundVolume;
private Vibrator mVibrator;
private int mVibrationDuration;
//private NotificationManager mNotificationManager;
private static AnySoftKeyboard INSTANCE;
Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_UPDATE_SUGGESTIONS:
performUpdateSuggestions();
break;
// case MSG_UPDATE_SHIFT_STATE:
// updateShiftKeyState(getCurrentInputEditorInfo());
// break;
default:
super.handleMessage(msg);
}
}
};
private boolean mJustAddedAutoSpace;
private static final int LAST_CHAR_SHIFT_STATE_DEFAULT = 0;
//private static final int LAST_CHAR_SHIFT_STATE_UNSHIFTED = 1;
private static final int LAST_CHAR_SHIFT_STATE_SHIFTED = 2;
private int mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
protected IBinder mImeToken = null;
private InputMethodManager mInputMethodManager;
public static AnySoftKeyboard getInstance() {
return INSTANCE;
}
public AnySoftKeyboard() {
// mGenericKeyboardTranslator = new
// GenericPhysicalKeyboardTranslator(this);
mConfig = AnyApplication.getConfig();
mHardKeyboardAction = new HardKeyboardActionImpl();
INSTANCE = this;
}
@Override
public AbstractInputMethodImpl onCreateInputMethodInterface() {
return new InputMethodImpl()
{
@Override
public void attachToken(IBinder token) {
super.attachToken(token);
mImeToken = token;
}
};
}
@Override
public void onCreate() {
super.onCreate();
// super.showStatusIcon(R.drawable.icon_8_key);
Log.i(TAG, "****** AnySoftKeyboard service started.");
Thread.setDefaultUncaughtExceptionHandler(new ChewbaccaUncaughtExceptionHandler(getApplication().getBaseContext(), null));
mInputMethodManager = (InputMethodManager)getSystemService(INPUT_METHOD_SERVICE);
//mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
updateRingerMode();
// register to receive ringer mode changes for silent mode
IntentFilter filter = new IntentFilter(AudioManager.RINGER_MODE_CHANGED_ACTION);
registerReceiver(mReceiver, filter);
mVibrator = ((Vibrator) getSystemService(Context.VIBRATOR_SERVICE));
// setStatusIcon(R.drawable.ime_qwerty);
loadSettings();
mKeyboardSwitcher = new KeyboardSwitcher(this);
mOrientation = getResources().getConfiguration().orientation;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(this);
sp.registerOnSharedPreferenceChangeListener(this);
mCurrentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
mSentenceSeparators = mCurrentKeyboard.getSentenceSeparators();
if (mSuggest == null) {
initSuggest(/* getResources().getConfiguration().locale.toString() */);
}
if (mKeyboardChangeNotificationType.equals(KEYBOARD_NOTIFICATION_ALWAYS))// should it be always on?
notifyKeyboardChangeIfNeeded();
}
@Override
public void onUnbindInput() {
if (AnyApplication.DEBUG) Log.d(TAG, "onUnbindInput");
super.onUnbindInput();
}
private void initSuggest(/* String locale */) {
// mLocale = locale;
mSuggest = new Suggest(this/* , R.raw.main */);
mSuggest.setCorrectionMode(mCorrectionMode);
setDictionariesForCurrentKeyboard();
}
@Override
public void onDestroy() {
Log.i(TAG, "AnySoftKeyboard has been destroyed! Cleaning resources..");
//DictionaryFactory.getInstance().close();
// unregisterReceiver(mReceiver);
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(this);
sp.unregisterOnSharedPreferenceChangeListener(this);
unregisterReceiver(mReceiver);
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
/*
Intent i = new Intent(NOTIFY_LAYOUT_SWITCH);
//dome summy package, so that everybody removes notification
i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE, "NO_SUCH_PACKAGE");
sendBroadcast(i);
*/
TutorialsProvider.onServiceDestroy();
super.onDestroy();
}
@Override
public void onFinishInputView(boolean finishingInput) {
if (DEBUG)
Log.d(TAG, "onFinishInputView(finishingInput:"
+ finishingInput + ")");
super.onFinishInputView(finishingInput);
if (!mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ALWAYS)) {
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
}
// Remove penging messages related to update suggestions
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
}
@Override
public View onCreateInputView() {
if (DEBUG) Log.v(TAG, "Creating Input View");
mInputView = (AnyKeyboardView) getLayoutInflater().inflate(R.layout.main_keyboard_layout, null);
//reseting token users
mOptionsDialog = null;
mQuickTextKeyDialog = null;
mKeyboardSwitcher.setInputView(mInputView);
mInputView.setOnKeyboardActionListener(this);
return mInputView;
}
@Override
public View onCreateCandidatesView() {
mKeyboardSwitcher.makeKeyboards(false);
final ViewGroup candidateViewContainer = (ViewGroup) getLayoutInflater().inflate(R.layout.candidates, null);
mCandidateView = (CandidateView) candidateViewContainer.findViewById(R.id.candidates);
mCandidateView.setService(this);
setCandidatesViewShown(true);
final KeyboardTheme theme = KeyboardThemeFactory.getCurrentKeyboardTheme(AnySoftKeyboard.getInstance());
final TypedArray a = theme.getPackageContext().obtainStyledAttributes(null, R.styleable.AnyKeyboardBaseView, 0, theme.getThemeResId());
final int closeTextColor = a.getColor(R.styleable.AnyKeyboardBaseView_suggestionOthersTextColor, getResources().getColor(R.color.candidate_other));
final float fontSizePixel = a.getDimension(R.styleable.AnyKeyboardBaseView_suggestionTextSize, getResources().getDimensionPixelSize(R.dimen.candidate_font_height));
a.recycle();
mCandidateCloseText = (TextView)candidateViewContainer.findViewById(R.id.close_suggestions_strip_text);
mCandidateCloseText.setTextColor(closeTextColor);
mCandidateCloseText.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSizePixel);
View closeIcon = candidateViewContainer.findViewById(R.id.close_suggestions_strip_icon);
if (closeIcon != null)
{
closeIcon.setOnClickListener(new OnClickListener() {
private final static long DOUBLE_TAP_TIMEOUT = 2 * 1000;//two seconds is enough
private long mFirstClickTime = 0;
public void onClick(View v) {
final long currentTime = SystemClock.elapsedRealtime();
if (currentTime - mFirstClickTime < DOUBLE_TAP_TIMEOUT)
{
abortCorrection(true);
}
else
{
// Toast.makeText(getApplicationContext(), "Press close icon again to dismiss suggestions", Toast.LENGTH_SHORT).show();
/* List<CharSequence> l = new ArrayList<CharSequence>();
l.add(mHintText);
mCandidateView.setSuggestions(l, false, false, false);*/
mCandidateView.setSuggestions(null, false, false, false);
if (mCandidateCloseText != null) mCandidateCloseText.setVisibility(View.VISIBLE);
postUpdateSuggestions(DOUBLE_TAP_TIMEOUT - 50);
}
mFirstClickTime = currentTime;
}
});
}
return candidateViewContainer;
}
@Override
public void onStartInputView(EditorInfo attribute, boolean restarting) {
if (DEBUG) Log.d(TAG, "onStartInputView(EditorInfo:"
+ attribute.imeOptions + "," + attribute.inputType
+ ", restarting:" + restarting + ")");
super.onStartInputView(attribute, restarting);
if (mInputView == null) {
return;
}
mKeyboardSwitcher.makeKeyboards(false);
TextEntryState.newSession(this);
if (!restarting) {
// Clear shift states.
mMetaState = 0;
}
mPredictionOn = false;
mCompletionOn = false;
mCompletions = null;
mCapsLock = false;
switch (attribute.inputType & EditorInfo.TYPE_MASK_CLASS)
{
case EditorInfo.TYPE_CLASS_DATETIME:
if (DEBUG) Log.d(TAG, "Setting MODE_DATETIME as keyboard due to a TYPE_CLASS_DATETIME input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_DATETIME, attribute);
break;
case EditorInfo.TYPE_CLASS_NUMBER:
if (DEBUG) Log.d(TAG, "Setting MODE_NUMBERS as keyboard due to a TYPE_CLASS_NUMBER input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_NUMBERS, attribute);
break;
case EditorInfo.TYPE_CLASS_PHONE:
if (DEBUG) Log.d(TAG, "Setting MODE_PHONE as keyboard due to a TYPE_CLASS_PHONE input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_PHONE, attribute);
break;
case EditorInfo.TYPE_CLASS_TEXT:
if (DEBUG) Log.d(TAG, "A TYPE_CLASS_TEXT input.");
final int variation = attribute.inputType & EditorInfo.TYPE_MASK_VARIATION;
switch(variation)
{
case EditorInfo.TYPE_TEXT_VARIATION_PASSWORD:
case EditorInfo.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD:
if (DEBUG) Log.d(TAG, "A password TYPE_CLASS_TEXT input with no prediction");
mPredictionOn = false;
break;
default:
mPredictionOn = true;
}
if (mConfig.getInsertSpaceAfterCandidatePick())
{
switch(variation)
{
case EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS:
case EditorInfo.TYPE_TEXT_VARIATION_URI:
mAutoSpace = false;
break;
default:
mAutoSpace = true;
}
}
else
{
//some users don't want auto-space
mAutoSpace = false;
}
switch(variation)
{
case EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS:
if (DEBUG) Log.d(TAG, "Setting MODE_EMAIL as keyboard due to a TYPE_TEXT_VARIATION_EMAIL_ADDRESS input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_EMAIL, attribute);
mPredictionOn = false;
break;
case EditorInfo.TYPE_TEXT_VARIATION_URI:
if (DEBUG) Log.d(TAG, "Setting MODE_URL as keyboard due to a TYPE_TEXT_VARIATION_URI input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_URL, attribute);
break;
case EditorInfo.TYPE_TEXT_VARIATION_SHORT_MESSAGE:
if (DEBUG) Log.d(TAG, "Setting MODE_IM as keyboard due to a TYPE_TEXT_VARIATION_SHORT_MESSAGE input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_IM, attribute);
break;
default:
if (DEBUG) Log.d(TAG, "Setting MODE_TEXT as keyboard due to a default input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_TEXT, attribute);
}
final int textFlag = attribute.inputType & EditorInfo.TYPE_MASK_FLAGS;
switch(textFlag)
{
case 0x00080000://FROM API 5: EditorInfo.TYPE_TEXT_FLAG_NO_SUGGESTIONS:
case EditorInfo.TYPE_TEXT_FLAG_AUTO_COMPLETE:
if (DEBUG) Log.d(TAG, "Input requested NO_SUGGESTIONS, or it is AUTO_COMPLETE by itself.");
mPredictionOn = false;
break;
default:
//we'll keep the previous mPredictionOn value
}
break;
default:
if (DEBUG) Log.d(TAG, "Setting MODE_TEXT as keyboard due to a default input.");
//No class. Probably a console window, or no GUI input connection
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_TEXT, attribute);
mPredictionOn = false;
mAutoSpace = true;
}
mInputView.closing();
if (AutoText.getSize(mInputView) < 1)
mQuickFixes = true;
mComposing.setLength(0);
mPredicting = false;
//mDeleteCount = 0;
mJustAddedAutoSpace = false;
setCandidatesViewShown(false);
// loadSettings();
updateShiftKeyState(attribute);
if (mSuggest != null) {
mSuggest.setCorrectionMode(mCorrectionMode);
}
mPredictionOn = mPredictionOn && mCorrectionMode > 0;
if (mCandidateView != null)
mCandidateView.setSuggestions(null, false, false, false);
if (mPredictionOn)
{
if ((SystemClock.elapsedRealtime() - mLastDictionaryRefresh) > MINIMUM_REFRESH_TIME_FOR_DICTIONARIES)
setDictionariesForCurrentKeyboard();
}
else
{
//this will release memory
setDictionariesForCurrentKeyboard();
}
if (TRACE_SDCARD)
Debug.startMethodTracing("anysoftkeyboard_log.trace");
}
@Override
public void onFinishInput() {
if (DEBUG)
Log.d(TAG, "onFinishInput()");
super.onFinishInput();
if (mInputView != null) {
mInputView.closing();
}
if (!mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ALWAYS)) {
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
// Intent i = new Intent(NOTIFY_LAYOUT_SWITCH);
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE, NOTIFY_LAYOUT_SWITCH);//dome summy package, so that everybody removes notification
// sendBroadcast(i);
}
// releasing some memory. Dictionaries, completions, etc.
if (mAutoDictionary != null) mAutoDictionary.flushPendingWrites();
System.gc();
}
///this function is called EVERYTIME them selection is changed. This also includes the underlined
///suggestions.
@Override
public void onUpdateSelection(int oldSelStart, int oldSelEnd,
int newSelStart, int newSelEnd,
int candidatesStart, int candidatesEnd) {
super.onUpdateSelection(oldSelStart, oldSelEnd, newSelStart, newSelEnd,
candidatesStart, candidatesEnd);
if (DEBUG) {
Log.i(TAG, "onUpdateSelection: oss=" + oldSelStart
+ ", ose=" + oldSelEnd
+ ", nss=" + newSelStart
+ ", nse=" + newSelEnd
+ ", cs=" + candidatesStart
+ ", ce=" + candidatesEnd);
}
// If the current selection in the text view changes, we should
// clear whatever candidate text we have.
if ((((mComposing.length() > 0 && mPredicting) /*|| mVoiceInputHighlighted*/)
&& (newSelStart != candidatesEnd
|| newSelEnd != candidatesEnd)
&& mLastSelectionStart != newSelStart)) {
mComposing.setLength(0);
mPredicting = false;
postUpdateSuggestions();
TextEntryState.reset();
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.finishComposingText();
}
//mVoiceInputHighlighted = false;
} else if (!mPredicting && !mJustAccepted) {
switch (TextEntryState.getState()) {
case ACCEPTED_DEFAULT:
TextEntryState.reset();
// fall through
case SPACE_AFTER_PICKED:
mJustAddedAutoSpace = false; // The user moved the cursor.
break;
}
}
mJustAccepted = false;
//postUpdateShiftKeyState();
updateShiftKeyState(getCurrentInputEditorInfo());
// Make a note of the cursor position
mLastSelectionStart = newSelStart;
mLastSelectionEnd = newSelEnd;
}
private void onPhysicalKeyboardKeyPressed() {
if (mConfig.hideSoftKeyboardWhenPhysicalKeyPressed()) hideWindow();
}
@Override
public void hideWindow() {
if (TRACE_SDCARD)
Debug.stopMethodTracing();
if (mOptionsDialog != null && mOptionsDialog.isShowing()) {
mOptionsDialog.dismiss();
mOptionsDialog = null;
}
if (mQuickTextKeyDialog != null && mQuickTextKeyDialog.isShowing()) {
mQuickTextKeyDialog.dismiss();
mQuickTextKeyDialog = null;
}
// if (mTutorial != null) {
// mTutorial.close();
// mTutorial = null;
// }
super.hideWindow();
TextEntryState.endSession();
}
@Override
public void onDisplayCompletions(CompletionInfo[] completions) {
if (DEBUG) {
Log.i(TAG, "Received completions:");
for (int i = 0; i < (completions != null ? completions.length : 0); i++) {
Log.i(TAG, " #" + i + ": " + completions[i]);
}
}
//completions should be shown if dictionary requires, or if we are in full-screen and have outside completeions
if (mCompletionOn || (isFullscreenMode() && (completions != null))) {
if (DEBUG) Log.v(TAG, "Received completions: completion should be shown: "+mCompletionOn+" fullscreen:"+isFullscreenMode());
mCompletions = completions;
//we do completions :)
mCompletionOn = true;
if (completions == null) {
if (DEBUG) Log.v(TAG, "Received completions: completion is NULL. Clearing suggestions.");
mCandidateView.setSuggestions(null, false, false, false);
return;
}
List<CharSequence> stringList = new ArrayList<CharSequence>();
for (int i = 0; i < (completions != null ? completions.length : 0); i++) {
CompletionInfo ci = completions[i];
if (ci != null)
stringList.add(ci.getText());
}
if (DEBUG) Log.v(TAG, "Received completions: setting to suggestions view "+stringList.size()+ " completions.");
// CharSequence typedWord = mWord.getTypedWord();
setSuggestions(stringList, true, true, true);
mBestWord = null;
//I mean, if I'm here, it must be shown...
setCandidatesViewShown(true);
}
else if (DEBUG) Log.v(TAG, "Received completions: completions should not be shown.");
}
/*
@Override
public void setCandidatesViewShown(boolean shown) {
// we show predication only in on-screen keyboard
// (onEvaluateInputViewShown)
// or if the physical keyboard supports candidates
// (mPredictionLandscape)
super.setCandidatesViewShown(shouldCandidatesStripBeShown() && shown);
}
*/
private void clearSuggestions() {
setSuggestions(null, false, false, false);
}
private void setSuggestions(
List<CharSequence> suggestions,
boolean completions,
boolean typedWordValid,
boolean haveMinimalSuggestion) {
// if (mIsShowingHint) {
// setCandidatesView(mCandidateViewContainer);
// mIsShowingHint = false;
// }
if (mCandidateView != null) {
mCandidateView.setSuggestions(
suggestions, completions, typedWordValid, haveMinimalSuggestion);
}
}
@Override
public void onComputeInsets(InputMethodService.Insets outInsets) {
super.onComputeInsets(outInsets);
if (!isFullscreenMode()) {
outInsets.contentTopInsets = outInsets.visibleTopInsets;
}
}
@Override
public boolean onEvaluateFullscreenMode() {
switch(mOrientation)
{
case Configuration.ORIENTATION_LANDSCAPE:
return mConfig.getUseFullScreenInputInLandscape();
default:
return mConfig.getUseFullScreenInputInPortrait();
}
}
@Override
public boolean onKeyDown(final int keyCode, KeyEvent event) {
// if (DEBUG)
// {
// Log.d(TAG, "onKeyDown:"+keyCode+" flags:"+event.getFlags());
//
// if (mInputView == null)
// {
// Log.d(TAG, "No input view");
// }
// else
// {
// Log.d(TAG, "\n canInteractWithUi:"+mInputView.canInteractWithUi()+"\n"+
// "getHeight:"+mInputView.getHeight()+"\n"+
// "getVisibility:"+mInputView.getVisibility()+"\n"+
// "getWindowVisibility:"+mInputView.getWindowVisibility()+"\n"+
// "isFocused:"+mInputView.isFocused()+"\n"+
// "isShown:"+mInputView.isShown()+"\n");
// }
// }
final boolean shouldTranslateSpecialKeys = AnySoftKeyboard.getInstance().isInputViewShown();
if(DEBUG){
Log.d(TAG, "isInputViewShown="+shouldTranslateSpecialKeys);
}
InputConnection ic = getCurrentInputConnection();
if (!mPredictionLandscape) {
// For all other keys, if we want to do transformations on
// text being entered with a hard keyboard, we need to process
// it and do the appropriate action.
// using physical keyboard is more annoying with candidate view in
// the way
// so we disable it.
// to clear the underline.
commitTyped(ic);// to clear the underline.
mPredicting = false;
}
if (DEBUG)
Log.d(TAG, "Event: Key:" + event.getKeyCode()
+ " Shift:"
+ ((event.getMetaState() & KeyEvent.META_SHIFT_ON) != 0)
+ " ALT:"
+ ((event.getMetaState() & KeyEvent.META_ALT_ON) != 0)
+ " Repeats:" + event.getRepeatCount());
switch (keyCode) {
/**** SPEACIAL translated HW keys
* If you add new keys here, do not forget to add to the
*/
case KeyEvent.KEYCODE_CAMERA:
if(shouldTranslateSpecialKeys && mConfig.useCameraKeyForBackspaceBackword()){
handleBackword(getCurrentInputConnection());
return true;
}
// DO NOT DELAY CAMERA KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
case KeyEvent.KEYCODE_FOCUS:
if(shouldTranslateSpecialKeys && mConfig.useCameraKeyForBackspaceBackword()){
handleDeleteLastCharacter(false);
return true;
}
// DO NOT DELAY FOCUS KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
case KeyEvent.KEYCODE_VOLUME_UP:
if(shouldTranslateSpecialKeys && mConfig.useVolumeKeyForLeftRight()){
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_LEFT);
return true;
}
// DO NOT DELAY VOLUME UP KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
case KeyEvent.KEYCODE_VOLUME_DOWN:
if(shouldTranslateSpecialKeys && mConfig.useVolumeKeyForLeftRight()){
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_RIGHT);
return true;
}
// DO NOT DELAY VOLUME DOWN KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
/**** END of SPEACIAL translated HW keys code section
*
*/
case KeyEvent.KEYCODE_BACK:
if (event.getRepeatCount() == 0 && mInputView != null) {
if (mInputView.handleBack()) {
// consuming the meta keys
if (ic != null) {
ic.clearMetaKeyStates(Integer.MAX_VALUE);// translated,
// so we
// also take
// care of
// the
// metakeys.
}
mMetaState = 0;
return true;
} /*
* else if (mTutorial != null) { mTutorial.close(); mTutorial =
* null; }
*/
}
break;
case KeyEvent.KEYCODE_SHIFT_LEFT:
case KeyEvent.KEYCODE_SHIFT_RIGHT:
if (event.isAltPressed() && Workarounds.isAltSpaceLangSwitchNotPossible()) {
if(DEBUG) Log.d(TAG,
"User pressed ALT+SHIFT on motorola milestone, moving to next physical keyboard.");
// consuming the meta keys
// mHardKeyboardAction.resetMetaState();
if (ic != null) {
ic.clearMetaKeyStates(Integer.MAX_VALUE);// translated, so
// we also take
// care of the
// metakeys.
}
mMetaState = 0;
// only physical keyboard
nextKeyboard(getCurrentInputEditorInfo(),
NextKeyboardType.AlphabetSupportsPhysical);
return true;
}
//NOTE: letting it fallthru to the other meta-keys
case KeyEvent.KEYCODE_ALT_LEFT:
case KeyEvent.KEYCODE_ALT_RIGHT:
case KeyEvent.KEYCODE_SYM:
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown before handle"));
mMetaState = MyMetaKeyKeyListener.handleKeyDown(mMetaState,
keyCode, event);
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown after handle"));
break;
case KeyEvent.KEYCODE_SPACE:
if ((event.isAltPressed() && !Workarounds.isAltSpaceLangSwitchNotPossible()) || event.isShiftPressed()) {
if(DEBUG)
if(event.isAltPressed()){
Log.d(TAG,
"User pressed ALT+SPACE, moving to next physical keyboard.");
} else {
Log.d(TAG,
"User pressed SHIFT+SPACE, moving to next physical keyboard.");
}
// consuming the meta keys
// mHardKeyboardAction.resetMetaState();
if (ic != null) {
ic.clearMetaKeyStates(Integer.MAX_VALUE);// translated, so
// we also take
// care of the
// metakeys.
}
mMetaState = 0;
// only physical keyboard
nextKeyboard(getCurrentInputEditorInfo(),
NextKeyboardType.AlphabetSupportsPhysical);
return true;
}
//NOTE:
// letting it fall through to the "default"
default:
// Fix issue 185, check if we should process key repeat
if (!mConfig.getUseRepeatingKeys() && event.getRepeatCount() > 0)
return true;
if (mKeyboardSwitcher.isCurrentKeyboardPhysical()) {
// sometimes, the physical keyboard will delete input, and then
// add some.
// we'll try to make it nice
if (ic != null)
ic.beginBatchEdit();
try {
//issue 393, backword on the hw keyboard!
if(mConfig.useBackword() && keyCode == KeyEvent.KEYCODE_DEL && event.isShiftPressed()){
handleBackword(ic);
return true;
} else if (event.isPrintingKey()) {
onPhysicalKeyboardKeyPressed();
mHardKeyboardAction.initializeAction(event, mMetaState);
// http://article.gmane.org/gmane.comp.handhelds.openmoko.android-freerunner/629
AnyKeyboard current = mKeyboardSwitcher.getCurrentKeyboard();
HardKeyboardTranslator keyTranslator = (HardKeyboardTranslator) current;
if (DEBUG)
{
final String keyboardName = current.getKeyboardName();
Log.d(TAG, "Asking '" + keyboardName
+ "' to translate key: " + keyCode);
Log.v(TAG,
"Hard Keyboard Action before translation: Shift: "
+ mHardKeyboardAction
.isShiftActive()
+ ", Alt: "
+ mHardKeyboardAction.isAltActive()
+ ", Key code: "
+ mHardKeyboardAction.getKeyCode()
+ ", changed: "
+ mHardKeyboardAction.getKeyCodeWasChanged());
}
keyTranslator.translatePhysicalCharacter(mHardKeyboardAction);
if (DEBUG)
Log.v(TAG,
"Hard Keyboard Action after translation: Key code: "
+ mHardKeyboardAction.getKeyCode()
+ ", changed: "
+ mHardKeyboardAction
.getKeyCodeWasChanged());
if (mHardKeyboardAction.getKeyCodeWasChanged()) {
final int translatedChar = mHardKeyboardAction
.getKeyCode();
// typing my own.
onKey(translatedChar, new int[] { translatedChar }, 0, 0);
// my handling
// we are at a regular key press, so we'll update
// our meta-state member
mMetaState = MyMetaKeyKeyListener
.adjustMetaAfterKeypress(mMetaState);
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown after adjust - translated"));
return true;
}
}
} finally {
if (ic != null)
ic.endBatchEdit();
}
}
if (event.isPrintingKey()) {
// we are at a regular key press, so we'll update our meta-state
// member
mMetaState = MyMetaKeyKeyListener
.adjustMetaAfterKeypress(mMetaState);
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown after adjust"));
}
}
return super.onKeyDown(keyCode, event);
}
private void notifyKeyboardChangeIfNeeded() {
// Log.d("anySoftKeyboard","notifyKeyboardChangeIfNeeded");
// Thread.dumpStack();
if (mKeyboardSwitcher == null)// happens on first onCreate.
return;
if ((mKeyboardSwitcher.isAlphabetMode())
&& !mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_NEVER)) {
//AnyKeyboard current = mKeyboardSwitcher.getCurrentKeyboard();
// notifying the user about the keyboard.
// creating the message
// final String keyboardName = mCurrentKeyboard.getKeyboardName();
//
// Notification notification = new Notification(R.drawable.notification_icon, keyboardName, System.currentTimeMillis());
//
// Intent notificationIntent = new Intent();
// PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
// notificationIntent, 0);
//
// notification.setLatestEventInfo(getApplicationContext(),
// getText(R.string.ime_name), keyboardName,
// contentIntent);
//
// if (mKeyboardChangeNotificationType.equals("1")) {
// notification.flags |= Notification.FLAG_ONGOING_EVENT;
// notification.flags |= Notification.FLAG_NO_CLEAR;
// } else {
// notification.flags |= Notification.FLAG_AUTO_CANCEL;
// }
// // notifying
// mNotificationManager.notify(KEYBOARD_NOTIFICATION_ID, notification);
// Intent i = new Intent(NOTIFY_LAYOUT_SWITCH);
// i.putExtra(NOTIFY_LAYOUT_SWITCH_NOTIFICATION_TITLE, getText(R.string.ime_name));
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_RESID, mCurrentKeyboard.getKeyboardIconResId());
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_NAME, mCurrentKeyboard.getKeyboardName());
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE, mCurrentKeyboard.getKeyboardContext().getPackageName());
// i.putExtra(NOTIFY_LAYOUT_SWITCH_NOTIFICATION_FLAGS, notification.flags);
// sendBroadcast(i);
mInputMethodManager.showStatusIcon(mImeToken, mCurrentKeyboard.getKeyboardContext().getPackageName(), mCurrentKeyboard.getKeyboardIconResId());
}
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
//Issue 248
case KeyEvent.KEYCODE_VOLUME_DOWN:
case KeyEvent.KEYCODE_VOLUME_UP:
if(isInputViewShown() == false){
return super.onKeyUp(keyCode, event);
}
if(mConfig.useVolumeKeyForLeftRight()){
//no need of vol up/down sound
updateShiftKeyState(getCurrentInputEditorInfo());
return true;
}
case KeyEvent.KEYCODE_DPAD_DOWN:
case KeyEvent.KEYCODE_DPAD_UP:
case KeyEvent.KEYCODE_DPAD_LEFT:
case KeyEvent.KEYCODE_DPAD_RIGHT:
// // If tutorial is visible, don't allow dpad to work
// if (mTutorial != null) {
// return true;
// }
// Enable shift key and DPAD to do selections
if (mInputView != null && mInputView.isShown()
&& mInputView.isShifted()) {
event = new KeyEvent(event.getDownTime(), event.getEventTime(),
event.getAction(), event.getKeyCode(), event
.getRepeatCount(), event.getDeviceId(), event
.getScanCode(), KeyEvent.META_SHIFT_LEFT_ON
| KeyEvent.META_SHIFT_ON);
InputConnection ic = getCurrentInputConnection();
if (ic != null)
ic.sendKeyEvent(event);
updateShiftKeyState(getCurrentInputEditorInfo());
return true;
}
break;
case KeyEvent.KEYCODE_ALT_LEFT:
case KeyEvent.KEYCODE_ALT_RIGHT:
case KeyEvent.KEYCODE_SHIFT_LEFT:
case KeyEvent.KEYCODE_SHIFT_RIGHT:
case KeyEvent.KEYCODE_SYM:
mMetaState = MyMetaKeyKeyListener.handleKeyUp(mMetaState, keyCode,
event);
if (DEBUG)
Log.d("AnySoftKeyboard-meta-key", getMetaKeysStates("onKeyUp"));
setInputConnectionMetaStateAsCurrentMetaKeyKeyListenerState();
break;
}
boolean r = super.onKeyUp(keyCode, event);
updateShiftKeyState(getCurrentInputEditorInfo());
return r;
}
private String getMetaKeysStates(String place) {
final int shiftState = MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SHIFT_ON);
final int altState = MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_ALT_ON);
final int symState = MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SYM_ON);
return "Meta keys state at " + place + "- SHIFT:" + shiftState
+ ", ALT:" + altState + " SYM:" + symState + " bits:"
+ MyMetaKeyKeyListener.getMetaState(mMetaState) + " state:"
+ mMetaState;
}
private void setInputConnectionMetaStateAsCurrentMetaKeyKeyListenerState() {
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
int clearStatesFlags = 0;
if (MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_ALT_ON) == 0)
clearStatesFlags += KeyEvent.META_ALT_ON;
if (MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SHIFT_ON) == 0)
clearStatesFlags += KeyEvent.META_SHIFT_ON;
if (MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SYM_ON) == 0)
clearStatesFlags += KeyEvent.META_SYM_ON;
if (DEBUG)
Log
.d(
"AnySoftKeyboard-meta-key",
getMetaKeysStates("setInputConnectionMetaStateAsCurrentMetaKeyKeyListenerState with flags: "
+ clearStatesFlags));
ic.clearMetaKeyStates(clearStatesFlags);
}
}
private void addToDictionaries(CharSequence suggestion, int frequencyDelta) {
checkAddToDictionary(suggestion, frequencyDelta/*, false*/);
}
/**
* Adds to the UserBigramDictionary and/or AutoDictionary
* @param addToBigramDictionary true if it should be added to bigram dictionary if possible
*/
private void checkAddToDictionary(CharSequence suggestion, int frequencyDelta/*,
boolean addToBigramDictionary*/) {
if (suggestion == null || suggestion.length() < 1) return;
// Only auto-add to dictionary if auto-correct is ON. Otherwise we'll be
// adding words in situations where the user or application really didn't
// want corrections enabled or learned.
if (!(mCorrectionMode == Suggest.CORRECTION_FULL/*
|| mCorrectionMode == Suggest.CORRECTION_FULL_BIGRAM*/)) {
return;
}
if (suggestion != null && mAutoDictionary != null) {
if (/*!addToBigramDictionary &&*/
mAutoDictionary.isValidWord(suggestion) ||
(!mSuggest.isValidWord(suggestion.toString()) && !mSuggest.isValidWord(suggestion.toString().toLowerCase()))) {
mAutoDictionary.addWord(suggestion.toString(), frequencyDelta);
}
/*
if (mUserBigramDictionary != null) {
CharSequence prevWord = EditingUtil.getPreviousWord(getCurrentInputConnection(),
mSentenceSeparators);
if (!TextUtils.isEmpty(prevWord)) {
mUserBigramDictionary.addBigrams(prevWord.toString(), suggestion.toString());
}
}*/
}
}
private void commitTyped(InputConnection inputConnection) {
if (mPredicting) {
mPredicting = false;
if (mComposing.length() > 0) {
if (inputConnection != null) {
inputConnection.commitText(mComposing, 1);
}
mCommittedLength = mComposing.length();
TextEntryState.acceptedTyped(mComposing);
addToDictionaries(mComposing, AutoDictionary.FREQUENCY_FOR_TYPED);
}
postUpdateSuggestionsNow();
}
}
/*private void postUpdateShiftKeyState() {
mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
// TODO: Should remove this 300ms delay?
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_UPDATE_SHIFT_STATE), 150);
}*/
public void updateShiftKeyState(EditorInfo attr) {
//mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
InputConnection ic = getCurrentInputConnection();
if (ic != null && attr != null && mKeyboardSwitcher.isAlphabetMode() && (mInputView != null)) {
mInputView.setShifted(mShiftKeyState.isMomentary() || mCapsLock
|| getCursorCapsMode(ic, attr) != 0);
}
}
private int getCursorCapsMode(InputConnection ic, EditorInfo attr) {
int caps = 0;
EditorInfo ei = getCurrentInputEditorInfo();
if (mAutoCap && ei != null && ei.inputType != EditorInfo.TYPE_NULL) {
caps = ic.getCursorCapsMode(attr.inputType);
}
return caps;
}
private void swapPunctuationAndSpace() {
final InputConnection ic = getCurrentInputConnection();
if (ic == null)
return;
CharSequence lastTwo = ic.getTextBeforeCursor(2, 0);
if (DEBUG)
{
String seps = "";
for(Character c : mSentenceSeparators) seps+=c;
Log.d(TAG, "swapPunctuationAndSpace: lastTwo: '"+lastTwo+"', mSentenceSeparators "+mSentenceSeparators.size()+ " '"+seps+"'");
}
if (lastTwo != null && lastTwo.length() == 2
&& lastTwo.charAt(0) == KeyCodes.SPACE
&& mSentenceSeparators.contains(lastTwo.charAt(1))) {
//ic.beginBatchEdit();
ic.deleteSurroundingText(2, 0);
ic.commitText(lastTwo.charAt(1) + " ", 1);
//ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
mJustAddedAutoSpace = true;
Log.d(TAG, "swapPunctuationAndSpace: YES");
}
}
private void reswapPeriodAndSpace() {
final InputConnection ic = getCurrentInputConnection();
if (ic == null) return;
CharSequence lastThree = ic.getTextBeforeCursor(3, 0);
if (lastThree != null && lastThree.length() == 3
&& lastThree.charAt(0) == '.'
&& lastThree.charAt(1) == KeyCodes.SPACE
&& lastThree.charAt(2) == '.') {
ic.beginBatchEdit();
ic.deleteSurroundingText(3, 0);
ic.commitText(".. ", 1);
ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
}
}
private void doubleSpace() {
// if (!mAutoPunctuate) return;
if (!mConfig.isDoubleSpaceChangesToPeriod())
return;
final InputConnection ic = getCurrentInputConnection();
if (ic == null)
return;
CharSequence lastThree = ic.getTextBeforeCursor(3, 0);
if (lastThree != null && lastThree.length() == 3
&& Character.isLetterOrDigit(lastThree.charAt(0))
&& lastThree.charAt(1) == KeyCodes.SPACE
&& lastThree.charAt(2) == KeyCodes.SPACE) {
ic.beginBatchEdit();
ic.deleteSurroundingText(2, 0);
ic.commitText(". ", 1);
ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
mJustAddedAutoSpace = true;
}
}
private void removeTrailingSpace() {
final InputConnection ic = getCurrentInputConnection();
if (ic == null) return;
CharSequence lastOne = ic.getTextBeforeCursor(1, 0);
if (lastOne != null && lastOne.length() == 1
&& lastOne.charAt(0) == KeyCodes.SPACE) {
ic.deleteSurroundingText(1, 0);
}
}
public boolean addWordToDictionary(String word) {
mUserDictionary.addWord(word, 128);
return true;
}
/**
* Helper to determine if a given character code is alphabetic.
*/
private boolean isAlphabet(int code) {
//inner letters have more options: ' in English. " in Hebrew, and more.
if (mPredicting)
return mCurrentKeyboard.isInnerWordLetter((char) code);
else
return mCurrentKeyboard.isStartOfWordLetter((char) code);
}
public void onMultiTap() {
if (DEBUG) Log.d(TAG, "onMultiTap");
handleDeleteLastCharacter(true);
}
public void onKey(int primaryCode, int[] keyCodes, int x, int y) {
if (DEBUG)
{
Log.d(TAG, "onKey " + primaryCode);
//Thread.dumpStack();
}
final InputConnection ic = getCurrentInputConnection();
switch (primaryCode) {
case Keyboard.KEYCODE_DELETE:
if (ic == null)//if we don't want to do anything, lets check null first.
break;
//we do backword if the shift is pressed while pressing backspace (like in a PC)
//but this is true ONLY if the device has multitouch, or the user specifically asked for it
if (mInputView != null && mInputView.isShifted() && !mInputView.getKeyboard().isShiftLocked() &&
((mInputView.hasDistinctMultitouch() && mShiftKeyState.isMomentary()) || mConfig.useBackword()))
{
handleBackword(ic);
}
else
{
handleDeleteLastCharacter(false);
}
break;
case AnyKeyboard.KEYCODE_CLEAR_INPUT:
if (ic != null)
{
ic.beginBatchEdit();
commitTyped(ic);
ic.deleteSurroundingText(Integer.MAX_VALUE, Integer.MAX_VALUE);
ic.endBatchEdit();
}
break;
case Keyboard.KEYCODE_SHIFT:
if ((!mInputView.hasDistinctMultitouch()) ||
((x == SWIPE_CORD) && (y == SWIPE_CORD)))//the SWIPE_CORD is the case where onKey was called from swipeX
handleShift(false);
break;
case AnyKeyboard.KEYCODE_CTRL:
if ((!mInputView.hasDistinctMultitouch()) ||
((x == SWIPE_CORD) && (y == SWIPE_CORD)))//the SWIPE_CORD is the case where onKey was called from swipeX
handleControl(false);
break;
case AnyKeyboard.KEYCODE_LEFT:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_LEFT);
break;
case AnyKeyboard.KEYCODE_RIGHT:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_RIGHT);
break;
case AnyKeyboard.KEYCODE_UP:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_UP);
break;
case AnyKeyboard.KEYCODE_DOWN:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_DOWN);
break;
case Keyboard.KEYCODE_CANCEL:
if (mOptionsDialog == null || !mOptionsDialog.isShowing()) {
handleClose();
}
break;
case AnyKeyboardView.KEYCODE_OPTIONS:
showOptionsMenu();
break;
case AnyKeyboard.KEYCODE_DOMAIN:
onText(mConfig.getDomainText());
break;
case AnyKeyboard.KEYCODE_QUICK_TEXT:
QuickTextKey quickTextKey = QuickTextKeyFactory.getCurrentQuickTextKey(this);
if (mSmileyOnShortPress) {
if (TextUtils.isEmpty(mOverrideQuickTextText))
onText(quickTextKey.getKeyOutputText());
else
onText(mOverrideQuickTextText);
} else {
if (quickTextKey.isPopupKeyboardUsed()) {
showQuickTextKeyPopupKeyboard(quickTextKey);
} else {
showQuickTextKeyPopupList(quickTextKey);
}
}
break;
case AnyKeyboardView.KEYCODE_QUICK_TEXT_LONGPRESS:
quickTextKey = QuickTextKeyFactory.getCurrentQuickTextKey(this);
if (quickTextKey.getId().equals(SMILEY_PLUGIN_ID) && !mSmileyOnShortPress) {
if (TextUtils.isEmpty(mOverrideQuickTextText))
onText(quickTextKey.getKeyOutputText());
else
onText(mOverrideQuickTextText);
} else {
if (quickTextKey.isPopupKeyboardUsed()) {
showQuickTextKeyPopupKeyboard(quickTextKey);
} else {
showQuickTextKeyPopupList(quickTextKey);
}
}
break;
case Keyboard.KEYCODE_MODE_CHANGE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.Symbols);
break;
case AnyKeyboard.KEYCODE_LANG_CHANGE:
if (mKeyboardSwitcher.shouldPopupForLanguageSwitch())
{
showLanguageSelectionDialog();
}
else
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.Alphabet);
break;
case Keyboard.KEYCODE_ALT:
nextAlterKeyboard(getCurrentInputEditorInfo());
break;
case AnyKeyboard.KEYCODE_KEYBOARD_CYCLE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.Any);
break;
case AnyKeyboard.KEYCODE_KEYBOARD_REVERSE_CYCLE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.PreviousAny);
break;
case AnyKeyboard.KEYCODE_KEYBOARD_CYCLE_INSIDE_MODE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.AnyInsideMode);
break;
case AnyKeyboard.KEYCODE_KEYBOARD_MODE_CHANGE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.OtherMode);
break;
case AnyKeyboard.KEYCODE_CLIPBOARD:
ClipboardManager cm = (ClipboardManager)getSystemService(CLIPBOARD_SERVICE);
if(cm.hasText()){
onText(cm.getText());
}
break;
case 9 /*Tab*/:
sendDownUpKeyEvents(KeyEvent.KEYCODE_TAB);
break;
default:
// Issue 146: Right to left langs require reversed parenthesis
if (mKeyboardSwitcher.isRightToLeftMode())
{
if (primaryCode == (int)')')
primaryCode = (int)'(';
else if (primaryCode == (int)'(')
primaryCode = (int)')';
}
if (isWordSeparator(primaryCode)) {
handleSeparator(primaryCode);
} else {
handleCharacter(primaryCode, keyCodes);
// reseting the mSpaceSent, which is set to true upon selecting
// candidate
mJustAddedAutoSpace = false;
}
// Cancel the just reverted state
mJustRevertedSeparator = null;
if (mKeyboardSwitcher.isKeyRequireSwitchToAlphabet(primaryCode))
{
mKeyboardSwitcher.nextKeyboard(getCurrentInputEditorInfo(),
NextKeyboardType.Alphabet);
}
break;
}
}
private void showLanguageSelectionDialog() {
KeyboardAddOnAndBuilder[] builders = mKeyboardSwitcher.getEnabledKeyboardsBuilders();
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setCancelable(true);
builder.setIcon(R.drawable.icon_8_key);
builder.setTitle(getResources().getString(R.string.select_keyboard_popup_title));
builder.setNegativeButton(android.R.string.cancel, null);
ArrayList<CharSequence> keyboardsIds = new ArrayList<CharSequence>();
ArrayList<CharSequence> keyboards = new ArrayList<CharSequence>();
//going over all enabled keyboards
for (KeyboardAddOnAndBuilder keyboardBuilder : builders) {
keyboardsIds.add(keyboardBuilder.getId());
String name = keyboardBuilder.getName();
keyboards.add(name);
}
final CharSequence[] ids = new CharSequence[keyboardsIds.size()];
final CharSequence[] items = new CharSequence[keyboards.size()];
keyboardsIds.toArray(ids);
keyboards.toArray(items);
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface di, int position) {
di.dismiss();
if ((position < 0) || (position >= items.length)) {
if (DEBUG)Log.d(TAG, "Keyboard selection popup canceled");
} else {
CharSequence id = ids[position];
if (DEBUG)Log.d(TAG, "User selected "+items[position]+" with id "+id);
EditorInfo currentEditorInfo = getCurrentInputEditorInfo();
AnyKeyboard currentKeyboard = mKeyboardSwitcher.nextAlphabetKeyboard(currentEditorInfo, id.toString());
setKeyboardStuff(currentEditorInfo, NextKeyboardType.Alphabet, currentKeyboard);
}
}
});
mOptionsDialog = builder.create();
Window window = mOptionsDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
mOptionsDialog.show();
}
public void onText(CharSequence text) {
if (DEBUG)
Log.d(TAG, "onText: '" + text+"'");
InputConnection ic = getCurrentInputConnection();
if (ic == null)
return;
abortCorrection(false);
ic.beginBatchEdit();
if (mPredicting) {
commitTyped(ic);
}
ic.commitText(text, 1);
ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
mJustRevertedSeparator = null;
mJustAddedAutoSpace = false;
}
private static boolean isBackwordStopChar(int c) {
return !Character.isLetter(c);// c == 32 || PUNCTUATION_CHARACTERS.contains(c);
}
private void handleBackword(InputConnection ic) {
if(ic == null){
return;
}
if (mPredicting) {
final int length = mComposing.length();
if (length == 0) {
return;
}
mComposing.delete(0, length);
mWord.deleteLast();
ic.setComposingText(mComposing, 1);
if (mComposing.length() == 0) {
mPredicting = false;
}
postUpdateSuggestions();
return;
}
CharSequence cs = ic.getTextBeforeCursor(1, 0);
//int csl = cs.length();//check if there is no input
if (TextUtils.isEmpty(cs)) {
return;//nothing to delete
}
//TWO OPTIONS
//1) Either we do like Linux and Windows (and probably ALL desktop OSes):
//Delete all the characters till a complete word was deleted:
/*
* What to do:
* We delete until we find a separator (the function isBackwordStopChar).
* Note that we MUST delete a delete a whole word! So if the backword starts
* at separators, we'll delete those, and then the word before:
* "test this, ," -> "test "
*/
//Pro: same as desktop
//Con: when auto-caps is on (the default), this will delete the previous word, which can be annoying..
//E.g., Writing a sentence, then a period, then ASK will auto-caps, then when the user press backspace (for some reason),
//the entire previous word deletes.
//2) Or we delete all the characters till we encounter a separator, but delete at least one character.
/*
* What to do:
* We delete until we find a separator (the function isBackwordStopChar).
* Note that we MUST delete a delete at least one character
* "test this, " -> "test this," -> "test this" -> "test "
*/
//Pro: Supports auto-caps, and mostly similar to desktop OSes
//Con: Not all desktop use-cases are here.
//For now, I go with option 2, but I'm open for discussion.
//2b) "test this, " -> "test this"
boolean stopCharAtTheEnd = isBackwordStopChar((int)cs.charAt(0));
int idx = 1;
int csl = 0;
while (true) {
cs = ic.getTextBeforeCursor(idx, 0);
csl = cs.length();
if (csl < idx) {
// read text is smaller than requested. We are at start
break;
}
++idx;
int cc = cs.charAt(0);
boolean isBackwordStopChar = isBackwordStopChar(cc);
if (stopCharAtTheEnd) {
if (!isBackwordStopChar){
--csl;
break;
}
continue;
}
if (isBackwordStopChar) {
--csl;
break;
}
}
//we want to delete at least one character
//ic.deleteSurroundingText(csl == 0 ? 1 : csl, 0);
ic.deleteSurroundingText(csl, 0);//it is always > 0 !
}
private void handleDeleteLastCharacter(boolean forMultitap) {
InputConnection ic = getCurrentInputConnection();
boolean deleteChar = false;
if (mPredicting) {
final int length = mComposing.length();
if (length > 0) {
mComposing.delete(length - 1, length);
mWord.deleteLast();
ic.setComposingText(mComposing, 1);
if (mComposing.length() == 0) {
mPredicting = false;
}
postUpdateSuggestions();
} else {
ic.deleteSurroundingText(1, 0);
}
} else {
deleteChar = true;
}
TextEntryState.backspace();
if (TextEntryState.getState() == TextEntryState.State.UNDO_COMMIT) {
revertLastWord(deleteChar);
return;
} else if (deleteChar) {
if (mCandidateView != null && mCandidateView.dismissAddToDictionaryHint()) {
// Go back to the suggestion mode if the user canceled the
// "Touch again to save".
// NOTE: In gerenal, we don't revert the word when backspacing
// from a manual suggestion pick. We deliberately chose a
// different behavior only in the case of picking the first
// suggestion (typed word). It's intentional to have made this
// inconsistent with backspacing after selecting other suggestions.
revertLastWord(deleteChar);
}
else
{
if (!forMultitap)
{
sendDownUpKeyEvents(KeyEvent.KEYCODE_DEL);
}
else
{
//this code tries to delete the text in a different way, because of multi-tap stuff
//using "deleteSurroundingText" will actually get the input updated faster!
//but will not handle "delete all selected text" feature, hence the "if (!forMultitap)" above
final CharSequence beforeText = ic.getTextBeforeCursor(1, 0);
final int textLengthBeforeDelete = (TextUtils.isEmpty(beforeText))? 0 : beforeText.length();
if (textLengthBeforeDelete > 0)
ic.deleteSurroundingText(1, 0);
else
sendDownUpKeyEvents(KeyEvent.KEYCODE_DEL);
}
}
}
mJustRevertedSeparator = null;
//handleShiftStateAfterBackspace();
}
/*
private void handleShiftStateAfterBackspace() {
switch(mLastCharacterShiftState)
{
//this code will help use in the case that
//a double/triple tap occur while first one was shifted
case LAST_CHAR_SHIFT_STATE_SHIFTED:
if (mInputView != null)
mInputView.setShifted(true);
mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
break;
// case LAST_CHAR_SHIFT_STATE_UNSHIFTED:
// if (mInputView != null)
// mInputView.setShifted(false);
// mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
// break;
default:
updateShiftKeyState(getCurrentInputEditorInfo());
break;
}
}
*/
private void handleControl(boolean reset) {
if (reset)
{
if (DEBUG) Log.d(TAG, "handleControl: reset");
mInputView.setControl(false);
}
else
{
if (!mInputView.isControl())
{
if (DEBUG) Log.d(TAG, "handleControl: current keyboard is un-control");
mInputView.setControl(true);
}
else
{
if (DEBUG) Log.d(TAG, "handleControl: current keyboard is control");
mInputView.setControl(true);
}
}
}
private void handleShift(boolean reset) {
//mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
if (mKeyboardSwitcher.isAlphabetMode()) {
//shift pressed and this is an alphabet keyboard
//we want to do:
//1)if keyboard is unshifted -> shift view and keyboard
//2)if keyboard is shifted -> capslock keyboard
//3)if keyboard is capslocked -> unshift view and keyboard
//final AnyKeyboard currentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
if (DEBUG)
{
final AnyKeyboard viewKeyboard = (AnyKeyboard)mInputView.getKeyboard();
if (mCurrentKeyboard != viewKeyboard)
{
Log.e(TAG, "NOTE: view keyboard and switcher keyboard are not the same!");
}
}
final boolean caps;
if (reset)
{
if (DEBUG) Log.d(TAG, "handleShift: reset");
mInputView.setShifted(false);
caps = false;
}
else
{
if (!mInputView.isShifted())
{
if (DEBUG) Log.d(TAG, "handleShift: current keyboard is un-shifted");
mInputView.setShifted(true);
caps = false;
}
else
{
if (mCurrentKeyboard.isShiftLocked())
{
if (DEBUG) Log.d(TAG, "handleShift: current keyboard is CAPSLOCKED");
mInputView.setShifted(false);
caps = false;
}
else
{
if (DEBUG) Log.d(TAG, "handleShift: current keyboard is shifted");
mInputView.setShifted(true);
caps = true;
}
}
}
mCapsLock = caps;
mCurrentKeyboard.setShiftLocked(mCapsLock);
}
}
private void abortCorrection(boolean force) {
if (force || TextEntryState.isCorrecting()) {
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
getCurrentInputConnection().finishComposingText();
clearSuggestions();
mComposing.setLength(0);
mPredicting = false;
mPredictionOn = false;
mJustAddedAutoSpace = false;
setCandidatesViewShown(false);
if (mSuggest != null) {
mSuggest.setCorrectionMode(Suggest.CORRECTION_NONE);
}
}
}
private void handleCharacter(final int primaryCode, int[] keyCodes) {
if(DEBUG) Log.d("AnySoftKeyboard", "handleCharacter: "+primaryCode+", isPredictionOn:"+isPredictionOn()+", mPredicting:"+mPredicting);
if (isAlphabet(primaryCode) && isPredictionOn()
&& !isCursorTouchingWord()) {
if (!mPredicting) {
mPredicting = true;
mComposing.setLength(0);
mWord.reset();
}
}
if(mInputView != null){
mLastCharacterShiftState = mInputView.isShifted()? LAST_CHAR_SHIFT_STATE_SHIFTED : LAST_CHAR_SHIFT_STATE_DEFAULT;
}
if (mLastSelectionStart == mLastSelectionEnd && TextEntryState.isCorrecting()) {
abortCorrection(false);
}
final int primaryCodeForShow;
if (mInputView != null)
{
if (mInputView.isShifted())
{
primaryCodeForShow = Character.toUpperCase(primaryCode);
}
else if (mInputView.isControl())
{
//http://en.wikipedia.org/wiki/Control_character#How_control_characters_map_to_keyboards
primaryCodeForShow = primaryCode & 63;
if (AnyApplication.DEBUG) Log.d(TAG, "CONTROL state: Char was "+primaryCode+" and now it is "+primaryCodeForShow);
}
else
primaryCodeForShow = primaryCode;
}
else
primaryCodeForShow = primaryCode;
if (mPredicting) {
if ((mInputView != null) && mInputView.isShifted()
&& mComposing.length() == 0) {
mWord.setFirstCharCapitalized(true);
}
mComposing.append((char) primaryCodeForShow);
if(keyCodes != null && keyCodes.length > 1 && primaryCode != keyCodes[0]){
int swapedItem = keyCodes[0];
keyCodes[0] = primaryCode;
for(int i=1;i<keyCodes.length; i++)
{
if (keyCodes[i] == primaryCode)
{
keyCodes[i] = swapedItem;
break;
}
}
}
if (mWord.add(primaryCodeForShow, keyCodes))
{
Toast note = Toast.makeText(this, "Check the logcat for a note from AnySoftKeyboard developers!", Toast.LENGTH_LONG);
note.show();
Log.i(TAG, "*******************"
+"\nNICE!!! You found the our easter egg! http://www.dailymotion.com/video/x3zg90_gnarls-barkley-crazy-2006-mtv-star_music\n"
+"\nAnySoftKeyboard R&D team would like to thank you for using our keyboard application."
+"\nWe hope you enjoying it, we enjoyed making it."
+"\nWhile developing this application, we heard Gnarls Barkley's Crazy quite a lot, and would like to share it with you."
+"\n"
+"\nThanks."
+"\nMenny Even Danan, Hezi Cohen, Hugo Lopes, Henrik Andersson, Sami Salonen, and Lado Kumsiashvili."
+"\n*******************");
Intent easterEgg = new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.dailymotion.com/video/x3zg90_gnarls-barkley-crazy-2006-mtv-star_music"));
easterEgg.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(easterEgg);
}
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.setComposingText(mComposing, 1);
}
postUpdateSuggestions();
} else {
sendKeyChar((char) primaryCodeForShow);
}
updateShiftKeyState(getCurrentInputEditorInfo());
// measureCps();
TextEntryState.typedCharacter((char) primaryCodeForShow,
isWordSeparator(primaryCodeForShow));
}
private void handleSeparator(int primaryCode) {
if(DEBUG) Log.d(TAG, "handleSeparator: "+primaryCode);
// Should dismiss the "Touch again to save" message when handling separator
if (mCandidateView != null && mCandidateView.dismissAddToDictionaryHint()) {
postUpdateSuggestions();
}
boolean pickedDefault = false;
// Handle separator
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.beginBatchEdit();
abortCorrection(false);
}
if (mPredicting) {
// In certain languages where single quote is a separator, it's
// better
// not to auto correct, but accept the typed word. For instance,
// in Italian dov' should not be expanded to dove' because the
// elision
// requires the last vowel to be removed.
if (mAutoCorrectOn
&& primaryCode != '\''
&& (mJustRevertedSeparator == null
|| mJustRevertedSeparator.length() == 0
|| mJustRevertedSeparator.charAt(0) != primaryCode)) {
pickedDefault = pickDefaultSuggestion();
// Picked the suggestion by the space key. We consider this
// as "added an auto space".
if (primaryCode == KeyCodes.SPACE) {
mJustAddedAutoSpace = true;
}
} else {
commitTyped(ic);
}
}
if (mJustAddedAutoSpace && primaryCode == KeyCodes.ENTER) {
removeTrailingSpace();
mJustAddedAutoSpace = false;
}
sendKeyChar((char) primaryCode);
// Handle the case of ". ." -> " .." with auto-space if necessary
// before changing the TextEntryState.
if (TextEntryState.getState() == TextEntryState.State.PUNCTUATION_AFTER_ACCEPTED
&& primaryCode == '.') {
reswapPeriodAndSpace();
}
TextEntryState.typedCharacter((char) primaryCode, true);
if (TextEntryState.getState() == TextEntryState.State.PUNCTUATION_AFTER_ACCEPTED
&& primaryCode != KeyCodes.ENTER) {
swapPunctuationAndSpace();
} else if (/*isPredictionOn() &&*/ primaryCode == ' ') {
doubleSpace();
}
if (pickedDefault && mBestWord != null) {
TextEntryState.acceptedDefault(mWord.getTypedWord(), mBestWord);
}
updateShiftKeyState(getCurrentInputEditorInfo());
if (ic != null) {
ic.endBatchEdit();
}
}
private void handleClose() {
commitTyped(getCurrentInputConnection());
requestHideSelf(0);
if (mInputView != null)
mInputView.closing();
TextEntryState.endSession();
}
// private void checkToggleCapsLock() {
// if (mKeyboardSwitcher.getCurrentKeyboard().isShifted()) {
// toggleCapsLock();
// }
// }
private void postUpdateSuggestions() {
postUpdateSuggestions(100);
}
private void postUpdateSuggestions(long delay) {
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
if (delay > 0)
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_UPDATE_SUGGESTIONS), delay);
else
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SUGGESTIONS));
}
private void postUpdateSuggestionsNow() {
postUpdateSuggestions(0);
}
private boolean isPredictionOn() {
boolean predictionOn = mPredictionOn;
// if (!onEvaluateInputViewShown()) predictionOn &=
// mPredictionLandscape;
return predictionOn;
}
private boolean shouldCandidatesStripBeShown() {
// boolean shown = isPredictionOn() && (mShowSuggestions || isFullscreenMode());
// if (!onEvaluateInputViewShown())
// shown &= mPredictionLandscape;
// return shown;
// return true;
// return isPredictionOn() || isFullscreenMode();
return true;
}
private void performUpdateSuggestions() {
if (DEBUG)
Log.d(TAG, "performUpdateSuggestions: has mSuggest:"
+ (mSuggest != null) + ", isPredictionOn:"
+ isPredictionOn() + ", mPredicting:" + mPredicting
+ ", mCorrectionMode:" + mCorrectionMode);
// Check if we have a suggestion engine attached.
if (mSuggest == null) {
return;
}
// final boolean showSuggestions = (mCandidateView != null && mPredicting
// && isPredictionOn() && shouldCandidatesStripBeShown());
if (mCandidateCloseText != null) mCandidateCloseText.setVisibility(View.GONE);
if (!mPredicting) {
if (mCandidateView != null)
mCandidateView.setSuggestions(null, false, false, false);
return;
}
List<CharSequence> stringList = mSuggest.getSuggestions(mInputView, mWord, false);
boolean correctionAvailable = mSuggest.hasMinimalCorrection();
// || mCorrectionMode == mSuggest.CORRECTION_FULL;
CharSequence typedWord = mWord.getTypedWord();
// If we're in basic correct
boolean typedWordValid = mSuggest.isValidWord(typedWord) ||
(preferCapitalization() && mSuggest.isValidWord(typedWord.toString().toLowerCase()));
if (mCorrectionMode == Suggest.CORRECTION_FULL) {
correctionAvailable |= typedWordValid;
}
// Don't auto-correct words with multiple capital letter
correctionAvailable &= !mWord.isMostlyCaps();
correctionAvailable &= !TextEntryState.isCorrecting();
mCandidateView.setSuggestions(stringList, false, typedWordValid, correctionAvailable);
if (stringList.size() > 0) {
if (correctionAvailable && !typedWordValid && stringList.size() > 1) {
mBestWord = stringList.get(1);
} else {
mBestWord = typedWord;
}
} else {
mBestWord = null;
}
setCandidatesViewShown(shouldCandidatesStripBeShown() || mCompletionOn);
}
private boolean pickDefaultSuggestion() {
// Complete any pending candidate query first
if (mHandler.hasMessages(MSG_UPDATE_SUGGESTIONS)) {
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
postUpdateSuggestionsNow();
}
if (mBestWord != null) {
TextEntryState.acceptedDefault(mWord.getTypedWord(), mBestWord);
mJustAccepted = true;
pickSuggestion(mBestWord);
// Add the word to the auto dictionary if it's not a known word
addToDictionaries(mBestWord, AutoDictionary.FREQUENCY_FOR_TYPED);
return true;
}
return false;
}
private CharSequence pickSuggestion(CharSequence suggestion) {
if (mCapsLock) {
suggestion = suggestion.toString().toUpperCase();
} else if (preferCapitalization()
|| (mKeyboardSwitcher.isAlphabetMode() && (mInputView != null) && mInputView .isShifted())) {
suggestion = Character.toUpperCase(suggestion.charAt(0))
+ suggestion.subSequence(1, suggestion.length()).toString();
}
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.commitText(suggestion, 1);
}
mPredicting = false;
mCommittedLength = suggestion.length();
if (mCandidateView != null) {
mCandidateView.setSuggestions(null, false, false, false);
}
updateShiftKeyState(getCurrentInputEditorInfo());
return suggestion;
}
public void pickSuggestionManually(int index, CharSequence suggestion) {
final boolean correcting = TextEntryState.isCorrecting();
final InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.beginBatchEdit();
}
try
{
if (mCompletionOn && mCompletions != null && index >= 0
&& index < mCompletions.length) {
CompletionInfo ci = mCompletions[index];
if (ic != null) {
ic.commitCompletion(ci);
}
mCommittedLength = suggestion.length();
if (mCandidateView != null) {
mCandidateView.clear();
}
updateShiftKeyState(getCurrentInputEditorInfo());
return;
}
pickSuggestion(suggestion, correcting);
// Add the word to the auto dictionary if it's not a known word
if (index == 0) {
addToDictionaries(suggestion, AutoDictionary.FREQUENCY_FOR_PICKED);
}
TextEntryState.acceptedSuggestion(mComposing.toString(), suggestion);
// Follow it with a space
if (mAutoSpace && !correcting) {
sendSpace();
mJustAddedAutoSpace = true;
}
final boolean showingAddToDictionaryHint = index == 0 && mCorrectionMode > 0
&& !mSuggest.isValidWord(suggestion)
&& !mSuggest.isValidWord(suggestion.toString().toLowerCase());
if (!correcting) {
// Fool the state watcher so that a subsequent backspace will not do a revert, unless
// we just did a correction, in which case we need to stay in
// TextEntryState.State.PICKED_SUGGESTION state.
TextEntryState.typedCharacter((char) KeyCodes.SPACE, true);
setNextSuggestions();
} else if (!showingAddToDictionaryHint) {
// If we're not showing the "Touch again to save", then show corrections again.
// In case the cursor position doesn't change, make sure we show the suggestions again.
clearSuggestions();
//postUpdateOldSuggestions();
}
if (showingAddToDictionaryHint && mCandidateView != null) {
mCandidateView.showAddToDictionaryHint(suggestion);
}
}
finally
{
if (ic != null)
{
ic.endBatchEdit();
}
}
}
/**
* Commits the chosen word to the text field and saves it for later
* retrieval.
* @param suggestion the suggestion picked by the user to be committed to
* the text field
* @param correcting whether this is due to a correction of an existing
* word.
*/
private void pickSuggestion(CharSequence suggestion, boolean correcting) {
if (mCapsLock) {
suggestion = suggestion.toString().toUpperCase();
} else if (preferCapitalization()
|| (mKeyboardSwitcher.isAlphabetMode() && (mInputView != null) && mInputView .isShifted())) {
suggestion = Character.toUpperCase(suggestion.charAt(0))
+ suggestion.subSequence(1, suggestion.length()).toString();
}
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.commitText(suggestion, 1);
}
mPredicting = false;
mCommittedLength = suggestion.length();
if (mCandidateView != null) {
mCandidateView.setSuggestions(null, false, false, false);
}
// If we just corrected a word, then don't show punctuations
if (!correcting) {
setNextSuggestions();
}
updateShiftKeyState(getCurrentInputEditorInfo());
}
private boolean isCursorTouchingWord() {
InputConnection ic = getCurrentInputConnection();
if (ic == null)
return false;
CharSequence toLeft = ic.getTextBeforeCursor(1, 0);
CharSequence toRight = ic.getTextAfterCursor(1, 0);
if (!TextUtils.isEmpty(toLeft) && !isWordSeparator(toLeft.charAt(0))) {
return true;
}
if (!TextUtils.isEmpty(toRight) && !isWordSeparator(toRight.charAt(0))) {
return true;
}
return false;
}
public void revertLastWord(boolean deleteChar) {
final int length = mComposing.length();
if (!mPredicting && length > 0) {
final InputConnection ic = getCurrentInputConnection();
mPredicting = true;
ic.beginBatchEdit();
mJustRevertedSeparator = ic.getTextBeforeCursor(1, 0);
if (deleteChar)
ic.deleteSurroundingText(1, 0);
int toDelete = mCommittedLength;
CharSequence toTheLeft = ic.getTextBeforeCursor(mCommittedLength, 0);
if (toTheLeft != null && toTheLeft.length() > 0
&& isWordSeparator(toTheLeft.charAt(0))) {
toDelete--;
}
ic.deleteSurroundingText(toDelete, 0);
ic.setComposingText(mComposing, 1);
TextEntryState.backspace();
ic.endBatchEdit();
postUpdateSuggestions();
} else {
sendDownUpKeyEvents(KeyEvent.KEYCODE_DEL);
mJustRevertedSeparator = null;
}
}
// private void setOldSuggestions() {
// //mShowingVoiceSuggestions = false;
// if (mCandidateView != null && mCandidateView.isShowingAddToDictionaryHint()) {
// return;
// }
// InputConnection ic = getCurrentInputConnection();
// if (ic == null) return;
// if (!mPredicting) {
// // Extract the selected or touching text
// EditingUtil.SelectedWord touching = EditingUtil.getWordAtCursorOrSelection(ic,
// mLastSelectionStart, mLastSelectionEnd, mWordSeparators);
//
// if (touching != null && touching.word.length() > 1) {
// ic.beginBatchEdit();
//
// if (!applyVoiceAlternatives(touching) && !applyTypedAlternatives(touching)) {
// abortCorrection(true);
// } else {
// TextEntryState.selectedForCorrection();
// EditingUtil.underlineWord(ic, touching);
// }
//
// ic.endBatchEdit();
// } else {
// abortCorrection(true);
// setNextSuggestions(); // Show the punctuation suggestions list
// }
// } else {
// abortCorrection(true);
// }
// }
private void setNextSuggestions() {
setSuggestions(new ArrayList<CharSequence>(), false, false, false);
}
public boolean isWordSeparator(int code) {
// String separators = getWordSeparators();
// return separators.contains(String.valueOf((char)code));
return (!isAlphabet(code));
}
private void sendSpace() {
sendKeyChar((char)KeyCodes.SPACE);
updateShiftKeyState(getCurrentInputEditorInfo());
}
public boolean preferCapitalization() {
return mWord.isFirstCharCapitalized();
}
public void swipeRight(boolean onSpaceBar) {
final int keyCode = mConfig.getSwipeRightKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
public void swipeLeft(boolean onSpaceBar) {
final int keyCode = mConfig.getSwipeLeftKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
private void nextAlterKeyboard(EditorInfo currentEditorInfo)
{
if(DEBUG)Log.d(TAG, "nextAlterKeyboard: currentEditorInfo.inputType="
+ currentEditorInfo.inputType);
//AnyKeyboard currentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
if (mCurrentKeyboard == null) {
if (DEBUG) Log.d("AnySoftKeyboard", "nextKeyboard: Looking for next keyboard. No current keyboard.");
} else {
if (DEBUG) Log.d("AnySoftKeyboard", "nextKeyboard: Looking for next keyboard. Current keyboard is:"
+ mCurrentKeyboard.getKeyboardName());
}
mCurrentKeyboard = mKeyboardSwitcher.nextAlterKeyboard(currentEditorInfo);
Log.i(TAG, "nextAlterKeyboard: Setting next keyboard to: "
+ mCurrentKeyboard.getKeyboardName());
}
private void nextKeyboard(EditorInfo currentEditorInfo,
KeyboardSwitcher.NextKeyboardType type) {
if (DEBUG) Log.d(TAG, "nextKeyboard: currentEditorInfo.inputType="
+ currentEditorInfo.inputType + " type:" + type);
// in numeric keyboards, the LANG key will go back to the original
// alphabet keyboard-
// so no need to look for the next keyboard, 'mLastSelectedKeyboard'
// holds the last
// keyboard used.
mCurrentKeyboard = mKeyboardSwitcher.nextKeyboard(currentEditorInfo, type);
if (!(mCurrentKeyboard instanceof GenericKeyboard))
mSentenceSeparators = mCurrentKeyboard.getSentenceSeparators();
setKeyboardStuff(currentEditorInfo, type, mCurrentKeyboard);
}
private void setKeyboardStuff(EditorInfo currentEditorInfo,
KeyboardSwitcher.NextKeyboardType type, AnyKeyboard currentKeyboard) {
Log.i(TAG, "nextKeyboard: Setting next keyboard to: "
+ currentKeyboard.getKeyboardName());
updateShiftKeyState(currentEditorInfo);
mCapsLock = currentKeyboard.isShiftLocked();
mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
// changing dictionary
setDictionariesForCurrentKeyboard();
// Notifying if needed
if ((mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ALWAYS))
|| (mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ON_PHYSICAL) && (type == NextKeyboardType.AlphabetSupportsPhysical))) {
notifyKeyboardChangeIfNeeded();
}
}
public void swipeDown(boolean onSpaceBar) {
final int keyCode = mConfig.getSwipeDownKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
public void swipeUp(boolean onSpaceBar) {
if (DEBUG) Log.d(TAG, "swipeUp: started at spacebar? "+onSpaceBar);
final int keyCode = mConfig.getSwipeUpKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
public void onPress(int primaryCode) {
if (DEBUG) Log.d(TAG, "onPress:"+primaryCode);
if (mVibrationDuration > 0 && primaryCode!=0) {
mVibrator.vibrate(mVibrationDuration);
}
final boolean distinctMultiTouch = mInputView.hasDistinctMultitouch();
if (distinctMultiTouch && primaryCode == Keyboard.KEYCODE_SHIFT) {
mShiftKeyState.onPress();
handleShift(false);
} else {
mShiftKeyState.onOtherKeyPressed();
}
if (distinctMultiTouch && primaryCode == AnyKeyboard.KEYCODE_CTRL) {
mControlKeyState.onPress();
handleControl(false);
} else {
mControlKeyState.onOtherKeyPressed();
}
if (mSoundOn && (!mSilentMode) && primaryCode!=0) {
final int keyFX;
switch (primaryCode) {
case 13:
case 10:
keyFX = AudioManager.FX_KEYPRESS_RETURN;
break;
case Keyboard.KEYCODE_DELETE:
keyFX = AudioManager.FX_KEYPRESS_DELETE;
break;
case 32:
keyFX = AudioManager.FX_KEYPRESS_SPACEBAR;
break;
default:
keyFX = AudioManager.FX_KEY_CLICK;
}
final float fxVolume;
//creating scoop to make sure volume and maxVolume
//are not used
{
final int volume;
final int maxVolume;
if (mSoundVolume > 0)
{
volume = mSoundVolume;
maxVolume = 100;
if (DEBUG)
Log.d(TAG, "Sound on key-pressed. Taking custom volume: "+volume+" out of "+maxVolume);
//pre-eclair
// volume is between 0..8 (float)
//eclair
// volume is between 0..1 (float)
if (Workarounds.getApiLevel() >= 5)
{
fxVolume = ((float)volume)/((float)maxVolume);
}
else
{
fxVolume = 8*((float)volume)/((float)maxVolume);
}
}
else
{
fxVolume = -1.0f;
}
}
if (DEBUG) Log.d(TAG, "Sound on key-pressed. Sound ID:"
+ keyFX + " with volume " + fxVolume);
mAudioManager.playSoundEffect(keyFX, fxVolume);
}
}
public void onRelease(int primaryCode) {
if (DEBUG) Log.d(TAG, "onRelease:"+primaryCode);
// vibrate();
// Reset any drag flags in the keyboard
//((AnyKeyboard) mInputView.getKeyboard()).keyReleased();
//vibrate();
final boolean distinctMultiTouch = mInputView.hasDistinctMultitouch();
if (distinctMultiTouch && primaryCode == Keyboard.KEYCODE_SHIFT) {
if (mShiftKeyState.isMomentary())
handleShift(true);
mShiftKeyState.onRelease();
}
if (distinctMultiTouch && primaryCode == AnyKeyboard.KEYCODE_CTRL) {
if (mControlKeyState.isMomentary())
handleControl(true);
mControlKeyState.onRelease();
}
//the user lifted the finger, let's handle the shift
if (primaryCode != Keyboard.KEYCODE_SHIFT)
updateShiftKeyState(getCurrentInputEditorInfo());
}
// receive ringer mode changes to detect silent mode
private BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
updateRingerMode();
}
};
// update flags for silent mode
private void updateRingerMode() {
mSilentMode = (mAudioManager.getRingerMode() != AudioManager.RINGER_MODE_NORMAL);
}
private void loadSettings() {
// setting all values to default
PreferenceManager.setDefaultValues(this, R.layout.prefs, false);
// Get the settings preferences
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(this);
mVibrationDuration = Integer.parseInt(sp.getString(
getString(R.string.settings_key_vibrate_on_key_press_duration),
getString(R.string.settings_default_vibrate_on_key_press_duration)));
mSoundOn = sp.getBoolean(getString(R.string.settings_key_sound_on), getResources().getBoolean(R.bool.settings_default_sound_on));
if (mSoundOn) {
Log.i(TAG, "Loading sounds effects from AUDIO_SERVICE due to configuration change.");
mAudioManager.loadSoundEffects();
}
// checking the volume
boolean customVolume = sp.getBoolean("use_custom_sound_volume", false);
int newVolume;
if (customVolume) {
newVolume = sp.getInt("custom_sound_volume", 0) + 1;
Log.i(TAG, "Custom volume checked: " + newVolume+" out of 100");
} else {
Log.i(TAG, "Custom volume un-checked.");
newVolume = -1;
}
mSoundVolume = newVolume;
// in order to support the old type of configuration
mKeyboardChangeNotificationType = sp.getString(
getString(R.string.settings_key_physical_keyboard_change_notification_type),
getString(R.string.settings_default_physical_keyboard_change_notification_type));
// now clearing the notification, and it will be re-shown if needed
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
// should it be always on?
if (mKeyboardChangeNotificationType.equals(KEYBOARD_NOTIFICATION_ALWAYS))
notifyKeyboardChangeIfNeeded();
mAutoCap = sp.getBoolean("auto_caps", true);
boolean newShowSuggestions = sp.getBoolean("candidates_on", true);
boolean suggestionsChanged = (newShowSuggestions != mShowSuggestions);
mShowSuggestions = newShowSuggestions;
// why check that it is "false"? Because it starts as "false", so it is
// not 'changed'.
if (suggestionsChanged || (!mShowSuggestions))
setDictionariesForCurrentKeyboard();
mAutoComplete = sp.getBoolean("auto_complete", true) && mShowSuggestions;
mQuickFixes = sp.getBoolean("quick_fix", true);
mAutoCorrectOn = /* mSuggest != null && *//*
* Suggestion always exists,
* maybe not at the moment, but
* shortly
*/
(mAutoComplete || mQuickFixes);
mCorrectionMode = mAutoComplete ? 2
: (mShowSuggestions/* mQuickFixes */? 1 : 0);
mSmileyOnShortPress = sp.getBoolean(getString(R.string.settings_key_emoticon_long_press_opens_popup), getResources().getBoolean(R.bool.settings_default_emoticon_long_press_opens_popup));
// mSmileyPopupType = sp.getString(getString(R.string.settings_key_smiley_popup_type), getString(R.string.settings_default_smiley_popup_type));
mOverrideQuickTextText = sp.getString(getString(R.string.settings_key_emoticon_default_text), null);
((ConfigurationImpl) mConfig).handleConfigurationChange(sp);
}
private void setDictionariesForCurrentKeyboard() {
if (mSuggest != null) {
if (!mPredictionOn) {
if (DEBUG)Log.d(TAG, "No suggestion is required. I'll try to release memory from the dictionary.");
//DictionaryFactory.getInstance().releaseAllDictionaries();
mSuggest.setMainDictionary(null);
mSuggest.setUserDictionary(null);
mSuggest.setAutoDictionary(null);
mLastDictionaryRefresh = -1;
} else {
mLastDictionaryRefresh = SystemClock.elapsedRealtime();
// It null at the creation of the application.
if ((mKeyboardSwitcher != null)
&& mKeyboardSwitcher.isAlphabetMode()) {
AnyKeyboard currentKeyobard = mKeyboardSwitcher.getCurrentKeyboard();
// if there is a mapping in the settings, we'll use that, else we'll
// return the default
String mappingSettingsKey = getDictionaryOverrideKey(currentKeyobard);
String defaultDictionary = currentKeyobard.getDefaultDictionaryLocale();
String dictionaryValue = getSharedPreferences().getString(mappingSettingsKey, null);
Dictionary dictionary = null;
if (dictionaryValue == null){
dictionary = DictionaryFactory.getInstance().getDictionaryByLanguage(currentKeyobard.getDefaultDictionaryLocale(), this);
} else {
if (DEBUG)
{
Log.d("AnySoftKeyboard", "Default dictionary '" + (defaultDictionary == null? "None" : defaultDictionary)
+ "' for keyboard '" + currentKeyobard.getKeyboardPrefId()
+ "' has been overriden to '" + dictionaryValue + "'");
}
dictionary = DictionaryFactory.getInstance().getDictionaryById(dictionaryValue, this);
}
mSuggest.setMainDictionary(dictionary);
mUserDictionary = DictionaryFactory.getInstance().createUserDictionary(this, defaultDictionary);
mSuggest.setUserDictionary(mUserDictionary);
mAutoDictionary = DictionaryFactory.getInstance().createAutoDictionary(this, this, defaultDictionary);
mSuggest.setAutoDictionary(mAutoDictionary);
}
}
}
}
private String getDictionaryOverrideKey(AnyKeyboard currentKeyboard) {
String mappingSettingsKey = currentKeyboard.getKeyboardPrefId()
+ "_override_dictionary";
return mappingSettingsKey;
}
private void launchSettings() {
handleClose();
Intent intent = new Intent();
intent.setClass(AnySoftKeyboard.this, MainSettings.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
}
private void launchDictionaryOverriding() {
//AnyKeyboard currentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
final String dictionaryOverridingKey = getDictionaryOverrideKey(mCurrentKeyboard);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setCancelable(true);
builder.setIcon(R.drawable.icon_8_key);
builder.setTitle(getResources().getString(
R.string.override_dictionary_title,
mCurrentKeyboard.getKeyboardName()));
builder.setNegativeButton(android.R.string.cancel, null);
ArrayList<CharSequence> dictionaryIds = new ArrayList<CharSequence>();
ArrayList<CharSequence> dictionaries = new ArrayList<CharSequence>();
// null dictionary is handled as the default for the keyboard
dictionaryIds.add(null);
dictionaries.add(getString(R.string.override_dictionary_default));
//going over all installed dictionaries
for (DictionaryAddOnAndBuilder dictionaryBuilder : ExternalDictionaryFactory.getAllAvailableExternalDictionaries(this)) {
dictionaryIds.add(dictionaryBuilder.getId());
String description = dictionaryBuilder.getDescription();
if(description != null && description.length() != 0) {
description = " (" + description + ")";
}
dictionaries.add(dictionaryBuilder.getName() + description);
}
final CharSequence[] ids = new CharSequence[dictionaryIds.size()];
final CharSequence[] items = new CharSequence[dictionaries.size()];
dictionaries.toArray(items);
dictionaryIds.toArray(ids);
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface di, int position) {
di.dismiss();
Editor editor = getSharedPreferences().edit();
switch (position) {
case 0:
if (DEBUG) Log.d(TAG, "Dictionary overriden disabled. User selected default.");
editor.remove(dictionaryOverridingKey);
showToastMessage(R.string.override_disabled, true);
break;
default:
if ((position < 0) || (position >= items.length)) {
if (DEBUG) Log.d(TAG, "Dictionary override dialog canceled.");
} else {
CharSequence id = ids[position];
String selectedDictionaryId = (id == null) ? null : id.toString();
String selectedLanguageString = items[position]
.toString();
if (DEBUG) Log.d(TAG, "Dictionary override. User selected "
+ selectedLanguageString + " which corresponds to id "
+ ((selectedDictionaryId == null) ? "(null)" : selectedDictionaryId));
editor.putString(dictionaryOverridingKey,
selectedDictionaryId);
showToastMessage(getString(R.string.override_enabled,
selectedLanguageString), true);
}
break;
}
editor.commit();
setDictionariesForCurrentKeyboard();
}
});
mOptionsDialog = builder.create();
Window window = mOptionsDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
mOptionsDialog.show();
}
private void showOptionsMenu() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setCancelable(true);
builder.setIcon(R.drawable.icon_8_key);
builder.setNegativeButton(android.R.string.cancel, null);
CharSequence itemSettings = getString(R.string.ime_settings);
CharSequence itemOverrideDictionary = getString(R.string.override_dictionary);
CharSequence itemInputMethod = getString(R.string.change_ime);
builder.setItems(new CharSequence[] { itemSettings,
itemOverrideDictionary, itemInputMethod },
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface di, int position) {
di.dismiss();
switch (position) {
case 0:
launchSettings();
break;
case 1:
launchDictionaryOverriding();
break;
case 2:
((InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE))
.showInputMethodPicker();
break;
}
}
});
builder.setTitle(getResources().getString(R.string.ime_name));
mOptionsDialog = builder.create();
Window window = mOptionsDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
mOptionsDialog.show();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
// If orientation changed while predicting, commit the change
if (newConfig.orientation != mOrientation) {
commitTyped(getCurrentInputConnection());
mOrientation = newConfig.orientation;
mKeyboardSwitcher.makeKeyboards(true);
}
super.onConfigurationChanged(newConfig);
}
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (DEBUG)Log.d(TAG, "onSharedPreferenceChanged - key:" + key);
AnyApplication.requestBackupToCloud();
boolean isKeyboardKey = key.startsWith(KeyboardAddOnAndBuilder.KEYBOARD_PREF_PREFIX);
boolean isDictionaryKey = key.startsWith("dictionary_");
boolean isQuickTextKey = key.equals(getString(R.string.settings_key_active_quick_text_key));
if (isKeyboardKey || isDictionaryKey || isQuickTextKey) {
mKeyboardSwitcher.makeKeyboards(true);
}
loadSettings();
if ( isDictionaryKey ||
key.equals(getString(R.string.settings_key_use_contacts_dictionary)) ||
key.equals(getString(R.string.settings_key_use_auto_dictionary)))
{
setDictionariesForCurrentKeyboard();
}
else if (
//key.equals(getString(R.string.settings_key_top_keyboard_row_id)) ||
key.equals(getString(R.string.settings_key_ext_kbd_bottom_row_key)) ||
key.equals(getString(R.string.settings_key_ext_kbd_top_row_key)) ||
key.equals(getString(R.string.settings_key_ext_kbd_ext_ketboard_key)) ||
key.equals(getString(R.string.settings_key_ext_kbd_hidden_bottom_row_key)) ||
key.equals(getString(R.string.settings_key_keyboard_theme_key)) ||
key.equals("zoom_factor_keys_in_portrait") ||
key.equals("zoom_factor_keys_in_landscape") ||
key.equals(getString(R.string.settings_key_smiley_icon_on_smileys_key)) ||
key.equals(getString(R.string.settings_key_long_press_timeout)) ||
key.equals(getString(R.string.settings_key_multitap_timeout)))
{
//in some cases we do want to force keyboards recreations
handleClose();
mKeyboardSwitcher.makeKeyboards(true);
if (key.equals(getString(R.string.settings_key_keyboard_theme_key)))
{
//also recreate keyboard view
setInputView(onCreateInputView());
setCandidatesView(onCreateCandidatesView());
setCandidatesViewShown(false);
}
}
}
public void appendCharactersToInput(CharSequence textToCommit) {
if (DEBUG)
Log.d(TAG, "appendCharactersToInput: '"+ textToCommit+"'");
for(int index=0; index<textToCommit.length(); index++)
{
final char c = textToCommit.charAt(index);
mWord.add(c, new int[]{c});
}
mComposing.append(textToCommit);
if (mCompletionOn)
getCurrentInputConnection().setComposingText(mWord.getTypedWord(), textToCommit.length());
else
commitTyped(getCurrentInputConnection());
updateShiftKeyState(getCurrentInputEditorInfo());
}
public void deleteLastCharactersFromInput(int countToDelete) {
if (countToDelete == 0)
return;
final int currentLength = mComposing.length();
boolean shouldDeleteUsingCompletion;
if (currentLength > 0) {
shouldDeleteUsingCompletion = true;
if (currentLength > countToDelete) {
mComposing.delete(currentLength - countToDelete, currentLength);
int deletesLeft = countToDelete;
while(deletesLeft > 0)
{
mWord.deleteLast();
deletesLeft--;
}
} else {
mComposing.setLength(0);
mWord.reset();
}
} else {
shouldDeleteUsingCompletion = false;
}
InputConnection ic = getCurrentInputConnection();
if(ic != null){
if (mCompletionOn && shouldDeleteUsingCompletion) {
ic.setComposingText(mComposing, 1);
// updateCandidates();
} else {
ic.deleteSurroundingText(countToDelete, 0);
}
}
updateShiftKeyState(getCurrentInputEditorInfo());
}
public SharedPreferences getSharedPreferences() {
return PreferenceManager.getDefaultSharedPreferences(this);
}
public void showToastMessage(int resId, boolean forShortTime) {
CharSequence text = getResources().getText(resId);
showToastMessage(text, forShortTime);
}
private void showToastMessage(CharSequence text, boolean forShortTime) {
int duration = forShortTime ? Toast.LENGTH_SHORT : Toast.LENGTH_LONG;
if (DEBUG)
Log.v("AnySoftKeyboard", "showToastMessage: '" + text + "'. For: "
+ duration);
Toast.makeText(this.getApplication(), text, duration).show();
}
@Override
public void onLowMemory() {
Log.w(TAG, "The OS has reported that it is low on memory!. I'll try to clear some cache.");
mKeyboardSwitcher.onLowMemory();
//DictionaryFactory.getInstance().onLowMemory(mSuggest.getMainDictionary());
super.onLowMemory();
}
private InputConnection mEditingInput = null;
private TextView mCandidateCloseText;
public void startInputConnectionEdit() {
mEditingInput = getCurrentInputConnection();
if (mEditingInput != null)
mEditingInput.beginBatchEdit();
}
public void endInputConnectionEdit() {
if (mEditingInput != null)
{
try
{
mEditingInput.endBatchEdit();
}
catch(Exception e)
{
//it could be dead already.
e.printStackTrace();
}
}
}
private void showQuickTextKeyPopupKeyboard(QuickTextKey quickTextKey) {
if (mInputView != null) {
if (quickTextKey.getPackageContext() == getApplicationContext()) {
mInputView.simulateLongPress(AnyKeyboard.KEYCODE_QUICK_TEXT);
} else {
mInputView.showQuickTextPopupKeyboard(quickTextKey.getPackageContext(), quickTextKey);
}
}
}
private void showQuickTextKeyPopupList(final QuickTextKey key) {
if (mQuickTextKeyDialog == null) {
String[] names = key.getPopupListNames();
final String[] texts = key.getPopupListValues();
int[] icons = key.getPopupListIconResIds();
final int N = names.length;
List<Map<String, ?>> entries = new ArrayList<Map<String, ?>>();
for (int i = 0; i < N; i++) {
HashMap<String, Object> entry = new HashMap<String, Object>();
entry.put("name", names[i]);
entry.put("text", texts[i]);
if (icons != null) entry.put("icons", icons[i]);
entries.add(entry);
}
int layout;
String[] from;
int[] to;
if (icons == null) {
layout = R.layout.quick_text_key_menu_item_without_icon;
from = new String[] {"name", "text"};
to = new int[] {R.id.quick_text_name, R.id.quick_text_output};
} else {
layout = R.layout.quick_text_key_menu_item_with_icon;
from = new String[] {"name", "text", "icons"};
to = new int[]{R.id.quick_text_name, R.id.quick_text_output, R.id.quick_text_icon};
}
final SimpleAdapter a = new SimpleAdapter(this, entries, layout, from, to);
SimpleAdapter.ViewBinder viewBinder = new SimpleAdapter.ViewBinder() {
public boolean setViewValue(View view, Object data, String textRepresentation) {
if (view instanceof ImageView) {
Drawable img = key.getPackageContext().getResources().getDrawable((Integer) data);
((ImageView) view).setImageDrawable(img);
return true;
}
return false;
}
};
a.setViewBinder(viewBinder);
AlertDialog.Builder b = new AlertDialog.Builder(this);
b.setTitle(getString(R.string.menu_insert_smiley));
b.setCancelable(true);
b.setAdapter(a, new DialogInterface.OnClickListener() {
@SuppressWarnings("unchecked")
public final void onClick(DialogInterface dialog, int which) {
HashMap<String, Object> item = (HashMap<String, Object>) a.getItem(which);
onText((String)item.get("text"));
dialog.dismiss();
}
});
mQuickTextKeyDialog = b.create();
Window window = mQuickTextKeyDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
}
mQuickTextKeyDialog.show();
}
public void promoteToUserDictionary(String word, int frequency) {
if (mUserDictionary.isValidWord(word)) return;
mUserDictionary.addWord(word, frequency);
}
public WordComposer getCurrentWord() {
return mWord;
}
/**
* Override this to control when the soft input area should be shown to
* the user. The default implementation only shows the input view when
* there is no hard keyboard or the keyboard is hidden. If you change what
* this returns, you will need to call {@link #updateInputViewShown()}
* yourself whenever the returned value may have changed to have it
* re-evalauted and applied.
* This needs to be re-coded for Issue 620
*/
@Override
public boolean onEvaluateInputViewShown() {
Configuration config = getResources().getConfiguration();
return config.keyboard == Configuration.KEYBOARD_NOKEYS
|| config.hardKeyboardHidden == Configuration.KEYBOARDHIDDEN_YES;
}
public void onCancel() {
//don't know what to do here.
}
public void forceKeyboardsRecreation() {
if (mKeyboardSwitcher != null) mKeyboardSwitcher.makeKeyboards(true);
}
}
|
src/com/anysoftkeyboard/AnySoftKeyboard.java
|
/*
* Copyright (C) 2011 AnySoftKeyboard.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.anysoftkeyboard;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.inputmethodservice.InputMethodService;
import android.media.AudioManager;
import android.net.Uri;
import android.os.Debug;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.SystemClock;
import android.os.Vibrator;
import android.preference.PreferenceManager;
import android.text.AutoText;
import android.text.ClipboardManager;
import android.text.TextUtils;
import android.util.Log;
import android.util.TypedValue;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.inputmethod.CompletionInfo;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputConnection;
import android.view.inputmethod.InputMethodManager;
import android.widget.ImageView;
import android.widget.SimpleAdapter;
import android.widget.TextView;
import android.widget.Toast;
import com.anysoftkeyboard.api.KeyCodes;
import com.anysoftkeyboard.dictionaries.AddableDictionary;
import com.anysoftkeyboard.dictionaries.AutoDictionary;
import com.anysoftkeyboard.dictionaries.Dictionary;
import com.anysoftkeyboard.dictionaries.DictionaryAddOnAndBuilder;
import com.anysoftkeyboard.dictionaries.DictionaryFactory;
import com.anysoftkeyboard.dictionaries.ExternalDictionaryFactory;
import com.anysoftkeyboard.dictionaries.Suggest;
import com.anysoftkeyboard.dictionaries.TextEntryState;
import com.anysoftkeyboard.keyboards.AnyKeyboard;
import com.anysoftkeyboard.keyboards.AnyKeyboard.HardKeyboardTranslator;
import com.anysoftkeyboard.keyboards.GenericKeyboard;
import com.anysoftkeyboard.keyboards.Keyboard;
import com.anysoftkeyboard.keyboards.KeyboardAddOnAndBuilder;
import com.anysoftkeyboard.keyboards.KeyboardSwitcher;
import com.anysoftkeyboard.keyboards.KeyboardSwitcher.NextKeyboardType;
import com.anysoftkeyboard.keyboards.physical.HardKeyboardActionImpl;
import com.anysoftkeyboard.keyboards.physical.MyMetaKeyKeyListener;
import com.anysoftkeyboard.keyboards.views.AnyKeyboardBaseView.OnKeyboardActionListener;
import com.anysoftkeyboard.keyboards.views.AnyKeyboardView;
import com.anysoftkeyboard.keyboards.views.CandidateView;
import com.anysoftkeyboard.quicktextkeys.QuickTextKey;
import com.anysoftkeyboard.quicktextkeys.QuickTextKeyFactory;
import com.anysoftkeyboard.theme.KeyboardTheme;
import com.anysoftkeyboard.theme.KeyboardThemeFactory;
import com.anysoftkeyboard.ui.settings.MainSettings;
import com.anysoftkeyboard.ui.tutorials.TutorialsProvider;
import com.anysoftkeyboard.utils.ModifierKeyState;
import com.anysoftkeyboard.utils.Workarounds;
import com.menny.android.anysoftkeyboard.AnyApplication;
import com.menny.android.anysoftkeyboard.R;
/**
* Input method implementation for Qwerty'ish keyboard.
*/
public class AnySoftKeyboard extends InputMethodService implements
OnKeyboardActionListener,
OnSharedPreferenceChangeListener, AnyKeyboardContextProvider {
private final static String TAG = "ASK";
private final static int SWIPE_CORD = -2;
/*
public final static String NOTIFY_LAYOUT_SWITCH = "com.menny.android.anysoftkeyboard.api.NOTIFY_LAYOUT_SWITCH";
//API
private static final String NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_RESID = "current_layout_resid";
private static final String NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_NAME = "current_layout_name";
private static final String NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE = "current_layout_package";
private static final String NOTIFY_LAYOUT_SWITCH_NOTIFICATION_FLAGS = "notification_flags";
private static final String NOTIFY_LAYOUT_SWITCH_NOTIFICATION_TITLE = "notification_title";
*/
private final boolean TRACE_SDCARD = false;
private static final int MSG_UPDATE_SUGGESTIONS = 0;
//private static final int MSG_START_TUTORIAL = 1;
private static final int MSG_UPDATE_SHIFT_STATE = 2;
//private static final int KEYBOARD_NOTIFICATION_ID = 1;
/*
private static final HashSet<Integer> SPACE_SWAP_CHARACTERS = new HashSet<Integer>(
6);
private static final HashSet<Integer> PUNCTUATION_CHARACTERS = new HashSet<Integer>(
16);
static {
String src = ".\n!?,:;@<>()[]{}";
for (int i = 0; i < src.length(); ++i)
PUNCTUATION_CHARACTERS.add((int) src.charAt(i));
src = ".!?,:;";
for (int i = 0; i < src.length(); ++i)
SPACE_SWAP_CHARACTERS.add((int) src.charAt(i));
}
*/
// Keep track of the last selection range to decide if we need to show word alternatives
private int mLastSelectionStart;
private int mLastSelectionEnd;
private final com.anysoftkeyboard.Configuration mConfig;
private static final boolean DEBUG = AnyApplication.DEBUG;
private ModifierKeyState mShiftKeyState = new ModifierKeyState();
private ModifierKeyState mControlKeyState = new ModifierKeyState();
private AnyKeyboardView mInputView;
private CandidateView mCandidateView;
private static final long MINIMUM_REFRESH_TIME_FOR_DICTIONARIES = 30*1000;
private long mLastDictionaryRefresh = -1;
private Suggest mSuggest;
private CompletionInfo[] mCompletions;
private AlertDialog mOptionsDialog;
private AlertDialog mQuickTextKeyDialog;
KeyboardSwitcher mKeyboardSwitcher;
private final HardKeyboardActionImpl mHardKeyboardAction;
private long mMetaState;
private AnyKeyboard mCurrentKeyboard = null;
private HashSet<Character> mSentenceSeparators = new HashSet<Character>();
//private UserDictionaryBase mContactsDictionary;
private AddableDictionary mUserDictionary;
private AutoDictionary mAutoDictionary;
private StringBuilder mComposing = new StringBuilder();
private WordComposer mWord = new WordComposer();
private int mOrientation = Configuration.ORIENTATION_PORTRAIT;
private int mCommittedLength;
/*
* Do we do prediction now
*/
private boolean mPredicting;
private CharSequence mBestWord;
private final boolean mPredictionLandscape = false;
/*
* is prediction needed for the current input connection
*/
private boolean mPredictionOn;
/*
* is out-side completions needed
*/
private boolean mCompletionOn;
private boolean mAutoSpace;
private boolean mAutoCorrectOn;
private boolean mCapsLock;
private static final String SMILEY_PLUGIN_ID = "0077b34d-770f-4083-83e4-081957e06c27";
private boolean mSmileyOnShortPress;
private String mOverrideQuickTextText = null;
private boolean mAutoCap;
private boolean mQuickFixes;
/*
* Configuration flag. Should we support dictionary suggestions
*/
private boolean mShowSuggestions = false;
private boolean mAutoComplete;
private int mCorrectionMode;
private String mKeyboardChangeNotificationType;
private static final String KEYBOARD_NOTIFICATION_ALWAYS = "1";
private static final String KEYBOARD_NOTIFICATION_ON_PHYSICAL = "2";
private static final String KEYBOARD_NOTIFICATION_NEVER = "3";
// Indicates whether the suggestion strip is to be on in landscape
private boolean mJustAccepted;
private CharSequence mJustRevertedSeparator;
private AudioManager mAudioManager;
private boolean mSilentMode;
private boolean mSoundOn;
// between 0..100. This is the custom volume
private int mSoundVolume;
private Vibrator mVibrator;
private int mVibrationDuration;
//private NotificationManager mNotificationManager;
private static AnySoftKeyboard INSTANCE;
Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_UPDATE_SUGGESTIONS:
performUpdateSuggestions();
break;
case MSG_UPDATE_SHIFT_STATE:
updateShiftKeyState(getCurrentInputEditorInfo());
break;
}
}
};
private boolean mJustAddedAutoSpace;
private static final int LAST_CHAR_SHIFT_STATE_DEFAULT = 0;
//private static final int LAST_CHAR_SHIFT_STATE_UNSHIFTED = 1;
private static final int LAST_CHAR_SHIFT_STATE_SHIFTED = 2;
private int mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
protected IBinder mImeToken = null;
private InputMethodManager mInputMethodManager;
public static AnySoftKeyboard getInstance() {
return INSTANCE;
}
public AnySoftKeyboard() {
// mGenericKeyboardTranslator = new
// GenericPhysicalKeyboardTranslator(this);
mConfig = AnyApplication.getConfig();
mHardKeyboardAction = new HardKeyboardActionImpl();
INSTANCE = this;
}
@Override
public AbstractInputMethodImpl onCreateInputMethodInterface() {
return new InputMethodImpl()
{
@Override
public void attachToken(IBinder token) {
super.attachToken(token);
mImeToken = token;
}
};
}
@Override
public void onCreate() {
super.onCreate();
// super.showStatusIcon(R.drawable.icon_8_key);
Log.i(TAG, "****** AnySoftKeyboard service started.");
Thread.setDefaultUncaughtExceptionHandler(new ChewbaccaUncaughtExceptionHandler(getApplication().getBaseContext(), null));
mInputMethodManager = (InputMethodManager)getSystemService(INPUT_METHOD_SERVICE);
//mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
updateRingerMode();
// register to receive ringer mode changes for silent mode
IntentFilter filter = new IntentFilter(AudioManager.RINGER_MODE_CHANGED_ACTION);
registerReceiver(mReceiver, filter);
mVibrator = ((Vibrator) getSystemService(Context.VIBRATOR_SERVICE));
// setStatusIcon(R.drawable.ime_qwerty);
loadSettings();
mKeyboardSwitcher = new KeyboardSwitcher(this);
mOrientation = getResources().getConfiguration().orientation;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(this);
sp.registerOnSharedPreferenceChangeListener(this);
mCurrentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
mSentenceSeparators = mCurrentKeyboard.getSentenceSeparators();
if (mSuggest == null) {
initSuggest(/* getResources().getConfiguration().locale.toString() */);
}
if (mKeyboardChangeNotificationType.equals(KEYBOARD_NOTIFICATION_ALWAYS))// should it be always on?
notifyKeyboardChangeIfNeeded();
}
@Override
public void onUnbindInput() {
if (AnyApplication.DEBUG) Log.d(TAG, "onUnbindInput");
super.onUnbindInput();
}
private void initSuggest(/* String locale */) {
// mLocale = locale;
mSuggest = new Suggest(this/* , R.raw.main */);
mSuggest.setCorrectionMode(mCorrectionMode);
setDictionariesForCurrentKeyboard();
}
@Override
public void onDestroy() {
Log.i(TAG, "AnySoftKeyboard has been destroyed! Cleaning resources..");
//DictionaryFactory.getInstance().close();
// unregisterReceiver(mReceiver);
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(this);
sp.unregisterOnSharedPreferenceChangeListener(this);
unregisterReceiver(mReceiver);
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
/*
Intent i = new Intent(NOTIFY_LAYOUT_SWITCH);
//dome summy package, so that everybody removes notification
i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE, "NO_SUCH_PACKAGE");
sendBroadcast(i);
*/
TutorialsProvider.onServiceDestroy();
super.onDestroy();
}
@Override
public void onFinishInputView(boolean finishingInput) {
if (DEBUG)
Log.d(TAG, "onFinishInputView(finishingInput:"
+ finishingInput + ")");
super.onFinishInputView(finishingInput);
if (!mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ALWAYS)) {
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
}
// Remove penging messages related to update suggestions
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
}
@Override
public View onCreateInputView() {
if (DEBUG) Log.v(TAG, "Creating Input View");
mInputView = (AnyKeyboardView) getLayoutInflater().inflate(R.layout.main_keyboard_layout, null);
//reseting token users
mOptionsDialog = null;
mQuickTextKeyDialog = null;
mKeyboardSwitcher.setInputView(mInputView);
mInputView.setOnKeyboardActionListener(this);
return mInputView;
}
@Override
public View onCreateCandidatesView() {
mKeyboardSwitcher.makeKeyboards(false);
final ViewGroup candidateViewContainer = (ViewGroup) getLayoutInflater().inflate(R.layout.candidates, null);
mCandidateView = (CandidateView) candidateViewContainer.findViewById(R.id.candidates);
mCandidateView.setService(this);
setCandidatesViewShown(true);
final KeyboardTheme theme = KeyboardThemeFactory.getCurrentKeyboardTheme(AnySoftKeyboard.getInstance());
final TypedArray a = theme.getPackageContext().obtainStyledAttributes(null, R.styleable.AnyKeyboardBaseView, 0, theme.getThemeResId());
final int closeTextColor = a.getColor(R.styleable.AnyKeyboardBaseView_suggestionOthersTextColor, getResources().getColor(R.color.candidate_other));
final float fontSizePixel = a.getDimension(R.styleable.AnyKeyboardBaseView_suggestionTextSize, getResources().getDimensionPixelSize(R.dimen.candidate_font_height));
a.recycle();
mCandidateCloseText = (TextView)candidateViewContainer.findViewById(R.id.close_suggestions_strip_text);
mCandidateCloseText.setTextColor(closeTextColor);
mCandidateCloseText.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSizePixel);
View closeIcon = candidateViewContainer.findViewById(R.id.close_suggestions_strip_icon);
if (closeIcon != null)
{
closeIcon.setOnClickListener(new OnClickListener() {
private final static long DOUBLE_TAP_TIMEOUT = 2 * 1000;//two seconds is enough
private long mFirstClickTime = 0;
public void onClick(View v) {
final long currentTime = SystemClock.elapsedRealtime();
if (currentTime - mFirstClickTime < DOUBLE_TAP_TIMEOUT)
{
abortCorrection(true);
}
else
{
// Toast.makeText(getApplicationContext(), "Press close icon again to dismiss suggestions", Toast.LENGTH_SHORT).show();
/* List<CharSequence> l = new ArrayList<CharSequence>();
l.add(mHintText);
mCandidateView.setSuggestions(l, false, false, false);*/
mCandidateView.setSuggestions(null, false, false, false);
if (mCandidateCloseText != null) mCandidateCloseText.setVisibility(View.VISIBLE);
postUpdateSuggestions(DOUBLE_TAP_TIMEOUT - 50);
}
mFirstClickTime = currentTime;
}
});
}
return candidateViewContainer;
}
@Override
public void onStartInputView(EditorInfo attribute, boolean restarting) {
if (DEBUG) Log.d(TAG, "onStartInputView(EditorInfo:"
+ attribute.imeOptions + "," + attribute.inputType
+ ", restarting:" + restarting + ")");
super.onStartInputView(attribute, restarting);
if (mInputView == null) {
return;
}
mKeyboardSwitcher.makeKeyboards(false);
TextEntryState.newSession(this);
if (!restarting) {
// Clear shift states.
mMetaState = 0;
}
mPredictionOn = false;
mCompletionOn = false;
mCompletions = null;
mCapsLock = false;
switch (attribute.inputType & EditorInfo.TYPE_MASK_CLASS)
{
case EditorInfo.TYPE_CLASS_DATETIME:
if (DEBUG) Log.d(TAG, "Setting MODE_DATETIME as keyboard due to a TYPE_CLASS_DATETIME input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_DATETIME, attribute);
break;
case EditorInfo.TYPE_CLASS_NUMBER:
if (DEBUG) Log.d(TAG, "Setting MODE_NUMBERS as keyboard due to a TYPE_CLASS_NUMBER input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_NUMBERS, attribute);
break;
case EditorInfo.TYPE_CLASS_PHONE:
if (DEBUG) Log.d(TAG, "Setting MODE_PHONE as keyboard due to a TYPE_CLASS_PHONE input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_PHONE, attribute);
break;
case EditorInfo.TYPE_CLASS_TEXT:
if (DEBUG) Log.d(TAG, "A TYPE_CLASS_TEXT input.");
final int variation = attribute.inputType & EditorInfo.TYPE_MASK_VARIATION;
switch(variation)
{
case EditorInfo.TYPE_TEXT_VARIATION_PASSWORD:
case EditorInfo.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD:
if (DEBUG) Log.d(TAG, "A password TYPE_CLASS_TEXT input with no prediction");
mPredictionOn = false;
break;
default:
mPredictionOn = true;
}
if (mConfig.getInsertSpaceAfterCandidatePick())
{
switch(variation)
{
case EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS:
case EditorInfo.TYPE_TEXT_VARIATION_URI:
mAutoSpace = false;
break;
default:
mAutoSpace = true;
}
}
else
{
//some users don't want auto-space
mAutoSpace = false;
}
switch(variation)
{
case EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS:
if (DEBUG) Log.d(TAG, "Setting MODE_EMAIL as keyboard due to a TYPE_TEXT_VARIATION_EMAIL_ADDRESS input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_EMAIL, attribute);
mPredictionOn = false;
break;
case EditorInfo.TYPE_TEXT_VARIATION_URI:
if (DEBUG) Log.d(TAG, "Setting MODE_URL as keyboard due to a TYPE_TEXT_VARIATION_URI input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_URL, attribute);
break;
case EditorInfo.TYPE_TEXT_VARIATION_SHORT_MESSAGE:
if (DEBUG) Log.d(TAG, "Setting MODE_IM as keyboard due to a TYPE_TEXT_VARIATION_SHORT_MESSAGE input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_IM, attribute);
break;
default:
if (DEBUG) Log.d(TAG, "Setting MODE_TEXT as keyboard due to a default input.");
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_TEXT, attribute);
}
final int textFlag = attribute.inputType & EditorInfo.TYPE_MASK_FLAGS;
switch(textFlag)
{
case 0x00080000://FROM API 5: EditorInfo.TYPE_TEXT_FLAG_NO_SUGGESTIONS:
case EditorInfo.TYPE_TEXT_FLAG_AUTO_COMPLETE:
if (DEBUG) Log.d(TAG, "Input requested NO_SUGGESTIONS, or it is AUTO_COMPLETE by itself.");
mPredictionOn = false;
break;
default:
//we'll keep the previous mPredictionOn value
}
break;
default:
if (DEBUG) Log.d(TAG, "Setting MODE_TEXT as keyboard due to a default input.");
//No class. Probably a console window, or no GUI input connection
mKeyboardSwitcher.setKeyboardMode(KeyboardSwitcher.MODE_TEXT, attribute);
mPredictionOn = false;
mAutoSpace = true;
}
mInputView.closing();
if (AutoText.getSize(mInputView) < 1)
mQuickFixes = true;
mComposing.setLength(0);
mPredicting = false;
//mDeleteCount = 0;
mJustAddedAutoSpace = false;
setCandidatesViewShown(false);
// loadSettings();
updateShiftKeyState(attribute);
if (mSuggest != null) {
mSuggest.setCorrectionMode(mCorrectionMode);
}
mPredictionOn = mPredictionOn && mCorrectionMode > 0;
if (mCandidateView != null)
mCandidateView.setSuggestions(null, false, false, false);
if (mPredictionOn)
{
if ((SystemClock.elapsedRealtime() - mLastDictionaryRefresh) > MINIMUM_REFRESH_TIME_FOR_DICTIONARIES)
setDictionariesForCurrentKeyboard();
}
else
{
//this will release memory
setDictionariesForCurrentKeyboard();
}
if (TRACE_SDCARD)
Debug.startMethodTracing("anysoftkeyboard_log.trace");
}
@Override
public void onFinishInput() {
if (DEBUG)
Log.d(TAG, "onFinishInput()");
super.onFinishInput();
if (mInputView != null) {
mInputView.closing();
}
if (!mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ALWAYS)) {
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
// Intent i = new Intent(NOTIFY_LAYOUT_SWITCH);
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE, NOTIFY_LAYOUT_SWITCH);//dome summy package, so that everybody removes notification
// sendBroadcast(i);
}
// releasing some memory. Dictionaries, completions, etc.
if (mAutoDictionary != null) mAutoDictionary.flushPendingWrites();
System.gc();
}
///this function is called EVERYTIME them selection is changed. This also includes the underlined
///suggestions.
@Override
public void onUpdateSelection(int oldSelStart, int oldSelEnd,
int newSelStart, int newSelEnd,
int candidatesStart, int candidatesEnd) {
super.onUpdateSelection(oldSelStart, oldSelEnd, newSelStart, newSelEnd,
candidatesStart, candidatesEnd);
if (DEBUG) {
Log.i(TAG, "onUpdateSelection: oss=" + oldSelStart
+ ", ose=" + oldSelEnd
+ ", nss=" + newSelStart
+ ", nse=" + newSelEnd
+ ", cs=" + candidatesStart
+ ", ce=" + candidatesEnd);
}
// If the current selection in the text view changes, we should
// clear whatever candidate text we have.
if ((((mComposing.length() > 0 && mPredicting) /*|| mVoiceInputHighlighted*/)
&& (newSelStart != candidatesEnd
|| newSelEnd != candidatesEnd)
&& mLastSelectionStart != newSelStart)) {
mComposing.setLength(0);
mPredicting = false;
postUpdateSuggestions();
TextEntryState.reset();
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.finishComposingText();
}
//mVoiceInputHighlighted = false;
} else if (!mPredicting && !mJustAccepted) {
switch (TextEntryState.getState()) {
case ACCEPTED_DEFAULT:
TextEntryState.reset();
// fall through
case SPACE_AFTER_PICKED:
mJustAddedAutoSpace = false; // The user moved the cursor.
break;
}
}
mJustAccepted = false;
postUpdateShiftKeyState();
// Make a note of the cursor position
mLastSelectionStart = newSelStart;
mLastSelectionEnd = newSelEnd;
}
private void onPhysicalKeyboardKeyPressed() {
if (mConfig.hideSoftKeyboardWhenPhysicalKeyPressed()) hideWindow();
}
@Override
public void hideWindow() {
if (TRACE_SDCARD)
Debug.stopMethodTracing();
if (mOptionsDialog != null && mOptionsDialog.isShowing()) {
mOptionsDialog.dismiss();
mOptionsDialog = null;
}
if (mQuickTextKeyDialog != null && mQuickTextKeyDialog.isShowing()) {
mQuickTextKeyDialog.dismiss();
mQuickTextKeyDialog = null;
}
// if (mTutorial != null) {
// mTutorial.close();
// mTutorial = null;
// }
super.hideWindow();
TextEntryState.endSession();
}
@Override
public void onDisplayCompletions(CompletionInfo[] completions) {
if (DEBUG) {
Log.i(TAG, "Received completions:");
for (int i = 0; i < (completions != null ? completions.length : 0); i++) {
Log.i(TAG, " #" + i + ": " + completions[i]);
}
}
//completions should be shown if dictionary requires, or if we are in full-screen and have outside completeions
if (mCompletionOn || (isFullscreenMode() && (completions != null))) {
if (DEBUG) Log.v(TAG, "Received completions: completion should be shown: "+mCompletionOn+" fullscreen:"+isFullscreenMode());
mCompletions = completions;
//we do completions :)
mCompletionOn = true;
if (completions == null) {
if (DEBUG) Log.v(TAG, "Received completions: completion is NULL. Clearing suggestions.");
mCandidateView.setSuggestions(null, false, false, false);
return;
}
List<CharSequence> stringList = new ArrayList<CharSequence>();
for (int i = 0; i < (completions != null ? completions.length : 0); i++) {
CompletionInfo ci = completions[i];
if (ci != null)
stringList.add(ci.getText());
}
if (DEBUG) Log.v(TAG, "Received completions: setting to suggestions view "+stringList.size()+ " completions.");
// CharSequence typedWord = mWord.getTypedWord();
setSuggestions(stringList, true, true, true);
mBestWord = null;
//I mean, if I'm here, it must be shown...
setCandidatesViewShown(true);
}
else if (DEBUG) Log.v(TAG, "Received completions: completions should not be shown.");
}
/*
@Override
public void setCandidatesViewShown(boolean shown) {
// we show predication only in on-screen keyboard
// (onEvaluateInputViewShown)
// or if the physical keyboard supports candidates
// (mPredictionLandscape)
super.setCandidatesViewShown(shouldCandidatesStripBeShown() && shown);
}
*/
private void clearSuggestions() {
setSuggestions(null, false, false, false);
}
private void setSuggestions(
List<CharSequence> suggestions,
boolean completions,
boolean typedWordValid,
boolean haveMinimalSuggestion) {
// if (mIsShowingHint) {
// setCandidatesView(mCandidateViewContainer);
// mIsShowingHint = false;
// }
if (mCandidateView != null) {
mCandidateView.setSuggestions(
suggestions, completions, typedWordValid, haveMinimalSuggestion);
}
}
@Override
public void onComputeInsets(InputMethodService.Insets outInsets) {
super.onComputeInsets(outInsets);
if (!isFullscreenMode()) {
outInsets.contentTopInsets = outInsets.visibleTopInsets;
}
}
@Override
public boolean onEvaluateFullscreenMode() {
switch(mOrientation)
{
case Configuration.ORIENTATION_LANDSCAPE:
return mConfig.getUseFullScreenInputInLandscape();
default:
return mConfig.getUseFullScreenInputInPortrait();
}
}
@Override
public boolean onKeyDown(final int keyCode, KeyEvent event) {
// if (DEBUG)
// {
// Log.d(TAG, "onKeyDown:"+keyCode+" flags:"+event.getFlags());
//
// if (mInputView == null)
// {
// Log.d(TAG, "No input view");
// }
// else
// {
// Log.d(TAG, "\n canInteractWithUi:"+mInputView.canInteractWithUi()+"\n"+
// "getHeight:"+mInputView.getHeight()+"\n"+
// "getVisibility:"+mInputView.getVisibility()+"\n"+
// "getWindowVisibility:"+mInputView.getWindowVisibility()+"\n"+
// "isFocused:"+mInputView.isFocused()+"\n"+
// "isShown:"+mInputView.isShown()+"\n");
// }
// }
final boolean shouldTranslateSpecialKeys = AnySoftKeyboard.getInstance().isInputViewShown();
if(DEBUG){
Log.d(TAG, "isInputViewShown="+shouldTranslateSpecialKeys);
}
InputConnection ic = getCurrentInputConnection();
if (!mPredictionLandscape) {
// For all other keys, if we want to do transformations on
// text being entered with a hard keyboard, we need to process
// it and do the appropriate action.
// using physical keyboard is more annoying with candidate view in
// the way
// so we disable it.
// to clear the underline.
commitTyped(ic);// to clear the underline.
mPredicting = false;
}
if (DEBUG)
Log.d(TAG, "Event: Key:" + event.getKeyCode()
+ " Shift:"
+ ((event.getMetaState() & KeyEvent.META_SHIFT_ON) != 0)
+ " ALT:"
+ ((event.getMetaState() & KeyEvent.META_ALT_ON) != 0)
+ " Repeats:" + event.getRepeatCount());
switch (keyCode) {
/**** SPEACIAL translated HW keys
* If you add new keys here, do not forget to add to the
*/
case KeyEvent.KEYCODE_CAMERA:
if(shouldTranslateSpecialKeys && mConfig.useCameraKeyForBackspaceBackword()){
handleBackword(getCurrentInputConnection());
return true;
}
// DO NOT DELAY CAMERA KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
case KeyEvent.KEYCODE_FOCUS:
if(shouldTranslateSpecialKeys && mConfig.useCameraKeyForBackspaceBackword()){
handleDeleteLastCharacter(false);
return true;
}
// DO NOT DELAY FOCUS KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
case KeyEvent.KEYCODE_VOLUME_UP:
if(shouldTranslateSpecialKeys && mConfig.useVolumeKeyForLeftRight()){
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_LEFT);
return true;
}
// DO NOT DELAY VOLUME UP KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
case KeyEvent.KEYCODE_VOLUME_DOWN:
if(shouldTranslateSpecialKeys && mConfig.useVolumeKeyForLeftRight()){
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_RIGHT);
return true;
}
// DO NOT DELAY VOLUME DOWN KEY with unneeded checks in default mark
return super.onKeyDown(keyCode, event);
/**** END of SPEACIAL translated HW keys code section
*
*/
case KeyEvent.KEYCODE_BACK:
if (event.getRepeatCount() == 0 && mInputView != null) {
if (mInputView.handleBack()) {
// consuming the meta keys
if (ic != null) {
ic.clearMetaKeyStates(Integer.MAX_VALUE);// translated,
// so we
// also take
// care of
// the
// metakeys.
}
mMetaState = 0;
return true;
} /*
* else if (mTutorial != null) { mTutorial.close(); mTutorial =
* null; }
*/
}
break;
case KeyEvent.KEYCODE_SHIFT_LEFT:
case KeyEvent.KEYCODE_SHIFT_RIGHT:
if (event.isAltPressed() && Workarounds.isAltSpaceLangSwitchNotPossible()) {
if(DEBUG) Log.d(TAG,
"User pressed ALT+SHIFT on motorola milestone, moving to next physical keyboard.");
// consuming the meta keys
// mHardKeyboardAction.resetMetaState();
if (ic != null) {
ic.clearMetaKeyStates(Integer.MAX_VALUE);// translated, so
// we also take
// care of the
// metakeys.
}
mMetaState = 0;
// only physical keyboard
nextKeyboard(getCurrentInputEditorInfo(),
NextKeyboardType.AlphabetSupportsPhysical);
return true;
}
//NOTE: letting it fallthru to the other meta-keys
case KeyEvent.KEYCODE_ALT_LEFT:
case KeyEvent.KEYCODE_ALT_RIGHT:
case KeyEvent.KEYCODE_SYM:
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown before handle"));
mMetaState = MyMetaKeyKeyListener.handleKeyDown(mMetaState,
keyCode, event);
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown after handle"));
break;
case KeyEvent.KEYCODE_SPACE:
if ((event.isAltPressed() && !Workarounds.isAltSpaceLangSwitchNotPossible()) || event.isShiftPressed()) {
if(DEBUG)
if(event.isAltPressed()){
Log.d(TAG,
"User pressed ALT+SPACE, moving to next physical keyboard.");
} else {
Log.d(TAG,
"User pressed SHIFT+SPACE, moving to next physical keyboard.");
}
// consuming the meta keys
// mHardKeyboardAction.resetMetaState();
if (ic != null) {
ic.clearMetaKeyStates(Integer.MAX_VALUE);// translated, so
// we also take
// care of the
// metakeys.
}
mMetaState = 0;
// only physical keyboard
nextKeyboard(getCurrentInputEditorInfo(),
NextKeyboardType.AlphabetSupportsPhysical);
return true;
}
//NOTE:
// letting it fall through to the "default"
default:
// Fix issue 185, check if we should process key repeat
if (!mConfig.getUseRepeatingKeys() && event.getRepeatCount() > 0)
return true;
if (mKeyboardSwitcher.isCurrentKeyboardPhysical()) {
// sometimes, the physical keyboard will delete input, and then
// add some.
// we'll try to make it nice
if (ic != null)
ic.beginBatchEdit();
try {
//issue 393, backword on the hw keyboard!
if(mConfig.useBackword() && keyCode == KeyEvent.KEYCODE_DEL && event.isShiftPressed()){
handleBackword(ic);
return true;
} else if (event.isPrintingKey()) {
onPhysicalKeyboardKeyPressed();
mHardKeyboardAction.initializeAction(event, mMetaState);
// http://article.gmane.org/gmane.comp.handhelds.openmoko.android-freerunner/629
AnyKeyboard current = mKeyboardSwitcher.getCurrentKeyboard();
HardKeyboardTranslator keyTranslator = (HardKeyboardTranslator) current;
if (DEBUG)
{
final String keyboardName = current.getKeyboardName();
Log.d(TAG, "Asking '" + keyboardName
+ "' to translate key: " + keyCode);
Log.v(TAG,
"Hard Keyboard Action before translation: Shift: "
+ mHardKeyboardAction
.isShiftActive()
+ ", Alt: "
+ mHardKeyboardAction.isAltActive()
+ ", Key code: "
+ mHardKeyboardAction.getKeyCode()
+ ", changed: "
+ mHardKeyboardAction.getKeyCodeWasChanged());
}
keyTranslator.translatePhysicalCharacter(mHardKeyboardAction);
if (DEBUG)
Log.v(TAG,
"Hard Keyboard Action after translation: Key code: "
+ mHardKeyboardAction.getKeyCode()
+ ", changed: "
+ mHardKeyboardAction
.getKeyCodeWasChanged());
if (mHardKeyboardAction.getKeyCodeWasChanged()) {
final int translatedChar = mHardKeyboardAction
.getKeyCode();
// typing my own.
onKey(translatedChar, new int[] { translatedChar }, 0, 0);
// my handling
// we are at a regular key press, so we'll update
// our meta-state member
mMetaState = MyMetaKeyKeyListener
.adjustMetaAfterKeypress(mMetaState);
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown after adjust - translated"));
return true;
}
}
} finally {
if (ic != null)
ic.endBatchEdit();
}
}
if (event.isPrintingKey()) {
// we are at a regular key press, so we'll update our meta-state
// member
mMetaState = MyMetaKeyKeyListener
.adjustMetaAfterKeypress(mMetaState);
if (DEBUG)
Log.d(TAG+"-meta-key",
getMetaKeysStates("onKeyDown after adjust"));
}
}
return super.onKeyDown(keyCode, event);
}
private void notifyKeyboardChangeIfNeeded() {
// Log.d("anySoftKeyboard","notifyKeyboardChangeIfNeeded");
// Thread.dumpStack();
if (mKeyboardSwitcher == null)// happens on first onCreate.
return;
if ((mKeyboardSwitcher.isAlphabetMode())
&& !mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_NEVER)) {
//AnyKeyboard current = mKeyboardSwitcher.getCurrentKeyboard();
// notifying the user about the keyboard.
// creating the message
// final String keyboardName = mCurrentKeyboard.getKeyboardName();
//
// Notification notification = new Notification(R.drawable.notification_icon, keyboardName, System.currentTimeMillis());
//
// Intent notificationIntent = new Intent();
// PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
// notificationIntent, 0);
//
// notification.setLatestEventInfo(getApplicationContext(),
// getText(R.string.ime_name), keyboardName,
// contentIntent);
//
// if (mKeyboardChangeNotificationType.equals("1")) {
// notification.flags |= Notification.FLAG_ONGOING_EVENT;
// notification.flags |= Notification.FLAG_NO_CLEAR;
// } else {
// notification.flags |= Notification.FLAG_AUTO_CANCEL;
// }
// // notifying
// mNotificationManager.notify(KEYBOARD_NOTIFICATION_ID, notification);
// Intent i = new Intent(NOTIFY_LAYOUT_SWITCH);
// i.putExtra(NOTIFY_LAYOUT_SWITCH_NOTIFICATION_TITLE, getText(R.string.ime_name));
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_RESID, mCurrentKeyboard.getKeyboardIconResId());
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_NAME, mCurrentKeyboard.getKeyboardName());
// i.putExtra(NOTIFY_LAYOUT_SWITCH_CURRENT_LAYOUT_PACKAGE, mCurrentKeyboard.getKeyboardContext().getPackageName());
// i.putExtra(NOTIFY_LAYOUT_SWITCH_NOTIFICATION_FLAGS, notification.flags);
// sendBroadcast(i);
mInputMethodManager.showStatusIcon(mImeToken, mCurrentKeyboard.getKeyboardContext().getPackageName(), mCurrentKeyboard.getKeyboardIconResId());
}
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
//Issue 248
case KeyEvent.KEYCODE_VOLUME_DOWN:
case KeyEvent.KEYCODE_VOLUME_UP:
if(AnySoftKeyboard.getInstance().isInputViewShown() == false){
return super.onKeyUp(keyCode, event);
}
if(mConfig.useVolumeKeyForLeftRight()){
//no need of vol up/down sound
return true;
}
case KeyEvent.KEYCODE_DPAD_DOWN:
case KeyEvent.KEYCODE_DPAD_UP:
case KeyEvent.KEYCODE_DPAD_LEFT:
case KeyEvent.KEYCODE_DPAD_RIGHT:
// // If tutorial is visible, don't allow dpad to work
// if (mTutorial != null) {
// return true;
// }
// Enable shift key and DPAD to do selections
if (mInputView != null && mInputView.isShown()
&& mInputView.isShifted()) {
event = new KeyEvent(event.getDownTime(), event.getEventTime(),
event.getAction(), event.getKeyCode(), event
.getRepeatCount(), event.getDeviceId(), event
.getScanCode(), KeyEvent.META_SHIFT_LEFT_ON
| KeyEvent.META_SHIFT_ON);
InputConnection ic = getCurrentInputConnection();
if (ic != null)
ic.sendKeyEvent(event);
return true;
}
break;
case KeyEvent.KEYCODE_ALT_LEFT:
case KeyEvent.KEYCODE_ALT_RIGHT:
case KeyEvent.KEYCODE_SHIFT_LEFT:
case KeyEvent.KEYCODE_SHIFT_RIGHT:
case KeyEvent.KEYCODE_SYM:
mMetaState = MyMetaKeyKeyListener.handleKeyUp(mMetaState, keyCode,
event);
if (DEBUG)
Log.d("AnySoftKeyboard-meta-key", getMetaKeysStates("onKeyUp"));
setInputConnectionMetaStateAsCurrentMetaKeyKeyListenerState();
break;
}
return super.onKeyUp(keyCode, event);
}
private String getMetaKeysStates(String place) {
final int shiftState = MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SHIFT_ON);
final int altState = MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_ALT_ON);
final int symState = MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SYM_ON);
return "Meta keys state at " + place + "- SHIFT:" + shiftState
+ ", ALT:" + altState + " SYM:" + symState + " bits:"
+ MyMetaKeyKeyListener.getMetaState(mMetaState) + " state:"
+ mMetaState;
}
private void setInputConnectionMetaStateAsCurrentMetaKeyKeyListenerState() {
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
int clearStatesFlags = 0;
if (MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_ALT_ON) == 0)
clearStatesFlags += KeyEvent.META_ALT_ON;
if (MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SHIFT_ON) == 0)
clearStatesFlags += KeyEvent.META_SHIFT_ON;
if (MyMetaKeyKeyListener.getMetaState(mMetaState,
MyMetaKeyKeyListener.META_SYM_ON) == 0)
clearStatesFlags += KeyEvent.META_SYM_ON;
if (DEBUG)
Log
.d(
"AnySoftKeyboard-meta-key",
getMetaKeysStates("setInputConnectionMetaStateAsCurrentMetaKeyKeyListenerState with flags: "
+ clearStatesFlags));
ic.clearMetaKeyStates(clearStatesFlags);
}
}
private void addToDictionaries(CharSequence suggestion, int frequencyDelta) {
checkAddToDictionary(suggestion, frequencyDelta/*, false*/);
}
/**
* Adds to the UserBigramDictionary and/or AutoDictionary
* @param addToBigramDictionary true if it should be added to bigram dictionary if possible
*/
private void checkAddToDictionary(CharSequence suggestion, int frequencyDelta/*,
boolean addToBigramDictionary*/) {
if (suggestion == null || suggestion.length() < 1) return;
// Only auto-add to dictionary if auto-correct is ON. Otherwise we'll be
// adding words in situations where the user or application really didn't
// want corrections enabled or learned.
if (!(mCorrectionMode == Suggest.CORRECTION_FULL/*
|| mCorrectionMode == Suggest.CORRECTION_FULL_BIGRAM*/)) {
return;
}
if (suggestion != null && mAutoDictionary != null) {
if (/*!addToBigramDictionary &&*/
mAutoDictionary.isValidWord(suggestion) ||
(!mSuggest.isValidWord(suggestion.toString()) && !mSuggest.isValidWord(suggestion.toString().toLowerCase()))) {
mAutoDictionary.addWord(suggestion.toString(), frequencyDelta);
}
/*
if (mUserBigramDictionary != null) {
CharSequence prevWord = EditingUtil.getPreviousWord(getCurrentInputConnection(),
mSentenceSeparators);
if (!TextUtils.isEmpty(prevWord)) {
mUserBigramDictionary.addBigrams(prevWord.toString(), suggestion.toString());
}
}*/
}
}
private void commitTyped(InputConnection inputConnection) {
if (mPredicting) {
mPredicting = false;
if (mComposing.length() > 0) {
if (inputConnection != null) {
inputConnection.commitText(mComposing, 1);
}
mCommittedLength = mComposing.length();
TextEntryState.acceptedTyped(mComposing);
addToDictionaries(mComposing, AutoDictionary.FREQUENCY_FOR_TYPED);
}
postUpdateSuggestionsNow();
}
}
private void postUpdateShiftKeyState() {
mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
// TODO: Should remove this 300ms delay?
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_UPDATE_SHIFT_STATE), 150);
}
public void updateShiftKeyState(EditorInfo attr) {
mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
InputConnection ic = getCurrentInputConnection();
if (ic != null && attr != null && mKeyboardSwitcher.isAlphabetMode() && (mInputView != null)) {
mInputView.setShifted(mShiftKeyState.isMomentary() || mCapsLock
|| getCursorCapsMode(ic, attr) != 0);
}
}
private int getCursorCapsMode(InputConnection ic, EditorInfo attr) {
int caps = 0;
EditorInfo ei = getCurrentInputEditorInfo();
if (mAutoCap && ei != null && ei.inputType != EditorInfo.TYPE_NULL) {
caps = ic.getCursorCapsMode(attr.inputType);
}
return caps;
}
private void swapPunctuationAndSpace() {
final InputConnection ic = getCurrentInputConnection();
if (ic == null)
return;
CharSequence lastTwo = ic.getTextBeforeCursor(2, 0);
if (DEBUG)
{
String seps = "";
for(Character c : mSentenceSeparators) seps+=c;
Log.d(TAG, "swapPunctuationAndSpace: lastTwo: '"+lastTwo+"', mSentenceSeparators "+mSentenceSeparators.size()+ " '"+seps+"'");
}
if (lastTwo != null && lastTwo.length() == 2
&& lastTwo.charAt(0) == KeyCodes.SPACE
&& mSentenceSeparators.contains(lastTwo.charAt(1))) {
//ic.beginBatchEdit();
ic.deleteSurroundingText(2, 0);
ic.commitText(lastTwo.charAt(1) + " ", 1);
//ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
mJustAddedAutoSpace = true;
Log.d(TAG, "swapPunctuationAndSpace: YES");
}
}
private void reswapPeriodAndSpace() {
final InputConnection ic = getCurrentInputConnection();
if (ic == null) return;
CharSequence lastThree = ic.getTextBeforeCursor(3, 0);
if (lastThree != null && lastThree.length() == 3
&& lastThree.charAt(0) == '.'
&& lastThree.charAt(1) == KeyCodes.SPACE
&& lastThree.charAt(2) == '.') {
ic.beginBatchEdit();
ic.deleteSurroundingText(3, 0);
ic.commitText(".. ", 1);
ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
}
}
private void doubleSpace() {
// if (!mAutoPunctuate) return;
if (!mConfig.isDoubleSpaceChangesToPeriod())
return;
final InputConnection ic = getCurrentInputConnection();
if (ic == null)
return;
CharSequence lastThree = ic.getTextBeforeCursor(3, 0);
if (lastThree != null && lastThree.length() == 3
&& Character.isLetterOrDigit(lastThree.charAt(0))
&& lastThree.charAt(1) == KeyCodes.SPACE
&& lastThree.charAt(2) == KeyCodes.SPACE) {
ic.beginBatchEdit();
ic.deleteSurroundingText(2, 0);
ic.commitText(". ", 1);
ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
mJustAddedAutoSpace = true;
}
}
private void removeTrailingSpace() {
final InputConnection ic = getCurrentInputConnection();
if (ic == null) return;
CharSequence lastOne = ic.getTextBeforeCursor(1, 0);
if (lastOne != null && lastOne.length() == 1
&& lastOne.charAt(0) == KeyCodes.SPACE) {
ic.deleteSurroundingText(1, 0);
}
}
public boolean addWordToDictionary(String word) {
mUserDictionary.addWord(word, 128);
return true;
}
/**
* Helper to determine if a given character code is alphabetic.
*/
private boolean isAlphabet(int code) {
//inner letters have more options: ' in English. " in Hebrew, and more.
if (mPredicting)
return mCurrentKeyboard.isInnerWordLetter((char) code);
else
return mCurrentKeyboard.isStartOfWordLetter((char) code);
}
public void onMultiTap() {
if (DEBUG) Log.d(TAG, "onMultiTap");
handleDeleteLastCharacter(true);
}
public void onKey(int primaryCode, int[] keyCodes, int x, int y) {
if (DEBUG)
{
Log.d(TAG, "onKey " + primaryCode);
//Thread.dumpStack();
}
final InputConnection ic = getCurrentInputConnection();
switch (primaryCode) {
case Keyboard.KEYCODE_DELETE:
if (ic == null)//if we don't want to do anything, lets check null first.
break;
//we do backword if the shift is pressed while pressing backspace (like in a PC)
//but this is true ONLY if the device has multitouch, or the user specifically asked for it
if (mInputView != null && mInputView.isShifted() && !mInputView.getKeyboard().isShiftLocked() &&
((mInputView.hasDistinctMultitouch() && mShiftKeyState.isMomentary()) || mConfig.useBackword()))
{
handleBackword(ic);
}
else
{
handleDeleteLastCharacter(false);
}
break;
case AnyKeyboard.KEYCODE_CLEAR_INPUT:
if (ic != null)
{
ic.beginBatchEdit();
commitTyped(ic);
ic.deleteSurroundingText(Integer.MAX_VALUE, Integer.MAX_VALUE);
ic.endBatchEdit();
}
break;
case Keyboard.KEYCODE_SHIFT:
if ((!mInputView.hasDistinctMultitouch()) ||
((x == SWIPE_CORD) && (y == SWIPE_CORD)))//the SWIPE_CORD is the case where onKey was called from swipeX
handleShift(false);
break;
case AnyKeyboard.KEYCODE_CTRL:
if ((!mInputView.hasDistinctMultitouch()) ||
((x == SWIPE_CORD) && (y == SWIPE_CORD)))//the SWIPE_CORD is the case where onKey was called from swipeX
handleControl(false);
break;
case AnyKeyboard.KEYCODE_LEFT:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_LEFT);
break;
case AnyKeyboard.KEYCODE_RIGHT:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_RIGHT);
break;
case AnyKeyboard.KEYCODE_UP:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_UP);
break;
case AnyKeyboard.KEYCODE_DOWN:
sendDownUpKeyEvents(KeyEvent.KEYCODE_DPAD_DOWN);
break;
case Keyboard.KEYCODE_CANCEL:
if (mOptionsDialog == null || !mOptionsDialog.isShowing()) {
handleClose();
}
break;
case AnyKeyboardView.KEYCODE_OPTIONS:
showOptionsMenu();
break;
case AnyKeyboard.KEYCODE_DOMAIN:
onText(mConfig.getDomainText());
break;
case AnyKeyboard.KEYCODE_QUICK_TEXT:
QuickTextKey quickTextKey = QuickTextKeyFactory.getCurrentQuickTextKey(this);
if (mSmileyOnShortPress) {
if (TextUtils.isEmpty(mOverrideQuickTextText))
onText(quickTextKey.getKeyOutputText());
else
onText(mOverrideQuickTextText);
} else {
if (quickTextKey.isPopupKeyboardUsed()) {
showQuickTextKeyPopupKeyboard(quickTextKey);
} else {
showQuickTextKeyPopupList(quickTextKey);
}
}
break;
case AnyKeyboardView.KEYCODE_QUICK_TEXT_LONGPRESS:
quickTextKey = QuickTextKeyFactory.getCurrentQuickTextKey(this);
if (quickTextKey.getId().equals(SMILEY_PLUGIN_ID) && !mSmileyOnShortPress) {
if (TextUtils.isEmpty(mOverrideQuickTextText))
onText(quickTextKey.getKeyOutputText());
else
onText(mOverrideQuickTextText);
} else {
if (quickTextKey.isPopupKeyboardUsed()) {
showQuickTextKeyPopupKeyboard(quickTextKey);
} else {
showQuickTextKeyPopupList(quickTextKey);
}
}
break;
case Keyboard.KEYCODE_MODE_CHANGE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.Symbols);
break;
case AnyKeyboard.KEYCODE_LANG_CHANGE:
if (mKeyboardSwitcher.shouldPopupForLanguageSwitch())
{
showLanguageSelectionDialog();
}
else
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.Alphabet);
break;
case Keyboard.KEYCODE_ALT:
nextAlterKeyboard(getCurrentInputEditorInfo());
break;
case AnyKeyboard.KEYCODE_KEYBOARD_CYCLE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.Any);
break;
case AnyKeyboard.KEYCODE_KEYBOARD_REVERSE_CYCLE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.PreviousAny);
break;
case AnyKeyboard.KEYCODE_KEYBOARD_CYCLE_INSIDE_MODE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.AnyInsideMode);
break;
case AnyKeyboard.KEYCODE_KEYBOARD_MODE_CHANGE:
nextKeyboard(getCurrentInputEditorInfo(), NextKeyboardType.OtherMode);
break;
case AnyKeyboard.KEYCODE_CLIPBOARD:
ClipboardManager cm = (ClipboardManager)getSystemService(CLIPBOARD_SERVICE);
if(cm.hasText()){
onText(cm.getText());
}
break;
case 9 /*Tab*/:
sendDownUpKeyEvents(KeyEvent.KEYCODE_TAB);
break;
default:
// Issue 146: Right to left langs require reversed parenthesis
if (mKeyboardSwitcher.isRightToLeftMode())
{
if (primaryCode == (int)')')
primaryCode = (int)'(';
else if (primaryCode == (int)'(')
primaryCode = (int)')';
}
if (isWordSeparator(primaryCode)) {
handleSeparator(primaryCode);
} else {
handleCharacter(primaryCode, keyCodes);
// reseting the mSpaceSent, which is set to true upon selecting
// candidate
mJustAddedAutoSpace = false;
}
// Cancel the just reverted state
mJustRevertedSeparator = null;
if (mKeyboardSwitcher.isKeyRequireSwitchToAlphabet(primaryCode))
{
mKeyboardSwitcher.nextKeyboard(getCurrentInputEditorInfo(),
NextKeyboardType.Alphabet);
}
break;
}
}
private void showLanguageSelectionDialog() {
KeyboardAddOnAndBuilder[] builders = mKeyboardSwitcher.getEnabledKeyboardsBuilders();
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setCancelable(true);
builder.setIcon(R.drawable.icon_8_key);
builder.setTitle(getResources().getString(R.string.select_keyboard_popup_title));
builder.setNegativeButton(android.R.string.cancel, null);
ArrayList<CharSequence> keyboardsIds = new ArrayList<CharSequence>();
ArrayList<CharSequence> keyboards = new ArrayList<CharSequence>();
//going over all enabled keyboards
for (KeyboardAddOnAndBuilder keyboardBuilder : builders) {
keyboardsIds.add(keyboardBuilder.getId());
String name = keyboardBuilder.getName();
keyboards.add(name);
}
final CharSequence[] ids = new CharSequence[keyboardsIds.size()];
final CharSequence[] items = new CharSequence[keyboards.size()];
keyboardsIds.toArray(ids);
keyboards.toArray(items);
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface di, int position) {
di.dismiss();
if ((position < 0) || (position >= items.length)) {
if (DEBUG)Log.d(TAG, "Keyboard selection popup canceled");
} else {
CharSequence id = ids[position];
if (DEBUG)Log.d(TAG, "User selected "+items[position]+" with id "+id);
EditorInfo currentEditorInfo = getCurrentInputEditorInfo();
AnyKeyboard currentKeyboard = mKeyboardSwitcher.nextAlphabetKeyboard(currentEditorInfo, id.toString());
setKeyboardStuff(currentEditorInfo, NextKeyboardType.Alphabet, currentKeyboard);
}
}
});
mOptionsDialog = builder.create();
Window window = mOptionsDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
mOptionsDialog.show();
}
public void onText(CharSequence text) {
if (DEBUG)
Log.d(TAG, "onText: '" + text+"'");
InputConnection ic = getCurrentInputConnection();
if (ic == null)
return;
abortCorrection(false);
ic.beginBatchEdit();
if (mPredicting) {
commitTyped(ic);
}
ic.commitText(text, 1);
ic.endBatchEdit();
updateShiftKeyState(getCurrentInputEditorInfo());
mJustRevertedSeparator = null;
mJustAddedAutoSpace = false;
}
private static boolean isBackwordStopChar(int c) {
return !Character.isLetter(c);// c == 32 || PUNCTUATION_CHARACTERS.contains(c);
}
private void handleBackword(InputConnection ic) {
if(ic == null){
return;
}
if (mPredicting) {
final int length = mComposing.length();
if (length == 0) {
return;
}
mComposing.delete(0, length);
mWord.deleteLast();
ic.setComposingText(mComposing, 1);
if (mComposing.length() == 0) {
mPredicting = false;
}
postUpdateSuggestions();
return;
}
CharSequence cs = ic.getTextBeforeCursor(1, 0);
//int csl = cs.length();//check if there is no input
if (TextUtils.isEmpty(cs)) {
return;//nothing to delete
}
//TWO OPTIONS
//1) Either we do like Linux and Windows (and probably ALL desktop OSes):
//Delete all the characters till a complete word was deleted:
/*
* What to do:
* We delete until we find a separator (the function isBackwordStopChar).
* Note that we MUST delete a delete a whole word! So if the backword starts
* at separators, we'll delete those, and then the word before:
* "test this, ," -> "test "
*/
//Pro: same as desktop
//Con: when auto-caps is on (the default), this will delete the previous word, which can be annoying..
//E.g., Writing a sentence, then a period, then ASK will auto-caps, then when the user press backspace (for some reason),
//the entire previous word deletes.
//2) Or we delete all the characters till we encounter a separator, but delete at least one character.
/*
* What to do:
* We delete until we find a separator (the function isBackwordStopChar).
* Note that we MUST delete a delete at least one character
* "test this, " -> "test this," -> "test this" -> "test "
*/
//Pro: Supports auto-caps, and mostly similar to desktop OSes
//Con: Not all desktop use-cases are here.
//For now, I go with option 2, but I'm open for discussion.
//2b) "test this, " -> "test this"
boolean stopCharAtTheEnd = isBackwordStopChar((int)cs.charAt(0));
int idx = 1;
int csl = 0;
while (true) {
cs = ic.getTextBeforeCursor(idx, 0);
csl = cs.length();
if (csl < idx) {
// read text is smaller than requested. We are at start
break;
}
++idx;
int cc = cs.charAt(0);
boolean isBackwordStopChar = isBackwordStopChar(cc);
if (stopCharAtTheEnd) {
if (!isBackwordStopChar){
--csl;
break;
}
continue;
}
if (isBackwordStopChar) {
--csl;
break;
}
}
//we want to delete at least one character
//ic.deleteSurroundingText(csl == 0 ? 1 : csl, 0);
ic.deleteSurroundingText(csl, 0);//it is always > 0 !
}
private void handleDeleteLastCharacter(boolean forMultitap) {
InputConnection ic = getCurrentInputConnection();
boolean deleteChar = false;
if (mPredicting) {
final int length = mComposing.length();
if (length > 0) {
mComposing.delete(length - 1, length);
mWord.deleteLast();
ic.setComposingText(mComposing, 1);
if (mComposing.length() == 0) {
mPredicting = false;
}
postUpdateSuggestions();
} else {
ic.deleteSurroundingText(1, 0);
}
} else {
deleteChar = true;
}
TextEntryState.backspace();
if (TextEntryState.getState() == TextEntryState.State.UNDO_COMMIT) {
revertLastWord(deleteChar);
return;
} else if (deleteChar) {
if (mCandidateView != null && mCandidateView.dismissAddToDictionaryHint()) {
// Go back to the suggestion mode if the user canceled the
// "Touch again to save".
// NOTE: In gerenal, we don't revert the word when backspacing
// from a manual suggestion pick. We deliberately chose a
// different behavior only in the case of picking the first
// suggestion (typed word). It's intentional to have made this
// inconsistent with backspacing after selecting other suggestions.
revertLastWord(deleteChar);
}
else
{
if (!forMultitap)
{
sendDownUpKeyEvents(KeyEvent.KEYCODE_DEL);
}
else
{
//this code tries to delete the text in a different way, because of multi-tap stuff
//using "deleteSurroundingText" will actually get the input updated faster!
//but will not handle "delete all selected text" feature, hence the "if (!forMultitap)" above
final CharSequence beforeText = ic.getTextBeforeCursor(1, 0);
final int textLengthBeforeDelete = (TextUtils.isEmpty(beforeText))? 0 : beforeText.length();
if (textLengthBeforeDelete > 0)
ic.deleteSurroundingText(1, 0);
else
sendDownUpKeyEvents(KeyEvent.KEYCODE_DEL);
}
}
}
mJustRevertedSeparator = null;
//handleShiftStateAfterBackspace();
}
/*
private void handleShiftStateAfterBackspace() {
switch(mLastCharacterShiftState)
{
//this code will help use in the case that
//a double/triple tap occur while first one was shifted
case LAST_CHAR_SHIFT_STATE_SHIFTED:
if (mInputView != null)
mInputView.setShifted(true);
mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
break;
// case LAST_CHAR_SHIFT_STATE_UNSHIFTED:
// if (mInputView != null)
// mInputView.setShifted(false);
// mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
// break;
default:
updateShiftKeyState(getCurrentInputEditorInfo());
break;
}
}
*/
private void handleControl(boolean reset) {
if (reset)
{
if (DEBUG) Log.d(TAG, "handleControl: reset");
mInputView.setControl(false);
}
else
{
if (!mInputView.isControl())
{
if (DEBUG) Log.d(TAG, "handleControl: current keyboard is un-control");
mInputView.setControl(true);
}
else
{
if (DEBUG) Log.d(TAG, "handleControl: current keyboard is control");
mInputView.setControl(true);
}
}
}
private void handleShift(boolean reset) {
mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
if (mKeyboardSwitcher.isAlphabetMode()) {
//shift pressed and this is an alphabet keyboard
//we want to do:
//1)if keyboard is unshifted -> shift view and keyboard
//2)if keyboard is shifted -> capslock keyboard
//3)if keyboard is capslocked -> unshift view and keyboard
//final AnyKeyboard currentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
if (DEBUG)
{
final AnyKeyboard viewKeyboard = (AnyKeyboard)mInputView.getKeyboard();
if (mCurrentKeyboard != viewKeyboard)
{
Log.e(TAG, "NOTE: view keyboard and switcher keyboard are not the same!");
}
}
final boolean caps;
if (reset)
{
if (DEBUG) Log.d(TAG, "handleShift: reset");
mInputView.setShifted(false);
caps = false;
}
else
{
if (!mInputView.isShifted())
{
if (DEBUG) Log.d(TAG, "handleShift: current keyboard is un-shifted");
mInputView.setShifted(true);
caps = false;
}
else
{
if (mCurrentKeyboard.isShiftLocked())
{
if (DEBUG) Log.d(TAG, "handleShift: current keyboard is CAPSLOCKED");
mInputView.setShifted(false);
caps = false;
}
else
{
if (DEBUG) Log.d(TAG, "handleShift: current keyboard is shifted");
mInputView.setShifted(true);
caps = true;
}
}
}
mCapsLock = caps;
mCurrentKeyboard.setShiftLocked(mCapsLock);
}
}
private void abortCorrection(boolean force) {
if (force || TextEntryState.isCorrecting()) {
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
getCurrentInputConnection().finishComposingText();
clearSuggestions();
mComposing.setLength(0);
mPredicting = false;
mPredictionOn = false;
mJustAddedAutoSpace = false;
setCandidatesViewShown(false);
if (mSuggest != null) {
mSuggest.setCorrectionMode(Suggest.CORRECTION_NONE);
}
}
}
private void handleCharacter(final int primaryCode, int[] keyCodes) {
if(DEBUG) Log.d("AnySoftKeyboard", "handleCharacter: "+primaryCode+", isPredictionOn:"+isPredictionOn()+", mPredicting:"+mPredicting);
if (isAlphabet(primaryCode) && isPredictionOn()
&& !isCursorTouchingWord()) {
if (!mPredicting) {
mPredicting = true;
mComposing.setLength(0);
mWord.reset();
}
}
if(mInputView != null){
mLastCharacterShiftState = mInputView.isShifted()? LAST_CHAR_SHIFT_STATE_SHIFTED : LAST_CHAR_SHIFT_STATE_DEFAULT;
}
if (mLastSelectionStart == mLastSelectionEnd && TextEntryState.isCorrecting()) {
abortCorrection(false);
}
final int primaryCodeForShow;
if (mInputView != null)
{
if (mInputView.isShifted())
{
primaryCodeForShow = Character.toUpperCase(primaryCode);
}
else if (mInputView.isControl())
{
//http://en.wikipedia.org/wiki/Control_character#How_control_characters_map_to_keyboards
primaryCodeForShow = primaryCode & 63;
if (AnyApplication.DEBUG) Log.d(TAG, "CONTROL state: Char was "+primaryCode+" and now it is "+primaryCodeForShow);
}
else
primaryCodeForShow = primaryCode;
}
else
primaryCodeForShow = primaryCode;
if (mPredicting) {
if ((mInputView != null) && mInputView.isShifted()
&& mComposing.length() == 0) {
mWord.setFirstCharCapitalized(true);
}
mComposing.append((char) primaryCodeForShow);
if(keyCodes != null && keyCodes.length > 1 && primaryCode != keyCodes[0]){
int swapedItem = keyCodes[0];
keyCodes[0] = primaryCode;
for(int i=1;i<keyCodes.length; i++)
{
if (keyCodes[i] == primaryCode)
{
keyCodes[i] = swapedItem;
break;
}
}
}
if (mWord.add(primaryCodeForShow, keyCodes))
{
Toast note = Toast.makeText(this, "Check the logcat for a note from AnySoftKeyboard developers!", Toast.LENGTH_LONG);
note.show();
Log.i(TAG, "*******************"
+"\nNICE!!! You found the our easter egg! http://www.dailymotion.com/video/x3zg90_gnarls-barkley-crazy-2006-mtv-star_music\n"
+"\nAnySoftKeyboard R&D team would like to thank you for using our keyboard application."
+"\nWe hope you enjoying it, we enjoyed making it."
+"\nWhile developing this application, we heard Gnarls Barkley's Crazy quite a lot, and would like to share it with you."
+"\n"
+"\nThanks."
+"\nMenny Even Danan, Hezi Cohen, Hugo Lopes, Henrik Andersson, Sami Salonen, and Lado Kumsiashvili."
+"\n*******************");
Intent easterEgg = new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.dailymotion.com/video/x3zg90_gnarls-barkley-crazy-2006-mtv-star_music"));
easterEgg.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(easterEgg);
}
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.setComposingText(mComposing, 1);
}
postUpdateSuggestions();
} else {
sendKeyChar((char) primaryCodeForShow);
}
updateShiftKeyState(getCurrentInputEditorInfo());
// measureCps();
TextEntryState.typedCharacter((char) primaryCodeForShow,
isWordSeparator(primaryCodeForShow));
}
private void handleSeparator(int primaryCode) {
if(DEBUG) Log.d(TAG, "handleSeparator: "+primaryCode);
// Should dismiss the "Touch again to save" message when handling separator
if (mCandidateView != null && mCandidateView.dismissAddToDictionaryHint()) {
postUpdateSuggestions();
}
boolean pickedDefault = false;
// Handle separator
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.beginBatchEdit();
abortCorrection(false);
}
if (mPredicting) {
// In certain languages where single quote is a separator, it's
// better
// not to auto correct, but accept the typed word. For instance,
// in Italian dov' should not be expanded to dove' because the
// elision
// requires the last vowel to be removed.
if (mAutoCorrectOn
&& primaryCode != '\''
&& (mJustRevertedSeparator == null
|| mJustRevertedSeparator.length() == 0
|| mJustRevertedSeparator.charAt(0) != primaryCode)) {
pickedDefault = pickDefaultSuggestion();
// Picked the suggestion by the space key. We consider this
// as "added an auto space".
if (primaryCode == KeyCodes.SPACE) {
mJustAddedAutoSpace = true;
}
} else {
commitTyped(ic);
}
}
if (mJustAddedAutoSpace && primaryCode == KeyCodes.ENTER) {
removeTrailingSpace();
mJustAddedAutoSpace = false;
}
sendKeyChar((char) primaryCode);
// Handle the case of ". ." -> " .." with auto-space if necessary
// before changing the TextEntryState.
if (TextEntryState.getState() == TextEntryState.State.PUNCTUATION_AFTER_ACCEPTED
&& primaryCode == '.') {
reswapPeriodAndSpace();
}
TextEntryState.typedCharacter((char) primaryCode, true);
if (TextEntryState.getState() == TextEntryState.State.PUNCTUATION_AFTER_ACCEPTED
&& primaryCode != KeyCodes.ENTER) {
swapPunctuationAndSpace();
} else if (/*isPredictionOn() &&*/ primaryCode == ' ') {
doubleSpace();
}
if (pickedDefault && mBestWord != null) {
TextEntryState.acceptedDefault(mWord.getTypedWord(), mBestWord);
}
updateShiftKeyState(getCurrentInputEditorInfo());
if (ic != null) {
ic.endBatchEdit();
}
}
private void handleClose() {
commitTyped(getCurrentInputConnection());
requestHideSelf(0);
if (mInputView != null)
mInputView.closing();
TextEntryState.endSession();
}
// private void checkToggleCapsLock() {
// if (mKeyboardSwitcher.getCurrentKeyboard().isShifted()) {
// toggleCapsLock();
// }
// }
private void postUpdateSuggestions() {
postUpdateSuggestions(100);
}
private void postUpdateSuggestions(long delay) {
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
if (delay > 0)
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_UPDATE_SUGGESTIONS), delay);
else
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SUGGESTIONS));
}
private void postUpdateSuggestionsNow() {
postUpdateSuggestions(0);
}
private boolean isPredictionOn() {
boolean predictionOn = mPredictionOn;
// if (!onEvaluateInputViewShown()) predictionOn &=
// mPredictionLandscape;
return predictionOn;
}
private boolean shouldCandidatesStripBeShown() {
// boolean shown = isPredictionOn() && (mShowSuggestions || isFullscreenMode());
// if (!onEvaluateInputViewShown())
// shown &= mPredictionLandscape;
// return shown;
// return true;
// return isPredictionOn() || isFullscreenMode();
return true;
}
private void performUpdateSuggestions() {
if (DEBUG)
Log.d(TAG, "performUpdateSuggestions: has mSuggest:"
+ (mSuggest != null) + ", isPredictionOn:"
+ isPredictionOn() + ", mPredicting:" + mPredicting
+ ", mCorrectionMode:" + mCorrectionMode);
// Check if we have a suggestion engine attached.
if (mSuggest == null) {
return;
}
// final boolean showSuggestions = (mCandidateView != null && mPredicting
// && isPredictionOn() && shouldCandidatesStripBeShown());
if (mCandidateCloseText != null) mCandidateCloseText.setVisibility(View.GONE);
if (!mPredicting) {
if (mCandidateView != null)
mCandidateView.setSuggestions(null, false, false, false);
return;
}
List<CharSequence> stringList = mSuggest.getSuggestions(mInputView, mWord, false);
boolean correctionAvailable = mSuggest.hasMinimalCorrection();
// || mCorrectionMode == mSuggest.CORRECTION_FULL;
CharSequence typedWord = mWord.getTypedWord();
// If we're in basic correct
boolean typedWordValid = mSuggest.isValidWord(typedWord) ||
(preferCapitalization() && mSuggest.isValidWord(typedWord.toString().toLowerCase()));
if (mCorrectionMode == Suggest.CORRECTION_FULL) {
correctionAvailable |= typedWordValid;
}
// Don't auto-correct words with multiple capital letter
correctionAvailable &= !mWord.isMostlyCaps();
correctionAvailable &= !TextEntryState.isCorrecting();
mCandidateView.setSuggestions(stringList, false, typedWordValid, correctionAvailable);
if (stringList.size() > 0) {
if (correctionAvailable && !typedWordValid && stringList.size() > 1) {
mBestWord = stringList.get(1);
} else {
mBestWord = typedWord;
}
} else {
mBestWord = null;
}
setCandidatesViewShown(shouldCandidatesStripBeShown() || mCompletionOn);
}
private boolean pickDefaultSuggestion() {
// Complete any pending candidate query first
if (mHandler.hasMessages(MSG_UPDATE_SUGGESTIONS)) {
mHandler.removeMessages(MSG_UPDATE_SUGGESTIONS);
postUpdateSuggestionsNow();
}
if (mBestWord != null) {
TextEntryState.acceptedDefault(mWord.getTypedWord(), mBestWord);
mJustAccepted = true;
pickSuggestion(mBestWord);
// Add the word to the auto dictionary if it's not a known word
addToDictionaries(mBestWord, AutoDictionary.FREQUENCY_FOR_TYPED);
return true;
}
return false;
}
private CharSequence pickSuggestion(CharSequence suggestion) {
if (mCapsLock) {
suggestion = suggestion.toString().toUpperCase();
} else if (preferCapitalization()
|| (mKeyboardSwitcher.isAlphabetMode() && (mInputView != null) && mInputView .isShifted())) {
suggestion = Character.toUpperCase(suggestion.charAt(0))
+ suggestion.subSequence(1, suggestion.length()).toString();
}
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.commitText(suggestion, 1);
}
mPredicting = false;
mCommittedLength = suggestion.length();
if (mCandidateView != null) {
mCandidateView.setSuggestions(null, false, false, false);
}
updateShiftKeyState(getCurrentInputEditorInfo());
return suggestion;
}
public void pickSuggestionManually(int index, CharSequence suggestion) {
final boolean correcting = TextEntryState.isCorrecting();
final InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.beginBatchEdit();
}
try
{
if (mCompletionOn && mCompletions != null && index >= 0
&& index < mCompletions.length) {
CompletionInfo ci = mCompletions[index];
if (ic != null) {
ic.commitCompletion(ci);
}
mCommittedLength = suggestion.length();
if (mCandidateView != null) {
mCandidateView.clear();
}
updateShiftKeyState(getCurrentInputEditorInfo());
return;
}
pickSuggestion(suggestion, correcting);
// Add the word to the auto dictionary if it's not a known word
if (index == 0) {
addToDictionaries(suggestion, AutoDictionary.FREQUENCY_FOR_PICKED);
}
TextEntryState.acceptedSuggestion(mComposing.toString(), suggestion);
// Follow it with a space
if (mAutoSpace && !correcting) {
sendSpace();
mJustAddedAutoSpace = true;
}
final boolean showingAddToDictionaryHint = index == 0 && mCorrectionMode > 0
&& !mSuggest.isValidWord(suggestion)
&& !mSuggest.isValidWord(suggestion.toString().toLowerCase());
if (!correcting) {
// Fool the state watcher so that a subsequent backspace will not do a revert, unless
// we just did a correction, in which case we need to stay in
// TextEntryState.State.PICKED_SUGGESTION state.
TextEntryState.typedCharacter((char) KeyCodes.SPACE, true);
setNextSuggestions();
} else if (!showingAddToDictionaryHint) {
// If we're not showing the "Touch again to save", then show corrections again.
// In case the cursor position doesn't change, make sure we show the suggestions again.
clearSuggestions();
//postUpdateOldSuggestions();
}
if (showingAddToDictionaryHint && mCandidateView != null) {
mCandidateView.showAddToDictionaryHint(suggestion);
}
}
finally
{
if (ic != null)
{
ic.endBatchEdit();
}
}
}
/**
* Commits the chosen word to the text field and saves it for later
* retrieval.
* @param suggestion the suggestion picked by the user to be committed to
* the text field
* @param correcting whether this is due to a correction of an existing
* word.
*/
private void pickSuggestion(CharSequence suggestion, boolean correcting) {
if (mCapsLock) {
suggestion = suggestion.toString().toUpperCase();
} else if (preferCapitalization()
|| (mKeyboardSwitcher.isAlphabetMode() && (mInputView != null) && mInputView .isShifted())) {
suggestion = Character.toUpperCase(suggestion.charAt(0))
+ suggestion.subSequence(1, suggestion.length()).toString();
}
InputConnection ic = getCurrentInputConnection();
if (ic != null) {
ic.commitText(suggestion, 1);
}
mPredicting = false;
mCommittedLength = suggestion.length();
if (mCandidateView != null) {
mCandidateView.setSuggestions(null, false, false, false);
}
// If we just corrected a word, then don't show punctuations
if (!correcting) {
setNextSuggestions();
}
updateShiftKeyState(getCurrentInputEditorInfo());
}
private boolean isCursorTouchingWord() {
InputConnection ic = getCurrentInputConnection();
if (ic == null)
return false;
CharSequence toLeft = ic.getTextBeforeCursor(1, 0);
CharSequence toRight = ic.getTextAfterCursor(1, 0);
if (!TextUtils.isEmpty(toLeft) && !isWordSeparator(toLeft.charAt(0))) {
return true;
}
if (!TextUtils.isEmpty(toRight) && !isWordSeparator(toRight.charAt(0))) {
return true;
}
return false;
}
public void revertLastWord(boolean deleteChar) {
final int length = mComposing.length();
if (!mPredicting && length > 0) {
final InputConnection ic = getCurrentInputConnection();
mPredicting = true;
ic.beginBatchEdit();
mJustRevertedSeparator = ic.getTextBeforeCursor(1, 0);
if (deleteChar)
ic.deleteSurroundingText(1, 0);
int toDelete = mCommittedLength;
CharSequence toTheLeft = ic.getTextBeforeCursor(mCommittedLength, 0);
if (toTheLeft != null && toTheLeft.length() > 0
&& isWordSeparator(toTheLeft.charAt(0))) {
toDelete--;
}
ic.deleteSurroundingText(toDelete, 0);
ic.setComposingText(mComposing, 1);
TextEntryState.backspace();
ic.endBatchEdit();
postUpdateSuggestions();
} else {
sendDownUpKeyEvents(KeyEvent.KEYCODE_DEL);
mJustRevertedSeparator = null;
}
}
// private void setOldSuggestions() {
// //mShowingVoiceSuggestions = false;
// if (mCandidateView != null && mCandidateView.isShowingAddToDictionaryHint()) {
// return;
// }
// InputConnection ic = getCurrentInputConnection();
// if (ic == null) return;
// if (!mPredicting) {
// // Extract the selected or touching text
// EditingUtil.SelectedWord touching = EditingUtil.getWordAtCursorOrSelection(ic,
// mLastSelectionStart, mLastSelectionEnd, mWordSeparators);
//
// if (touching != null && touching.word.length() > 1) {
// ic.beginBatchEdit();
//
// if (!applyVoiceAlternatives(touching) && !applyTypedAlternatives(touching)) {
// abortCorrection(true);
// } else {
// TextEntryState.selectedForCorrection();
// EditingUtil.underlineWord(ic, touching);
// }
//
// ic.endBatchEdit();
// } else {
// abortCorrection(true);
// setNextSuggestions(); // Show the punctuation suggestions list
// }
// } else {
// abortCorrection(true);
// }
// }
private void setNextSuggestions() {
setSuggestions(new ArrayList<CharSequence>(), false, false, false);
}
public boolean isWordSeparator(int code) {
// String separators = getWordSeparators();
// return separators.contains(String.valueOf((char)code));
return (!isAlphabet(code));
}
private void sendSpace() {
sendKeyChar((char)KeyCodes.SPACE);
updateShiftKeyState(getCurrentInputEditorInfo());
}
public boolean preferCapitalization() {
return mWord.isFirstCharCapitalized();
}
public void swipeRight(boolean onSpaceBar) {
final int keyCode = mConfig.getSwipeRightKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
public void swipeLeft(boolean onSpaceBar) {
final int keyCode = mConfig.getSwipeLeftKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
private void nextAlterKeyboard(EditorInfo currentEditorInfo)
{
if(DEBUG)Log.d(TAG, "nextAlterKeyboard: currentEditorInfo.inputType="
+ currentEditorInfo.inputType);
//AnyKeyboard currentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
if (mCurrentKeyboard == null) {
if (DEBUG) Log.d("AnySoftKeyboard", "nextKeyboard: Looking for next keyboard. No current keyboard.");
} else {
if (DEBUG) Log.d("AnySoftKeyboard", "nextKeyboard: Looking for next keyboard. Current keyboard is:"
+ mCurrentKeyboard.getKeyboardName());
}
mCurrentKeyboard = mKeyboardSwitcher.nextAlterKeyboard(currentEditorInfo);
Log.i(TAG, "nextAlterKeyboard: Setting next keyboard to: "
+ mCurrentKeyboard.getKeyboardName());
}
private void nextKeyboard(EditorInfo currentEditorInfo,
KeyboardSwitcher.NextKeyboardType type) {
if (DEBUG) Log.d(TAG, "nextKeyboard: currentEditorInfo.inputType="
+ currentEditorInfo.inputType + " type:" + type);
// in numeric keyboards, the LANG key will go back to the original
// alphabet keyboard-
// so no need to look for the next keyboard, 'mLastSelectedKeyboard'
// holds the last
// keyboard used.
mCurrentKeyboard = mKeyboardSwitcher.nextKeyboard(currentEditorInfo, type);
if (!(mCurrentKeyboard instanceof GenericKeyboard))
mSentenceSeparators = mCurrentKeyboard.getSentenceSeparators();
setKeyboardStuff(currentEditorInfo, type, mCurrentKeyboard);
}
private void setKeyboardStuff(EditorInfo currentEditorInfo,
KeyboardSwitcher.NextKeyboardType type, AnyKeyboard currentKeyboard) {
Log.i(TAG, "nextKeyboard: Setting next keyboard to: "
+ currentKeyboard.getKeyboardName());
updateShiftKeyState(currentEditorInfo);
mCapsLock = currentKeyboard.isShiftLocked();
mLastCharacterShiftState = LAST_CHAR_SHIFT_STATE_DEFAULT;
// changing dictionary
setDictionariesForCurrentKeyboard();
// Notifying if needed
if ((mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ALWAYS))
|| (mKeyboardChangeNotificationType
.equals(KEYBOARD_NOTIFICATION_ON_PHYSICAL) && (type == NextKeyboardType.AlphabetSupportsPhysical))) {
notifyKeyboardChangeIfNeeded();
}
}
public void swipeDown(boolean onSpaceBar) {
final int keyCode = mConfig.getSwipeDownKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
public void swipeUp(boolean onSpaceBar) {
if (DEBUG) Log.d(TAG, "swipeUp: started at spacebar? "+onSpaceBar);
final int keyCode = mConfig.getSwipeUpKeyCode();
if (keyCode != 0)
onKey(keyCode, new int[]{keyCode}, SWIPE_CORD, SWIPE_CORD);
}
public void onPress(int primaryCode) {
if (DEBUG) Log.d(TAG, "onPress:"+primaryCode);
if (mVibrationDuration > 0 && primaryCode!=0) {
mVibrator.vibrate(mVibrationDuration);
}
final boolean distinctMultiTouch = mInputView.hasDistinctMultitouch();
if (distinctMultiTouch && primaryCode == Keyboard.KEYCODE_SHIFT) {
mShiftKeyState.onPress();
handleShift(false);
} else {
mShiftKeyState.onOtherKeyPressed();
}
if (distinctMultiTouch && primaryCode == AnyKeyboard.KEYCODE_CTRL) {
mControlKeyState.onPress();
handleControl(false);
} else {
mControlKeyState.onOtherKeyPressed();
}
if (mSoundOn && (!mSilentMode) && primaryCode!=0) {
final int keyFX;
switch (primaryCode) {
case 13:
case 10:
keyFX = AudioManager.FX_KEYPRESS_RETURN;
break;
case Keyboard.KEYCODE_DELETE:
keyFX = AudioManager.FX_KEYPRESS_DELETE;
break;
case 32:
keyFX = AudioManager.FX_KEYPRESS_SPACEBAR;
break;
default:
keyFX = AudioManager.FX_KEY_CLICK;
}
final float fxVolume;
//creating scoop to make sure volume and maxVolume
//are not used
{
final int volume;
final int maxVolume;
if (mSoundVolume > 0)
{
volume = mSoundVolume;
maxVolume = 100;
if (DEBUG)
Log.d(TAG, "Sound on key-pressed. Taking custom volume: "+volume+" out of "+maxVolume);
//pre-eclair
// volume is between 0..8 (float)
//eclair
// volume is between 0..1 (float)
if (Workarounds.getApiLevel() >= 5)
{
fxVolume = ((float)volume)/((float)maxVolume);
}
else
{
fxVolume = 8*((float)volume)/((float)maxVolume);
}
}
else
{
fxVolume = -1.0f;
}
}
if (DEBUG) Log.d(TAG, "Sound on key-pressed. Sound ID:"
+ keyFX + " with volume " + fxVolume);
mAudioManager.playSoundEffect(keyFX, fxVolume);
}
}
public void onRelease(int primaryCode) {
if (DEBUG) Log.d(TAG, "onRelease:"+primaryCode);
// vibrate();
// Reset any drag flags in the keyboard
//((AnyKeyboard) mInputView.getKeyboard()).keyReleased();
//vibrate();
final boolean distinctMultiTouch = mInputView.hasDistinctMultitouch();
if (distinctMultiTouch && primaryCode == Keyboard.KEYCODE_SHIFT) {
if (mShiftKeyState.isMomentary())
handleShift(true);
mShiftKeyState.onRelease();
}
if (distinctMultiTouch && primaryCode == AnyKeyboard.KEYCODE_CTRL) {
if (mControlKeyState.isMomentary())
handleControl(true);
mControlKeyState.onRelease();
}
}
// receive ringer mode changes to detect silent mode
private BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
updateRingerMode();
}
};
// update flags for silent mode
private void updateRingerMode() {
mSilentMode = (mAudioManager.getRingerMode() != AudioManager.RINGER_MODE_NORMAL);
}
private void loadSettings() {
// setting all values to default
PreferenceManager.setDefaultValues(this, R.layout.prefs, false);
// Get the settings preferences
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(this);
mVibrationDuration = Integer.parseInt(sp.getString(
getString(R.string.settings_key_vibrate_on_key_press_duration),
getString(R.string.settings_default_vibrate_on_key_press_duration)));
mSoundOn = sp.getBoolean(getString(R.string.settings_key_sound_on), getResources().getBoolean(R.bool.settings_default_sound_on));
if (mSoundOn) {
Log.i(TAG, "Loading sounds effects from AUDIO_SERVICE due to configuration change.");
mAudioManager.loadSoundEffects();
}
// checking the volume
boolean customVolume = sp.getBoolean("use_custom_sound_volume", false);
int newVolume;
if (customVolume) {
newVolume = sp.getInt("custom_sound_volume", 0) + 1;
Log.i(TAG, "Custom volume checked: " + newVolume+" out of 100");
} else {
Log.i(TAG, "Custom volume un-checked.");
newVolume = -1;
}
mSoundVolume = newVolume;
// in order to support the old type of configuration
mKeyboardChangeNotificationType = sp.getString(
getString(R.string.settings_key_physical_keyboard_change_notification_type),
getString(R.string.settings_default_physical_keyboard_change_notification_type));
// now clearing the notification, and it will be re-shown if needed
mInputMethodManager.hideStatusIcon(mImeToken);
//mNotificationManager.cancel(KEYBOARD_NOTIFICATION_ID);
// should it be always on?
if (mKeyboardChangeNotificationType.equals(KEYBOARD_NOTIFICATION_ALWAYS))
notifyKeyboardChangeIfNeeded();
mAutoCap = sp.getBoolean("auto_caps", true);
boolean newShowSuggestions = sp.getBoolean("candidates_on", true);
boolean suggestionsChanged = (newShowSuggestions != mShowSuggestions);
mShowSuggestions = newShowSuggestions;
// why check that it is "false"? Because it starts as "false", so it is
// not 'changed'.
if (suggestionsChanged || (!mShowSuggestions))
setDictionariesForCurrentKeyboard();
mAutoComplete = sp.getBoolean("auto_complete", true) && mShowSuggestions;
mQuickFixes = sp.getBoolean("quick_fix", true);
mAutoCorrectOn = /* mSuggest != null && *//*
* Suggestion always exists,
* maybe not at the moment, but
* shortly
*/
(mAutoComplete || mQuickFixes);
mCorrectionMode = mAutoComplete ? 2
: (mShowSuggestions/* mQuickFixes */? 1 : 0);
mSmileyOnShortPress = sp.getBoolean(getString(R.string.settings_key_emoticon_long_press_opens_popup), getResources().getBoolean(R.bool.settings_default_emoticon_long_press_opens_popup));
// mSmileyPopupType = sp.getString(getString(R.string.settings_key_smiley_popup_type), getString(R.string.settings_default_smiley_popup_type));
mOverrideQuickTextText = sp.getString(getString(R.string.settings_key_emoticon_default_text), null);
((ConfigurationImpl) mConfig).handleConfigurationChange(sp);
}
private void setDictionariesForCurrentKeyboard() {
if (mSuggest != null) {
if (!mPredictionOn) {
if (DEBUG)Log.d(TAG, "No suggestion is required. I'll try to release memory from the dictionary.");
//DictionaryFactory.getInstance().releaseAllDictionaries();
mSuggest.setMainDictionary(null);
mSuggest.setUserDictionary(null);
mSuggest.setAutoDictionary(null);
mLastDictionaryRefresh = -1;
} else {
mLastDictionaryRefresh = SystemClock.elapsedRealtime();
// It null at the creation of the application.
if ((mKeyboardSwitcher != null)
&& mKeyboardSwitcher.isAlphabetMode()) {
AnyKeyboard currentKeyobard = mKeyboardSwitcher.getCurrentKeyboard();
// if there is a mapping in the settings, we'll use that, else we'll
// return the default
String mappingSettingsKey = getDictionaryOverrideKey(currentKeyobard);
String defaultDictionary = currentKeyobard.getDefaultDictionaryLocale();
String dictionaryValue = getSharedPreferences().getString(mappingSettingsKey, null);
Dictionary dictionary = null;
if (dictionaryValue == null){
dictionary = DictionaryFactory.getInstance().getDictionaryByLanguage(currentKeyobard.getDefaultDictionaryLocale(), this);
} else {
if (DEBUG)
{
Log.d("AnySoftKeyboard", "Default dictionary '" + (defaultDictionary == null? "None" : defaultDictionary)
+ "' for keyboard '" + currentKeyobard.getKeyboardPrefId()
+ "' has been overriden to '" + dictionaryValue + "'");
}
dictionary = DictionaryFactory.getInstance().getDictionaryById(dictionaryValue, this);
}
mSuggest.setMainDictionary(dictionary);
mUserDictionary = DictionaryFactory.getInstance().createUserDictionary(this, defaultDictionary);
mSuggest.setUserDictionary(mUserDictionary);
mAutoDictionary = DictionaryFactory.getInstance().createAutoDictionary(this, this, defaultDictionary);
mSuggest.setAutoDictionary(mAutoDictionary);
}
}
}
}
private String getDictionaryOverrideKey(AnyKeyboard currentKeyboard) {
String mappingSettingsKey = currentKeyboard.getKeyboardPrefId()
+ "_override_dictionary";
return mappingSettingsKey;
}
private void launchSettings() {
handleClose();
Intent intent = new Intent();
intent.setClass(AnySoftKeyboard.this, MainSettings.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
}
private void launchDictionaryOverriding() {
//AnyKeyboard currentKeyboard = mKeyboardSwitcher.getCurrentKeyboard();
final String dictionaryOverridingKey = getDictionaryOverrideKey(mCurrentKeyboard);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setCancelable(true);
builder.setIcon(R.drawable.icon_8_key);
builder.setTitle(getResources().getString(
R.string.override_dictionary_title,
mCurrentKeyboard.getKeyboardName()));
builder.setNegativeButton(android.R.string.cancel, null);
ArrayList<CharSequence> dictionaryIds = new ArrayList<CharSequence>();
ArrayList<CharSequence> dictionaries = new ArrayList<CharSequence>();
// null dictionary is handled as the default for the keyboard
dictionaryIds.add(null);
dictionaries.add(getString(R.string.override_dictionary_default));
//going over all installed dictionaries
for (DictionaryAddOnAndBuilder dictionaryBuilder : ExternalDictionaryFactory.getAllAvailableExternalDictionaries(this)) {
dictionaryIds.add(dictionaryBuilder.getId());
String description = dictionaryBuilder.getDescription();
if(description != null && description.length() != 0) {
description = " (" + description + ")";
}
dictionaries.add(dictionaryBuilder.getName() + description);
}
final CharSequence[] ids = new CharSequence[dictionaryIds.size()];
final CharSequence[] items = new CharSequence[dictionaries.size()];
dictionaries.toArray(items);
dictionaryIds.toArray(ids);
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface di, int position) {
di.dismiss();
Editor editor = getSharedPreferences().edit();
switch (position) {
case 0:
if (DEBUG) Log.d(TAG, "Dictionary overriden disabled. User selected default.");
editor.remove(dictionaryOverridingKey);
showToastMessage(R.string.override_disabled, true);
break;
default:
if ((position < 0) || (position >= items.length)) {
if (DEBUG) Log.d(TAG, "Dictionary override dialog canceled.");
} else {
CharSequence id = ids[position];
String selectedDictionaryId = (id == null) ? null : id.toString();
String selectedLanguageString = items[position]
.toString();
if (DEBUG) Log.d(TAG, "Dictionary override. User selected "
+ selectedLanguageString + " which corresponds to id "
+ ((selectedDictionaryId == null) ? "(null)" : selectedDictionaryId));
editor.putString(dictionaryOverridingKey,
selectedDictionaryId);
showToastMessage(getString(R.string.override_enabled,
selectedLanguageString), true);
}
break;
}
editor.commit();
setDictionariesForCurrentKeyboard();
}
});
mOptionsDialog = builder.create();
Window window = mOptionsDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
mOptionsDialog.show();
}
private void showOptionsMenu() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setCancelable(true);
builder.setIcon(R.drawable.icon_8_key);
builder.setNegativeButton(android.R.string.cancel, null);
CharSequence itemSettings = getString(R.string.ime_settings);
CharSequence itemOverrideDictionary = getString(R.string.override_dictionary);
CharSequence itemInputMethod = getString(R.string.change_ime);
builder.setItems(new CharSequence[] { itemSettings,
itemOverrideDictionary, itemInputMethod },
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface di, int position) {
di.dismiss();
switch (position) {
case 0:
launchSettings();
break;
case 1:
launchDictionaryOverriding();
break;
case 2:
((InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE))
.showInputMethodPicker();
break;
}
}
});
builder.setTitle(getResources().getString(R.string.ime_name));
mOptionsDialog = builder.create();
Window window = mOptionsDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
mOptionsDialog.show();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
// If orientation changed while predicting, commit the change
if (newConfig.orientation != mOrientation) {
commitTyped(getCurrentInputConnection());
mOrientation = newConfig.orientation;
mKeyboardSwitcher.makeKeyboards(true);
}
super.onConfigurationChanged(newConfig);
}
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (DEBUG)Log.d(TAG, "onSharedPreferenceChanged - key:" + key);
AnyApplication.requestBackupToCloud();
boolean isKeyboardKey = key.startsWith(KeyboardAddOnAndBuilder.KEYBOARD_PREF_PREFIX);
boolean isDictionaryKey = key.startsWith("dictionary_");
boolean isQuickTextKey = key.equals(getString(R.string.settings_key_active_quick_text_key));
if (isKeyboardKey || isDictionaryKey || isQuickTextKey) {
mKeyboardSwitcher.makeKeyboards(true);
}
loadSettings();
if ( isDictionaryKey ||
key.equals(getString(R.string.settings_key_use_contacts_dictionary)) ||
key.equals(getString(R.string.settings_key_use_auto_dictionary)))
{
setDictionariesForCurrentKeyboard();
}
else if (
//key.equals(getString(R.string.settings_key_top_keyboard_row_id)) ||
key.equals(getString(R.string.settings_key_ext_kbd_bottom_row_key)) ||
key.equals(getString(R.string.settings_key_ext_kbd_top_row_key)) ||
key.equals(getString(R.string.settings_key_ext_kbd_ext_ketboard_key)) ||
key.equals(getString(R.string.settings_key_ext_kbd_hidden_bottom_row_key)) ||
key.equals(getString(R.string.settings_key_keyboard_theme_key)) ||
key.equals("zoom_factor_keys_in_portrait") ||
key.equals("zoom_factor_keys_in_landscape") ||
key.equals(getString(R.string.settings_key_smiley_icon_on_smileys_key)) ||
key.equals(getString(R.string.settings_key_long_press_timeout)) ||
key.equals(getString(R.string.settings_key_multitap_timeout)))
{
//in some cases we do want to force keyboards recreations
handleClose();
mKeyboardSwitcher.makeKeyboards(true);
if (key.equals(getString(R.string.settings_key_keyboard_theme_key)))
{
//also recreate keyboard view
setInputView(onCreateInputView());
setCandidatesView(onCreateCandidatesView());
setCandidatesViewShown(false);
}
}
}
public void appendCharactersToInput(CharSequence textToCommit) {
if (DEBUG)
Log.d(TAG, "appendCharactersToInput: '"+ textToCommit+"'");
for(int index=0; index<textToCommit.length(); index++)
{
final char c = textToCommit.charAt(index);
mWord.add(c, new int[]{c});
}
mComposing.append(textToCommit);
if (mCompletionOn)
getCurrentInputConnection().setComposingText(mWord.getTypedWord(), textToCommit.length());
else
commitTyped(getCurrentInputConnection());
updateShiftKeyState(getCurrentInputEditorInfo());
}
public void deleteLastCharactersFromInput(int countToDelete) {
if (countToDelete == 0)
return;
final int currentLength = mComposing.length();
boolean shouldDeleteUsingCompletion;
if (currentLength > 0) {
shouldDeleteUsingCompletion = true;
if (currentLength > countToDelete) {
mComposing.delete(currentLength - countToDelete, currentLength);
int deletesLeft = countToDelete;
while(deletesLeft > 0)
{
mWord.deleteLast();
deletesLeft--;
}
} else {
mComposing.setLength(0);
mWord.reset();
}
} else {
shouldDeleteUsingCompletion = false;
}
InputConnection ic = getCurrentInputConnection();
if(ic != null){
if (mCompletionOn && shouldDeleteUsingCompletion) {
ic.setComposingText(mComposing, 1);
// updateCandidates();
} else {
ic.deleteSurroundingText(countToDelete, 0);
}
}
updateShiftKeyState(getCurrentInputEditorInfo());
}
public SharedPreferences getSharedPreferences() {
return PreferenceManager.getDefaultSharedPreferences(this);
}
public void showToastMessage(int resId, boolean forShortTime) {
CharSequence text = getResources().getText(resId);
showToastMessage(text, forShortTime);
}
private void showToastMessage(CharSequence text, boolean forShortTime) {
int duration = forShortTime ? Toast.LENGTH_SHORT : Toast.LENGTH_LONG;
if (DEBUG)
Log.v("AnySoftKeyboard", "showToastMessage: '" + text + "'. For: "
+ duration);
Toast.makeText(this.getApplication(), text, duration).show();
}
@Override
public void onLowMemory() {
Log.w(TAG, "The OS has reported that it is low on memory!. I'll try to clear some cache.");
mKeyboardSwitcher.onLowMemory();
//DictionaryFactory.getInstance().onLowMemory(mSuggest.getMainDictionary());
super.onLowMemory();
}
private InputConnection mEditingInput = null;
private TextView mCandidateCloseText;
public void startInputConnectionEdit() {
mEditingInput = getCurrentInputConnection();
if (mEditingInput != null)
mEditingInput.beginBatchEdit();
}
public void endInputConnectionEdit() {
if (mEditingInput != null)
{
try
{
mEditingInput.endBatchEdit();
}
catch(Exception e)
{
//it could be dead already.
e.printStackTrace();
}
}
}
private void showQuickTextKeyPopupKeyboard(QuickTextKey quickTextKey) {
if (mInputView != null) {
if (quickTextKey.getPackageContext() == getApplicationContext()) {
mInputView.simulateLongPress(AnyKeyboard.KEYCODE_QUICK_TEXT);
} else {
mInputView.showQuickTextPopupKeyboard(quickTextKey.getPackageContext(), quickTextKey);
}
}
}
private void showQuickTextKeyPopupList(final QuickTextKey key) {
if (mQuickTextKeyDialog == null) {
String[] names = key.getPopupListNames();
final String[] texts = key.getPopupListValues();
int[] icons = key.getPopupListIconResIds();
final int N = names.length;
List<Map<String, ?>> entries = new ArrayList<Map<String, ?>>();
for (int i = 0; i < N; i++) {
HashMap<String, Object> entry = new HashMap<String, Object>();
entry.put("name", names[i]);
entry.put("text", texts[i]);
if (icons != null) entry.put("icons", icons[i]);
entries.add(entry);
}
int layout;
String[] from;
int[] to;
if (icons == null) {
layout = R.layout.quick_text_key_menu_item_without_icon;
from = new String[] {"name", "text"};
to = new int[] {R.id.quick_text_name, R.id.quick_text_output};
} else {
layout = R.layout.quick_text_key_menu_item_with_icon;
from = new String[] {"name", "text", "icons"};
to = new int[]{R.id.quick_text_name, R.id.quick_text_output, R.id.quick_text_icon};
}
final SimpleAdapter a = new SimpleAdapter(this, entries, layout, from, to);
SimpleAdapter.ViewBinder viewBinder = new SimpleAdapter.ViewBinder() {
public boolean setViewValue(View view, Object data, String textRepresentation) {
if (view instanceof ImageView) {
Drawable img = key.getPackageContext().getResources().getDrawable((Integer) data);
((ImageView) view).setImageDrawable(img);
return true;
}
return false;
}
};
a.setViewBinder(viewBinder);
AlertDialog.Builder b = new AlertDialog.Builder(this);
b.setTitle(getString(R.string.menu_insert_smiley));
b.setCancelable(true);
b.setAdapter(a, new DialogInterface.OnClickListener() {
@SuppressWarnings("unchecked")
public final void onClick(DialogInterface dialog, int which) {
HashMap<String, Object> item = (HashMap<String, Object>) a.getItem(which);
onText((String)item.get("text"));
dialog.dismiss();
}
});
mQuickTextKeyDialog = b.create();
Window window = mQuickTextKeyDialog.getWindow();
WindowManager.LayoutParams lp = window.getAttributes();
lp.token = mInputView.getWindowToken();
lp.type = WindowManager.LayoutParams.TYPE_APPLICATION_ATTACHED_DIALOG;
window.setAttributes(lp);
window.addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
}
mQuickTextKeyDialog.show();
}
public void promoteToUserDictionary(String word, int frequency) {
if (mUserDictionary.isValidWord(word)) return;
mUserDictionary.addWord(word, frequency);
}
public WordComposer getCurrentWord() {
return mWord;
}
/**
* Override this to control when the soft input area should be shown to
* the user. The default implementation only shows the input view when
* there is no hard keyboard or the keyboard is hidden. If you change what
* this returns, you will need to call {@link #updateInputViewShown()}
* yourself whenever the returned value may have changed to have it
* re-evalauted and applied.
* This needs to be re-coded for Issue 620
*/
@Override
public boolean onEvaluateInputViewShown() {
Configuration config = getResources().getConfiguration();
return config.keyboard == Configuration.KEYBOARD_NOKEYS
|| config.hardKeyboardHidden == Configuration.KEYBOARDHIDDEN_YES;
}
public void onCancel() {
//don't know what to do here.
}
public void forceKeyboardsRecreation() {
if (mKeyboardSwitcher != null) mKeyboardSwitcher.makeKeyboards(true);
}
}
|
A possible fix for issue 787
|
src/com/anysoftkeyboard/AnySoftKeyboard.java
|
A possible fix for issue 787
|
<ide><path>rc/com/anysoftkeyboard/AnySoftKeyboard.java
<ide>
<ide> private static final int MSG_UPDATE_SUGGESTIONS = 0;
<ide> //private static final int MSG_START_TUTORIAL = 1;
<del> private static final int MSG_UPDATE_SHIFT_STATE = 2;
<add> //private static final int MSG_UPDATE_SHIFT_STATE = 2;
<ide>
<ide> //private static final int KEYBOARD_NOTIFICATION_ID = 1;
<ide> /*
<ide> case MSG_UPDATE_SUGGESTIONS:
<ide> performUpdateSuggestions();
<ide> break;
<del> case MSG_UPDATE_SHIFT_STATE:
<del> updateShiftKeyState(getCurrentInputEditorInfo());
<del> break;
<add>// case MSG_UPDATE_SHIFT_STATE:
<add>// updateShiftKeyState(getCurrentInputEditorInfo());
<add>// break;
<add> default:
<add> super.handleMessage(msg);
<ide> }
<ide> }
<ide> };
<ide> }
<ide> }
<ide> mJustAccepted = false;
<del> postUpdateShiftKeyState();
<add> //postUpdateShiftKeyState();
<add> updateShiftKeyState(getCurrentInputEditorInfo());
<ide>
<ide> // Make a note of the cursor position
<ide> mLastSelectionStart = newSelStart;
<ide> //Issue 248
<ide> case KeyEvent.KEYCODE_VOLUME_DOWN:
<ide> case KeyEvent.KEYCODE_VOLUME_UP:
<del> if(AnySoftKeyboard.getInstance().isInputViewShown() == false){
<add> if(isInputViewShown() == false){
<ide> return super.onKeyUp(keyCode, event);
<ide> }
<ide> if(mConfig.useVolumeKeyForLeftRight()){
<ide> //no need of vol up/down sound
<add> updateShiftKeyState(getCurrentInputEditorInfo());
<ide> return true;
<ide> }
<ide> case KeyEvent.KEYCODE_DPAD_DOWN:
<ide> InputConnection ic = getCurrentInputConnection();
<ide> if (ic != null)
<ide> ic.sendKeyEvent(event);
<add>
<add> updateShiftKeyState(getCurrentInputEditorInfo());
<ide> return true;
<ide> }
<ide> break;
<ide> setInputConnectionMetaStateAsCurrentMetaKeyKeyListenerState();
<ide> break;
<ide> }
<del> return super.onKeyUp(keyCode, event);
<add> boolean r = super.onKeyUp(keyCode, event);
<add> updateShiftKeyState(getCurrentInputEditorInfo());
<add> return r;
<ide> }
<ide>
<ide> private String getMetaKeysStates(String place) {
<ide> }
<ide> }
<ide>
<del> private void postUpdateShiftKeyState() {
<add> /*private void postUpdateShiftKeyState() {
<ide> mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
<ide> // TODO: Should remove this 300ms delay?
<ide> mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_UPDATE_SHIFT_STATE), 150);
<del> }
<add> }*/
<ide>
<ide> public void updateShiftKeyState(EditorInfo attr) {
<del> mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
<add> //mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
<ide> InputConnection ic = getCurrentInputConnection();
<ide> if (ic != null && attr != null && mKeyboardSwitcher.isAlphabetMode() && (mInputView != null)) {
<ide> mInputView.setShifted(mShiftKeyState.isMomentary() || mCapsLock
<ide> }
<ide>
<ide> private void handleShift(boolean reset) {
<del> mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
<add> //mHandler.removeMessages(MSG_UPDATE_SHIFT_STATE);
<ide>
<ide> if (mKeyboardSwitcher.isAlphabetMode()) {
<ide> //shift pressed and this is an alphabet keyboard
<ide> handleControl(true);
<ide> mControlKeyState.onRelease();
<ide> }
<add> //the user lifted the finger, let's handle the shift
<add> if (primaryCode != Keyboard.KEYCODE_SHIFT)
<add> updateShiftKeyState(getCurrentInputEditorInfo());
<ide> }
<ide>
<ide> // receive ringer mode changes to detect silent mode
|
|
Java
|
apache-2.0
|
8b4ec678710d5d27c4fbe04a390518a47d757f43
| 0 |
greg-pendlebury/bamboo,nla/bamboo,greg-pendlebury/bamboo,nla/bamboo,greg-pendlebury/bamboo,nla/bamboo,greg-pendlebury/bamboo,greg-pendlebury/bamboo,nla/bamboo
|
package bamboo.task;
import bamboo.core.Db;
import bamboo.core.DbPool;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.StatusLine;
import org.archive.format.arc.ARCConstants;
import org.archive.io.ArchiveReader;
import org.archive.io.ArchiveReaderFactory;
import org.archive.io.ArchiveRecord;
import org.archive.io.ArchiveRecordHeader;
import org.archive.util.Base32;
import org.archive.util.LaxHttpParser;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
public class CdxIndexJob implements Taskmaster.Job {
final private DbPool dbPool;
final private URL cdxServer = getCdxServerUrl();
public CdxIndexJob(DbPool dbPool) {
this.dbPool = dbPool;
}
@Override
public void run(Taskmaster.IProgressMonitor progress) throws IOException {
ExecutorService threadPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
try (Db db = dbPool.take()) {
for (Db.Warc warc : db.findWarcsToCdxIndex()) {
threadPool.submit(() -> {
try (Db db2 = dbPool.take()) {
System.out.println("CDX indexing " + warc.id + " " + warc.path);
buildCdx(warc.path);
db2.setWarcCdxIndexed(warc.id, System.currentTimeMillis());
System.out.println("Finished CDX indexing " + warc.id + " " + warc.path);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
});
}
threadPool.shutdown();
threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
} finally {
threadPool.shutdownNow();
}
}
final static DateTimeFormatter warcDateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
final static DateTimeFormatter arcDateFormat = DateTimeFormatter.ofPattern("yyyyMMddHHmmss");
private static String warcToArcDate(String warcDate) {
if (warcDate.length() == 14) {
return warcDate; // already an ARC date
}
return LocalDateTime.parse(warcDate, warcDateFormat).format(arcDateFormat);
}
private static URL getCdxServerUrl() {
String cdxUrl = System.getenv("BAMBOO_CDX_URL");
if (cdxUrl == null) {
throw new IllegalStateException("Environment variable BAMBOO_CDX_URL must be set");
}
try {
return new URL(cdxUrl);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
private void buildCdx(Path warc) throws IOException {
StringWriter sw = new StringWriter();
sw.write(" CDX N b a m s k r M S V g\n");
writeCdx(warc, sw);
byte[] data = sw.toString().getBytes(StandardCharsets.UTF_8);
HttpURLConnection conn = (HttpURLConnection) cdxServer.openConnection();
conn.setRequestMethod("POST");
conn.addRequestProperty("Content-Type", "text/plain");
conn.setFixedLengthStreamingMode(data.length);
conn.setDoOutput(true);
try (OutputStream out = conn.getOutputStream()) {
out.write(data);
out.flush();
}
try (BufferedReader rdr = new BufferedReader(new InputStreamReader(conn.getInputStream()))) {
String output = rdr.readLine();
int status = conn.getResponseCode();
if (status != 200) {
throw new RuntimeException("Indexing failed: " + output);
}
}
}
private static void writeCdx(Path warc, Writer out) {
String filename = warc.getFileName().toString();
try (ArchiveReader reader = ArchiveReaderFactory.get(warc.toFile())) {
for (ArchiveRecord record : reader) {
String cdxLine = formatCdxLine(filename, record);
if (cdxLine != null) {
out.write(cdxLine);
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static String formatCdxLine(String filename, ArchiveRecord record) throws IOException {
ArchiveRecordHeader h = record.getHeader();
String warcType = (String)h.getHeaderValue("WARC-Type");
if (warcType != null && !warcType.equals("response"))
return null;
if (h.getUrl().startsWith("dns:") || h.getUrl().startsWith("filedesc:"))
return null;
String contentType = null;
String location = null;
// parse HTTP header
String line = new String(LaxHttpParser.readRawLine(record), ISO_8859_1);
if (!StatusLine.startsWithHTTP(line)) {
return null;
}
int status = parseStatusLine(line);
for (Header header : LaxHttpParser.parseHeaders(record, ARCConstants.DEFAULT_ENCODING)) {
switch (header.getName().toLowerCase()) {
case "location":
try {
URL url = new URL(h.getUrl());
location = new URL(url, header.getValue()).toString().replace(" ", "%20");
} catch (MalformedURLException e) {
// skip it
}
break;
case "content-type":
contentType = header.getValue();
break;
}
}
contentType = cleanContentType(contentType);
String digest = (String) h.getHeaderValue("WARC-Payload-Digest");
if (digest == null) {
digest = calcDigest(record);
} else if (digest.startsWith("sha1:")) {
digest = digest.substring(5);
}
StringBuilder out = new StringBuilder();
out.append('-').append(' '); // let server do canonicalization
out.append(warcToArcDate(h.getDate())).append(' ');
out.append(h.getUrl().replace(" ", "%20")).append(' ');
out.append(optional(contentType)).append(' ');
out.append(status == -1 ? "-" : Integer.toString(status)).append(' ');
out.append(optional(digest)).append(' ');
out.append(optional(location)).append(' ');
out.append("- "); // TODO: X-Robots-Tag http://noarchive.net/xrobots/
out.append(Long.toString(h.getContentLength())).append(' ');
out.append(Long.toString(h.getOffset())).append(' ');
out.append(filename).append('\n');
return out.toString();
}
private static final Pattern STATUS_LINE = Pattern.compile("\\s*\\S+\\s+(\\d+)(?:\\s.*|$)");
private static int parseStatusLine(String line) {
Matcher m = STATUS_LINE.matcher(line);
if (m.matches()) {
return Integer.parseInt(m.group(1));
} else {
return -1;
}
}
private static String calcDigest(ArchiveRecord record) throws IOException {
String digest;
try {
MessageDigest md = MessageDigest.getInstance("SHA1");
byte[] buf = new byte[8192];
for (; ; ) {
int len = record.read(buf);
if (len < 0) break;
md.update(buf, 0, len);
}
digest = Base32.encode(md.digest());
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
return digest;
}
private static String optional(String s) {
if (s == null) {
return "-";
}
return s;
}
private static String cleanContentType(String contentType) {
if (contentType == null) return null;
contentType = stripAfterChar(contentType, ';');
return stripAfterChar(contentType, ' ');
}
private static String stripAfterChar(String s, int c) {
int i = s.indexOf(c);
if (i > -1) {
return s.substring(0, i);
} else {
return s;
}
}
public static void main(String args[]) {
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(System.out));
writeCdx(Paths.get(args[0]), out);
}
}
|
src/bamboo/task/CdxIndexJob.java
|
package bamboo.task;
import bamboo.core.Db;
import bamboo.core.DbPool;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.StatusLine;
import org.archive.format.arc.ARCConstants;
import org.archive.io.ArchiveReader;
import org.archive.io.ArchiveReaderFactory;
import org.archive.io.ArchiveRecord;
import org.archive.io.ArchiveRecordHeader;
import org.archive.util.Base32;
import org.archive.util.LaxHttpParser;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
public class CdxIndexJob implements Taskmaster.Job {
final private DbPool dbPool;
final private URL cdxServer = getCdxServerUrl();
public CdxIndexJob(DbPool dbPool) {
this.dbPool = dbPool;
}
@Override
public void run(Taskmaster.IProgressMonitor progress) throws IOException {
ExecutorService threadPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
try (Db db = dbPool.take()) {
for (Db.Warc warc : db.findWarcsToCdxIndex()) {
threadPool.submit(() -> {
try (Db db2 = dbPool.take()) {
System.out.println("CDX indexing " + warc.id + " " + warc.path);
buildCdx(warc.path);
db2.setWarcCdxIndexed(warc.id, System.currentTimeMillis());
System.out.println("Finished CDX indexing " + warc.id + " " + warc.path);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
});
}
threadPool.shutdown();
threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
} finally {
threadPool.shutdownNow();
}
}
final static DateTimeFormatter warcDateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
final static DateTimeFormatter arcDateFormat = DateTimeFormatter.ofPattern("yyyyMMddHHmmss");
private static String warcToArcDate(String warcDate) {
if (warcDate.length() == 14) {
return warcDate; // already an ARC date
}
return LocalDateTime.parse(warcDate, warcDateFormat).format(arcDateFormat);
}
private static URL getCdxServerUrl() {
String cdxUrl = System.getenv("BAMBOO_CDX_URL");
if (cdxUrl == null) {
throw new IllegalStateException("Environment variable BAMBOO_CDX_URL must be set");
}
try {
return new URL(cdxUrl);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
private void buildCdx(Path warc) throws IOException {
StringWriter sw = new StringWriter();
sw.write(" CDX N b a m s k r M S V g\n");
writeCdx(warc, sw);
byte[] data = sw.toString().getBytes(StandardCharsets.UTF_8);
HttpURLConnection conn = (HttpURLConnection) cdxServer.openConnection();
conn.setRequestMethod("POST");
conn.addRequestProperty("Content-Type", "text/plain");
conn.setFixedLengthStreamingMode(data.length);
conn.setDoOutput(true);
try (OutputStream out = conn.getOutputStream()) {
out.write(data);
out.flush();
}
try (BufferedReader rdr = new BufferedReader(new InputStreamReader(conn.getInputStream()))) {
String output = rdr.readLine();
int status = conn.getResponseCode();
if (status != 200) {
throw new RuntimeException("Indexing failed: " + output);
}
}
}
private static void writeCdx(Path warc, Writer out) {
String filename = warc.getFileName().toString();
try (ArchiveReader reader = ArchiveReaderFactory.get(warc.toFile())) {
for (ArchiveRecord record : reader) {
String cdxLine = formatCdxLine(filename, record);
if (cdxLine != null) {
out.write(cdxLine);
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static String formatCdxLine(String filename, ArchiveRecord record) throws IOException {
ArchiveRecordHeader h = record.getHeader();
String warcType = (String)h.getHeaderValue("WARC-Type");
if (warcType != null && !warcType.equals("response"))
return null;
if (h.getUrl().startsWith("dns:") || h.getUrl().startsWith("filedesc:"))
return null;
String contentType = null;
String location = null;
// parse HTTP header
String line = new String(LaxHttpParser.readRawLine(record), ISO_8859_1);
if (!StatusLine.startsWithHTTP(line)) {
throw new RuntimeException("Not a HTTP status line: " + line);
}
int status = parseStatusLine(line);
for (Header header : LaxHttpParser.parseHeaders(record, ARCConstants.DEFAULT_ENCODING)) {
switch (header.getName().toLowerCase()) {
case "location":
try {
URL url = new URL(h.getUrl());
location = new URL(url, header.getValue()).toString().replace(" ", "%20");
} catch (MalformedURLException e) {
// skip it
}
break;
case "content-type":
contentType = header.getValue();
break;
}
}
contentType = cleanContentType(contentType);
String digest = (String) h.getHeaderValue("WARC-Payload-Digest");
if (digest == null) {
digest = calcDigest(record);
} else if (digest.startsWith("sha1:")) {
digest = digest.substring(5);
}
StringBuilder out = new StringBuilder();
out.append('-').append(' '); // let server do canonicalization
out.append(warcToArcDate(h.getDate())).append(' ');
out.append(h.getUrl().replace(" ", "%20")).append(' ');
out.append(optional(contentType)).append(' ');
out.append(status == -1 ? "-" : Integer.toString(status)).append(' ');
out.append(optional(digest)).append(' ');
out.append(optional(location)).append(' ');
out.append("- "); // TODO: X-Robots-Tag http://noarchive.net/xrobots/
out.append(Long.toString(h.getContentLength())).append(' ');
out.append(Long.toString(h.getOffset())).append(' ');
out.append(filename).append('\n');
return out.toString();
}
private static final Pattern STATUS_LINE = Pattern.compile("\\s*\\S+\\s+(\\d+)(?:\\s.*|$)");
private static int parseStatusLine(String line) {
Matcher m = STATUS_LINE.matcher(line);
if (m.matches()) {
return Integer.parseInt(m.group(1));
} else {
return -1;
}
}
private static String calcDigest(ArchiveRecord record) throws IOException {
String digest;
try {
MessageDigest md = MessageDigest.getInstance("SHA1");
byte[] buf = new byte[8192];
for (; ; ) {
int len = record.read(buf);
if (len < 0) break;
md.update(buf, 0, len);
}
digest = Base32.encode(md.digest());
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
return digest;
}
private static String optional(String s) {
if (s == null) {
return "-";
}
return s;
}
private static String cleanContentType(String contentType) {
if (contentType == null) return null;
contentType = stripAfterChar(contentType, ';');
return stripAfterChar(contentType, ' ');
}
private static String stripAfterChar(String s, int c) {
int i = s.indexOf(c);
if (i > -1) {
return s.substring(0, i);
} else {
return s;
}
}
public static void main(String args[]) {
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(System.out));
writeCdx(Paths.get(args[0]), out);
}
}
|
Skip records with bogus status lines
|
src/bamboo/task/CdxIndexJob.java
|
Skip records with bogus status lines
|
<ide><path>rc/bamboo/task/CdxIndexJob.java
<ide> // parse HTTP header
<ide> String line = new String(LaxHttpParser.readRawLine(record), ISO_8859_1);
<ide> if (!StatusLine.startsWithHTTP(line)) {
<del> throw new RuntimeException("Not a HTTP status line: " + line);
<add> return null;
<ide> }
<ide> int status = parseStatusLine(line);
<ide> for (Header header : LaxHttpParser.parseHeaders(record, ARCConstants.DEFAULT_ENCODING)) {
|
|
JavaScript
|
mit
|
6d3e96d825bacbfb618250eae5732c02394a195a
| 0 |
dotkom/super-duper-fiesta,dotkom/super-duper-fiesta,dotkom/super-duper-fiesta
|
import React, { Fragment } from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { adminCreateGenfors, adminLogin } from 'features/auth/actionCreators';
import Button from '../Button';
import { ErrorContainer } from '../Error';
import Heading from '../Heading';
import css from './AdminLogin.css';
function zeroPadNumber(number) {
return number < 10 ? `0${number}` : `${number}`;
}
class AdminLogin extends React.Component {
constructor(props) {
super(props);
this.state = {
date: new Date(),
password: '',
title: '',
};
}
authenticateAdmin(e) {
e.preventDefault();
if (this.props.meetingExists) {
this.props.login(this.state.password);
} else {
this.props.createGenfors(this.state.password, this.state.title, this.state.date);
}
}
render() {
// Reload page if logged in as admin
if (this.props.reloadPage) window.location.reload();
const date = new Date(this.state.date);
const zeroPaddedMonth = zeroPadNumber(date.getMonth() + 1);
const zeroPaddedDay = zeroPadNumber(date.getDate());
const formattedDate =
`${date.getFullYear()}-${zeroPaddedMonth}-${zeroPaddedDay}`;
return (
<div>
<div>
<Heading link="/" title="Administrasjon">
<Link to="/">
<Button>Gå tilbake</Button>
</Link>
</Heading>
<main>
<ErrorContainer />
<div className={css.component}>
<h1>Logg inn som tellekorps</h1>
{(!this.props.loggedIn || (this.props.loggedIn && this.props.meetingExists)) && (
<p>Du må autorisere deg for å få tilgang til denne funksjonaliteten.</p>
)}
{(this.props.loggedIn && !this.props.meetingExists) && (
<p>Vennligst opprett en generalforsamling</p>
)}
{this.props.loggedIn && (
<Fragment>
<form onSubmit={event => this.authenticateAdmin(event)}>
<div className={css.formInputs}>
{!this.props.meetingExists && (
<div className={css.createGenfors}>
<input
type="text"
placeholder="Tittel"
value={this.state.title}
onChange={e => this.setState({ title: e.target.value })}
/>
<input
type="date"
value={formattedDate}
onChange={e => this.setState({ date: new Date(e.target.value) })}
/>
</div>
)}
<div className={css.adminPassword}>
<input
type="password"
placeholder="Administratorpassord"
value={this.state.password}
onChange={e => this.setState({ password: e.target.value })}
/>
</div>
</div>
<Button
background
size="lg"
type="submit"
>
Logg inn
</Button>
</form>
</Fragment>
)}
</div>
</main>
</div>
</div>
);
}
}
AdminLogin.propTypes = {
createGenfors: PropTypes.func.isRequired,
login: PropTypes.func.isRequired,
meetingExists: PropTypes.bool.isRequired,
reloadPage: PropTypes.bool.isRequired,
};
const mapStateToProps = state => ({
meetingExists: state.meeting &&
state.meeting.title &&
state.meeting.title !== '' &&
state.meeting.title.length > 0,
reloadPage: state.auth.reloadPage,
loggedIn: state.auth.loggedIn,
});
const mapDispatchToProps = dispatch => ({
login: (password) => {
dispatch(adminLogin(password));
},
createGenfors: (password, title, date) => {
dispatch(adminCreateGenfors(password, title, date));
},
});
AdminLogin.propTypes = {
login: PropTypes.func.isRequired,
};
export default AdminLogin;
export const AdminLoginContainer = connect(mapStateToProps,
mapDispatchToProps)(AdminLogin);
|
client/src/components/Admin/AdminLogin.js
|
import React, { Fragment } from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { adminCreateGenfors, adminLogin } from 'features/auth/actionCreators';
import Button from '../Button';
import { ErrorContainer } from '../Error';
import Heading from '../Heading';
import css from './AdminLogin.css';
function zeroPadNumber(number) {
return number < 10 ? `0${number}` : `${number}`;
}
class AdminLogin extends React.Component {
constructor(props) {
super(props);
this.state = {
date: new Date(),
password: '',
title: '',
};
}
authenticateAdmin(e) {
console.log('wtf')
e.preventDefault();
if (this.props.meetingExists) {
this.props.login(this.state.password);
} else {
this.props.createGenfors(this.state.password, this.state.title, this.state.date);
}
}
render() {
// Reload page if logged in as admin
if (this.props.reloadPage) window.location.reload();
const date = new Date(this.state.date);
const zeroPaddedMonth = zeroPadNumber(date.getMonth() + 1);
const zeroPaddedDay = zeroPadNumber(date.getDate());
const formattedDate =
`${date.getFullYear()}-${zeroPaddedMonth}-${zeroPaddedDay}`;
return (
<div>
<div>
<Heading link="/" title="Administrasjon">
<Link to="/">
<Button>Gå tilbake</Button>
</Link>
</Heading>
<main>
<ErrorContainer />
<div className={css.component}>
<h1>Logg inn som tellekorps</h1>
{(!this.props.loggedIn || this.props.loggedIn && this.props.meetingExists) && (
<p>Du må autorisere deg for å få tilgang til denne funksjonaliteten.</p>
)}
{(this.props.loggedIn && !this.props.meetingExists) && (
<p>Vennligst opprett en generalforsamling</p>
)}
{this.props.loggedIn && (
<Fragment>
<form onSubmit={event => this.authenticateAdmin(event)}>
<div className={css.formInputs}>
{!this.props.meetingExists && (
<div className={css.createGenfors}>
<input
type="text"
placeholder="Tittel"
value={this.state.title}
onChange={e => this.setState({ title: e.target.value })}
/>
<input
type="date"
value={formattedDate}
onChange={e => this.setState({ date: new Date(e.target.value) })}
/>
</div>
)}
<div className={css.adminPassword}>
<input
type="password"
placeholder="Administratorpassord"
value={this.state.password}
onChange={e => this.setState({ password: e.target.value })}
/>
</div>
</div>
<Button
background
size="lg"
type="submit"
>
Logg inn
</Button>
</form>
</Fragment>
)}
</div>
</main>
</div>
</div>
);
}
}
AdminLogin.propTypes = {
createGenfors: PropTypes.func.isRequired,
login: PropTypes.func.isRequired,
meetingExists: PropTypes.bool.isRequired,
reloadPage: PropTypes.bool.isRequired,
};
const mapStateToProps = state => ({
meetingExists: state.meeting &&
state.meeting.title &&
state.meeting.title !== '' &&
state.meeting.title.length > 0,
reloadPage: state.auth.reloadPage,
loggedIn: state.auth.loggedIn,
});
const mapDispatchToProps = dispatch => ({
login: (password) => {
dispatch(adminLogin(password));
},
createGenfors: (password, title, date) => {
dispatch(adminCreateGenfors(password, title, date));
},
});
AdminLogin.propTypes = {
login: PropTypes.func.isRequired,
};
export default AdminLogin;
export const AdminLoginContainer = connect(mapStateToProps,
mapDispatchToProps)(AdminLogin);
|
Avoid ambiguous parenthesis
|
client/src/components/Admin/AdminLogin.js
|
Avoid ambiguous parenthesis
|
<ide><path>lient/src/components/Admin/AdminLogin.js
<ide> }
<ide>
<ide> authenticateAdmin(e) {
<del> console.log('wtf')
<ide> e.preventDefault();
<ide> if (this.props.meetingExists) {
<ide> this.props.login(this.state.password);
<ide> <div className={css.component}>
<ide> <h1>Logg inn som tellekorps</h1>
<ide>
<del> {(!this.props.loggedIn || this.props.loggedIn && this.props.meetingExists) && (
<add> {(!this.props.loggedIn || (this.props.loggedIn && this.props.meetingExists)) && (
<ide> <p>Du må autorisere deg for å få tilgang til denne funksjonaliteten.</p>
<ide> )}
<ide>
|
|
JavaScript
|
mit
|
a5ef1eaae5841899a26d97f6358fa7ece04b8f48
| 0 |
thku/nsw-cps-item,thku/nsw-cps-item
|
function calcItem(item) {
if(item.type == "input") {
// check if card already exists or return default
if($('#wrapper input[name="' + item.variable + '"]').length) {
result = $('#wrapper input[name="' + item.variable + '"]').val();
} else {
result = item.attributes.default;
}
} else {
// get formula
var formula = item.attributes.formula;
// get current values of input and prepare values of output variables
$.each(config.items, function(index, element) {
variable = new RegExp(element.variable, 'g');
if(element.type == "input") {
formula = formula.replace(variable, calcItem(element));
} else {
// check if element already exists
if($('#wrapper input[name="' + element.variable + '"]').length) {
formula = formula.replace(variable, $('#wrapper input[name="' + element.variable + '"]').val());
} else {
formula = formula.replace(variable, element.attributes.default);
}
}
});
// prepare output variable
result = eval(formula);
}
// check limits
if((item.attributes.max !== null) && (result > item.attributes.max)) {
result = item.attributes.max;
} else if ((item.attributes.min !== null) && (result < item.attributes.min)) {
result = item.attributes.min;
}
// round result
if((item.attributes.round !== null) && (item.type == "output")) {
result = Math.round(result, item.attributes.round);
}
// return result
return result;
}
|
assets/js/calcitem.js
|
function calcItem(item) {
if(item.type == "input") {
// check if card already exists or return default
if($('#wrapper input[name="' + item.variable + '"]').length) {
result = $('#wrapper input[name="' + item.variable + '"]').val();
} else {
result = item.attributes.default;
}
} else {
// get formula
var formula = item.attributes.formula;
// get current values of input and prepare values of output variables
$.each(config.items, function(index, element) {
variable = new RegExp(element.variable, 'g');
if(element.type == "input") {
formula = formula.replace(variable, calcItem(element));
} else {
// check if element already exists
if($('#wrapper input[name="' + element.variable + '"]').length) {
formula = formula.replace(variable, $('#wrapper input[name="' + element.variable + '"]').val());
} else {
formula = formula.replace(variable, element.attributes.default);
}
}
});
// prepare output variable
result = eval(formula);
}
// check limits
if((item.attributes.max !== null) && (result > item.attributes.max)) {
result = item.attributes.max;
} else if ((item.attributes.min !== null) && (result < item.attributes.min)) {
result = item.attributes.min;
}
// return result
return result;
}
|
adding round function for output variables
|
assets/js/calcitem.js
|
adding round function for output variables
|
<ide><path>ssets/js/calcitem.js
<ide> result = item.attributes.min;
<ide> }
<ide>
<add> // round result
<add> if((item.attributes.round !== null) && (item.type == "output")) {
<add> result = Math.round(result, item.attributes.round);
<add> }
<add>
<ide> // return result
<ide> return result;
<ide> }
|
|
JavaScript
|
mit
|
301155684fffe598fc4f7ba780c89d77ec1bfcab
| 0 |
pex-gl/pex-renderer,pex-gl/pex-renderer
|
import { vec3, vec4, mat3, mat4 } from "pex-math";
import { aabb } from "pex-geom";
import createPassDescriptors from "./renderer/passes.js";
export default function createRenderPipelineSystem(opts) {
const { ctx, resourceCache, renderGraph } = opts;
ctx.gl.getExtension("WEBGL_color_buffer_float");
ctx.gl.getExtension("WEBGL_color_buffer_half_float");
ctx.gl.getExtension("EXT_color_buffer_half_float");
ctx.gl.getExtension("EXT_color_buffer_half_float");
ctx.gl.getExtension("EXT_shader_texture_lod");
ctx.gl.getExtension("OES_standard_derivatives");
ctx.gl.getExtension("WEBGL_draw_buffers");
ctx.gl.getExtension("OES_texture_float");
ctx.gl.getExtension("EXT_float_blend");
const dummyTextureCube = ctx.textureCube({ width: 4, height: 4 });
const tempMat4 = mat4.create(); //FIXME
const passes = createPassDescriptors(ctx);
let clearCmd = {
pass: ctx.pass({
clearColor: [0, 0, 0, 0],
clearDepth: 1,
}),
};
function nextPowerOfTwo(n) {
if (n === 0) return 1;
n--;
n |= n >> 1;
n |= n >> 2;
n |= n >> 4;
n |= n >> 8;
n |= n >> 16;
return n + 1;
}
function prevPowerOfTwo(n) {
return nextPowerOfTwo(n) / 2;
}
const renderPipelineSystem = {
cache: {},
debug: true,
shadowQuality: 1, //TODO: not implemented shadowQuality
outputEncoding: opts.outputEncoding || ctx.Encoding.Linear,
renderers: [],
};
function drawMeshes({
viewport,
cameraEntity,
shadowMapping,
shadowMappingLight,
entities,
renderableEntities,
skybox,
forward,
renderers,
drawTransparent,
backgroundColorTexture,
}) {
// if (backgroundColorTexture) {
// ctx.update(backgroundColorTexture, { mipmap: true });
// }
const renderView = {
viewport: viewport,
};
if (cameraEntity) {
renderView.cameraEntity = cameraEntity;
renderView.camera = cameraEntity.camera;
}
if (shadowMappingLight) {
renderView.camera = {
projectionMatrix: shadowMappingLight._projectionMatrix,
viewMatrix: shadowMappingLight._viewMatrix,
};
}
if (shadowMapping) {
renderers.forEach((renderer) => {
if (renderer.renderStages.shadow) {
renderer.renderStages.shadow(renderView, entities, {
shadowMapping: true,
shadowMappingLight,
});
}
});
} else {
if (!drawTransparent) {
renderers.forEach((renderer) => {
if (renderer.renderStages.opaque) {
renderer.renderStages.opaque(renderView, entities);
}
});
renderers.forEach((renderer) => {
if (renderer.renderStages.background) {
renderer.renderStages.background(renderView, entities);
}
});
}
if (drawTransparent) {
//TODO: capture color buffer and blur it for transmission/refraction
renderers.forEach((renderer) => {
if (renderer.renderStages.transparent) {
renderer.renderStages.transparent(renderView, entities, {
backgroundColorTexture,
});
}
});
}
}
}
// TODO remove, should be in AABB
function aabbToPoints(bbox) {
if (aabb.isEmpty(bbox)) return [];
return [
[bbox[0][0], bbox[0][1], bbox[0][2], 1],
[bbox[1][0], bbox[0][1], bbox[0][2], 1],
[bbox[1][0], bbox[0][1], bbox[1][2], 1],
[bbox[0][0], bbox[0][1], bbox[1][2], 1],
[bbox[0][0], bbox[1][1], bbox[0][2], 1],
[bbox[1][0], bbox[1][1], bbox[0][2], 1],
[bbox[1][0], bbox[1][1], bbox[1][2], 1],
[bbox[0][0], bbox[1][1], bbox[1][2], 1],
];
}
renderPipelineSystem.updateDirectionalLightShadowMap = function (
lightEnt,
entities,
shadowCastingEntities,
renderers
) {
const light = lightEnt.directionalLight;
// const position = lightEnt._transform.worldPosition;
// const target = [0, 0, 1, 0];
// const up = [0, 1, 0, 0];
// vec4.multMat4(target, lightEnt._transform.modelMatrix);
// vec3.add(target, position);
// vec4.multMat4(up, lightEnt._transform.modelMatrix);
// mat4.lookAt(light._viewMatrix, position, target, up);
const shadowBboxPoints = shadowCastingEntities.reduce(
(points, entity) =>
points.concat(aabbToPoints(entity.transform.worldBounds)),
[]
);
// TODO: gc vec3.copy, all the bounding box creation
const bboxPointsInLightSpace = shadowBboxPoints.map((p) =>
vec3.multMat4(vec3.copy(p), light._viewMatrix)
);
const sceneBboxInLightSpace = aabb.create();
aabb.fromPoints(sceneBboxInLightSpace, bboxPointsInLightSpace);
// console.log("sceneBboxInLightSpace", ...sceneBboxInLightSpace);
const lightNear = -sceneBboxInLightSpace[1][2];
const lightFar = -sceneBboxInLightSpace[0][2];
light._near = lightNear;
light._far = lightFar;
mat4.ortho(
light._projectionMatrix,
sceneBboxInLightSpace[0][0],
sceneBboxInLightSpace[1][0],
sceneBboxInLightSpace[0][1],
sceneBboxInLightSpace[1][1],
lightNear,
lightFar
);
light.sceneBboxInLightSpace = sceneBboxInLightSpace;
//TODO: can this be all done at once?
let colorMap = resourceCache.texture2D(
passes.directionalLightShadows.colorMapDesc
);
colorMap.name = "TempColorMap\n" + colorMap.id;
let shadowMap = resourceCache.texture2D(
passes.directionalLightShadows.shadowMapDesc
);
shadowMap.name = "ShadowMap\n" + shadowMap.id;
//TODO: need to create new descriptor to get uniq
let passDesc = { ...passes.directionalLightShadows.pass };
passDesc.color[0] = colorMap;
passDesc.depth = shadowMap;
let shadowMapPass = resourceCache.pass(passDesc);
const renderView = {
camera: {
viewMatrix: light._viewMatrix,
projectionMatrix: light._projectionMatrix,
},
viewport: [0, 0, shadowMap.width, shadowMap.height],
};
renderGraph.renderPass({
name: "RenderShadowMap" + lightEnt.id,
pass: shadowMapPass,
renderView: renderView,
render: () => {
light._shadowMap = shadowMap;
drawMeshes({
viewport: renderView.viewport,
//TODO: passing camera entity around is a mess
cameraEntity: {
camera: {
position: lightEnt._transform.worldPosition,
},
},
shadowMapping: true,
shadowMappingLight: light,
entities,
renderableEntities: shadowCastingEntities,
forward: false,
drawTransparent: false,
renderers,
});
},
});
light._shadowMap = shadowMap; // TODO: we borrow it for a frame
// ctx.submit(shadowMapDrawCommand, () => {
// drawMeshes(null, true, light, entities, shadowCastingEntities);
// });
};
renderPipelineSystem.patchDirectionalLight = (directionalLight) => {
directionalLight._viewMatrix = mat4.create();
directionalLight._projectionMatrix = mat4.create();
};
renderPipelineSystem.update = (entities, options = {}) => {
let { renderView, renderers } = options;
// ctx.submit(clearCmd);
const rendererableEntities = entities.filter(
(e) => e.geometry && e.material
);
const cameraEntities = entities.filter((e) => e.camera);
const skyboxEntities = entities.filter((e) => e.skybox);
const directionalLightEntities = entities.filter((e) => e.directionalLight);
const shadowCastingEntities = rendererableEntities.filter(
(e) => e.material.castShadows
);
if (!renderView) {
renderView = {
camera: cameraEntities[0].camera,
viewport: [0, 0, ctx.gl.drawingBufferWidth, ctx.gl.drawingBufferHeight],
};
}
directionalLightEntities.forEach((lightEntity) => {
if (!lightEntity.directionalLight._viewMatrix) {
renderPipelineSystem.patchDirectionalLight(
lightEntity.directionalLight
);
}
if (
lightEntity.directionalLight.castShadows &&
options.shadowPass !== false
) {
renderPipelineSystem.updateDirectionalLightShadowMap(
lightEntity,
entities,
shadowCastingEntities,
renderers
);
}
});
const shadowMaps = directionalLightEntities
.map((e) => {
return e.directionalLight._shadowMap;
})
.filter((_) => _);
// cameraEntities.forEach((camera) => {
let entitiesToDraw = rendererableEntities;
if (renderView.camera.layer) {
entitiesToDraw = rendererableEntities.filter((e) => {
return !e.layer || e.layer == renderView.camera.layer;
});
}
//TODO: this should be done on the fly by render graph
passes.mainPass.outputTextureDesc.width = renderView.viewport[2];
passes.mainPass.outputTextureDesc.height = renderView.viewport[3];
const mainPassOutputTexture = resourceCache.texture2D(
passes.mainPass.outputTextureDesc
);
mainPassOutputTexture.name = `mainPassOutput\n${mainPassOutputTexture.id}`;
passes.mainPass.outputDepthTextureDesc.width = renderView.viewport[2];
passes.mainPass.outputDepthTextureDesc.height = renderView.viewport[3];
const outputDepthTexture = resourceCache.texture2D(
passes.mainPass.outputDepthTextureDesc
);
outputDepthTexture.name = `mainPassDepth\n${outputDepthTexture.id}`;
const mainPass = resourceCache.pass({
color: [mainPassOutputTexture],
depth: outputDepthTexture,
clearColor: [0, 0, 0, 1],
clearDepth: 1,
});
renderGraph.renderPass({
name: `MainPass ${renderView.viewport}`,
uses: [...shadowMaps],
renderView: {
...renderView,
viewport: [0, 0, renderView.viewport[2], renderView.viewport[3]],
},
pass: mainPass,
render: () => {
drawMeshes({
viewport: renderView.viewport,
cameraEntity: renderView.cameraEntity,
shadowMapping: false,
entities: entities,
renderableEntities: entitiesToDraw,
skybox: skyboxEntities[0]?._skybox,
forward: true,
drawTransparent: false,
renderers: renderers,
});
},
});
const needsGrabPass = !!entities.find((e) => e.material?.transmission);
let grabPassColorCopyTexture;
if (needsGrabPass) {
passes.grabPass.colorCopyTextureDesc.width = prevPowerOfTwo(
renderView.viewport[2]
);
passes.grabPass.colorCopyTextureDesc.height = prevPowerOfTwo(
renderView.viewport[3]
);
grabPassColorCopyTexture = resourceCache.texture2D(
passes.grabPass.colorCopyTextureDesc
);
grabPassColorCopyTexture.name = `grapbPassOutput\n${grabPassColorCopyTexture.id}`;
const grabPass = resourceCache.pass({
color: [grabPassColorCopyTexture],
});
const copyTexturePipeline = resourceCache.pipeline(
passes.grabPass.copyTexturePipelineDesc
);
const fullscreenTriangle = resourceCache.fullscreenTriangle();
const copyTextureCmd = {
name: "Copy Texture",
attributes: fullscreenTriangle.attributes,
count: fullscreenTriangle.count,
pipeline: copyTexturePipeline,
uniforms: {
//uViewport: renderView.viewport,
uViewport: [
0,
0,
grabPassColorCopyTexture.width,
grabPassColorCopyTexture.height,
],
uTexture: mainPassOutputTexture,
},
};
renderGraph.renderPass({
name: `GrabPass ${renderView.viewport}`,
uses: [mainPassOutputTexture],
renderView: {
...renderView,
//viewport: [0, 0, renderView.viewport[2], renderView.viewport[3]],
viewport: [
0,
0,
grabPassColorCopyTexture.width,
grabPassColorCopyTexture.height,
],
},
pass: grabPass,
render: () => {
ctx.submit(copyTextureCmd);
},
});
}
// console.log("needsGrabPass", needsGrabPass);
const transparentPass = resourceCache.pass({
color: [mainPassOutputTexture],
depth: outputDepthTexture,
});
renderGraph.renderPass({
name: `TransparentMainPass ${renderView.viewport}`,
uses: [...shadowMaps, grabPassColorCopyTexture].filter((_) => _), //filter out nulls
renderView: {
...renderView,
viewport: [0, 0, renderView.viewport[2], renderView.viewport[3]],
},
pass: transparentPass,
render: () => {
drawMeshes({
viewport: renderView.viewport,
cameraEntity: renderView.cameraEntity,
shadowMapping: false,
entities: entities,
renderableEntities: entitiesToDraw,
skybox: skyboxEntities[0]?._skybox,
forward: true,
drawTransparent: true,
backgroundColorTexture: grabPassColorCopyTexture,
renderers: renderers,
});
},
});
const postProcessingPipeline = resourceCache.pipeline(
passes.tonemap.pipelineDesc
);
const fullscreenTriangle = resourceCache.fullscreenTriangle();
const postProcessingCmd = {
name: "Draw FSTriangle",
attributes: fullscreenTriangle.attributes,
count: fullscreenTriangle.count,
pipeline: postProcessingPipeline,
uniforms: {
uViewport: renderView.viewport,
uTexture: mainPassOutputTexture,
},
};
renderGraph.renderPass({
name: "PostProcessingPass",
// pass: ctx.pass({ color: [{ id: -1 }] }),
uses: [mainPassOutputTexture],
renderView,
render: () => {
ctx.submit(postProcessingCmd);
},
});
// });
};
return renderPipelineSystem;
}
|
systems/render-pipeline.js
|
import { vec3, vec4, mat3, mat4 } from "pex-math";
import { aabb } from "pex-geom";
import createPassDescriptors from "./renderer/passes.js";
export default function createRenderPipelineSystem(opts) {
const { ctx, resourceCache, renderGraph } = opts;
ctx.gl.getExtension("WEBGL_color_buffer_float");
ctx.gl.getExtension("WEBGL_color_buffer_half_float");
ctx.gl.getExtension("EXT_color_buffer_half_float");
ctx.gl.getExtension("EXT_color_buffer_half_float");
ctx.gl.getExtension("EXT_shader_texture_lod");
ctx.gl.getExtension("OES_standard_derivatives");
ctx.gl.getExtension("WEBGL_draw_buffers");
ctx.gl.getExtension("OES_texture_float");
ctx.gl.getExtension("EXT_float_blend");
const dummyTexture2D = ctx.texture2D({ width: 4, height: 4 });
const dummyTextureCube = ctx.textureCube({ width: 4, height: 4 });
const tempMat4 = mat4.create(); //FIXME
const passes = createPassDescriptors(ctx);
let clearCmd = {
pass: ctx.pass({
clearColor: [0, 0, 0, 0],
clearDepth: 1,
}),
};
function nextPowerOfTwo(n) {
if (n === 0) return 1;
n--;
n |= n >> 1;
n |= n >> 2;
n |= n >> 4;
n |= n >> 8;
n |= n >> 16;
return n + 1;
}
function prevPowerOfTwo(n) {
return nextPowerOfTwo(n) / 2;
}
const renderPipelineSystem = {
cache: {},
debug: true,
shadowQuality: 1, //TODO: not implemented shadowQuality
outputEncoding: opts.outputEncoding || ctx.Encoding.Linear,
renderers: [],
};
function drawMeshes({
viewport,
cameraEntity,
shadowMapping,
shadowMappingLight,
entities,
renderableEntities,
skybox,
forward,
renderers,
drawTransparent,
backgroundColorTexture,
}) {
// if (backgroundColorTexture) {
// ctx.update(backgroundColorTexture, { mipmap: true });
// }
const renderView = {
viewport: viewport,
};
if (cameraEntity) {
renderView.cameraEntity = cameraEntity;
renderView.camera = cameraEntity.camera;
}
if (shadowMappingLight) {
renderView.camera = {
projectionMatrix: shadowMappingLight._projectionMatrix,
viewMatrix: shadowMappingLight._viewMatrix,
};
}
if (shadowMapping) {
renderers.forEach((renderer) => {
if (renderer.renderStages.shadow) {
renderer.renderStages.shadow(renderView, entities, {
shadowMapping: true,
shadowMappingLight,
});
}
});
} else {
if (!drawTransparent) {
renderers.forEach((renderer) => {
if (renderer.renderStages.opaque) {
renderer.renderStages.opaque(renderView, entities);
}
});
renderers.forEach((renderer) => {
if (renderer.renderStages.background) {
renderer.renderStages.background(renderView, entities);
}
});
}
if (drawTransparent) {
//TODO: capture color buffer and blur it for transmission/refraction
renderers.forEach((renderer) => {
if (renderer.renderStages.transparent) {
renderer.renderStages.transparent(renderView, entities, {
backgroundColorTexture,
});
}
});
}
}
}
// TODO remove, should be in AABB
function aabbToPoints(bbox) {
if (aabb.isEmpty(bbox)) return [];
return [
[bbox[0][0], bbox[0][1], bbox[0][2], 1],
[bbox[1][0], bbox[0][1], bbox[0][2], 1],
[bbox[1][0], bbox[0][1], bbox[1][2], 1],
[bbox[0][0], bbox[0][1], bbox[1][2], 1],
[bbox[0][0], bbox[1][1], bbox[0][2], 1],
[bbox[1][0], bbox[1][1], bbox[0][2], 1],
[bbox[1][0], bbox[1][1], bbox[1][2], 1],
[bbox[0][0], bbox[1][1], bbox[1][2], 1],
];
}
renderPipelineSystem.updateDirectionalLightShadowMap = function (
lightEnt,
entities,
shadowCastingEntities,
renderers
) {
const light = lightEnt.directionalLight;
// const position = lightEnt._transform.worldPosition;
// const target = [0, 0, 1, 0];
// const up = [0, 1, 0, 0];
// vec4.multMat4(target, lightEnt._transform.modelMatrix);
// vec3.add(target, position);
// vec4.multMat4(up, lightEnt._transform.modelMatrix);
// mat4.lookAt(light._viewMatrix, position, target, up);
const shadowBboxPoints = shadowCastingEntities.reduce(
(points, entity) =>
points.concat(aabbToPoints(entity.transform.worldBounds)),
[]
);
// TODO: gc vec3.copy, all the bounding box creation
const bboxPointsInLightSpace = shadowBboxPoints.map((p) =>
vec3.multMat4(vec3.copy(p), light._viewMatrix)
);
const sceneBboxInLightSpace = aabb.create();
aabb.fromPoints(sceneBboxInLightSpace, bboxPointsInLightSpace);
// console.log("sceneBboxInLightSpace", ...sceneBboxInLightSpace);
const lightNear = -sceneBboxInLightSpace[1][2];
const lightFar = -sceneBboxInLightSpace[0][2];
light._near = lightNear;
light._far = lightFar;
mat4.ortho(
light._projectionMatrix,
sceneBboxInLightSpace[0][0],
sceneBboxInLightSpace[1][0],
sceneBboxInLightSpace[0][1],
sceneBboxInLightSpace[1][1],
lightNear,
lightFar
);
light.sceneBboxInLightSpace = sceneBboxInLightSpace;
//TODO: can this be all done at once?
let colorMap = resourceCache.texture2D(
passes.directionalLightShadows.colorMapDesc
);
colorMap.name = "TempColorMap\n" + colorMap.id;
let shadowMap = resourceCache.texture2D(
passes.directionalLightShadows.shadowMapDesc
);
shadowMap.name = "ShadowMap\n" + shadowMap.id;
//TODO: need to create new descriptor to get uniq
let passDesc = { ...passes.directionalLightShadows.pass };
passDesc.color[0] = colorMap;
passDesc.depth = shadowMap;
let shadowMapPass = resourceCache.pass(passDesc);
const renderView = {
camera: {
viewMatrix: light._viewMatrix,
projectionMatrix: light._projectionMatrix,
},
viewport: [0, 0, shadowMap.width, shadowMap.height],
};
renderGraph.renderPass({
name: "RenderShadowMap" + lightEnt.id,
pass: shadowMapPass,
renderView: renderView,
render: () => {
light._shadowMap = shadowMap;
drawMeshes({
viewport: renderView.viewport,
//TODO: passing camera entity around is a mess
cameraEntity: {
camera: {
position: lightEnt._transform.worldPosition,
},
},
shadowMapping: true,
shadowMappingLight: light,
entities,
renderableEntities: shadowCastingEntities,
forward: false,
drawTransparent: false,
renderers,
});
},
});
light._shadowMap = shadowMap; // TODO: we borrow it for a frame
// ctx.submit(shadowMapDrawCommand, () => {
// drawMeshes(null, true, light, entities, shadowCastingEntities);
// });
};
renderPipelineSystem.patchDirectionalLight = (directionalLight) => {
directionalLight._viewMatrix = mat4.create();
directionalLight._projectionMatrix = mat4.create();
};
renderPipelineSystem.update = (entities, options = {}) => {
let { renderView, renderers } = options;
// ctx.submit(clearCmd);
const rendererableEntities = entities.filter(
(e) => e.geometry && e.material
);
const cameraEntities = entities.filter((e) => e.camera);
const skyboxEntities = entities.filter((e) => e.skybox);
const directionalLightEntities = entities.filter((e) => e.directionalLight);
const shadowCastingEntities = rendererableEntities.filter(
(e) => e.material.castShadows
);
if (!renderView) {
renderView = {
camera: cameraEntities[0].camera,
viewport: [0, 0, ctx.gl.drawingBufferWidth, ctx.gl.drawingBufferHeight],
};
}
directionalLightEntities.forEach((lightEntity) => {
if (!lightEntity.directionalLight._viewMatrix) {
renderPipelineSystem.patchDirectionalLight(
lightEntity.directionalLight
);
}
if (
lightEntity.directionalLight.castShadows &&
options.shadowPass !== false
) {
renderPipelineSystem.updateDirectionalLightShadowMap(
lightEntity,
entities,
shadowCastingEntities,
renderers
);
}
});
const shadowMaps = directionalLightEntities.map((e) => {
return e.directionalLight._shadowMap;
});
// cameraEntities.forEach((camera) => {
let entitiesToDraw = rendererableEntities;
if (renderView.camera.layer) {
entitiesToDraw = rendererableEntities.filter((e) => {
return !e.layer || e.layer == renderView.camera.layer;
});
}
//TODO: this should be done on the fly by render graph
passes.mainPass.outputTextureDesc.width = renderView.viewport[2];
passes.mainPass.outputTextureDesc.height = renderView.viewport[3];
const mainPassOutputTexture = resourceCache.texture2D(
passes.mainPass.outputTextureDesc
);
mainPassOutputTexture.name = `mainPassOutput\n${mainPassOutputTexture.id}`;
passes.mainPass.outputDepthTextureDesc.width = renderView.viewport[2];
passes.mainPass.outputDepthTextureDesc.height = renderView.viewport[3];
const outputDepthTexture = resourceCache.texture2D(
passes.mainPass.outputDepthTextureDesc
);
outputDepthTexture.name = `mainPassDepth\n${outputDepthTexture.id}`;
const mainPass = resourceCache.pass({
color: [mainPassOutputTexture],
depth: outputDepthTexture,
clearColor: [0, 0, 0, 1],
clearDepth: 1,
});
renderGraph.renderPass({
name: `MainPass ${renderView.viewport}`,
uses: [...shadowMaps],
renderView: {
...renderView,
viewport: [0, 0, renderView.viewport[2], renderView.viewport[3]],
},
pass: mainPass,
render: () => {
drawMeshes({
viewport: renderView.viewport,
cameraEntity: renderView.cameraEntity,
shadowMapping: false,
entities: entities,
renderableEntities: entitiesToDraw,
skybox: skyboxEntities[0]?._skybox,
forward: true,
drawTransparent: false,
renderers: renderers,
});
},
});
const needsGrabPass = !!entities.find((e) => e.material?.transmission);
let grabPassColorCopyTexture;
if (needsGrabPass) {
passes.grabPass.colorCopyTextureDesc.width = prevPowerOfTwo(
renderView.viewport[2]
);
passes.grabPass.colorCopyTextureDesc.height = prevPowerOfTwo(
renderView.viewport[3]
);
grabPassColorCopyTexture = resourceCache.texture2D(
passes.grabPass.colorCopyTextureDesc
);
grabPassColorCopyTexture.name = `grapbPassOutput\n${grabPassColorCopyTexture.id}`;
const grabPass = resourceCache.pass({
color: [grabPassColorCopyTexture],
});
const copyTexturePipeline = resourceCache.pipeline(
passes.grabPass.copyTexturePipelineDesc
);
const fullscreenTriangle = resourceCache.fullscreenTriangle();
const copyTextureCmd = {
name: "Copy Texture",
attributes: fullscreenTriangle.attributes,
count: fullscreenTriangle.count,
pipeline: copyTexturePipeline,
uniforms: {
//uViewport: renderView.viewport,
uViewport: [
0,
0,
grabPassColorCopyTexture.width,
grabPassColorCopyTexture.height,
],
uTexture: mainPassOutputTexture,
},
};
renderGraph.renderPass({
name: `GrabPass ${renderView.viewport}`,
uses: [mainPassOutputTexture],
renderView: {
...renderView,
//viewport: [0, 0, renderView.viewport[2], renderView.viewport[3]],
viewport: [
0,
0,
grabPassColorCopyTexture.width,
grabPassColorCopyTexture.height,
],
},
pass: grabPass,
render: () => {
ctx.submit(copyTextureCmd);
},
});
}
// console.log("needsGrabPass", needsGrabPass);
const transparentPass = resourceCache.pass({
color: [mainPassOutputTexture],
depth: outputDepthTexture,
});
renderGraph.renderPass({
name: `TransparentMainPass ${renderView.viewport}`,
uses: [...shadowMaps, grabPassColorCopyTexture].filter((_) => _), //filter out nulls
renderView: {
...renderView,
viewport: [0, 0, renderView.viewport[2], renderView.viewport[3]],
},
pass: transparentPass,
render: () => {
drawMeshes({
viewport: renderView.viewport,
cameraEntity: renderView.cameraEntity,
shadowMapping: false,
entities: entities,
renderableEntities: entitiesToDraw,
skybox: skyboxEntities[0]?._skybox,
forward: true,
drawTransparent: true,
backgroundColorTexture: grabPassColorCopyTexture,
renderers: renderers,
});
},
});
const postProcessingPipeline = resourceCache.pipeline(
passes.tonemap.pipelineDesc
);
const fullscreenTriangle = resourceCache.fullscreenTriangle();
const postProcessingCmd = {
name: "Draw FSTriangle",
attributes: fullscreenTriangle.attributes,
count: fullscreenTriangle.count,
pipeline: postProcessingPipeline,
uniforms: {
uViewport: renderView.viewport,
uTexture: mainPassOutputTexture,
},
};
renderGraph.renderPass({
name: "PostProcessingPass",
// pass: ctx.pass({ color: [{ id: -1 }] }),
uses: [mainPassOutputTexture],
renderView,
render: () => {
ctx.submit(postProcessingCmd);
},
});
// });
};
return renderPipelineSystem;
}
|
fix(systems/render-pipeline): use only not null shadowmaps
|
systems/render-pipeline.js
|
fix(systems/render-pipeline): use only not null shadowmaps
|
<ide><path>ystems/render-pipeline.js
<ide> ctx.gl.getExtension("OES_texture_float");
<ide> ctx.gl.getExtension("EXT_float_blend");
<ide>
<del> const dummyTexture2D = ctx.texture2D({ width: 4, height: 4 });
<ide> const dummyTextureCube = ctx.textureCube({ width: 4, height: 4 });
<ide> const tempMat4 = mat4.create(); //FIXME
<ide> const passes = createPassDescriptors(ctx);
<ide> }
<ide> });
<ide>
<del> const shadowMaps = directionalLightEntities.map((e) => {
<del> return e.directionalLight._shadowMap;
<del> });
<add> const shadowMaps = directionalLightEntities
<add> .map((e) => {
<add> return e.directionalLight._shadowMap;
<add> })
<add> .filter((_) => _);
<ide> // cameraEntities.forEach((camera) => {
<ide> let entitiesToDraw = rendererableEntities;
<ide> if (renderView.camera.layer) {
|
|
Java
|
apache-2.0
|
11b86cb083a32362a48904c6935b85643297e8d1
| 0 |
chrishantha/jfr-flame-graph
|
package com.github.chrishantha.jfr.flamegraph.output;
import com.beust.jcommander.IStringConverter;
import com.jrockit.mc.flightrecorder.spi.IEvent;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Different types of events possibly available in a JFR recording.
* <p>
* Each type can be activated using a command line option and can match one or many
* JFR event types. Each type knows how to convert the event into a numeric value
* that will make the flame graph most meaningful. For allocation events this would
* be the number of bytes allocated, while for file reads it would be the duration of
* the read operation.
*/
public enum EventType {
METHOD_PROFILING_SAMPLE("cpu", ValueField.COUNT, "Method Profiling Sample"),
ALLOCATION_IN_NEW_TLAB("allocation-tlab", ValueField.TLAB_SIZE, "Allocation in new TLAB"),
ALLOCATION_OUTSIDE_TLAB("allocation-outside-tlab", ValueField.ALLOCATION_SIZE, "Allocation outside TLAB"),
JAVA_EXCEPTION("exceptions", ValueField.COUNT, "Java Exception"),
JAVA_MONITOR_BLOCKED("monitor-blocked", ValueField.DURATION, "Java Monitor Blocked"),
IO("io", ValueField.DURATION, "File Read", "File Write", "Socket Read", "Socket Write");
private final String commandLineOption;
private final ValueField valueField;
private final String[] eventNames;
EventType(String commandLineOption, ValueField valueField, String... eventNames) {
this.eventNames = eventNames;
this.commandLineOption = commandLineOption;
this.valueField = valueField;
}
public boolean matches(IEvent event) {
String name = event.getEventType().getName();
return Arrays.stream(eventNames).anyMatch(name::equals);
}
public long getValue(IEvent event) {
return valueField.getValue(event);
}
@Override
public String toString() {
return commandLineOption;
}
public static final class EventTypeConverter implements IStringConverter<EventType> {
private static final Map<String, EventType> typesByOption = new HashMap<>();
static {
for (EventType type : EventType.values()) {
typesByOption.put(type.commandLineOption, type);
}
}
@Override
public EventType convert(String commandLineOption) {
EventType eventType = typesByOption.get(commandLineOption);
if (eventType == null) {
throw new IllegalArgumentException("Event type [" + commandLineOption + "] does not exist.");
}
return eventType;
}
}
private enum ValueField {
COUNT {
@Override
public long getValue(IEvent event) {
return 1;
}
},
DURATION {
@Override
public long getValue(IEvent event) {
long nanos = (long) event.getValue("(duration)");
return TimeUnit.NANOSECONDS.toMillis(nanos);
}
},
ALLOCATION_SIZE {
@Override
public long getValue(IEvent event) {
return (long) event.getValue("allocationSize") / 1000;
}
},
TLAB_SIZE {
@Override
public long getValue(IEvent event) {
return (long) event.getValue("tlabSize") / 1000;
}
};
public abstract long getValue(IEvent event);
}
}
|
src/main/java/com/github/chrishantha/jfr/flamegraph/output/EventType.java
|
package com.github.chrishantha.jfr.flamegraph.output;
import com.beust.jcommander.IStringConverter;
import com.jrockit.mc.flightrecorder.spi.IEvent;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
/**
* Different types of events possibly available in a JFR recording.
* <p>
* Each type can be activated using a command line option and can match one or many
* JFR event types. Each type knows how to convert the event into a numeric value
* that will make the flame graph most meaningful. For allocation events this would
* be the number of bytes allocated, while for file reads it would be the duration of
* the read operation.
*/
public enum EventType {
METHOD_PROFILING_SAMPLE("cpu", ValueField.COUNT, "Method Profiling Sample"),
ALLOCATION_IN_NEW_TLAB("allocation-tlab", ValueField.TLAB_SIZE, "Allocation in new TLAB"),
ALLOCATION_OUTSIDE_TLAB("allocation-outside-tlab", ValueField.ALLOCATION_SIZE, "Allocation outside TLAB"),
JAVA_EXCEPTION("exceptions", ValueField.COUNT, "Java Exception"),
JAVA_MONITOR_BLOCKED("monitor-blocked", ValueField.DURATION, "Java Monitor Blocked"),
IO("io", ValueField.DURATION, "File Read", "File Write", "Socket Read", "Socket Write");
private final String commandLineOption;
private final ValueField valueField;
private final String[] eventNames;
EventType(String commandLineOption, ValueField valueField, String... eventNames) {
this.eventNames = eventNames;
this.commandLineOption = commandLineOption;
this.valueField = valueField;
}
public boolean matches(IEvent event) {
String name = event.getEventType().getName();
return Arrays.stream(eventNames).anyMatch(name::equals);
}
public long getValue(IEvent event) {
return valueField.getValue(event);
}
@Override
public String toString() {
return commandLineOption;
}
public static final class EventTypeConverter implements IStringConverter<EventType> {
@Override
public EventType convert(String commandLineOption) {
switch (commandLineOption) {
case "allocation-tlab":
return ALLOCATION_IN_NEW_TLAB;
case "allocation-outside-tlab":
return ALLOCATION_OUTSIDE_TLAB;
case "exceptions":
return JAVA_EXCEPTION;
case "monitor-blocked":
return JAVA_MONITOR_BLOCKED;
case "cpu":
return METHOD_PROFILING_SAMPLE;
case "io":
return IO;
default:
throw new IllegalArgumentException("Event type [" + commandLineOption + "] does not exist.");
}
}
}
private enum ValueField {
COUNT {
@Override
public long getValue(IEvent event) {
return 1;
}
},
DURATION {
@Override
public long getValue(IEvent event) {
long nanos = (long) event.getValue("(duration)");
return TimeUnit.NANOSECONDS.toMillis(nanos);
}
},
ALLOCATION_SIZE {
@Override
public long getValue(IEvent event) {
return (long) event.getValue("allocationSize") / 1000;
}
},
TLAB_SIZE {
@Override
public long getValue(IEvent event) {
return (long) event.getValue("tlabSize") / 1000;
}
};
public abstract long getValue(IEvent event);
}
}
|
Cleanup EventTypeConverter
|
src/main/java/com/github/chrishantha/jfr/flamegraph/output/EventType.java
|
Cleanup EventTypeConverter
|
<ide><path>rc/main/java/com/github/chrishantha/jfr/flamegraph/output/EventType.java
<ide> import com.jrockit.mc.flightrecorder.spi.IEvent;
<ide>
<ide> import java.util.Arrays;
<add>import java.util.HashMap;
<add>import java.util.Map;
<ide> import java.util.concurrent.TimeUnit;
<ide>
<ide> /**
<ide>
<ide>
<ide> public static final class EventTypeConverter implements IStringConverter<EventType> {
<add> private static final Map<String, EventType> typesByOption = new HashMap<>();
<add>
<add> static {
<add> for (EventType type : EventType.values()) {
<add> typesByOption.put(type.commandLineOption, type);
<add> }
<add> }
<add>
<ide> @Override
<ide> public EventType convert(String commandLineOption) {
<del> switch (commandLineOption) {
<del> case "allocation-tlab":
<del> return ALLOCATION_IN_NEW_TLAB;
<del> case "allocation-outside-tlab":
<del> return ALLOCATION_OUTSIDE_TLAB;
<del> case "exceptions":
<del> return JAVA_EXCEPTION;
<del> case "monitor-blocked":
<del> return JAVA_MONITOR_BLOCKED;
<del> case "cpu":
<del> return METHOD_PROFILING_SAMPLE;
<del> case "io":
<del> return IO;
<del> default:
<del> throw new IllegalArgumentException("Event type [" + commandLineOption + "] does not exist.");
<add> EventType eventType = typesByOption.get(commandLineOption);
<add> if (eventType == null) {
<add> throw new IllegalArgumentException("Event type [" + commandLineOption + "] does not exist.");
<ide> }
<add> return eventType;
<ide> }
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
error: pathspec 'iserve-discovery-disco/src/test/java/uk/ac/open/kmi/iserve/discovery/disco/impl/OperationMatchTest.java' did not match any file(s) known to git
|
ff56d518bda160e84acfdb62f452d85bf88d5ed1
| 1 |
kmi/iserve,kmi/iserve,kmi/iserve,kmi/iserve
|
package uk.ac.open.kmi.iserve.discovery.disco.impl;
import com.google.common.collect.Table;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import es.usc.citius.composit.importer.wsc.wscxml.WSCDataset;
import junit.framework.Assert;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.Level;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.ac.open.kmi.iserve.commons.io.Transformer;
import uk.ac.open.kmi.iserve.commons.model.*;
import uk.ac.open.kmi.iserve.discovery.api.ConceptMatcher;
import uk.ac.open.kmi.iserve.discovery.api.MatchResult;
import uk.ac.open.kmi.iserve.discovery.api.MatchType;
import uk.ac.open.kmi.iserve.discovery.disco.LogicConceptMatchType;
import uk.ac.open.kmi.iserve.sal.exception.ServiceException;
import uk.ac.open.kmi.iserve.sal.manager.impl.iServeFacade;
import java.io.File;
import java.net.URI;
import java.net.URL;
import java.util.*;
/**
* @author Pablo Rodríguez Mier
*/
public class OperationMatchTest {
private static final Logger log = LoggerFactory.getLogger(SparqlLogicConceptMatcherWSC08Test.class);
private static final String MEDIATYPE = "text/xml";
private static final String SPARQL_ENDPOINT = "http://localhost:8080/openrdf-sesame/repositories/Test";
private static final String WSC08_01 = "/WSC08/wsc08_datasets/01/";
private static final String WSC08_01_SERVICES = WSC08_01 + "services.xml";
private static final String WSC08_01_TAXONOMY_FILE = WSC08_01 + "taxonomy.owl";
private static final String WSC_01_TAXONOMY_URL = "http://localhost/wsc/01/taxonomy.owl";
private static final String WSC_01_TAXONOMY_NS = "http://localhost/wsc/01/taxonomy.owl#";
private static ConceptMatcher conceptMatcher;
private static iServeFacade manager;
@BeforeClass
public static void setUp() throws Exception {
BasicConfigurator.configure();
org.apache.log4j.Logger.getRootLogger().setLevel(Level.INFO);
manager = iServeFacade.getInstance();
// Clean the whole thing before testing
manager.clearRegistry();
conceptMatcher = new SparqlLogicConceptMatcher(SPARQL_ENDPOINT);
log.info("Importing WSC 2008 services");
String file = SparqlLogicConceptMatcherWSC08Test.class.getResource(WSC08_01_SERVICES).getFile();
log.debug("Using " + file);
File services = new File(file);
// Get base url
URL base = SparqlLogicConceptMatcherWSC08Test.class.getResource(WSC08_01);
// First load the ontology in the server to avoid issues
OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
// Fetch the model
String taxonomyFile = SparqlLogicConceptMatcherWSC08Test.class.getResource(WSC08_01_TAXONOMY_FILE).toURI().toASCIIString();
model.read(taxonomyFile);
// Upload the model first (it won't be automatically fetched as the URIs won't resolve so we do it manually)
manager.getKnowledgeBaseManager().uploadModel(URI.create(WSC_01_TAXONOMY_URL), model, true);
//List<Service> result = new WSCImporter().transform(new FileInputStream(services), null);
// Automatic plugin discovery
List<Service> result = Transformer.getInstance().transform(services, base.toURI().toASCIIString(), MEDIATYPE);
// Import all services
for (Service s : result) {
URI uri = manager.getServiceManager().addService(s);
Assert.assertNotNull(uri);
log.info("Service added: " + uri.toASCIIString());
}
}
private Service find(String name) throws ServiceException {
for(URI srvUri : manager.getServiceManager().listServices()){
if (srvUri.toASCIIString().contains(name)){
return manager.getServiceManager().getService(srvUri);
}
}
return null;
}
private MatchType dataFlowMatch(Operation op1, Operation op2){
MessageContent outOp1 = op1.getOutputs().iterator().next();
Set<URI> outputsOp1 = getModelReferences(outOp1);
MessageContent inOp2 = op2.getInputs().iterator().next();
Set<URI> inputsOp2 = getModelReferences(inOp2);
// Match
Table<URI,URI, MatchResult> result = conceptMatcher.match(outputsOp1, inputsOp2);
// TODO: This should be independent of the match type used. MatchTypes.getHighest();
MatchType best = LogicConceptMatchType.Exact;
for(URI dest :inputsOp2){
// Get all matchers and find the best
MatchType localBest = LogicConceptMatchType.Fail;
Map<URI, MatchResult> matches = result.column(dest);
// If there is no match, fail is assumed
if (matches != null){
for(MatchResult matchResult : matches.values()){
if (matchResult.getMatchType().compareTo(localBest)>=0){
localBest = matchResult.getMatchType();
}
}
}
// Downgrade the best if the local Best for the match is worse than the global best
if (localBest.compareTo(best)<=0){
best = localBest;
}
}
return best;
}
public Set<URI> getModelReferences(MessageContent msg){
Set<URI> uris = new HashSet<URI>();
for(MessagePart p : msg.getMandatoryParts()){
for(uk.ac.open.kmi.iserve.commons.model.Resource r : p.getModelReferences()){
uris.add(r.getUri());
}
}
return uris;
}
private Operation createOperationWithOutputs(Set<URI> outputs){
Operation op = new Operation(URI.create("http://localhost/op"));
MessageContent content = new MessageContent(URI.create("http://localhost/msg"));
for(URI output : outputs){
MessagePart part = new MessagePart(output);
part.addModelReference(new Resource(output));
content.addMandatoryPart(part);
}
op.addOutput(content);
return op;
}
@Test
public void testFail() throws ServiceException {
Set<URI> availableInputs = new HashSet<URI>();
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1233457844"));
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1849951292"));
Operation fakeOp = createOperationWithOutputs(availableInputs);
Service service = find("serv1529824753");
Operation servOp = service.getOperations().iterator().next();
MatchType type = dataFlowMatch(fakeOp, servOp);
Assert.assertEquals(LogicConceptMatchType.Fail, type);
}
@Test
public void testSubsumes() throws ServiceException {
Set<URI> availableInputs = new HashSet<URI>();
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1233457844"));
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1849951292"));
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1988815758"));
Operation fakeOp = createOperationWithOutputs(availableInputs);
Service service = find("serv1529824753");
Operation servOp = service.getOperations().iterator().next();
MatchType type = dataFlowMatch(fakeOp, servOp);
Assert.assertEquals(LogicConceptMatchType.Subsume, type);
}
@Test
public void testPlugin() throws ServiceException {
Set<URI> availableInputs = new HashSet<URI>();
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1233457844"));
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1849951292"));
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con864995873"));
Operation fakeOp = createOperationWithOutputs(availableInputs);
Service service = find("serv1529824753");
Operation servOp = service.getOperations().iterator().next();
MatchType type = dataFlowMatch(fakeOp, servOp);
Assert.assertEquals(LogicConceptMatchType.Plugin, type);
}
@Test
public void testExact() throws ServiceException {
Set<URI> availableInputs = new HashSet<URI>();
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1794855625"));
availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con332477359"));
Operation fakeOp = createOperationWithOutputs(availableInputs);
Service service = find("serv904934656");
Operation servOp = service.getOperations().iterator().next();
MatchType type = dataFlowMatch(fakeOp, servOp);
Assert.assertEquals(LogicConceptMatchType.Exact, type);
}
}
|
iserve-discovery-disco/src/test/java/uk/ac/open/kmi/iserve/discovery/disco/impl/OperationMatchTest.java
|
Disco test added for testing operation matching
(cherry picked from commit 1f76e6d)
|
iserve-discovery-disco/src/test/java/uk/ac/open/kmi/iserve/discovery/disco/impl/OperationMatchTest.java
|
Disco test added for testing operation matching (cherry picked from commit 1f76e6d)
|
<ide><path>serve-discovery-disco/src/test/java/uk/ac/open/kmi/iserve/discovery/disco/impl/OperationMatchTest.java
<add>package uk.ac.open.kmi.iserve.discovery.disco.impl;
<add>
<add>import com.google.common.collect.Table;
<add>import com.hp.hpl.jena.ontology.OntModel;
<add>import com.hp.hpl.jena.ontology.OntModelSpec;
<add>import com.hp.hpl.jena.rdf.model.ModelFactory;
<add>import es.usc.citius.composit.importer.wsc.wscxml.WSCDataset;
<add>import junit.framework.Assert;
<add>import org.apache.log4j.BasicConfigurator;
<add>import org.apache.log4j.Level;
<add>import org.junit.BeforeClass;
<add>import org.junit.Test;
<add>import org.slf4j.Logger;
<add>import org.slf4j.LoggerFactory;
<add>import uk.ac.open.kmi.iserve.commons.io.Transformer;
<add>import uk.ac.open.kmi.iserve.commons.model.*;
<add>import uk.ac.open.kmi.iserve.discovery.api.ConceptMatcher;
<add>import uk.ac.open.kmi.iserve.discovery.api.MatchResult;
<add>import uk.ac.open.kmi.iserve.discovery.api.MatchType;
<add>import uk.ac.open.kmi.iserve.discovery.disco.LogicConceptMatchType;
<add>import uk.ac.open.kmi.iserve.sal.exception.ServiceException;
<add>import uk.ac.open.kmi.iserve.sal.manager.impl.iServeFacade;
<add>
<add>import java.io.File;
<add>import java.net.URI;
<add>import java.net.URL;
<add>import java.util.*;
<add>
<add>/**
<add> * @author Pablo Rodríguez Mier
<add> */
<add>public class OperationMatchTest {
<add> private static final Logger log = LoggerFactory.getLogger(SparqlLogicConceptMatcherWSC08Test.class);
<add>
<add> private static final String MEDIATYPE = "text/xml";
<add>
<add> private static final String SPARQL_ENDPOINT = "http://localhost:8080/openrdf-sesame/repositories/Test";
<add>
<add> private static final String WSC08_01 = "/WSC08/wsc08_datasets/01/";
<add> private static final String WSC08_01_SERVICES = WSC08_01 + "services.xml";
<add> private static final String WSC08_01_TAXONOMY_FILE = WSC08_01 + "taxonomy.owl";
<add> private static final String WSC_01_TAXONOMY_URL = "http://localhost/wsc/01/taxonomy.owl";
<add> private static final String WSC_01_TAXONOMY_NS = "http://localhost/wsc/01/taxonomy.owl#";
<add>
<add> private static ConceptMatcher conceptMatcher;
<add> private static iServeFacade manager;
<add>
<add> @BeforeClass
<add> public static void setUp() throws Exception {
<add> BasicConfigurator.configure();
<add> org.apache.log4j.Logger.getRootLogger().setLevel(Level.INFO);
<add>
<add> manager = iServeFacade.getInstance();
<add>
<add> // Clean the whole thing before testing
<add> manager.clearRegistry();
<add>
<add> conceptMatcher = new SparqlLogicConceptMatcher(SPARQL_ENDPOINT);
<add>
<add> log.info("Importing WSC 2008 services");
<add> String file = SparqlLogicConceptMatcherWSC08Test.class.getResource(WSC08_01_SERVICES).getFile();
<add> log.debug("Using " + file);
<add> File services = new File(file);
<add>
<add> // Get base url
<add> URL base = SparqlLogicConceptMatcherWSC08Test.class.getResource(WSC08_01);
<add>
<add> // First load the ontology in the server to avoid issues
<add> OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
<add> // Fetch the model
<add> String taxonomyFile = SparqlLogicConceptMatcherWSC08Test.class.getResource(WSC08_01_TAXONOMY_FILE).toURI().toASCIIString();
<add> model.read(taxonomyFile);
<add>
<add> // Upload the model first (it won't be automatically fetched as the URIs won't resolve so we do it manually)
<add> manager.getKnowledgeBaseManager().uploadModel(URI.create(WSC_01_TAXONOMY_URL), model, true);
<add>
<add> //List<Service> result = new WSCImporter().transform(new FileInputStream(services), null);
<add> // Automatic plugin discovery
<add> List<Service> result = Transformer.getInstance().transform(services, base.toURI().toASCIIString(), MEDIATYPE);
<add> // Import all services
<add> for (Service s : result) {
<add> URI uri = manager.getServiceManager().addService(s);
<add> Assert.assertNotNull(uri);
<add> log.info("Service added: " + uri.toASCIIString());
<add> }
<add> }
<add>
<add> private Service find(String name) throws ServiceException {
<add> for(URI srvUri : manager.getServiceManager().listServices()){
<add> if (srvUri.toASCIIString().contains(name)){
<add> return manager.getServiceManager().getService(srvUri);
<add> }
<add> }
<add> return null;
<add> }
<add>
<add> private MatchType dataFlowMatch(Operation op1, Operation op2){
<add> MessageContent outOp1 = op1.getOutputs().iterator().next();
<add> Set<URI> outputsOp1 = getModelReferences(outOp1);
<add>
<add> MessageContent inOp2 = op2.getInputs().iterator().next();
<add> Set<URI> inputsOp2 = getModelReferences(inOp2);
<add>
<add> // Match
<add> Table<URI,URI, MatchResult> result = conceptMatcher.match(outputsOp1, inputsOp2);
<add> // TODO: This should be independent of the match type used. MatchTypes.getHighest();
<add> MatchType best = LogicConceptMatchType.Exact;
<add> for(URI dest :inputsOp2){
<add> // Get all matchers and find the best
<add> MatchType localBest = LogicConceptMatchType.Fail;
<add> Map<URI, MatchResult> matches = result.column(dest);
<add> // If there is no match, fail is assumed
<add> if (matches != null){
<add> for(MatchResult matchResult : matches.values()){
<add> if (matchResult.getMatchType().compareTo(localBest)>=0){
<add> localBest = matchResult.getMatchType();
<add> }
<add> }
<add> }
<add> // Downgrade the best if the local Best for the match is worse than the global best
<add> if (localBest.compareTo(best)<=0){
<add> best = localBest;
<add> }
<add> }
<add> return best;
<add> }
<add>
<add> public Set<URI> getModelReferences(MessageContent msg){
<add> Set<URI> uris = new HashSet<URI>();
<add> for(MessagePart p : msg.getMandatoryParts()){
<add> for(uk.ac.open.kmi.iserve.commons.model.Resource r : p.getModelReferences()){
<add> uris.add(r.getUri());
<add> }
<add> }
<add> return uris;
<add> }
<add>
<add> private Operation createOperationWithOutputs(Set<URI> outputs){
<add> Operation op = new Operation(URI.create("http://localhost/op"));
<add> MessageContent content = new MessageContent(URI.create("http://localhost/msg"));
<add> for(URI output : outputs){
<add> MessagePart part = new MessagePart(output);
<add> part.addModelReference(new Resource(output));
<add> content.addMandatoryPart(part);
<add> }
<add> op.addOutput(content);
<add> return op;
<add> }
<add>
<add> @Test
<add> public void testFail() throws ServiceException {
<add> Set<URI> availableInputs = new HashSet<URI>();
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1233457844"));
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1849951292"));
<add>
<add> Operation fakeOp = createOperationWithOutputs(availableInputs);
<add> Service service = find("serv1529824753");
<add> Operation servOp = service.getOperations().iterator().next();
<add> MatchType type = dataFlowMatch(fakeOp, servOp);
<add> Assert.assertEquals(LogicConceptMatchType.Fail, type);
<add> }
<add>
<add> @Test
<add> public void testSubsumes() throws ServiceException {
<add> Set<URI> availableInputs = new HashSet<URI>();
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1233457844"));
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1849951292"));
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1988815758"));
<add>
<add> Operation fakeOp = createOperationWithOutputs(availableInputs);
<add> Service service = find("serv1529824753");
<add> Operation servOp = service.getOperations().iterator().next();
<add> MatchType type = dataFlowMatch(fakeOp, servOp);
<add> Assert.assertEquals(LogicConceptMatchType.Subsume, type);
<add> }
<add>
<add> @Test
<add> public void testPlugin() throws ServiceException {
<add> Set<URI> availableInputs = new HashSet<URI>();
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1233457844"));
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1849951292"));
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con864995873"));
<add>
<add> Operation fakeOp = createOperationWithOutputs(availableInputs);
<add> Service service = find("serv1529824753");
<add> Operation servOp = service.getOperations().iterator().next();
<add> MatchType type = dataFlowMatch(fakeOp, servOp);
<add> Assert.assertEquals(LogicConceptMatchType.Plugin, type);
<add> }
<add>
<add>
<add> @Test
<add> public void testExact() throws ServiceException {
<add> Set<URI> availableInputs = new HashSet<URI>();
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con1794855625"));
<add> availableInputs.add(URI.create(WSC_01_TAXONOMY_NS + "con332477359"));
<add> Operation fakeOp = createOperationWithOutputs(availableInputs);
<add> Service service = find("serv904934656");
<add> Operation servOp = service.getOperations().iterator().next();
<add> MatchType type = dataFlowMatch(fakeOp, servOp);
<add> Assert.assertEquals(LogicConceptMatchType.Exact, type);
<add> }
<add>}
|
|
JavaScript
|
mit
|
b36847d964c22817e6a9d600edfc55eaa823dd59
| 0 |
ficristo/brackets,ficristo/brackets,ficristo/brackets,sprintr/brackets,adobe/brackets,adobe/brackets,sprintr/brackets,sprintr/brackets,ficristo/brackets,ficristo/brackets,adobe/brackets,adobe/brackets,adobe/brackets,sprintr/brackets,sprintr/brackets
|
/**
* Copyright (c) 2014-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.Immutable = factory());
}(this, function () { 'use strict';var SLICE$0 = Array.prototype.slice;
function createClass(ctor, superClass) {
if (superClass) {
ctor.prototype = Object.create(superClass.prototype);
}
ctor.prototype.constructor = ctor;
}
function Iterable(value) {
return isIterable(value) ? value : Seq(value);
}
createClass(KeyedIterable, Iterable);
function KeyedIterable(value) {
return isKeyed(value) ? value : KeyedSeq(value);
}
createClass(IndexedIterable, Iterable);
function IndexedIterable(value) {
return isIndexed(value) ? value : IndexedSeq(value);
}
createClass(SetIterable, Iterable);
function SetIterable(value) {
return isIterable(value) && !isAssociative(value) ? value : SetSeq(value);
}
function isIterable(maybeIterable) {
return !!(maybeIterable && maybeIterable[IS_ITERABLE_SENTINEL]);
}
function isKeyed(maybeKeyed) {
return !!(maybeKeyed && maybeKeyed[IS_KEYED_SENTINEL]);
}
function isIndexed(maybeIndexed) {
return !!(maybeIndexed && maybeIndexed[IS_INDEXED_SENTINEL]);
}
function isAssociative(maybeAssociative) {
return isKeyed(maybeAssociative) || isIndexed(maybeAssociative);
}
function isOrdered(maybeOrdered) {
return !!(maybeOrdered && maybeOrdered[IS_ORDERED_SENTINEL]);
}
Iterable.isIterable = isIterable;
Iterable.isKeyed = isKeyed;
Iterable.isIndexed = isIndexed;
Iterable.isAssociative = isAssociative;
Iterable.isOrdered = isOrdered;
Iterable.Keyed = KeyedIterable;
Iterable.Indexed = IndexedIterable;
Iterable.Set = SetIterable;
var IS_ITERABLE_SENTINEL = '@@__IMMUTABLE_ITERABLE__@@';
var IS_KEYED_SENTINEL = '@@__IMMUTABLE_KEYED__@@';
var IS_INDEXED_SENTINEL = '@@__IMMUTABLE_INDEXED__@@';
var IS_ORDERED_SENTINEL = '@@__IMMUTABLE_ORDERED__@@';
// Used for setting prototype methods that IE8 chokes on.
var DELETE = 'delete';
// Constants describing the size of trie nodes.
var SHIFT = 5; // Resulted in best performance after ______?
var SIZE = 1 << SHIFT;
var MASK = SIZE - 1;
// A consistent shared value representing "not set" which equals nothing other
// than itself, and nothing that could be provided externally.
var NOT_SET = {};
// Boolean references, Rough equivalent of `bool &`.
var CHANGE_LENGTH = { value: false };
var DID_ALTER = { value: false };
function MakeRef(ref) {
ref.value = false;
return ref;
}
function SetRef(ref) {
ref && (ref.value = true);
}
// A function which returns a value representing an "owner" for transient writes
// to tries. The return value will only ever equal itself, and will not equal
// the return of any subsequent call of this function.
function OwnerID() {}
// http://jsperf.com/copy-array-inline
function arrCopy(arr, offset) {
offset = offset || 0;
var len = Math.max(0, arr.length - offset);
var newArr = new Array(len);
for (var ii = 0; ii < len; ii++) {
newArr[ii] = arr[ii + offset];
}
return newArr;
}
function ensureSize(iter) {
if (iter.size === undefined) {
iter.size = iter.__iterate(returnTrue);
}
return iter.size;
}
function wrapIndex(iter, index) {
// This implements "is array index" which the ECMAString spec defines as:
//
// A String property name P is an array index if and only if
// ToString(ToUint32(P)) is equal to P and ToUint32(P) is not equal
// to 2^32−1.
//
// http://www.ecma-international.org/ecma-262/6.0/#sec-array-exotic-objects
if (typeof index !== 'number') {
var uint32Index = index >>> 0; // N >>> 0 is shorthand for ToUint32
if ('' + uint32Index !== index || uint32Index === 4294967295) {
return NaN;
}
index = uint32Index;
}
return index < 0 ? ensureSize(iter) + index : index;
}
function returnTrue() {
return true;
}
function wholeSlice(begin, end, size) {
return (begin === 0 || (size !== undefined && begin <= -size)) &&
(end === undefined || (size !== undefined && end >= size));
}
function resolveBegin(begin, size) {
return resolveIndex(begin, size, 0);
}
function resolveEnd(end, size) {
return resolveIndex(end, size, size);
}
function resolveIndex(index, size, defaultIndex) {
return index === undefined ?
defaultIndex :
index < 0 ?
Math.max(0, size + index) :
size === undefined ?
index :
Math.min(size, index);
}
/* global Symbol */
var ITERATE_KEYS = 0;
var ITERATE_VALUES = 1;
var ITERATE_ENTRIES = 2;
var REAL_ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator;
var FAUX_ITERATOR_SYMBOL = '@@iterator';
var ITERATOR_SYMBOL = REAL_ITERATOR_SYMBOL || FAUX_ITERATOR_SYMBOL;
function Iterator(next) {
this.next = next;
}
Iterator.prototype.toString = function() {
return '[Iterator]';
};
Iterator.KEYS = ITERATE_KEYS;
Iterator.VALUES = ITERATE_VALUES;
Iterator.ENTRIES = ITERATE_ENTRIES;
Iterator.prototype.inspect =
Iterator.prototype.toSource = function () { return this.toString(); }
Iterator.prototype[ITERATOR_SYMBOL] = function () {
return this;
};
function iteratorValue(type, k, v, iteratorResult) {
var value = type === 0 ? k : type === 1 ? v : [k, v];
iteratorResult ? (iteratorResult.value = value) : (iteratorResult = {
value: value, done: false
});
return iteratorResult;
}
function iteratorDone() {
return { value: undefined, done: true };
}
function hasIterator(maybeIterable) {
return !!getIteratorFn(maybeIterable);
}
function isIterator(maybeIterator) {
return maybeIterator && typeof maybeIterator.next === 'function';
}
function getIterator(iterable) {
var iteratorFn = getIteratorFn(iterable);
return iteratorFn && iteratorFn.call(iterable);
}
function getIteratorFn(iterable) {
var iteratorFn = iterable && (
(REAL_ITERATOR_SYMBOL && iterable[REAL_ITERATOR_SYMBOL]) ||
iterable[FAUX_ITERATOR_SYMBOL]
);
if (typeof iteratorFn === 'function') {
return iteratorFn;
}
}
function isArrayLike(value) {
return value && typeof value.length === 'number';
}
createClass(Seq, Iterable);
function Seq(value) {
return value === null || value === undefined ? emptySequence() :
isIterable(value) ? value.toSeq() : seqFromValue(value);
}
Seq.of = function(/*...values*/) {
return Seq(arguments);
};
Seq.prototype.toSeq = function() {
return this;
};
Seq.prototype.toString = function() {
return this.__toString('Seq {', '}');
};
Seq.prototype.cacheResult = function() {
if (!this._cache && this.__iterateUncached) {
this._cache = this.entrySeq().toArray();
this.size = this._cache.length;
}
return this;
};
// abstract __iterateUncached(fn, reverse)
Seq.prototype.__iterate = function(fn, reverse) {
return seqIterate(this, fn, reverse, true);
};
// abstract __iteratorUncached(type, reverse)
Seq.prototype.__iterator = function(type, reverse) {
return seqIterator(this, type, reverse, true);
};
createClass(KeyedSeq, Seq);
function KeyedSeq(value) {
return value === null || value === undefined ?
emptySequence().toKeyedSeq() :
isIterable(value) ?
(isKeyed(value) ? value.toSeq() : value.fromEntrySeq()) :
keyedSeqFromValue(value);
}
KeyedSeq.prototype.toKeyedSeq = function() {
return this;
};
createClass(IndexedSeq, Seq);
function IndexedSeq(value) {
return value === null || value === undefined ? emptySequence() :
!isIterable(value) ? indexedSeqFromValue(value) :
isKeyed(value) ? value.entrySeq() : value.toIndexedSeq();
}
IndexedSeq.of = function(/*...values*/) {
return IndexedSeq(arguments);
};
IndexedSeq.prototype.toIndexedSeq = function() {
return this;
};
IndexedSeq.prototype.toString = function() {
return this.__toString('Seq [', ']');
};
IndexedSeq.prototype.__iterate = function(fn, reverse) {
return seqIterate(this, fn, reverse, false);
};
IndexedSeq.prototype.__iterator = function(type, reverse) {
return seqIterator(this, type, reverse, false);
};
createClass(SetSeq, Seq);
function SetSeq(value) {
return (
value === null || value === undefined ? emptySequence() :
!isIterable(value) ? indexedSeqFromValue(value) :
isKeyed(value) ? value.entrySeq() : value
).toSetSeq();
}
SetSeq.of = function(/*...values*/) {
return SetSeq(arguments);
};
SetSeq.prototype.toSetSeq = function() {
return this;
};
Seq.isSeq = isSeq;
Seq.Keyed = KeyedSeq;
Seq.Set = SetSeq;
Seq.Indexed = IndexedSeq;
var IS_SEQ_SENTINEL = '@@__IMMUTABLE_SEQ__@@';
Seq.prototype[IS_SEQ_SENTINEL] = true;
createClass(ArraySeq, IndexedSeq);
function ArraySeq(array) {
this._array = array;
this.size = array.length;
}
ArraySeq.prototype.get = function(index, notSetValue) {
return this.has(index) ? this._array[wrapIndex(this, index)] : notSetValue;
};
ArraySeq.prototype.__iterate = function(fn, reverse) {
var array = this._array;
var maxIndex = array.length - 1;
for (var ii = 0; ii <= maxIndex; ii++) {
if (fn(array[reverse ? maxIndex - ii : ii], ii, this) === false) {
return ii + 1;
}
}
return ii;
};
ArraySeq.prototype.__iterator = function(type, reverse) {
var array = this._array;
var maxIndex = array.length - 1;
var ii = 0;
return new Iterator(function()
{return ii > maxIndex ?
iteratorDone() :
iteratorValue(type, ii, array[reverse ? maxIndex - ii++ : ii++])}
);
};
createClass(ObjectSeq, KeyedSeq);
function ObjectSeq(object) {
var keys = Object.keys(object);
this._object = object;
this._keys = keys;
this.size = keys.length;
}
ObjectSeq.prototype.get = function(key, notSetValue) {
if (notSetValue !== undefined && !this.has(key)) {
return notSetValue;
}
return this._object[key];
};
ObjectSeq.prototype.has = function(key) {
return this._object.hasOwnProperty(key);
};
ObjectSeq.prototype.__iterate = function(fn, reverse) {
var object = this._object;
var keys = this._keys;
var maxIndex = keys.length - 1;
for (var ii = 0; ii <= maxIndex; ii++) {
var key = keys[reverse ? maxIndex - ii : ii];
if (fn(object[key], key, this) === false) {
return ii + 1;
}
}
return ii;
};
ObjectSeq.prototype.__iterator = function(type, reverse) {
var object = this._object;
var keys = this._keys;
var maxIndex = keys.length - 1;
var ii = 0;
return new Iterator(function() {
var key = keys[reverse ? maxIndex - ii : ii];
return ii++ > maxIndex ?
iteratorDone() :
iteratorValue(type, key, object[key]);
});
};
ObjectSeq.prototype[IS_ORDERED_SENTINEL] = true;
createClass(IterableSeq, IndexedSeq);
function IterableSeq(iterable) {
this._iterable = iterable;
this.size = iterable.length || iterable.size;
}
IterableSeq.prototype.__iterateUncached = function(fn, reverse) {
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var iterable = this._iterable;
var iterator = getIterator(iterable);
var iterations = 0;
if (isIterator(iterator)) {
var step;
while (!(step = iterator.next()).done) {
if (fn(step.value, iterations++, this) === false) {
break;
}
}
}
return iterations;
};
IterableSeq.prototype.__iteratorUncached = function(type, reverse) {
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterable = this._iterable;
var iterator = getIterator(iterable);
if (!isIterator(iterator)) {
return new Iterator(iteratorDone);
}
var iterations = 0;
return new Iterator(function() {
var step = iterator.next();
return step.done ? step : iteratorValue(type, iterations++, step.value);
});
};
createClass(IteratorSeq, IndexedSeq);
function IteratorSeq(iterator) {
this._iterator = iterator;
this._iteratorCache = [];
}
IteratorSeq.prototype.__iterateUncached = function(fn, reverse) {
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var iterator = this._iterator;
var cache = this._iteratorCache;
var iterations = 0;
while (iterations < cache.length) {
if (fn(cache[iterations], iterations++, this) === false) {
return iterations;
}
}
var step;
while (!(step = iterator.next()).done) {
var val = step.value;
cache[iterations] = val;
if (fn(val, iterations++, this) === false) {
break;
}
}
return iterations;
};
IteratorSeq.prototype.__iteratorUncached = function(type, reverse) {
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterator = this._iterator;
var cache = this._iteratorCache;
var iterations = 0;
return new Iterator(function() {
if (iterations >= cache.length) {
var step = iterator.next();
if (step.done) {
return step;
}
cache[iterations] = step.value;
}
return iteratorValue(type, iterations, cache[iterations++]);
});
};
// # pragma Helper functions
function isSeq(maybeSeq) {
return !!(maybeSeq && maybeSeq[IS_SEQ_SENTINEL]);
}
var EMPTY_SEQ;
function emptySequence() {
return EMPTY_SEQ || (EMPTY_SEQ = new ArraySeq([]));
}
function keyedSeqFromValue(value) {
var seq =
Array.isArray(value) ? new ArraySeq(value).fromEntrySeq() :
isIterator(value) ? new IteratorSeq(value).fromEntrySeq() :
hasIterator(value) ? new IterableSeq(value).fromEntrySeq() :
typeof value === 'object' ? new ObjectSeq(value) :
undefined;
if (!seq) {
throw new TypeError(
'Expected Array or iterable object of [k, v] entries, '+
'or keyed object: ' + value
);
}
return seq;
}
function indexedSeqFromValue(value) {
var seq = maybeIndexedSeqFromValue(value);
if (!seq) {
throw new TypeError(
'Expected Array or iterable object of values: ' + value
);
}
return seq;
}
function seqFromValue(value) {
var seq = maybeIndexedSeqFromValue(value) ||
(typeof value === 'object' && new ObjectSeq(value));
if (!seq) {
throw new TypeError(
'Expected Array or iterable object of values, or keyed object: ' + value
);
}
return seq;
}
function maybeIndexedSeqFromValue(value) {
return (
isArrayLike(value) ? new ArraySeq(value) :
isIterator(value) ? new IteratorSeq(value) :
hasIterator(value) ? new IterableSeq(value) :
undefined
);
}
function seqIterate(seq, fn, reverse, useKeys) {
var cache = seq._cache;
if (cache) {
var maxIndex = cache.length - 1;
for (var ii = 0; ii <= maxIndex; ii++) {
var entry = cache[reverse ? maxIndex - ii : ii];
if (fn(entry[1], useKeys ? entry[0] : ii, seq) === false) {
return ii + 1;
}
}
return ii;
}
return seq.__iterateUncached(fn, reverse);
}
function seqIterator(seq, type, reverse, useKeys) {
var cache = seq._cache;
if (cache) {
var maxIndex = cache.length - 1;
var ii = 0;
return new Iterator(function() {
var entry = cache[reverse ? maxIndex - ii : ii];
return ii++ > maxIndex ?
iteratorDone() :
iteratorValue(type, useKeys ? entry[0] : ii - 1, entry[1]);
});
}
return seq.__iteratorUncached(type, reverse);
}
function fromJS(json, converter) {
return converter ?
fromJSWith(converter, json, '', {'': json}) :
fromJSDefault(json);
}
function fromJSWith(converter, json, key, parentJSON) {
if (Array.isArray(json)) {
return converter.call(parentJSON, key, IndexedSeq(json).map(function(v, k) {return fromJSWith(converter, v, k, json)}));
}
if (isPlainObj(json)) {
return converter.call(parentJSON, key, KeyedSeq(json).map(function(v, k) {return fromJSWith(converter, v, k, json)}));
}
return json;
}
function fromJSDefault(json) {
if (Array.isArray(json)) {
return IndexedSeq(json).map(fromJSDefault).toList();
}
if (isPlainObj(json)) {
return KeyedSeq(json).map(fromJSDefault).toMap();
}
return json;
}
function isPlainObj(value) {
return value && (value.constructor === Object || value.constructor === undefined);
}
/**
* An extension of the "same-value" algorithm as [described for use by ES6 Map
* and Set](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map#Key_equality)
*
* NaN is considered the same as NaN, however -0 and 0 are considered the same
* value, which is different from the algorithm described by
* [`Object.is`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is).
*
* This is extended further to allow Objects to describe the values they
* represent, by way of `valueOf` or `equals` (and `hashCode`).
*
* Note: because of this extension, the key equality of Immutable.Map and the
* value equality of Immutable.Set will differ from ES6 Map and Set.
*
* ### Defining custom values
*
* The easiest way to describe the value an object represents is by implementing
* `valueOf`. For example, `Date` represents a value by returning a unix
* timestamp for `valueOf`:
*
* var date1 = new Date(1234567890000); // Fri Feb 13 2009 ...
* var date2 = new Date(1234567890000);
* date1.valueOf(); // 1234567890000
* assert( date1 !== date2 );
* assert( Immutable.is( date1, date2 ) );
*
* Note: overriding `valueOf` may have other implications if you use this object
* where JavaScript expects a primitive, such as implicit string coercion.
*
* For more complex types, especially collections, implementing `valueOf` may
* not be performant. An alternative is to implement `equals` and `hashCode`.
*
* `equals` takes another object, presumably of similar type, and returns true
* if the it is equal. Equality is symmetrical, so the same result should be
* returned if this and the argument are flipped.
*
* assert( a.equals(b) === b.equals(a) );
*
* `hashCode` returns a 32bit integer number representing the object which will
* be used to determine how to store the value object in a Map or Set. You must
* provide both or neither methods, one must not exist without the other.
*
* Also, an important relationship between these methods must be upheld: if two
* values are equal, they *must* return the same hashCode. If the values are not
* equal, they might have the same hashCode; this is called a hash collision,
* and while undesirable for performance reasons, it is acceptable.
*
* if (a.equals(b)) {
* assert( a.hashCode() === b.hashCode() );
* }
*
* All Immutable collections implement `equals` and `hashCode`.
*
*/
function is(valueA, valueB) {
if (valueA === valueB || (valueA !== valueA && valueB !== valueB)) {
return true;
}
if (!valueA || !valueB) {
return false;
}
if (typeof valueA.valueOf === 'function' &&
typeof valueB.valueOf === 'function') {
valueA = valueA.valueOf();
valueB = valueB.valueOf();
if (valueA === valueB || (valueA !== valueA && valueB !== valueB)) {
return true;
}
if (!valueA || !valueB) {
return false;
}
}
if (typeof valueA.equals === 'function' &&
typeof valueB.equals === 'function' &&
valueA.equals(valueB)) {
return true;
}
return false;
}
function deepEqual(a, b) {
if (a === b) {
return true;
}
if (
!isIterable(b) ||
a.size !== undefined && b.size !== undefined && a.size !== b.size ||
a.__hash !== undefined && b.__hash !== undefined && a.__hash !== b.__hash ||
isKeyed(a) !== isKeyed(b) ||
isIndexed(a) !== isIndexed(b) ||
isOrdered(a) !== isOrdered(b)
) {
return false;
}
if (a.size === 0 && b.size === 0) {
return true;
}
var notAssociative = !isAssociative(a);
if (isOrdered(a)) {
var entries = a.entries();
return b.every(function(v, k) {
var entry = entries.next().value;
return entry && is(entry[1], v) && (notAssociative || is(entry[0], k));
}) && entries.next().done;
}
var flipped = false;
if (a.size === undefined) {
if (b.size === undefined) {
if (typeof a.cacheResult === 'function') {
a.cacheResult();
}
} else {
flipped = true;
var _ = a;
a = b;
b = _;
}
}
var allEqual = true;
var bSize = b.__iterate(function(v, k) {
if (notAssociative ? !a.has(v) :
flipped ? !is(v, a.get(k, NOT_SET)) : !is(a.get(k, NOT_SET), v)) {
allEqual = false;
return false;
}
});
return allEqual && a.size === bSize;
}
createClass(Repeat, IndexedSeq);
function Repeat(value, times) {
if (!(this instanceof Repeat)) {
return new Repeat(value, times);
}
this._value = value;
this.size = times === undefined ? Infinity : Math.max(0, times);
if (this.size === 0) {
if (EMPTY_REPEAT) {
return EMPTY_REPEAT;
}
EMPTY_REPEAT = this;
}
}
Repeat.prototype.toString = function() {
if (this.size === 0) {
return 'Repeat []';
}
return 'Repeat [ ' + this._value + ' ' + this.size + ' times ]';
};
Repeat.prototype.get = function(index, notSetValue) {
return this.has(index) ? this._value : notSetValue;
};
Repeat.prototype.includes = function(searchValue) {
return is(this._value, searchValue);
};
Repeat.prototype.slice = function(begin, end) {
var size = this.size;
return wholeSlice(begin, end, size) ? this :
new Repeat(this._value, resolveEnd(end, size) - resolveBegin(begin, size));
};
Repeat.prototype.reverse = function() {
return this;
};
Repeat.prototype.indexOf = function(searchValue) {
if (is(this._value, searchValue)) {
return 0;
}
return -1;
};
Repeat.prototype.lastIndexOf = function(searchValue) {
if (is(this._value, searchValue)) {
return this.size;
}
return -1;
};
Repeat.prototype.__iterate = function(fn, reverse) {
for (var ii = 0; ii < this.size; ii++) {
if (fn(this._value, ii, this) === false) {
return ii + 1;
}
}
return ii;
};
Repeat.prototype.__iterator = function(type, reverse) {var this$0 = this;
var ii = 0;
return new Iterator(function()
{return ii < this$0.size ? iteratorValue(type, ii++, this$0._value) : iteratorDone()}
);
};
Repeat.prototype.equals = function(other) {
return other instanceof Repeat ?
is(this._value, other._value) :
deepEqual(other);
};
var EMPTY_REPEAT;
function invariant(condition, error) {
if (!condition) throw new Error(error);
}
createClass(Range, IndexedSeq);
function Range(start, end, step) {
if (!(this instanceof Range)) {
return new Range(start, end, step);
}
invariant(step !== 0, 'Cannot step a Range by 0');
start = start || 0;
if (end === undefined) {
end = Infinity;
}
step = step === undefined ? 1 : Math.abs(step);
if (end < start) {
step = -step;
}
this._start = start;
this._end = end;
this._step = step;
this.size = Math.max(0, Math.ceil((end - start) / step - 1) + 1);
if (this.size === 0) {
if (EMPTY_RANGE) {
return EMPTY_RANGE;
}
EMPTY_RANGE = this;
}
}
Range.prototype.toString = function() {
if (this.size === 0) {
return 'Range []';
}
return 'Range [ ' +
this._start + '...' + this._end +
(this._step !== 1 ? ' by ' + this._step : '') +
' ]';
};
Range.prototype.get = function(index, notSetValue) {
return this.has(index) ?
this._start + wrapIndex(this, index) * this._step :
notSetValue;
};
Range.prototype.includes = function(searchValue) {
var possibleIndex = (searchValue - this._start) / this._step;
return possibleIndex >= 0 &&
possibleIndex < this.size &&
possibleIndex === Math.floor(possibleIndex);
};
Range.prototype.slice = function(begin, end) {
if (wholeSlice(begin, end, this.size)) {
return this;
}
begin = resolveBegin(begin, this.size);
end = resolveEnd(end, this.size);
if (end <= begin) {
return new Range(0, 0);
}
return new Range(this.get(begin, this._end), this.get(end, this._end), this._step);
};
Range.prototype.indexOf = function(searchValue) {
var offsetValue = searchValue - this._start;
if (offsetValue % this._step === 0) {
var index = offsetValue / this._step;
if (index >= 0 && index < this.size) {
return index
}
}
return -1;
};
Range.prototype.lastIndexOf = function(searchValue) {
return this.indexOf(searchValue);
};
Range.prototype.__iterate = function(fn, reverse) {
var maxIndex = this.size - 1;
var step = this._step;
var value = reverse ? this._start + maxIndex * step : this._start;
for (var ii = 0; ii <= maxIndex; ii++) {
if (fn(value, ii, this) === false) {
return ii + 1;
}
value += reverse ? -step : step;
}
return ii;
};
Range.prototype.__iterator = function(type, reverse) {
var maxIndex = this.size - 1;
var step = this._step;
var value = reverse ? this._start + maxIndex * step : this._start;
var ii = 0;
return new Iterator(function() {
var v = value;
value += reverse ? -step : step;
return ii > maxIndex ? iteratorDone() : iteratorValue(type, ii++, v);
});
};
Range.prototype.equals = function(other) {
return other instanceof Range ?
this._start === other._start &&
this._end === other._end &&
this._step === other._step :
deepEqual(this, other);
};
var EMPTY_RANGE;
createClass(Collection, Iterable);
function Collection() {
throw TypeError('Abstract');
}
createClass(KeyedCollection, Collection);function KeyedCollection() {}
createClass(IndexedCollection, Collection);function IndexedCollection() {}
createClass(SetCollection, Collection);function SetCollection() {}
Collection.Keyed = KeyedCollection;
Collection.Indexed = IndexedCollection;
Collection.Set = SetCollection;
var imul =
typeof Math.imul === 'function' && Math.imul(0xffffffff, 2) === -2 ?
Math.imul :
function imul(a, b) {
a = a | 0; // int
b = b | 0; // int
var c = a & 0xffff;
var d = b & 0xffff;
// Shift by 0 fixes the sign on the high part.
return (c * d) + ((((a >>> 16) * d + c * (b >>> 16)) << 16) >>> 0) | 0; // int
};
// v8 has an optimization for storing 31-bit signed numbers.
// Values which have either 00 or 11 as the high order bits qualify.
// This function drops the highest order bit in a signed number, maintaining
// the sign bit.
function smi(i32) {
return ((i32 >>> 1) & 0x40000000) | (i32 & 0xBFFFFFFF);
}
function hash(o) {
if (o === false || o === null || o === undefined) {
return 0;
}
if (typeof o.valueOf === 'function') {
o = o.valueOf();
if (o === false || o === null || o === undefined) {
return 0;
}
}
if (o === true) {
return 1;
}
var type = typeof o;
if (type === 'number') {
if (o !== o || o === Infinity) {
return 0;
}
var h = o | 0;
if (h !== o) {
h ^= o * 0xFFFFFFFF;
}
while (o > 0xFFFFFFFF) {
o /= 0xFFFFFFFF;
h ^= o;
}
return smi(h);
}
if (type === 'string') {
return o.length > STRING_HASH_CACHE_MIN_STRLEN ? cachedHashString(o) : hashString(o);
}
if (typeof o.hashCode === 'function') {
return o.hashCode();
}
if (type === 'object') {
return hashJSObj(o);
}
if (typeof o.toString === 'function') {
return hashString(o.toString());
}
throw new Error('Value type ' + type + ' cannot be hashed.');
}
function cachedHashString(string) {
var hash = stringHashCache[string];
if (hash === undefined) {
hash = hashString(string);
if (STRING_HASH_CACHE_SIZE === STRING_HASH_CACHE_MAX_SIZE) {
STRING_HASH_CACHE_SIZE = 0;
stringHashCache = {};
}
STRING_HASH_CACHE_SIZE++;
stringHashCache[string] = hash;
}
return hash;
}
// http://jsperf.com/hashing-strings
function hashString(string) {
// This is the hash from JVM
// The hash code for a string is computed as
// s[0] * 31 ^ (n - 1) + s[1] * 31 ^ (n - 2) + ... + s[n - 1],
// where s[i] is the ith character of the string and n is the length of
// the string. We "mod" the result to make it between 0 (inclusive) and 2^31
// (exclusive) by dropping high bits.
var hash = 0;
for (var ii = 0; ii < string.length; ii++) {
hash = 31 * hash + string.charCodeAt(ii) | 0;
}
return smi(hash);
}
function hashJSObj(obj) {
var hash;
if (usingWeakMap) {
hash = weakMap.get(obj);
if (hash !== undefined) {
return hash;
}
}
hash = obj[UID_HASH_KEY];
if (hash !== undefined) {
return hash;
}
if (!canDefineProperty) {
hash = obj.propertyIsEnumerable && obj.propertyIsEnumerable[UID_HASH_KEY];
if (hash !== undefined) {
return hash;
}
hash = getIENodeHash(obj);
if (hash !== undefined) {
return hash;
}
}
hash = ++objHashUID;
if (objHashUID & 0x40000000) {
objHashUID = 0;
}
if (usingWeakMap) {
weakMap.set(obj, hash);
} else if (isExtensible !== undefined && isExtensible(obj) === false) {
throw new Error('Non-extensible objects are not allowed as keys.');
} else if (canDefineProperty) {
Object.defineProperty(obj, UID_HASH_KEY, {
'enumerable': false,
'configurable': false,
'writable': false,
'value': hash
});
} else if (obj.propertyIsEnumerable !== undefined &&
obj.propertyIsEnumerable === obj.constructor.prototype.propertyIsEnumerable) {
// Since we can't define a non-enumerable property on the object
// we'll hijack one of the less-used non-enumerable properties to
// save our hash on it. Since this is a function it will not show up in
// `JSON.stringify` which is what we want.
obj.propertyIsEnumerable = function() {
return this.constructor.prototype.propertyIsEnumerable.apply(this, arguments);
};
obj.propertyIsEnumerable[UID_HASH_KEY] = hash;
} else if (obj.nodeType !== undefined) {
// At this point we couldn't get the IE `uniqueID` to use as a hash
// and we couldn't use a non-enumerable property to exploit the
// dontEnum bug so we simply add the `UID_HASH_KEY` on the node
// itself.
obj[UID_HASH_KEY] = hash;
} else {
throw new Error('Unable to set a non-enumerable property on object.');
}
return hash;
}
// Get references to ES5 object methods.
var isExtensible = Object.isExtensible;
// True if Object.defineProperty works as expected. IE8 fails this test.
var canDefineProperty = (function() {
try {
Object.defineProperty({}, '@', {});
return true;
} catch (e) {
return false;
}
}());
// IE has a `uniqueID` property on DOM nodes. We can construct the hash from it
// and avoid memory leaks from the IE cloneNode bug.
function getIENodeHash(node) {
if (node && node.nodeType > 0) {
switch (node.nodeType) {
case 1: // Element
return node.uniqueID;
case 9: // Document
return node.documentElement && node.documentElement.uniqueID;
}
}
}
// If possible, use a WeakMap.
var usingWeakMap = typeof WeakMap === 'function';
var weakMap;
if (usingWeakMap) {
weakMap = new WeakMap();
}
var objHashUID = 0;
var UID_HASH_KEY = '__immutablehash__';
if (typeof Symbol === 'function') {
UID_HASH_KEY = Symbol(UID_HASH_KEY);
}
var STRING_HASH_CACHE_MIN_STRLEN = 16;
var STRING_HASH_CACHE_MAX_SIZE = 255;
var STRING_HASH_CACHE_SIZE = 0;
var stringHashCache = {};
function assertNotInfinite(size) {
invariant(
size !== Infinity,
'Cannot perform this action with an infinite size.'
);
}
createClass(Map, KeyedCollection);
// @pragma Construction
function Map(value) {
return value === null || value === undefined ? emptyMap() :
isMap(value) && !isOrdered(value) ? value :
emptyMap().withMutations(function(map ) {
var iter = KeyedIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v, k) {return map.set(k, v)});
});
}
Map.of = function() {var keyValues = SLICE$0.call(arguments, 0);
return emptyMap().withMutations(function(map ) {
for (var i = 0; i < keyValues.length; i += 2) {
if (i + 1 >= keyValues.length) {
throw new Error('Missing value for key: ' + keyValues[i]);
}
map.set(keyValues[i], keyValues[i + 1]);
}
});
};
Map.prototype.toString = function() {
return this.__toString('Map {', '}');
};
// @pragma Access
Map.prototype.get = function(k, notSetValue) {
return this._root ?
this._root.get(0, undefined, k, notSetValue) :
notSetValue;
};
// @pragma Modification
Map.prototype.set = function(k, v) {
return updateMap(this, k, v);
};
Map.prototype.setIn = function(keyPath, v) {
return this.updateIn(keyPath, NOT_SET, function() {return v});
};
Map.prototype.remove = function(k) {
return updateMap(this, k, NOT_SET);
};
Map.prototype.deleteIn = function(keyPath) {
return this.updateIn(keyPath, function() {return NOT_SET});
};
Map.prototype.update = function(k, notSetValue, updater) {
return arguments.length === 1 ?
k(this) :
this.updateIn([k], notSetValue, updater);
};
Map.prototype.updateIn = function(keyPath, notSetValue, updater) {
if (!updater) {
updater = notSetValue;
notSetValue = undefined;
}
var updatedValue = updateInDeepMap(
this,
forceIterator(keyPath),
notSetValue,
updater
);
return updatedValue === NOT_SET ? undefined : updatedValue;
};
Map.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = 0;
this._root = null;
this.__hash = undefined;
this.__altered = true;
return this;
}
return emptyMap();
};
// @pragma Composition
Map.prototype.merge = function(/*...iters*/) {
return mergeIntoMapWith(this, undefined, arguments);
};
Map.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoMapWith(this, merger, iters);
};
Map.prototype.mergeIn = function(keyPath) {var iters = SLICE$0.call(arguments, 1);
return this.updateIn(
keyPath,
emptyMap(),
function(m ) {return typeof m.merge === 'function' ?
m.merge.apply(m, iters) :
iters[iters.length - 1]}
);
};
Map.prototype.mergeDeep = function(/*...iters*/) {
return mergeIntoMapWith(this, deepMerger, arguments);
};
Map.prototype.mergeDeepWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoMapWith(this, deepMergerWith(merger), iters);
};
Map.prototype.mergeDeepIn = function(keyPath) {var iters = SLICE$0.call(arguments, 1);
return this.updateIn(
keyPath,
emptyMap(),
function(m ) {return typeof m.mergeDeep === 'function' ?
m.mergeDeep.apply(m, iters) :
iters[iters.length - 1]}
);
};
Map.prototype.sort = function(comparator) {
// Late binding
return OrderedMap(sortFactory(this, comparator));
};
Map.prototype.sortBy = function(mapper, comparator) {
// Late binding
return OrderedMap(sortFactory(this, comparator, mapper));
};
// @pragma Mutability
Map.prototype.withMutations = function(fn) {
var mutable = this.asMutable();
fn(mutable);
return mutable.wasAltered() ? mutable.__ensureOwner(this.__ownerID) : this;
};
Map.prototype.asMutable = function() {
return this.__ownerID ? this : this.__ensureOwner(new OwnerID());
};
Map.prototype.asImmutable = function() {
return this.__ensureOwner();
};
Map.prototype.wasAltered = function() {
return this.__altered;
};
Map.prototype.__iterator = function(type, reverse) {
return new MapIterator(this, type, reverse);
};
Map.prototype.__iterate = function(fn, reverse) {var this$0 = this;
var iterations = 0;
this._root && this._root.iterate(function(entry ) {
iterations++;
return fn(entry[1], entry[0], this$0);
}, reverse);
return iterations;
};
Map.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
if (!ownerID) {
this.__ownerID = ownerID;
this.__altered = false;
return this;
}
return makeMap(this.size, this._root, ownerID, this.__hash);
};
function isMap(maybeMap) {
return !!(maybeMap && maybeMap[IS_MAP_SENTINEL]);
}
Map.isMap = isMap;
var IS_MAP_SENTINEL = '@@__IMMUTABLE_MAP__@@';
var MapPrototype = Map.prototype;
MapPrototype[IS_MAP_SENTINEL] = true;
MapPrototype[DELETE] = MapPrototype.remove;
MapPrototype.removeIn = MapPrototype.deleteIn;
// #pragma Trie Nodes
function ArrayMapNode(ownerID, entries) {
this.ownerID = ownerID;
this.entries = entries;
}
ArrayMapNode.prototype.get = function(shift, keyHash, key, notSetValue) {
var entries = this.entries;
for (var ii = 0, len = entries.length; ii < len; ii++) {
if (is(key, entries[ii][0])) {
return entries[ii][1];
}
}
return notSetValue;
};
ArrayMapNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
var removed = value === NOT_SET;
var entries = this.entries;
var idx = 0;
for (var len = entries.length; idx < len; idx++) {
if (is(key, entries[idx][0])) {
break;
}
}
var exists = idx < len;
if (exists ? entries[idx][1] === value : removed) {
return this;
}
SetRef(didAlter);
(removed || !exists) && SetRef(didChangeSize);
if (removed && entries.length === 1) {
return; // undefined
}
if (!exists && !removed && entries.length >= MAX_ARRAY_MAP_SIZE) {
return createNodes(ownerID, entries, key, value);
}
var isEditable = ownerID && ownerID === this.ownerID;
var newEntries = isEditable ? entries : arrCopy(entries);
if (exists) {
if (removed) {
idx === len - 1 ? newEntries.pop() : (newEntries[idx] = newEntries.pop());
} else {
newEntries[idx] = [key, value];
}
} else {
newEntries.push([key, value]);
}
if (isEditable) {
this.entries = newEntries;
return this;
}
return new ArrayMapNode(ownerID, newEntries);
};
function BitmapIndexedNode(ownerID, bitmap, nodes) {
this.ownerID = ownerID;
this.bitmap = bitmap;
this.nodes = nodes;
}
BitmapIndexedNode.prototype.get = function(shift, keyHash, key, notSetValue) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var bit = (1 << ((shift === 0 ? keyHash : keyHash >>> shift) & MASK));
var bitmap = this.bitmap;
return (bitmap & bit) === 0 ? notSetValue :
this.nodes[popCount(bitmap & (bit - 1))].get(shift + SHIFT, keyHash, key, notSetValue);
};
BitmapIndexedNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var keyHashFrag = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var bit = 1 << keyHashFrag;
var bitmap = this.bitmap;
var exists = (bitmap & bit) !== 0;
if (!exists && value === NOT_SET) {
return this;
}
var idx = popCount(bitmap & (bit - 1));
var nodes = this.nodes;
var node = exists ? nodes[idx] : undefined;
var newNode = updateNode(node, ownerID, shift + SHIFT, keyHash, key, value, didChangeSize, didAlter);
if (newNode === node) {
return this;
}
if (!exists && newNode && nodes.length >= MAX_BITMAP_INDEXED_SIZE) {
return expandNodes(ownerID, nodes, bitmap, keyHashFrag, newNode);
}
if (exists && !newNode && nodes.length === 2 && isLeafNode(nodes[idx ^ 1])) {
return nodes[idx ^ 1];
}
if (exists && newNode && nodes.length === 1 && isLeafNode(newNode)) {
return newNode;
}
var isEditable = ownerID && ownerID === this.ownerID;
var newBitmap = exists ? newNode ? bitmap : bitmap ^ bit : bitmap | bit;
var newNodes = exists ? newNode ?
setIn(nodes, idx, newNode, isEditable) :
spliceOut(nodes, idx, isEditable) :
spliceIn(nodes, idx, newNode, isEditable);
if (isEditable) {
this.bitmap = newBitmap;
this.nodes = newNodes;
return this;
}
return new BitmapIndexedNode(ownerID, newBitmap, newNodes);
};
function HashArrayMapNode(ownerID, count, nodes) {
this.ownerID = ownerID;
this.count = count;
this.nodes = nodes;
}
HashArrayMapNode.prototype.get = function(shift, keyHash, key, notSetValue) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var idx = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var node = this.nodes[idx];
return node ? node.get(shift + SHIFT, keyHash, key, notSetValue) : notSetValue;
};
HashArrayMapNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var idx = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var removed = value === NOT_SET;
var nodes = this.nodes;
var node = nodes[idx];
if (removed && !node) {
return this;
}
var newNode = updateNode(node, ownerID, shift + SHIFT, keyHash, key, value, didChangeSize, didAlter);
if (newNode === node) {
return this;
}
var newCount = this.count;
if (!node) {
newCount++;
} else if (!newNode) {
newCount--;
if (newCount < MIN_HASH_ARRAY_MAP_SIZE) {
return packNodes(ownerID, nodes, newCount, idx);
}
}
var isEditable = ownerID && ownerID === this.ownerID;
var newNodes = setIn(nodes, idx, newNode, isEditable);
if (isEditable) {
this.count = newCount;
this.nodes = newNodes;
return this;
}
return new HashArrayMapNode(ownerID, newCount, newNodes);
};
function HashCollisionNode(ownerID, keyHash, entries) {
this.ownerID = ownerID;
this.keyHash = keyHash;
this.entries = entries;
}
HashCollisionNode.prototype.get = function(shift, keyHash, key, notSetValue) {
var entries = this.entries;
for (var ii = 0, len = entries.length; ii < len; ii++) {
if (is(key, entries[ii][0])) {
return entries[ii][1];
}
}
return notSetValue;
};
HashCollisionNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var removed = value === NOT_SET;
if (keyHash !== this.keyHash) {
if (removed) {
return this;
}
SetRef(didAlter);
SetRef(didChangeSize);
return mergeIntoNode(this, ownerID, shift, keyHash, [key, value]);
}
var entries = this.entries;
var idx = 0;
for (var len = entries.length; idx < len; idx++) {
if (is(key, entries[idx][0])) {
break;
}
}
var exists = idx < len;
if (exists ? entries[idx][1] === value : removed) {
return this;
}
SetRef(didAlter);
(removed || !exists) && SetRef(didChangeSize);
if (removed && len === 2) {
return new ValueNode(ownerID, this.keyHash, entries[idx ^ 1]);
}
var isEditable = ownerID && ownerID === this.ownerID;
var newEntries = isEditable ? entries : arrCopy(entries);
if (exists) {
if (removed) {
idx === len - 1 ? newEntries.pop() : (newEntries[idx] = newEntries.pop());
} else {
newEntries[idx] = [key, value];
}
} else {
newEntries.push([key, value]);
}
if (isEditable) {
this.entries = newEntries;
return this;
}
return new HashCollisionNode(ownerID, this.keyHash, newEntries);
};
function ValueNode(ownerID, keyHash, entry) {
this.ownerID = ownerID;
this.keyHash = keyHash;
this.entry = entry;
}
ValueNode.prototype.get = function(shift, keyHash, key, notSetValue) {
return is(key, this.entry[0]) ? this.entry[1] : notSetValue;
};
ValueNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
var removed = value === NOT_SET;
var keyMatch = is(key, this.entry[0]);
if (keyMatch ? value === this.entry[1] : removed) {
return this;
}
SetRef(didAlter);
if (removed) {
SetRef(didChangeSize);
return; // undefined
}
if (keyMatch) {
if (ownerID && ownerID === this.ownerID) {
this.entry[1] = value;
return this;
}
return new ValueNode(ownerID, this.keyHash, [key, value]);
}
SetRef(didChangeSize);
return mergeIntoNode(this, ownerID, shift, hash(key), [key, value]);
};
// #pragma Iterators
ArrayMapNode.prototype.iterate =
HashCollisionNode.prototype.iterate = function (fn, reverse) {
var entries = this.entries;
for (var ii = 0, maxIndex = entries.length - 1; ii <= maxIndex; ii++) {
if (fn(entries[reverse ? maxIndex - ii : ii]) === false) {
return false;
}
}
}
BitmapIndexedNode.prototype.iterate =
HashArrayMapNode.prototype.iterate = function (fn, reverse) {
var nodes = this.nodes;
for (var ii = 0, maxIndex = nodes.length - 1; ii <= maxIndex; ii++) {
var node = nodes[reverse ? maxIndex - ii : ii];
if (node && node.iterate(fn, reverse) === false) {
return false;
}
}
}
ValueNode.prototype.iterate = function (fn, reverse) {
return fn(this.entry);
}
createClass(MapIterator, Iterator);
function MapIterator(map, type, reverse) {
this._type = type;
this._reverse = reverse;
this._stack = map._root && mapIteratorFrame(map._root);
}
MapIterator.prototype.next = function() {
var type = this._type;
var stack = this._stack;
while (stack) {
var node = stack.node;
var index = stack.index++;
var maxIndex;
if (node.entry) {
if (index === 0) {
return mapIteratorValue(type, node.entry);
}
} else if (node.entries) {
maxIndex = node.entries.length - 1;
if (index <= maxIndex) {
return mapIteratorValue(type, node.entries[this._reverse ? maxIndex - index : index]);
}
} else {
maxIndex = node.nodes.length - 1;
if (index <= maxIndex) {
var subNode = node.nodes[this._reverse ? maxIndex - index : index];
if (subNode) {
if (subNode.entry) {
return mapIteratorValue(type, subNode.entry);
}
stack = this._stack = mapIteratorFrame(subNode, stack);
}
continue;
}
}
stack = this._stack = this._stack.__prev;
}
return iteratorDone();
};
function mapIteratorValue(type, entry) {
return iteratorValue(type, entry[0], entry[1]);
}
function mapIteratorFrame(node, prev) {
return {
node: node,
index: 0,
__prev: prev
};
}
function makeMap(size, root, ownerID, hash) {
var map = Object.create(MapPrototype);
map.size = size;
map._root = root;
map.__ownerID = ownerID;
map.__hash = hash;
map.__altered = false;
return map;
}
var EMPTY_MAP;
function emptyMap() {
return EMPTY_MAP || (EMPTY_MAP = makeMap(0));
}
function updateMap(map, k, v) {
var newRoot;
var newSize;
if (!map._root) {
if (v === NOT_SET) {
return map;
}
newSize = 1;
newRoot = new ArrayMapNode(map.__ownerID, [[k, v]]);
} else {
var didChangeSize = MakeRef(CHANGE_LENGTH);
var didAlter = MakeRef(DID_ALTER);
newRoot = updateNode(map._root, map.__ownerID, 0, undefined, k, v, didChangeSize, didAlter);
if (!didAlter.value) {
return map;
}
newSize = map.size + (didChangeSize.value ? v === NOT_SET ? -1 : 1 : 0);
}
if (map.__ownerID) {
map.size = newSize;
map._root = newRoot;
map.__hash = undefined;
map.__altered = true;
return map;
}
return newRoot ? makeMap(newSize, newRoot) : emptyMap();
}
function updateNode(node, ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (!node) {
if (value === NOT_SET) {
return node;
}
SetRef(didAlter);
SetRef(didChangeSize);
return new ValueNode(ownerID, keyHash, [key, value]);
}
return node.update(ownerID, shift, keyHash, key, value, didChangeSize, didAlter);
}
function isLeafNode(node) {
return node.constructor === ValueNode || node.constructor === HashCollisionNode;
}
function mergeIntoNode(node, ownerID, shift, keyHash, entry) {
if (node.keyHash === keyHash) {
return new HashCollisionNode(ownerID, keyHash, [node.entry, entry]);
}
var idx1 = (shift === 0 ? node.keyHash : node.keyHash >>> shift) & MASK;
var idx2 = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var newNode;
var nodes = idx1 === idx2 ?
[mergeIntoNode(node, ownerID, shift + SHIFT, keyHash, entry)] :
((newNode = new ValueNode(ownerID, keyHash, entry)), idx1 < idx2 ? [node, newNode] : [newNode, node]);
return new BitmapIndexedNode(ownerID, (1 << idx1) | (1 << idx2), nodes);
}
function createNodes(ownerID, entries, key, value) {
if (!ownerID) {
ownerID = new OwnerID();
}
var node = new ValueNode(ownerID, hash(key), [key, value]);
for (var ii = 0; ii < entries.length; ii++) {
var entry = entries[ii];
node = node.update(ownerID, 0, undefined, entry[0], entry[1]);
}
return node;
}
function packNodes(ownerID, nodes, count, excluding) {
var bitmap = 0;
var packedII = 0;
var packedNodes = new Array(count);
for (var ii = 0, bit = 1, len = nodes.length; ii < len; ii++, bit <<= 1) {
var node = nodes[ii];
if (node !== undefined && ii !== excluding) {
bitmap |= bit;
packedNodes[packedII++] = node;
}
}
return new BitmapIndexedNode(ownerID, bitmap, packedNodes);
}
function expandNodes(ownerID, nodes, bitmap, including, node) {
var count = 0;
var expandedNodes = new Array(SIZE);
for (var ii = 0; bitmap !== 0; ii++, bitmap >>>= 1) {
expandedNodes[ii] = bitmap & 1 ? nodes[count++] : undefined;
}
expandedNodes[including] = node;
return new HashArrayMapNode(ownerID, count + 1, expandedNodes);
}
function mergeIntoMapWith(map, merger, iterables) {
var iters = [];
for (var ii = 0; ii < iterables.length; ii++) {
var value = iterables[ii];
var iter = KeyedIterable(value);
if (!isIterable(value)) {
iter = iter.map(function(v ) {return fromJS(v)});
}
iters.push(iter);
}
return mergeIntoCollectionWith(map, merger, iters);
}
function deepMerger(existing, value, key) {
return existing && existing.mergeDeep && isIterable(value) ?
existing.mergeDeep(value) :
is(existing, value) ? existing : value;
}
function deepMergerWith(merger) {
return function(existing, value, key) {
if (existing && existing.mergeDeepWith && isIterable(value)) {
return existing.mergeDeepWith(merger, value);
}
var nextValue = merger(existing, value, key);
return is(existing, nextValue) ? existing : nextValue;
};
}
function mergeIntoCollectionWith(collection, merger, iters) {
iters = iters.filter(function(x ) {return x.size !== 0});
if (iters.length === 0) {
return collection;
}
if (collection.size === 0 && !collection.__ownerID && iters.length === 1) {
return collection.constructor(iters[0]);
}
return collection.withMutations(function(collection ) {
var mergeIntoMap = merger ?
function(value, key) {
collection.update(key, NOT_SET, function(existing )
{return existing === NOT_SET ? value : merger(existing, value, key)}
);
} :
function(value, key) {
collection.set(key, value);
}
for (var ii = 0; ii < iters.length; ii++) {
iters[ii].forEach(mergeIntoMap);
}
});
}
function updateInDeepMap(existing, keyPathIter, notSetValue, updater) {
var isNotSet = existing === NOT_SET;
var step = keyPathIter.next();
if (step.done) {
var existingValue = isNotSet ? notSetValue : existing;
var newValue = updater(existingValue);
return newValue === existingValue ? existing : newValue;
}
invariant(
isNotSet || (existing && existing.set),
'invalid keyPath'
);
var key = step.value;
var nextExisting = isNotSet ? NOT_SET : existing.get(key, NOT_SET);
var nextUpdated = updateInDeepMap(
nextExisting,
keyPathIter,
notSetValue,
updater
);
return nextUpdated === nextExisting ? existing :
nextUpdated === NOT_SET ? existing.remove(key) :
(isNotSet ? emptyMap() : existing).set(key, nextUpdated);
}
function popCount(x) {
x = x - ((x >> 1) & 0x55555555);
x = (x & 0x33333333) + ((x >> 2) & 0x33333333);
x = (x + (x >> 4)) & 0x0f0f0f0f;
x = x + (x >> 8);
x = x + (x >> 16);
return x & 0x7f;
}
function setIn(array, idx, val, canEdit) {
var newArray = canEdit ? array : arrCopy(array);
newArray[idx] = val;
return newArray;
}
function spliceIn(array, idx, val, canEdit) {
var newLen = array.length + 1;
if (canEdit && idx + 1 === newLen) {
array[idx] = val;
return array;
}
var newArray = new Array(newLen);
var after = 0;
for (var ii = 0; ii < newLen; ii++) {
if (ii === idx) {
newArray[ii] = val;
after = -1;
} else {
newArray[ii] = array[ii + after];
}
}
return newArray;
}
function spliceOut(array, idx, canEdit) {
var newLen = array.length - 1;
if (canEdit && idx === newLen) {
array.pop();
return array;
}
var newArray = new Array(newLen);
var after = 0;
for (var ii = 0; ii < newLen; ii++) {
if (ii === idx) {
after = 1;
}
newArray[ii] = array[ii + after];
}
return newArray;
}
var MAX_ARRAY_MAP_SIZE = SIZE / 4;
var MAX_BITMAP_INDEXED_SIZE = SIZE / 2;
var MIN_HASH_ARRAY_MAP_SIZE = SIZE / 4;
createClass(List, IndexedCollection);
// @pragma Construction
function List(value) {
var empty = emptyList();
if (value === null || value === undefined) {
return empty;
}
if (isList(value)) {
return value;
}
var iter = IndexedIterable(value);
var size = iter.size;
if (size === 0) {
return empty;
}
assertNotInfinite(size);
if (size > 0 && size < SIZE) {
return makeList(0, size, SHIFT, null, new VNode(iter.toArray()));
}
return empty.withMutations(function(list ) {
list.setSize(size);
iter.forEach(function(v, i) {return list.set(i, v)});
});
}
List.of = function(/*...values*/) {
return this(arguments);
};
List.prototype.toString = function() {
return this.__toString('List [', ']');
};
// @pragma Access
List.prototype.get = function(index, notSetValue) {
index = wrapIndex(this, index);
if (index >= 0 && index < this.size) {
index += this._origin;
var node = listNodeFor(this, index);
return node && node.array[index & MASK];
}
return notSetValue;
};
// @pragma Modification
List.prototype.set = function(index, value) {
return updateList(this, index, value);
};
List.prototype.remove = function(index) {
return !this.has(index) ? this :
index === 0 ? this.shift() :
index === this.size - 1 ? this.pop() :
this.splice(index, 1);
};
List.prototype.insert = function(index, value) {
return this.splice(index, 0, value);
};
List.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = this._origin = this._capacity = 0;
this._level = SHIFT;
this._root = this._tail = null;
this.__hash = undefined;
this.__altered = true;
return this;
}
return emptyList();
};
List.prototype.push = function(/*...values*/) {
var values = arguments;
var oldSize = this.size;
return this.withMutations(function(list ) {
setListBounds(list, 0, oldSize + values.length);
for (var ii = 0; ii < values.length; ii++) {
list.set(oldSize + ii, values[ii]);
}
});
};
List.prototype.pop = function() {
return setListBounds(this, 0, -1);
};
List.prototype.unshift = function(/*...values*/) {
var values = arguments;
return this.withMutations(function(list ) {
setListBounds(list, -values.length);
for (var ii = 0; ii < values.length; ii++) {
list.set(ii, values[ii]);
}
});
};
List.prototype.shift = function() {
return setListBounds(this, 1);
};
// @pragma Composition
List.prototype.merge = function(/*...iters*/) {
return mergeIntoListWith(this, undefined, arguments);
};
List.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoListWith(this, merger, iters);
};
List.prototype.mergeDeep = function(/*...iters*/) {
return mergeIntoListWith(this, deepMerger, arguments);
};
List.prototype.mergeDeepWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoListWith(this, deepMergerWith(merger), iters);
};
List.prototype.setSize = function(size) {
return setListBounds(this, 0, size);
};
// @pragma Iteration
List.prototype.slice = function(begin, end) {
var size = this.size;
if (wholeSlice(begin, end, size)) {
return this;
}
return setListBounds(
this,
resolveBegin(begin, size),
resolveEnd(end, size)
);
};
List.prototype.__iterator = function(type, reverse) {
var index = 0;
var values = iterateList(this, reverse);
return new Iterator(function() {
var value = values();
return value === DONE ?
iteratorDone() :
iteratorValue(type, index++, value);
});
};
List.prototype.__iterate = function(fn, reverse) {
var index = 0;
var values = iterateList(this, reverse);
var value;
while ((value = values()) !== DONE) {
if (fn(value, index++, this) === false) {
break;
}
}
return index;
};
List.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
if (!ownerID) {
this.__ownerID = ownerID;
return this;
}
return makeList(this._origin, this._capacity, this._level, this._root, this._tail, ownerID, this.__hash);
};
function isList(maybeList) {
return !!(maybeList && maybeList[IS_LIST_SENTINEL]);
}
List.isList = isList;
var IS_LIST_SENTINEL = '@@__IMMUTABLE_LIST__@@';
var ListPrototype = List.prototype;
ListPrototype[IS_LIST_SENTINEL] = true;
ListPrototype[DELETE] = ListPrototype.remove;
ListPrototype.setIn = MapPrototype.setIn;
ListPrototype.deleteIn =
ListPrototype.removeIn = MapPrototype.removeIn;
ListPrototype.update = MapPrototype.update;
ListPrototype.updateIn = MapPrototype.updateIn;
ListPrototype.mergeIn = MapPrototype.mergeIn;
ListPrototype.mergeDeepIn = MapPrototype.mergeDeepIn;
ListPrototype.withMutations = MapPrototype.withMutations;
ListPrototype.asMutable = MapPrototype.asMutable;
ListPrototype.asImmutable = MapPrototype.asImmutable;
ListPrototype.wasAltered = MapPrototype.wasAltered;
function VNode(array, ownerID) {
this.array = array;
this.ownerID = ownerID;
}
// TODO: seems like these methods are very similar
VNode.prototype.removeBefore = function(ownerID, level, index) {
if (index === level ? 1 << level : 0 || this.array.length === 0) {
return this;
}
var originIndex = (index >>> level) & MASK;
if (originIndex >= this.array.length) {
return new VNode([], ownerID);
}
var removingFirst = originIndex === 0;
var newChild;
if (level > 0) {
var oldChild = this.array[originIndex];
newChild = oldChild && oldChild.removeBefore(ownerID, level - SHIFT, index);
if (newChild === oldChild && removingFirst) {
return this;
}
}
if (removingFirst && !newChild) {
return this;
}
var editable = editableVNode(this, ownerID);
if (!removingFirst) {
for (var ii = 0; ii < originIndex; ii++) {
editable.array[ii] = undefined;
}
}
if (newChild) {
editable.array[originIndex] = newChild;
}
return editable;
};
VNode.prototype.removeAfter = function(ownerID, level, index) {
if (index === (level ? 1 << level : 0) || this.array.length === 0) {
return this;
}
var sizeIndex = ((index - 1) >>> level) & MASK;
if (sizeIndex >= this.array.length) {
return this;
}
var newChild;
if (level > 0) {
var oldChild = this.array[sizeIndex];
newChild = oldChild && oldChild.removeAfter(ownerID, level - SHIFT, index);
if (newChild === oldChild && sizeIndex === this.array.length - 1) {
return this;
}
}
var editable = editableVNode(this, ownerID);
editable.array.splice(sizeIndex + 1);
if (newChild) {
editable.array[sizeIndex] = newChild;
}
return editable;
};
var DONE = {};
function iterateList(list, reverse) {
var left = list._origin;
var right = list._capacity;
var tailPos = getTailOffset(right);
var tail = list._tail;
return iterateNodeOrLeaf(list._root, list._level, 0);
function iterateNodeOrLeaf(node, level, offset) {
return level === 0 ?
iterateLeaf(node, offset) :
iterateNode(node, level, offset);
}
function iterateLeaf(node, offset) {
var array = offset === tailPos ? tail && tail.array : node && node.array;
var from = offset > left ? 0 : left - offset;
var to = right - offset;
if (to > SIZE) {
to = SIZE;
}
return function() {
if (from === to) {
return DONE;
}
var idx = reverse ? --to : from++;
return array && array[idx];
};
}
function iterateNode(node, level, offset) {
var values;
var array = node && node.array;
var from = offset > left ? 0 : (left - offset) >> level;
var to = ((right - offset) >> level) + 1;
if (to > SIZE) {
to = SIZE;
}
return function() {
do {
if (values) {
var value = values();
if (value !== DONE) {
return value;
}
values = null;
}
if (from === to) {
return DONE;
}
var idx = reverse ? --to : from++;
values = iterateNodeOrLeaf(
array && array[idx], level - SHIFT, offset + (idx << level)
);
} while (true);
};
}
}
function makeList(origin, capacity, level, root, tail, ownerID, hash) {
var list = Object.create(ListPrototype);
list.size = capacity - origin;
list._origin = origin;
list._capacity = capacity;
list._level = level;
list._root = root;
list._tail = tail;
list.__ownerID = ownerID;
list.__hash = hash;
list.__altered = false;
return list;
}
var EMPTY_LIST;
function emptyList() {
return EMPTY_LIST || (EMPTY_LIST = makeList(0, 0, SHIFT));
}
function updateList(list, index, value) {
index = wrapIndex(list, index);
if (index !== index) {
return list;
}
if (index >= list.size || index < 0) {
return list.withMutations(function(list ) {
index < 0 ?
setListBounds(list, index).set(0, value) :
setListBounds(list, 0, index + 1).set(index, value)
});
}
index += list._origin;
var newTail = list._tail;
var newRoot = list._root;
var didAlter = MakeRef(DID_ALTER);
if (index >= getTailOffset(list._capacity)) {
newTail = updateVNode(newTail, list.__ownerID, 0, index, value, didAlter);
} else {
newRoot = updateVNode(newRoot, list.__ownerID, list._level, index, value, didAlter);
}
if (!didAlter.value) {
return list;
}
if (list.__ownerID) {
list._root = newRoot;
list._tail = newTail;
list.__hash = undefined;
list.__altered = true;
return list;
}
return makeList(list._origin, list._capacity, list._level, newRoot, newTail);
}
function updateVNode(node, ownerID, level, index, value, didAlter) {
var idx = (index >>> level) & MASK;
var nodeHas = node && idx < node.array.length;
if (!nodeHas && value === undefined) {
return node;
}
var newNode;
if (level > 0) {
var lowerNode = node && node.array[idx];
var newLowerNode = updateVNode(lowerNode, ownerID, level - SHIFT, index, value, didAlter);
if (newLowerNode === lowerNode) {
return node;
}
newNode = editableVNode(node, ownerID);
newNode.array[idx] = newLowerNode;
return newNode;
}
if (nodeHas && node.array[idx] === value) {
return node;
}
SetRef(didAlter);
newNode = editableVNode(node, ownerID);
if (value === undefined && idx === newNode.array.length - 1) {
newNode.array.pop();
} else {
newNode.array[idx] = value;
}
return newNode;
}
function editableVNode(node, ownerID) {
if (ownerID && node && ownerID === node.ownerID) {
return node;
}
return new VNode(node ? node.array.slice() : [], ownerID);
}
function listNodeFor(list, rawIndex) {
if (rawIndex >= getTailOffset(list._capacity)) {
return list._tail;
}
if (rawIndex < 1 << (list._level + SHIFT)) {
var node = list._root;
var level = list._level;
while (node && level > 0) {
node = node.array[(rawIndex >>> level) & MASK];
level -= SHIFT;
}
return node;
}
}
function setListBounds(list, begin, end) {
// Sanitize begin & end using this shorthand for ToInt32(argument)
// http://www.ecma-international.org/ecma-262/6.0/#sec-toint32
if (begin !== undefined) {
begin = begin | 0;
}
if (end !== undefined) {
end = end | 0;
}
var owner = list.__ownerID || new OwnerID();
var oldOrigin = list._origin;
var oldCapacity = list._capacity;
var newOrigin = oldOrigin + begin;
var newCapacity = end === undefined ? oldCapacity : end < 0 ? oldCapacity + end : oldOrigin + end;
if (newOrigin === oldOrigin && newCapacity === oldCapacity) {
return list;
}
// If it's going to end after it starts, it's empty.
if (newOrigin >= newCapacity) {
return list.clear();
}
var newLevel = list._level;
var newRoot = list._root;
// New origin might need creating a higher root.
var offsetShift = 0;
while (newOrigin + offsetShift < 0) {
newRoot = new VNode(newRoot && newRoot.array.length ? [undefined, newRoot] : [], owner);
newLevel += SHIFT;
offsetShift += 1 << newLevel;
}
if (offsetShift) {
newOrigin += offsetShift;
oldOrigin += offsetShift;
newCapacity += offsetShift;
oldCapacity += offsetShift;
}
var oldTailOffset = getTailOffset(oldCapacity);
var newTailOffset = getTailOffset(newCapacity);
// New size might need creating a higher root.
while (newTailOffset >= 1 << (newLevel + SHIFT)) {
newRoot = new VNode(newRoot && newRoot.array.length ? [newRoot] : [], owner);
newLevel += SHIFT;
}
// Locate or create the new tail.
var oldTail = list._tail;
var newTail = newTailOffset < oldTailOffset ?
listNodeFor(list, newCapacity - 1) :
newTailOffset > oldTailOffset ? new VNode([], owner) : oldTail;
// Merge Tail into tree.
if (oldTail && newTailOffset > oldTailOffset && newOrigin < oldCapacity && oldTail.array.length) {
newRoot = editableVNode(newRoot, owner);
var node = newRoot;
for (var level = newLevel; level > SHIFT; level -= SHIFT) {
var idx = (oldTailOffset >>> level) & MASK;
node = node.array[idx] = editableVNode(node.array[idx], owner);
}
node.array[(oldTailOffset >>> SHIFT) & MASK] = oldTail;
}
// If the size has been reduced, there's a chance the tail needs to be trimmed.
if (newCapacity < oldCapacity) {
newTail = newTail && newTail.removeAfter(owner, 0, newCapacity);
}
// If the new origin is within the tail, then we do not need a root.
if (newOrigin >= newTailOffset) {
newOrigin -= newTailOffset;
newCapacity -= newTailOffset;
newLevel = SHIFT;
newRoot = null;
newTail = newTail && newTail.removeBefore(owner, 0, newOrigin);
// Otherwise, if the root has been trimmed, garbage collect.
} else if (newOrigin > oldOrigin || newTailOffset < oldTailOffset) {
offsetShift = 0;
// Identify the new top root node of the subtree of the old root.
while (newRoot) {
var beginIndex = (newOrigin >>> newLevel) & MASK;
if (beginIndex !== (newTailOffset >>> newLevel) & MASK) {
break;
}
if (beginIndex) {
offsetShift += (1 << newLevel) * beginIndex;
}
newLevel -= SHIFT;
newRoot = newRoot.array[beginIndex];
}
// Trim the new sides of the new root.
if (newRoot && newOrigin > oldOrigin) {
newRoot = newRoot.removeBefore(owner, newLevel, newOrigin - offsetShift);
}
if (newRoot && newTailOffset < oldTailOffset) {
newRoot = newRoot.removeAfter(owner, newLevel, newTailOffset - offsetShift);
}
if (offsetShift) {
newOrigin -= offsetShift;
newCapacity -= offsetShift;
}
}
if (list.__ownerID) {
list.size = newCapacity - newOrigin;
list._origin = newOrigin;
list._capacity = newCapacity;
list._level = newLevel;
list._root = newRoot;
list._tail = newTail;
list.__hash = undefined;
list.__altered = true;
return list;
}
return makeList(newOrigin, newCapacity, newLevel, newRoot, newTail);
}
function mergeIntoListWith(list, merger, iterables) {
var iters = [];
var maxSize = 0;
for (var ii = 0; ii < iterables.length; ii++) {
var value = iterables[ii];
var iter = IndexedIterable(value);
if (iter.size > maxSize) {
maxSize = iter.size;
}
if (!isIterable(value)) {
iter = iter.map(function(v ) {return fromJS(v)});
}
iters.push(iter);
}
if (maxSize > list.size) {
list = list.setSize(maxSize);
}
return mergeIntoCollectionWith(list, merger, iters);
}
function getTailOffset(size) {
return size < SIZE ? 0 : (((size - 1) >>> SHIFT) << SHIFT);
}
createClass(OrderedMap, Map);
// @pragma Construction
function OrderedMap(value) {
return value === null || value === undefined ? emptyOrderedMap() :
isOrderedMap(value) ? value :
emptyOrderedMap().withMutations(function(map ) {
var iter = KeyedIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v, k) {return map.set(k, v)});
});
}
OrderedMap.of = function(/*...values*/) {
return this(arguments);
};
OrderedMap.prototype.toString = function() {
return this.__toString('OrderedMap {', '}');
};
// @pragma Access
OrderedMap.prototype.get = function(k, notSetValue) {
var index = this._map.get(k);
return index !== undefined ? this._list.get(index)[1] : notSetValue;
};
// @pragma Modification
OrderedMap.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = 0;
this._map.clear();
this._list.clear();
return this;
}
return emptyOrderedMap();
};
OrderedMap.prototype.set = function(k, v) {
return updateOrderedMap(this, k, v);
};
OrderedMap.prototype.remove = function(k) {
return updateOrderedMap(this, k, NOT_SET);
};
OrderedMap.prototype.wasAltered = function() {
return this._map.wasAltered() || this._list.wasAltered();
};
OrderedMap.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._list.__iterate(
function(entry ) {return entry && fn(entry[1], entry[0], this$0)},
reverse
);
};
OrderedMap.prototype.__iterator = function(type, reverse) {
return this._list.fromEntrySeq().__iterator(type, reverse);
};
OrderedMap.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
var newMap = this._map.__ensureOwner(ownerID);
var newList = this._list.__ensureOwner(ownerID);
if (!ownerID) {
this.__ownerID = ownerID;
this._map = newMap;
this._list = newList;
return this;
}
return makeOrderedMap(newMap, newList, ownerID, this.__hash);
};
function isOrderedMap(maybeOrderedMap) {
return isMap(maybeOrderedMap) && isOrdered(maybeOrderedMap);
}
OrderedMap.isOrderedMap = isOrderedMap;
OrderedMap.prototype[IS_ORDERED_SENTINEL] = true;
OrderedMap.prototype[DELETE] = OrderedMap.prototype.remove;
function makeOrderedMap(map, list, ownerID, hash) {
var omap = Object.create(OrderedMap.prototype);
omap.size = map ? map.size : 0;
omap._map = map;
omap._list = list;
omap.__ownerID = ownerID;
omap.__hash = hash;
return omap;
}
var EMPTY_ORDERED_MAP;
function emptyOrderedMap() {
return EMPTY_ORDERED_MAP || (EMPTY_ORDERED_MAP = makeOrderedMap(emptyMap(), emptyList()));
}
function updateOrderedMap(omap, k, v) {
var map = omap._map;
var list = omap._list;
var i = map.get(k);
var has = i !== undefined;
var newMap;
var newList;
if (v === NOT_SET) { // removed
if (!has) {
return omap;
}
if (list.size >= SIZE && list.size >= map.size * 2) {
newList = list.filter(function(entry, idx) {return entry !== undefined && i !== idx});
newMap = newList.toKeyedSeq().map(function(entry ) {return entry[0]}).flip().toMap();
if (omap.__ownerID) {
newMap.__ownerID = newList.__ownerID = omap.__ownerID;
}
} else {
newMap = map.remove(k);
newList = i === list.size - 1 ? list.pop() : list.set(i, undefined);
}
} else {
if (has) {
if (v === list.get(i)[1]) {
return omap;
}
newMap = map;
newList = list.set(i, [k, v]);
} else {
newMap = map.set(k, list.size);
newList = list.set(list.size, [k, v]);
}
}
if (omap.__ownerID) {
omap.size = newMap.size;
omap._map = newMap;
omap._list = newList;
omap.__hash = undefined;
return omap;
}
return makeOrderedMap(newMap, newList);
}
createClass(ToKeyedSequence, KeyedSeq);
function ToKeyedSequence(indexed, useKeys) {
this._iter = indexed;
this._useKeys = useKeys;
this.size = indexed.size;
}
ToKeyedSequence.prototype.get = function(key, notSetValue) {
return this._iter.get(key, notSetValue);
};
ToKeyedSequence.prototype.has = function(key) {
return this._iter.has(key);
};
ToKeyedSequence.prototype.valueSeq = function() {
return this._iter.valueSeq();
};
ToKeyedSequence.prototype.reverse = function() {var this$0 = this;
var reversedSequence = reverseFactory(this, true);
if (!this._useKeys) {
reversedSequence.valueSeq = function() {return this$0._iter.toSeq().reverse()};
}
return reversedSequence;
};
ToKeyedSequence.prototype.map = function(mapper, context) {var this$0 = this;
var mappedSequence = mapFactory(this, mapper, context);
if (!this._useKeys) {
mappedSequence.valueSeq = function() {return this$0._iter.toSeq().map(mapper, context)};
}
return mappedSequence;
};
ToKeyedSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
var ii;
return this._iter.__iterate(
this._useKeys ?
function(v, k) {return fn(v, k, this$0)} :
((ii = reverse ? resolveSize(this) : 0),
function(v ) {return fn(v, reverse ? --ii : ii++, this$0)}),
reverse
);
};
ToKeyedSequence.prototype.__iterator = function(type, reverse) {
if (this._useKeys) {
return this._iter.__iterator(type, reverse);
}
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
var ii = reverse ? resolveSize(this) : 0;
return new Iterator(function() {
var step = iterator.next();
return step.done ? step :
iteratorValue(type, reverse ? --ii : ii++, step.value, step);
});
};
ToKeyedSequence.prototype[IS_ORDERED_SENTINEL] = true;
createClass(ToIndexedSequence, IndexedSeq);
function ToIndexedSequence(iter) {
this._iter = iter;
this.size = iter.size;
}
ToIndexedSequence.prototype.includes = function(value) {
return this._iter.includes(value);
};
ToIndexedSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
var iterations = 0;
return this._iter.__iterate(function(v ) {return fn(v, iterations++, this$0)}, reverse);
};
ToIndexedSequence.prototype.__iterator = function(type, reverse) {
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
var iterations = 0;
return new Iterator(function() {
var step = iterator.next();
return step.done ? step :
iteratorValue(type, iterations++, step.value, step)
});
};
createClass(ToSetSequence, SetSeq);
function ToSetSequence(iter) {
this._iter = iter;
this.size = iter.size;
}
ToSetSequence.prototype.has = function(key) {
return this._iter.includes(key);
};
ToSetSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._iter.__iterate(function(v ) {return fn(v, v, this$0)}, reverse);
};
ToSetSequence.prototype.__iterator = function(type, reverse) {
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
return new Iterator(function() {
var step = iterator.next();
return step.done ? step :
iteratorValue(type, step.value, step.value, step);
});
};
createClass(FromEntriesSequence, KeyedSeq);
function FromEntriesSequence(entries) {
this._iter = entries;
this.size = entries.size;
}
FromEntriesSequence.prototype.entrySeq = function() {
return this._iter.toSeq();
};
FromEntriesSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._iter.__iterate(function(entry ) {
// Check if entry exists first so array access doesn't throw for holes
// in the parent iteration.
if (entry) {
validateEntry(entry);
var indexedIterable = isIterable(entry);
return fn(
indexedIterable ? entry.get(1) : entry[1],
indexedIterable ? entry.get(0) : entry[0],
this$0
);
}
}, reverse);
};
FromEntriesSequence.prototype.__iterator = function(type, reverse) {
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
return new Iterator(function() {
while (true) {
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
// Check if entry exists first so array access doesn't throw for holes
// in the parent iteration.
if (entry) {
validateEntry(entry);
var indexedIterable = isIterable(entry);
return iteratorValue(
type,
indexedIterable ? entry.get(0) : entry[0],
indexedIterable ? entry.get(1) : entry[1],
step
);
}
}
});
};
ToIndexedSequence.prototype.cacheResult =
ToKeyedSequence.prototype.cacheResult =
ToSetSequence.prototype.cacheResult =
FromEntriesSequence.prototype.cacheResult =
cacheResultThrough;
function flipFactory(iterable) {
var flipSequence = makeSequence(iterable);
flipSequence._iter = iterable;
flipSequence.size = iterable.size;
flipSequence.flip = function() {return iterable};
flipSequence.reverse = function () {
var reversedSequence = iterable.reverse.apply(this); // super.reverse()
reversedSequence.flip = function() {return iterable.reverse()};
return reversedSequence;
};
flipSequence.has = function(key ) {return iterable.includes(key)};
flipSequence.includes = function(key ) {return iterable.has(key)};
flipSequence.cacheResult = cacheResultThrough;
flipSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
return iterable.__iterate(function(v, k) {return fn(k, v, this$0) !== false}, reverse);
}
flipSequence.__iteratorUncached = function(type, reverse) {
if (type === ITERATE_ENTRIES) {
var iterator = iterable.__iterator(type, reverse);
return new Iterator(function() {
var step = iterator.next();
if (!step.done) {
var k = step.value[0];
step.value[0] = step.value[1];
step.value[1] = k;
}
return step;
});
}
return iterable.__iterator(
type === ITERATE_VALUES ? ITERATE_KEYS : ITERATE_VALUES,
reverse
);
}
return flipSequence;
}
function mapFactory(iterable, mapper, context) {
var mappedSequence = makeSequence(iterable);
mappedSequence.size = iterable.size;
mappedSequence.has = function(key ) {return iterable.has(key)};
mappedSequence.get = function(key, notSetValue) {
var v = iterable.get(key, NOT_SET);
return v === NOT_SET ?
notSetValue :
mapper.call(context, v, key, iterable);
};
mappedSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
return iterable.__iterate(
function(v, k, c) {return fn(mapper.call(context, v, k, c), k, this$0) !== false},
reverse
);
}
mappedSequence.__iteratorUncached = function (type, reverse) {
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
return new Iterator(function() {
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
var key = entry[0];
return iteratorValue(
type,
key,
mapper.call(context, entry[1], key, iterable),
step
);
});
}
return mappedSequence;
}
function reverseFactory(iterable, useKeys) {
var reversedSequence = makeSequence(iterable);
reversedSequence._iter = iterable;
reversedSequence.size = iterable.size;
reversedSequence.reverse = function() {return iterable};
if (iterable.flip) {
reversedSequence.flip = function () {
var flipSequence = flipFactory(iterable);
flipSequence.reverse = function() {return iterable.flip()};
return flipSequence;
};
}
reversedSequence.get = function(key, notSetValue)
{return iterable.get(useKeys ? key : -1 - key, notSetValue)};
reversedSequence.has = function(key )
{return iterable.has(useKeys ? key : -1 - key)};
reversedSequence.includes = function(value ) {return iterable.includes(value)};
reversedSequence.cacheResult = cacheResultThrough;
reversedSequence.__iterate = function (fn, reverse) {var this$0 = this;
return iterable.__iterate(function(v, k) {return fn(v, k, this$0)}, !reverse);
};
reversedSequence.__iterator =
function(type, reverse) {return iterable.__iterator(type, !reverse)};
return reversedSequence;
}
function filterFactory(iterable, predicate, context, useKeys) {
var filterSequence = makeSequence(iterable);
if (useKeys) {
filterSequence.has = function(key ) {
var v = iterable.get(key, NOT_SET);
return v !== NOT_SET && !!predicate.call(context, v, key, iterable);
};
filterSequence.get = function(key, notSetValue) {
var v = iterable.get(key, NOT_SET);
return v !== NOT_SET && predicate.call(context, v, key, iterable) ?
v : notSetValue;
};
}
filterSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
var iterations = 0;
iterable.__iterate(function(v, k, c) {
if (predicate.call(context, v, k, c)) {
iterations++;
return fn(v, useKeys ? k : iterations - 1, this$0);
}
}, reverse);
return iterations;
};
filterSequence.__iteratorUncached = function (type, reverse) {
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
var iterations = 0;
return new Iterator(function() {
while (true) {
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
var key = entry[0];
var value = entry[1];
if (predicate.call(context, value, key, iterable)) {
return iteratorValue(type, useKeys ? key : iterations++, value, step);
}
}
});
}
return filterSequence;
}
function countByFactory(iterable, grouper, context) {
var groups = Map().asMutable();
iterable.__iterate(function(v, k) {
groups.update(
grouper.call(context, v, k, iterable),
0,
function(a ) {return a + 1}
);
});
return groups.asImmutable();
}
function groupByFactory(iterable, grouper, context) {
var isKeyedIter = isKeyed(iterable);
var groups = (isOrdered(iterable) ? OrderedMap() : Map()).asMutable();
iterable.__iterate(function(v, k) {
groups.update(
grouper.call(context, v, k, iterable),
function(a ) {return (a = a || [], a.push(isKeyedIter ? [k, v] : v), a)}
);
});
var coerce = iterableClass(iterable);
return groups.map(function(arr ) {return reify(iterable, coerce(arr))});
}
function sliceFactory(iterable, begin, end, useKeys) {
var originalSize = iterable.size;
// Sanitize begin & end using this shorthand for ToInt32(argument)
// http://www.ecma-international.org/ecma-262/6.0/#sec-toint32
if (begin !== undefined) {
begin = begin | 0;
}
if (end !== undefined) {
if (end === Infinity) {
end = originalSize;
} else {
end = end | 0;
}
}
if (wholeSlice(begin, end, originalSize)) {
return iterable;
}
var resolvedBegin = resolveBegin(begin, originalSize);
var resolvedEnd = resolveEnd(end, originalSize);
// begin or end will be NaN if they were provided as negative numbers and
// this iterable's size is unknown. In that case, cache first so there is
// a known size and these do not resolve to NaN.
if (resolvedBegin !== resolvedBegin || resolvedEnd !== resolvedEnd) {
return sliceFactory(iterable.toSeq().cacheResult(), begin, end, useKeys);
}
// Note: resolvedEnd is undefined when the original sequence's length is
// unknown and this slice did not supply an end and should contain all
// elements after resolvedBegin.
// In that case, resolvedSize will be NaN and sliceSize will remain undefined.
var resolvedSize = resolvedEnd - resolvedBegin;
var sliceSize;
if (resolvedSize === resolvedSize) {
sliceSize = resolvedSize < 0 ? 0 : resolvedSize;
}
var sliceSeq = makeSequence(iterable);
// If iterable.size is undefined, the size of the realized sliceSeq is
// unknown at this point unless the number of items to slice is 0
sliceSeq.size = sliceSize === 0 ? sliceSize : iterable.size && sliceSize || undefined;
if (!useKeys && isSeq(iterable) && sliceSize >= 0) {
sliceSeq.get = function (index, notSetValue) {
index = wrapIndex(this, index);
return index >= 0 && index < sliceSize ?
iterable.get(index + resolvedBegin, notSetValue) :
notSetValue;
}
}
sliceSeq.__iterateUncached = function(fn, reverse) {var this$0 = this;
if (sliceSize === 0) {
return 0;
}
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var skipped = 0;
var isSkipping = true;
var iterations = 0;
iterable.__iterate(function(v, k) {
if (!(isSkipping && (isSkipping = skipped++ < resolvedBegin))) {
iterations++;
return fn(v, useKeys ? k : iterations - 1, this$0) !== false &&
iterations !== sliceSize;
}
});
return iterations;
};
sliceSeq.__iteratorUncached = function(type, reverse) {
if (sliceSize !== 0 && reverse) {
return this.cacheResult().__iterator(type, reverse);
}
// Don't bother instantiating parent iterator if taking 0.
var iterator = sliceSize !== 0 && iterable.__iterator(type, reverse);
var skipped = 0;
var iterations = 0;
return new Iterator(function() {
while (skipped++ < resolvedBegin) {
iterator.next();
}
if (++iterations > sliceSize) {
return iteratorDone();
}
var step = iterator.next();
if (useKeys || type === ITERATE_VALUES) {
return step;
} else if (type === ITERATE_KEYS) {
return iteratorValue(type, iterations - 1, undefined, step);
} else {
return iteratorValue(type, iterations - 1, step.value[1], step);
}
});
}
return sliceSeq;
}
function takeWhileFactory(iterable, predicate, context) {
var takeSequence = makeSequence(iterable);
takeSequence.__iterateUncached = function(fn, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var iterations = 0;
iterable.__iterate(function(v, k, c)
{return predicate.call(context, v, k, c) && ++iterations && fn(v, k, this$0)}
);
return iterations;
};
takeSequence.__iteratorUncached = function(type, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
var iterating = true;
return new Iterator(function() {
if (!iterating) {
return iteratorDone();
}
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
var k = entry[0];
var v = entry[1];
if (!predicate.call(context, v, k, this$0)) {
iterating = false;
return iteratorDone();
}
return type === ITERATE_ENTRIES ? step :
iteratorValue(type, k, v, step);
});
};
return takeSequence;
}
function skipWhileFactory(iterable, predicate, context, useKeys) {
var skipSequence = makeSequence(iterable);
skipSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var isSkipping = true;
var iterations = 0;
iterable.__iterate(function(v, k, c) {
if (!(isSkipping && (isSkipping = predicate.call(context, v, k, c)))) {
iterations++;
return fn(v, useKeys ? k : iterations - 1, this$0);
}
});
return iterations;
};
skipSequence.__iteratorUncached = function(type, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
var skipping = true;
var iterations = 0;
return new Iterator(function() {
var step, k, v;
do {
step = iterator.next();
if (step.done) {
if (useKeys || type === ITERATE_VALUES) {
return step;
} else if (type === ITERATE_KEYS) {
return iteratorValue(type, iterations++, undefined, step);
} else {
return iteratorValue(type, iterations++, step.value[1], step);
}
}
var entry = step.value;
k = entry[0];
v = entry[1];
skipping && (skipping = predicate.call(context, v, k, this$0));
} while (skipping);
return type === ITERATE_ENTRIES ? step :
iteratorValue(type, k, v, step);
});
};
return skipSequence;
}
function concatFactory(iterable, values) {
var isKeyedIterable = isKeyed(iterable);
var iters = [iterable].concat(values).map(function(v ) {
if (!isIterable(v)) {
v = isKeyedIterable ?
keyedSeqFromValue(v) :
indexedSeqFromValue(Array.isArray(v) ? v : [v]);
} else if (isKeyedIterable) {
v = KeyedIterable(v);
}
return v;
}).filter(function(v ) {return v.size !== 0});
if (iters.length === 0) {
return iterable;
}
if (iters.length === 1) {
var singleton = iters[0];
if (singleton === iterable ||
isKeyedIterable && isKeyed(singleton) ||
isIndexed(iterable) && isIndexed(singleton)) {
return singleton;
}
}
var concatSeq = new ArraySeq(iters);
if (isKeyedIterable) {
concatSeq = concatSeq.toKeyedSeq();
} else if (!isIndexed(iterable)) {
concatSeq = concatSeq.toSetSeq();
}
concatSeq = concatSeq.flatten(true);
concatSeq.size = iters.reduce(
function(sum, seq) {
if (sum !== undefined) {
var size = seq.size;
if (size !== undefined) {
return sum + size;
}
}
},
0
);
return concatSeq;
}
function flattenFactory(iterable, depth, useKeys) {
var flatSequence = makeSequence(iterable);
flatSequence.__iterateUncached = function(fn, reverse) {
var iterations = 0;
var stopped = false;
function flatDeep(iter, currentDepth) {var this$0 = this;
iter.__iterate(function(v, k) {
if ((!depth || currentDepth < depth) && isIterable(v)) {
flatDeep(v, currentDepth + 1);
} else if (fn(v, useKeys ? k : iterations++, this$0) === false) {
stopped = true;
}
return !stopped;
}, reverse);
}
flatDeep(iterable, 0);
return iterations;
}
flatSequence.__iteratorUncached = function(type, reverse) {
var iterator = iterable.__iterator(type, reverse);
var stack = [];
var iterations = 0;
return new Iterator(function() {
while (iterator) {
var step = iterator.next();
if (step.done !== false) {
iterator = stack.pop();
continue;
}
var v = step.value;
if (type === ITERATE_ENTRIES) {
v = v[1];
}
if ((!depth || stack.length < depth) && isIterable(v)) {
stack.push(iterator);
iterator = v.__iterator(type, reverse);
} else {
return useKeys ? step : iteratorValue(type, iterations++, v, step);
}
}
return iteratorDone();
});
}
return flatSequence;
}
function flatMapFactory(iterable, mapper, context) {
var coerce = iterableClass(iterable);
return iterable.toSeq().map(
function(v, k) {return coerce(mapper.call(context, v, k, iterable))}
).flatten(true);
}
function interposeFactory(iterable, separator) {
var interposedSequence = makeSequence(iterable);
interposedSequence.size = iterable.size && iterable.size * 2 -1;
interposedSequence.__iterateUncached = function(fn, reverse) {var this$0 = this;
var iterations = 0;
iterable.__iterate(function(v, k)
{return (!iterations || fn(separator, iterations++, this$0) !== false) &&
fn(v, iterations++, this$0) !== false},
reverse
);
return iterations;
};
interposedSequence.__iteratorUncached = function(type, reverse) {
var iterator = iterable.__iterator(ITERATE_VALUES, reverse);
var iterations = 0;
var step;
return new Iterator(function() {
if (!step || iterations % 2) {
step = iterator.next();
if (step.done) {
return step;
}
}
return iterations % 2 ?
iteratorValue(type, iterations++, separator) :
iteratorValue(type, iterations++, step.value, step);
});
};
return interposedSequence;
}
function sortFactory(iterable, comparator, mapper) {
if (!comparator) {
comparator = defaultComparator;
}
var isKeyedIterable = isKeyed(iterable);
var index = 0;
var entries = iterable.toSeq().map(
function(v, k) {return [k, v, index++, mapper ? mapper(v, k, iterable) : v]}
).toArray();
entries.sort(function(a, b) {return comparator(a[3], b[3]) || a[2] - b[2]}).forEach(
isKeyedIterable ?
function(v, i) { entries[i].length = 2; } :
function(v, i) { entries[i] = v[1]; }
);
return isKeyedIterable ? KeyedSeq(entries) :
isIndexed(iterable) ? IndexedSeq(entries) :
SetSeq(entries);
}
function maxFactory(iterable, comparator, mapper) {
if (!comparator) {
comparator = defaultComparator;
}
if (mapper) {
var entry = iterable.toSeq()
.map(function(v, k) {return [v, mapper(v, k, iterable)]})
.reduce(function(a, b) {return maxCompare(comparator, a[1], b[1]) ? b : a});
return entry && entry[0];
} else {
return iterable.reduce(function(a, b) {return maxCompare(comparator, a, b) ? b : a});
}
}
function maxCompare(comparator, a, b) {
var comp = comparator(b, a);
// b is considered the new max if the comparator declares them equal, but
// they are not equal and b is in fact a nullish value.
return (comp === 0 && b !== a && (b === undefined || b === null || b !== b)) || comp > 0;
}
function zipWithFactory(keyIter, zipper, iters) {
var zipSequence = makeSequence(keyIter);
zipSequence.size = new ArraySeq(iters).map(function(i ) {return i.size}).min();
// Note: this a generic base implementation of __iterate in terms of
// __iterator which may be more generically useful in the future.
zipSequence.__iterate = function(fn, reverse) {
/* generic:
var iterator = this.__iterator(ITERATE_ENTRIES, reverse);
var step;
var iterations = 0;
while (!(step = iterator.next()).done) {
iterations++;
if (fn(step.value[1], step.value[0], this) === false) {
break;
}
}
return iterations;
*/
// indexed:
var iterator = this.__iterator(ITERATE_VALUES, reverse);
var step;
var iterations = 0;
while (!(step = iterator.next()).done) {
if (fn(step.value, iterations++, this) === false) {
break;
}
}
return iterations;
};
zipSequence.__iteratorUncached = function(type, reverse) {
var iterators = iters.map(function(i )
{return (i = Iterable(i), getIterator(reverse ? i.reverse() : i))}
);
var iterations = 0;
var isDone = false;
return new Iterator(function() {
var steps;
if (!isDone) {
steps = iterators.map(function(i ) {return i.next()});
isDone = steps.some(function(s ) {return s.done});
}
if (isDone) {
return iteratorDone();
}
return iteratorValue(
type,
iterations++,
zipper.apply(null, steps.map(function(s ) {return s.value}))
);
});
};
return zipSequence
}
// #pragma Helper Functions
function reify(iter, seq) {
return isSeq(iter) ? seq : iter.constructor(seq);
}
function validateEntry(entry) {
if (entry !== Object(entry)) {
throw new TypeError('Expected [K, V] tuple: ' + entry);
}
}
function resolveSize(iter) {
assertNotInfinite(iter.size);
return ensureSize(iter);
}
function iterableClass(iterable) {
return isKeyed(iterable) ? KeyedIterable :
isIndexed(iterable) ? IndexedIterable :
SetIterable;
}
function makeSequence(iterable) {
return Object.create(
(
isKeyed(iterable) ? KeyedSeq :
isIndexed(iterable) ? IndexedSeq :
SetSeq
).prototype
);
}
function cacheResultThrough() {
if (this._iter.cacheResult) {
this._iter.cacheResult();
this.size = this._iter.size;
return this;
} else {
return Seq.prototype.cacheResult.call(this);
}
}
function defaultComparator(a, b) {
return a > b ? 1 : a < b ? -1 : 0;
}
function forceIterator(keyPath) {
var iter = getIterator(keyPath);
if (!iter) {
// Array might not be iterable in this environment, so we need a fallback
// to our wrapped type.
if (!isArrayLike(keyPath)) {
throw new TypeError('Expected iterable or array-like: ' + keyPath);
}
iter = getIterator(Iterable(keyPath));
}
return iter;
}
createClass(Record, KeyedCollection);
function Record(defaultValues, name) {
var hasInitialized;
var RecordType = function Record(values) {
if (values instanceof RecordType) {
return values;
}
if (!(this instanceof RecordType)) {
return new RecordType(values);
}
if (!hasInitialized) {
hasInitialized = true;
var keys = Object.keys(defaultValues);
setProps(RecordTypePrototype, keys);
RecordTypePrototype.size = keys.length;
RecordTypePrototype._name = name;
RecordTypePrototype._keys = keys;
RecordTypePrototype._defaultValues = defaultValues;
}
this._map = Map(values);
};
var RecordTypePrototype = RecordType.prototype = Object.create(RecordPrototype);
RecordTypePrototype.constructor = RecordType;
return RecordType;
}
Record.prototype.toString = function() {
return this.__toString(recordName(this) + ' {', '}');
};
// @pragma Access
Record.prototype.has = function(k) {
return this._defaultValues.hasOwnProperty(k);
};
Record.prototype.get = function(k, notSetValue) {
if (!this.has(k)) {
return notSetValue;
}
var defaultVal = this._defaultValues[k];
return this._map ? this._map.get(k, defaultVal) : defaultVal;
};
// @pragma Modification
Record.prototype.clear = function() {
if (this.__ownerID) {
this._map && this._map.clear();
return this;
}
var RecordType = this.constructor;
return RecordType._empty || (RecordType._empty = makeRecord(this, emptyMap()));
};
Record.prototype.set = function(k, v) {
if (!this.has(k)) {
throw new Error('Cannot set unknown key "' + k + '" on ' + recordName(this));
}
if (this._map && !this._map.has(k)) {
var defaultVal = this._defaultValues[k];
if (v === defaultVal) {
return this;
}
}
var newMap = this._map && this._map.set(k, v);
if (this.__ownerID || newMap === this._map) {
return this;
}
return makeRecord(this, newMap);
};
Record.prototype.remove = function(k) {
if (!this.has(k)) {
return this;
}
var newMap = this._map && this._map.remove(k);
if (this.__ownerID || newMap === this._map) {
return this;
}
return makeRecord(this, newMap);
};
Record.prototype.wasAltered = function() {
return this._map.wasAltered();
};
Record.prototype.__iterator = function(type, reverse) {var this$0 = this;
return KeyedIterable(this._defaultValues).map(function(_, k) {return this$0.get(k)}).__iterator(type, reverse);
};
Record.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return KeyedIterable(this._defaultValues).map(function(_, k) {return this$0.get(k)}).__iterate(fn, reverse);
};
Record.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
var newMap = this._map && this._map.__ensureOwner(ownerID);
if (!ownerID) {
this.__ownerID = ownerID;
this._map = newMap;
return this;
}
return makeRecord(this, newMap, ownerID);
};
var RecordPrototype = Record.prototype;
RecordPrototype[DELETE] = RecordPrototype.remove;
RecordPrototype.deleteIn =
RecordPrototype.removeIn = MapPrototype.removeIn;
RecordPrototype.merge = MapPrototype.merge;
RecordPrototype.mergeWith = MapPrototype.mergeWith;
RecordPrototype.mergeIn = MapPrototype.mergeIn;
RecordPrototype.mergeDeep = MapPrototype.mergeDeep;
RecordPrototype.mergeDeepWith = MapPrototype.mergeDeepWith;
RecordPrototype.mergeDeepIn = MapPrototype.mergeDeepIn;
RecordPrototype.setIn = MapPrototype.setIn;
RecordPrototype.update = MapPrototype.update;
RecordPrototype.updateIn = MapPrototype.updateIn;
RecordPrototype.withMutations = MapPrototype.withMutations;
RecordPrototype.asMutable = MapPrototype.asMutable;
RecordPrototype.asImmutable = MapPrototype.asImmutable;
function makeRecord(likeRecord, map, ownerID) {
var record = Object.create(Object.getPrototypeOf(likeRecord));
record._map = map;
record.__ownerID = ownerID;
return record;
}
function recordName(record) {
return record._name || record.constructor.name || 'Record';
}
function setProps(prototype, names) {
try {
names.forEach(setProp.bind(undefined, prototype));
} catch (error) {
// Object.defineProperty failed. Probably IE8.
}
}
function setProp(prototype, name) {
Object.defineProperty(prototype, name, {
get: function() {
return this.get(name);
},
set: function(value) {
invariant(this.__ownerID, 'Cannot set on an immutable record.');
this.set(name, value);
}
});
}
createClass(Set, SetCollection);
// @pragma Construction
function Set(value) {
return value === null || value === undefined ? emptySet() :
isSet(value) && !isOrdered(value) ? value :
emptySet().withMutations(function(set ) {
var iter = SetIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v ) {return set.add(v)});
});
}
Set.of = function(/*...values*/) {
return this(arguments);
};
Set.fromKeys = function(value) {
return this(KeyedIterable(value).keySeq());
};
Set.prototype.toString = function() {
return this.__toString('Set {', '}');
};
// @pragma Access
Set.prototype.has = function(value) {
return this._map.has(value);
};
// @pragma Modification
Set.prototype.add = function(value) {
return updateSet(this, this._map.set(value, true));
};
Set.prototype.remove = function(value) {
return updateSet(this, this._map.remove(value));
};
Set.prototype.clear = function() {
return updateSet(this, this._map.clear());
};
// @pragma Composition
Set.prototype.union = function() {var iters = SLICE$0.call(arguments, 0);
iters = iters.filter(function(x ) {return x.size !== 0});
if (iters.length === 0) {
return this;
}
if (this.size === 0 && !this.__ownerID && iters.length === 1) {
return this.constructor(iters[0]);
}
return this.withMutations(function(set ) {
for (var ii = 0; ii < iters.length; ii++) {
SetIterable(iters[ii]).forEach(function(value ) {return set.add(value)});
}
});
};
Set.prototype.intersect = function() {var iters = SLICE$0.call(arguments, 0);
if (iters.length === 0) {
return this;
}
iters = iters.map(function(iter ) {return SetIterable(iter)});
var originalSet = this;
return this.withMutations(function(set ) {
originalSet.forEach(function(value ) {
if (!iters.every(function(iter ) {return iter.includes(value)})) {
set.remove(value);
}
});
});
};
Set.prototype.subtract = function() {var iters = SLICE$0.call(arguments, 0);
if (iters.length === 0) {
return this;
}
iters = iters.map(function(iter ) {return SetIterable(iter)});
var originalSet = this;
return this.withMutations(function(set ) {
originalSet.forEach(function(value ) {
if (iters.some(function(iter ) {return iter.includes(value)})) {
set.remove(value);
}
});
});
};
Set.prototype.merge = function() {
return this.union.apply(this, arguments);
};
Set.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return this.union.apply(this, iters);
};
Set.prototype.sort = function(comparator) {
// Late binding
return OrderedSet(sortFactory(this, comparator));
};
Set.prototype.sortBy = function(mapper, comparator) {
// Late binding
return OrderedSet(sortFactory(this, comparator, mapper));
};
Set.prototype.wasAltered = function() {
return this._map.wasAltered();
};
Set.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._map.__iterate(function(_, k) {return fn(k, k, this$0)}, reverse);
};
Set.prototype.__iterator = function(type, reverse) {
return this._map.map(function(_, k) {return k}).__iterator(type, reverse);
};
Set.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
var newMap = this._map.__ensureOwner(ownerID);
if (!ownerID) {
this.__ownerID = ownerID;
this._map = newMap;
return this;
}
return this.__make(newMap, ownerID);
};
function isSet(maybeSet) {
return !!(maybeSet && maybeSet[IS_SET_SENTINEL]);
}
Set.isSet = isSet;
var IS_SET_SENTINEL = '@@__IMMUTABLE_SET__@@';
var SetPrototype = Set.prototype;
SetPrototype[IS_SET_SENTINEL] = true;
SetPrototype[DELETE] = SetPrototype.remove;
SetPrototype.mergeDeep = SetPrototype.merge;
SetPrototype.mergeDeepWith = SetPrototype.mergeWith;
SetPrototype.withMutations = MapPrototype.withMutations;
SetPrototype.asMutable = MapPrototype.asMutable;
SetPrototype.asImmutable = MapPrototype.asImmutable;
SetPrototype.__empty = emptySet;
SetPrototype.__make = makeSet;
function updateSet(set, newMap) {
if (set.__ownerID) {
set.size = newMap.size;
set._map = newMap;
return set;
}
return newMap === set._map ? set :
newMap.size === 0 ? set.__empty() :
set.__make(newMap);
}
function makeSet(map, ownerID) {
var set = Object.create(SetPrototype);
set.size = map ? map.size : 0;
set._map = map;
set.__ownerID = ownerID;
return set;
}
var EMPTY_SET;
function emptySet() {
return EMPTY_SET || (EMPTY_SET = makeSet(emptyMap()));
}
createClass(OrderedSet, Set);
// @pragma Construction
function OrderedSet(value) {
return value === null || value === undefined ? emptyOrderedSet() :
isOrderedSet(value) ? value :
emptyOrderedSet().withMutations(function(set ) {
var iter = SetIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v ) {return set.add(v)});
});
}
OrderedSet.of = function(/*...values*/) {
return this(arguments);
};
OrderedSet.fromKeys = function(value) {
return this(KeyedIterable(value).keySeq());
};
OrderedSet.prototype.toString = function() {
return this.__toString('OrderedSet {', '}');
};
function isOrderedSet(maybeOrderedSet) {
return isSet(maybeOrderedSet) && isOrdered(maybeOrderedSet);
}
OrderedSet.isOrderedSet = isOrderedSet;
var OrderedSetPrototype = OrderedSet.prototype;
OrderedSetPrototype[IS_ORDERED_SENTINEL] = true;
OrderedSetPrototype.__empty = emptyOrderedSet;
OrderedSetPrototype.__make = makeOrderedSet;
function makeOrderedSet(map, ownerID) {
var set = Object.create(OrderedSetPrototype);
set.size = map ? map.size : 0;
set._map = map;
set.__ownerID = ownerID;
return set;
}
var EMPTY_ORDERED_SET;
function emptyOrderedSet() {
return EMPTY_ORDERED_SET || (EMPTY_ORDERED_SET = makeOrderedSet(emptyOrderedMap()));
}
createClass(Stack, IndexedCollection);
// @pragma Construction
function Stack(value) {
return value === null || value === undefined ? emptyStack() :
isStack(value) ? value :
emptyStack().unshiftAll(value);
}
Stack.of = function(/*...values*/) {
return this(arguments);
};
Stack.prototype.toString = function() {
return this.__toString('Stack [', ']');
};
// @pragma Access
Stack.prototype.get = function(index, notSetValue) {
var head = this._head;
index = wrapIndex(this, index);
while (head && index--) {
head = head.next;
}
return head ? head.value : notSetValue;
};
Stack.prototype.peek = function() {
return this._head && this._head.value;
};
// @pragma Modification
Stack.prototype.push = function(/*...values*/) {
if (arguments.length === 0) {
return this;
}
var newSize = this.size + arguments.length;
var head = this._head;
for (var ii = arguments.length - 1; ii >= 0; ii--) {
head = {
value: arguments[ii],
next: head
};
}
if (this.__ownerID) {
this.size = newSize;
this._head = head;
this.__hash = undefined;
this.__altered = true;
return this;
}
return makeStack(newSize, head);
};
Stack.prototype.pushAll = function(iter) {
iter = IndexedIterable(iter);
if (iter.size === 0) {
return this;
}
assertNotInfinite(iter.size);
var newSize = this.size;
var head = this._head;
iter.reverse().forEach(function(value ) {
newSize++;
head = {
value: value,
next: head
};
});
if (this.__ownerID) {
this.size = newSize;
this._head = head;
this.__hash = undefined;
this.__altered = true;
return this;
}
return makeStack(newSize, head);
};
Stack.prototype.pop = function() {
return this.slice(1);
};
Stack.prototype.unshift = function(/*...values*/) {
return this.push.apply(this, arguments);
};
Stack.prototype.unshiftAll = function(iter) {
return this.pushAll(iter);
};
Stack.prototype.shift = function() {
return this.pop.apply(this, arguments);
};
Stack.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = 0;
this._head = undefined;
this.__hash = undefined;
this.__altered = true;
return this;
}
return emptyStack();
};
Stack.prototype.slice = function(begin, end) {
if (wholeSlice(begin, end, this.size)) {
return this;
}
var resolvedBegin = resolveBegin(begin, this.size);
var resolvedEnd = resolveEnd(end, this.size);
if (resolvedEnd !== this.size) {
// super.slice(begin, end);
return IndexedCollection.prototype.slice.call(this, begin, end);
}
var newSize = this.size - resolvedBegin;
var head = this._head;
while (resolvedBegin--) {
head = head.next;
}
if (this.__ownerID) {
this.size = newSize;
this._head = head;
this.__hash = undefined;
this.__altered = true;
return this;
}
return makeStack(newSize, head);
};
// @pragma Mutability
Stack.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
if (!ownerID) {
this.__ownerID = ownerID;
this.__altered = false;
return this;
}
return makeStack(this.size, this._head, ownerID, this.__hash);
};
// @pragma Iteration
Stack.prototype.__iterate = function(fn, reverse) {
if (reverse) {
return this.reverse().__iterate(fn);
}
var iterations = 0;
var node = this._head;
while (node) {
if (fn(node.value, iterations++, this) === false) {
break;
}
node = node.next;
}
return iterations;
};
Stack.prototype.__iterator = function(type, reverse) {
if (reverse) {
return this.reverse().__iterator(type);
}
var iterations = 0;
var node = this._head;
return new Iterator(function() {
if (node) {
var value = node.value;
node = node.next;
return iteratorValue(type, iterations++, value);
}
return iteratorDone();
});
};
function isStack(maybeStack) {
return !!(maybeStack && maybeStack[IS_STACK_SENTINEL]);
}
Stack.isStack = isStack;
var IS_STACK_SENTINEL = '@@__IMMUTABLE_STACK__@@';
var StackPrototype = Stack.prototype;
StackPrototype[IS_STACK_SENTINEL] = true;
StackPrototype.withMutations = MapPrototype.withMutations;
StackPrototype.asMutable = MapPrototype.asMutable;
StackPrototype.asImmutable = MapPrototype.asImmutable;
StackPrototype.wasAltered = MapPrototype.wasAltered;
function makeStack(size, head, ownerID, hash) {
var map = Object.create(StackPrototype);
map.size = size;
map._head = head;
map.__ownerID = ownerID;
map.__hash = hash;
map.__altered = false;
return map;
}
var EMPTY_STACK;
function emptyStack() {
return EMPTY_STACK || (EMPTY_STACK = makeStack(0));
}
/**
* Contributes additional methods to a constructor
*/
function mixin(ctor, methods) {
var keyCopier = function(key ) { ctor.prototype[key] = methods[key]; };
Object.keys(methods).forEach(keyCopier);
Object.getOwnPropertySymbols &&
Object.getOwnPropertySymbols(methods).forEach(keyCopier);
return ctor;
}
Iterable.Iterator = Iterator;
mixin(Iterable, {
// ### Conversion to other types
toArray: function() {
assertNotInfinite(this.size);
var array = new Array(this.size || 0);
this.valueSeq().__iterate(function(v, i) { array[i] = v; });
return array;
},
toIndexedSeq: function() {
return new ToIndexedSequence(this);
},
toJS: function() {
return this.toSeq().map(
function(value ) {return value && typeof value.toJS === 'function' ? value.toJS() : value}
).__toJS();
},
toJSON: function() {
return this.toSeq().map(
function(value ) {return value && typeof value.toJSON === 'function' ? value.toJSON() : value}
).__toJS();
},
toKeyedSeq: function() {
return new ToKeyedSequence(this, true);
},
toMap: function() {
// Use Late Binding here to solve the circular dependency.
return Map(this.toKeyedSeq());
},
toObject: function() {
assertNotInfinite(this.size);
var object = {};
this.__iterate(function(v, k) { object[k] = v; });
return object;
},
toOrderedMap: function() {
// Use Late Binding here to solve the circular dependency.
return OrderedMap(this.toKeyedSeq());
},
toOrderedSet: function() {
// Use Late Binding here to solve the circular dependency.
return OrderedSet(isKeyed(this) ? this.valueSeq() : this);
},
toSet: function() {
// Use Late Binding here to solve the circular dependency.
return Set(isKeyed(this) ? this.valueSeq() : this);
},
toSetSeq: function() {
return new ToSetSequence(this);
},
toSeq: function() {
return isIndexed(this) ? this.toIndexedSeq() :
isKeyed(this) ? this.toKeyedSeq() :
this.toSetSeq();
},
toStack: function() {
// Use Late Binding here to solve the circular dependency.
return Stack(isKeyed(this) ? this.valueSeq() : this);
},
toList: function() {
// Use Late Binding here to solve the circular dependency.
return List(isKeyed(this) ? this.valueSeq() : this);
},
// ### Common JavaScript methods and properties
toString: function() {
return '[Iterable]';
},
__toString: function(head, tail) {
if (this.size === 0) {
return head + tail;
}
return head + ' ' + this.toSeq().map(this.__toStringMapper).join(', ') + ' ' + tail;
},
// ### ES6 Collection methods (ES6 Array and Map)
concat: function() {var values = SLICE$0.call(arguments, 0);
return reify(this, concatFactory(this, values));
},
includes: function(searchValue) {
return this.some(function(value ) {return is(value, searchValue)});
},
entries: function() {
return this.__iterator(ITERATE_ENTRIES);
},
every: function(predicate, context) {
assertNotInfinite(this.size);
var returnValue = true;
this.__iterate(function(v, k, c) {
if (!predicate.call(context, v, k, c)) {
returnValue = false;
return false;
}
});
return returnValue;
},
filter: function(predicate, context) {
return reify(this, filterFactory(this, predicate, context, true));
},
find: function(predicate, context, notSetValue) {
var entry = this.findEntry(predicate, context);
return entry ? entry[1] : notSetValue;
},
forEach: function(sideEffect, context) {
assertNotInfinite(this.size);
return this.__iterate(context ? sideEffect.bind(context) : sideEffect);
},
join: function(separator) {
assertNotInfinite(this.size);
separator = separator !== undefined ? '' + separator : ',';
var joined = '';
var isFirst = true;
this.__iterate(function(v ) {
isFirst ? (isFirst = false) : (joined += separator);
joined += v !== null && v !== undefined ? v.toString() : '';
});
return joined;
},
keys: function() {
return this.__iterator(ITERATE_KEYS);
},
map: function(mapper, context) {
return reify(this, mapFactory(this, mapper, context));
},
reduce: function(reducer, initialReduction, context) {
assertNotInfinite(this.size);
var reduction;
var useFirst;
if (arguments.length < 2) {
useFirst = true;
} else {
reduction = initialReduction;
}
this.__iterate(function(v, k, c) {
if (useFirst) {
useFirst = false;
reduction = v;
} else {
reduction = reducer.call(context, reduction, v, k, c);
}
});
return reduction;
},
reduceRight: function(reducer, initialReduction, context) {
var reversed = this.toKeyedSeq().reverse();
return reversed.reduce.apply(reversed, arguments);
},
reverse: function() {
return reify(this, reverseFactory(this, true));
},
slice: function(begin, end) {
return reify(this, sliceFactory(this, begin, end, true));
},
some: function(predicate, context) {
return !this.every(not(predicate), context);
},
sort: function(comparator) {
return reify(this, sortFactory(this, comparator));
},
values: function() {
return this.__iterator(ITERATE_VALUES);
},
// ### More sequential methods
butLast: function() {
return this.slice(0, -1);
},
isEmpty: function() {
return this.size !== undefined ? this.size === 0 : !this.some(function() {return true});
},
count: function(predicate, context) {
return ensureSize(
predicate ? this.toSeq().filter(predicate, context) : this
);
},
countBy: function(grouper, context) {
return countByFactory(this, grouper, context);
},
equals: function(other) {
return deepEqual(this, other);
},
entrySeq: function() {
var iterable = this;
if (iterable._cache) {
// We cache as an entries array, so we can just return the cache!
return new ArraySeq(iterable._cache);
}
var entriesSequence = iterable.toSeq().map(entryMapper).toIndexedSeq();
entriesSequence.fromEntrySeq = function() {return iterable.toSeq()};
return entriesSequence;
},
filterNot: function(predicate, context) {
return this.filter(not(predicate), context);
},
findEntry: function(predicate, context, notSetValue) {
var found = notSetValue;
this.__iterate(function(v, k, c) {
if (predicate.call(context, v, k, c)) {
found = [k, v];
return false;
}
});
return found;
},
findKey: function(predicate, context) {
var entry = this.findEntry(predicate, context);
return entry && entry[0];
},
findLast: function(predicate, context, notSetValue) {
return this.toKeyedSeq().reverse().find(predicate, context, notSetValue);
},
findLastEntry: function(predicate, context, notSetValue) {
return this.toKeyedSeq().reverse().findEntry(predicate, context, notSetValue);
},
findLastKey: function(predicate, context) {
return this.toKeyedSeq().reverse().findKey(predicate, context);
},
first: function() {
return this.find(returnTrue);
},
flatMap: function(mapper, context) {
return reify(this, flatMapFactory(this, mapper, context));
},
flatten: function(depth) {
return reify(this, flattenFactory(this, depth, true));
},
fromEntrySeq: function() {
return new FromEntriesSequence(this);
},
get: function(searchKey, notSetValue) {
return this.find(function(_, key) {return is(key, searchKey)}, undefined, notSetValue);
},
getIn: function(searchKeyPath, notSetValue) {
var nested = this;
// Note: in an ES6 environment, we would prefer:
// for (var key of searchKeyPath) {
var iter = forceIterator(searchKeyPath);
var step;
while (!(step = iter.next()).done) {
var key = step.value;
nested = nested && nested.get ? nested.get(key, NOT_SET) : NOT_SET;
if (nested === NOT_SET) {
return notSetValue;
}
}
return nested;
},
groupBy: function(grouper, context) {
return groupByFactory(this, grouper, context);
},
has: function(searchKey) {
return this.get(searchKey, NOT_SET) !== NOT_SET;
},
hasIn: function(searchKeyPath) {
return this.getIn(searchKeyPath, NOT_SET) !== NOT_SET;
},
isSubset: function(iter) {
iter = typeof iter.includes === 'function' ? iter : Iterable(iter);
return this.every(function(value ) {return iter.includes(value)});
},
isSuperset: function(iter) {
iter = typeof iter.isSubset === 'function' ? iter : Iterable(iter);
return iter.isSubset(this);
},
keyOf: function(searchValue) {
return this.findKey(function(value ) {return is(value, searchValue)});
},
keySeq: function() {
return this.toSeq().map(keyMapper).toIndexedSeq();
},
last: function() {
return this.toSeq().reverse().first();
},
lastKeyOf: function(searchValue) {
return this.toKeyedSeq().reverse().keyOf(searchValue);
},
max: function(comparator) {
return maxFactory(this, comparator);
},
maxBy: function(mapper, comparator) {
return maxFactory(this, comparator, mapper);
},
min: function(comparator) {
return maxFactory(this, comparator ? neg(comparator) : defaultNegComparator);
},
minBy: function(mapper, comparator) {
return maxFactory(this, comparator ? neg(comparator) : defaultNegComparator, mapper);
},
rest: function() {
return this.slice(1);
},
skip: function(amount) {
return this.slice(Math.max(0, amount));
},
skipLast: function(amount) {
return reify(this, this.toSeq().reverse().skip(amount).reverse());
},
skipWhile: function(predicate, context) {
return reify(this, skipWhileFactory(this, predicate, context, true));
},
skipUntil: function(predicate, context) {
return this.skipWhile(not(predicate), context);
},
sortBy: function(mapper, comparator) {
return reify(this, sortFactory(this, comparator, mapper));
},
take: function(amount) {
return this.slice(0, Math.max(0, amount));
},
takeLast: function(amount) {
return reify(this, this.toSeq().reverse().take(amount).reverse());
},
takeWhile: function(predicate, context) {
return reify(this, takeWhileFactory(this, predicate, context));
},
takeUntil: function(predicate, context) {
return this.takeWhile(not(predicate), context);
},
valueSeq: function() {
return this.toIndexedSeq();
},
// ### Hashable Object
hashCode: function() {
return this.__hash || (this.__hash = hashIterable(this));
}
// ### Internal
// abstract __iterate(fn, reverse)
// abstract __iterator(type, reverse)
});
// var IS_ITERABLE_SENTINEL = '@@__IMMUTABLE_ITERABLE__@@';
// var IS_KEYED_SENTINEL = '@@__IMMUTABLE_KEYED__@@';
// var IS_INDEXED_SENTINEL = '@@__IMMUTABLE_INDEXED__@@';
// var IS_ORDERED_SENTINEL = '@@__IMMUTABLE_ORDERED__@@';
var IterablePrototype = Iterable.prototype;
IterablePrototype[IS_ITERABLE_SENTINEL] = true;
IterablePrototype[ITERATOR_SYMBOL] = IterablePrototype.values;
IterablePrototype.__toJS = IterablePrototype.toArray;
IterablePrototype.__toStringMapper = quoteString;
IterablePrototype.inspect =
IterablePrototype.toSource = function() { return this.toString(); };
IterablePrototype.chain = IterablePrototype.flatMap;
IterablePrototype.contains = IterablePrototype.includes;
mixin(KeyedIterable, {
// ### More sequential methods
flip: function() {
return reify(this, flipFactory(this));
},
mapEntries: function(mapper, context) {var this$0 = this;
var iterations = 0;
return reify(this,
this.toSeq().map(
function(v, k) {return mapper.call(context, [k, v], iterations++, this$0)}
).fromEntrySeq()
);
},
mapKeys: function(mapper, context) {var this$0 = this;
return reify(this,
this.toSeq().flip().map(
function(k, v) {return mapper.call(context, k, v, this$0)}
).flip()
);
}
});
var KeyedIterablePrototype = KeyedIterable.prototype;
KeyedIterablePrototype[IS_KEYED_SENTINEL] = true;
KeyedIterablePrototype[ITERATOR_SYMBOL] = IterablePrototype.entries;
KeyedIterablePrototype.__toJS = IterablePrototype.toObject;
KeyedIterablePrototype.__toStringMapper = function(v, k) {return JSON.stringify(k) + ': ' + quoteString(v)};
mixin(IndexedIterable, {
// ### Conversion to other types
toKeyedSeq: function() {
return new ToKeyedSequence(this, false);
},
// ### ES6 Collection methods (ES6 Array and Map)
filter: function(predicate, context) {
return reify(this, filterFactory(this, predicate, context, false));
},
findIndex: function(predicate, context) {
var entry = this.findEntry(predicate, context);
return entry ? entry[0] : -1;
},
indexOf: function(searchValue) {
var key = this.keyOf(searchValue);
return key === undefined ? -1 : key;
},
lastIndexOf: function(searchValue) {
var key = this.lastKeyOf(searchValue);
return key === undefined ? -1 : key;
},
reverse: function() {
return reify(this, reverseFactory(this, false));
},
slice: function(begin, end) {
return reify(this, sliceFactory(this, begin, end, false));
},
splice: function(index, removeNum /*, ...values*/) {
var numArgs = arguments.length;
removeNum = Math.max(removeNum | 0, 0);
if (numArgs === 0 || (numArgs === 2 && !removeNum)) {
return this;
}
// If index is negative, it should resolve relative to the size of the
// collection. However size may be expensive to compute if not cached, so
// only call count() if the number is in fact negative.
index = resolveBegin(index, index < 0 ? this.count() : this.size);
var spliced = this.slice(0, index);
return reify(
this,
numArgs === 1 ?
spliced :
spliced.concat(arrCopy(arguments, 2), this.slice(index + removeNum))
);
},
// ### More collection methods
findLastIndex: function(predicate, context) {
var entry = this.findLastEntry(predicate, context);
return entry ? entry[0] : -1;
},
first: function() {
return this.get(0);
},
flatten: function(depth) {
return reify(this, flattenFactory(this, depth, false));
},
get: function(index, notSetValue) {
index = wrapIndex(this, index);
return (index < 0 || (this.size === Infinity ||
(this.size !== undefined && index > this.size))) ?
notSetValue :
this.find(function(_, key) {return key === index}, undefined, notSetValue);
},
has: function(index) {
index = wrapIndex(this, index);
return index >= 0 && (this.size !== undefined ?
this.size === Infinity || index < this.size :
this.indexOf(index) !== -1
);
},
interpose: function(separator) {
return reify(this, interposeFactory(this, separator));
},
interleave: function(/*...iterables*/) {
var iterables = [this].concat(arrCopy(arguments));
var zipped = zipWithFactory(this.toSeq(), IndexedSeq.of, iterables);
var interleaved = zipped.flatten(true);
if (zipped.size) {
interleaved.size = zipped.size * iterables.length;
}
return reify(this, interleaved);
},
keySeq: function() {
return Range(0, this.size);
},
last: function() {
return this.get(-1);
},
skipWhile: function(predicate, context) {
return reify(this, skipWhileFactory(this, predicate, context, false));
},
zip: function(/*, ...iterables */) {
var iterables = [this].concat(arrCopy(arguments));
return reify(this, zipWithFactory(this, defaultZipper, iterables));
},
zipWith: function(zipper/*, ...iterables */) {
var iterables = arrCopy(arguments);
iterables[0] = this;
return reify(this, zipWithFactory(this, zipper, iterables));
}
});
IndexedIterable.prototype[IS_INDEXED_SENTINEL] = true;
IndexedIterable.prototype[IS_ORDERED_SENTINEL] = true;
mixin(SetIterable, {
// ### ES6 Collection methods (ES6 Array and Map)
get: function(value, notSetValue) {
return this.has(value) ? value : notSetValue;
},
includes: function(value) {
return this.has(value);
},
// ### More sequential methods
keySeq: function() {
return this.valueSeq();
}
});
SetIterable.prototype.has = IterablePrototype.includes;
SetIterable.prototype.contains = SetIterable.prototype.includes;
// Mixin subclasses
mixin(KeyedSeq, KeyedIterable.prototype);
mixin(IndexedSeq, IndexedIterable.prototype);
mixin(SetSeq, SetIterable.prototype);
mixin(KeyedCollection, KeyedIterable.prototype);
mixin(IndexedCollection, IndexedIterable.prototype);
mixin(SetCollection, SetIterable.prototype);
// #pragma Helper functions
function keyMapper(v, k) {
return k;
}
function entryMapper(v, k) {
return [k, v];
}
function not(predicate) {
return function() {
return !predicate.apply(this, arguments);
}
}
function neg(predicate) {
return function() {
return -predicate.apply(this, arguments);
}
}
function quoteString(value) {
return typeof value === 'string' ? JSON.stringify(value) : String(value);
}
function defaultZipper() {
return arrCopy(arguments);
}
function defaultNegComparator(a, b) {
return a < b ? 1 : a > b ? -1 : 0;
}
function hashIterable(iterable) {
if (iterable.size === Infinity) {
return 0;
}
var ordered = isOrdered(iterable);
var keyed = isKeyed(iterable);
var h = ordered ? 1 : 0;
var size = iterable.__iterate(
keyed ?
ordered ?
function(v, k) { h = 31 * h + hashMerge(hash(v), hash(k)) | 0; } :
function(v, k) { h = h + hashMerge(hash(v), hash(k)) | 0; } :
ordered ?
function(v ) { h = 31 * h + hash(v) | 0; } :
function(v ) { h = h + hash(v) | 0; }
);
return murmurHashOfSize(size, h);
}
function murmurHashOfSize(size, h) {
h = imul(h, 0xCC9E2D51);
h = imul(h << 15 | h >>> -15, 0x1B873593);
h = imul(h << 13 | h >>> -13, 5);
h = (h + 0xE6546B64 | 0) ^ size;
h = imul(h ^ h >>> 16, 0x85EBCA6B);
h = imul(h ^ h >>> 13, 0xC2B2AE35);
h = smi(h ^ h >>> 16);
return h;
}
function hashMerge(a, b) {
return a ^ b + 0x9E3779B9 + (a << 6) + (a >> 2) | 0; // int
}
var Immutable = {
Iterable: Iterable,
Seq: Seq,
Collection: Collection,
Map: Map,
OrderedMap: OrderedMap,
List: List,
Stack: Stack,
Set: Set,
OrderedSet: OrderedSet,
Record: Record,
Range: Range,
Repeat: Repeat,
is: is,
fromJS: fromJS
};
return Immutable;
}));
|
src/thirdparty/immutable.js
|
/**
* Copyright (c) 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
global.Immutable = factory();
}(this, function () { 'use strict';var SLICE$0 = Array.prototype.slice;
function createClass(ctor, superClass) {
if (superClass) {
ctor.prototype = Object.create(superClass.prototype);
}
ctor.prototype.constructor = ctor;
}
function Iterable(value) {
return isIterable(value) ? value : Seq(value);
}
createClass(KeyedIterable, Iterable);
function KeyedIterable(value) {
return isKeyed(value) ? value : KeyedSeq(value);
}
createClass(IndexedIterable, Iterable);
function IndexedIterable(value) {
return isIndexed(value) ? value : IndexedSeq(value);
}
createClass(SetIterable, Iterable);
function SetIterable(value) {
return isIterable(value) && !isAssociative(value) ? value : SetSeq(value);
}
function isIterable(maybeIterable) {
return !!(maybeIterable && maybeIterable[IS_ITERABLE_SENTINEL]);
}
function isKeyed(maybeKeyed) {
return !!(maybeKeyed && maybeKeyed[IS_KEYED_SENTINEL]);
}
function isIndexed(maybeIndexed) {
return !!(maybeIndexed && maybeIndexed[IS_INDEXED_SENTINEL]);
}
function isAssociative(maybeAssociative) {
return isKeyed(maybeAssociative) || isIndexed(maybeAssociative);
}
function isOrdered(maybeOrdered) {
return !!(maybeOrdered && maybeOrdered[IS_ORDERED_SENTINEL]);
}
Iterable.isIterable = isIterable;
Iterable.isKeyed = isKeyed;
Iterable.isIndexed = isIndexed;
Iterable.isAssociative = isAssociative;
Iterable.isOrdered = isOrdered;
Iterable.Keyed = KeyedIterable;
Iterable.Indexed = IndexedIterable;
Iterable.Set = SetIterable;
var IS_ITERABLE_SENTINEL = '@@__IMMUTABLE_ITERABLE__@@';
var IS_KEYED_SENTINEL = '@@__IMMUTABLE_KEYED__@@';
var IS_INDEXED_SENTINEL = '@@__IMMUTABLE_INDEXED__@@';
var IS_ORDERED_SENTINEL = '@@__IMMUTABLE_ORDERED__@@';
// Used for setting prototype methods that IE8 chokes on.
var DELETE = 'delete';
// Constants describing the size of trie nodes.
var SHIFT = 5; // Resulted in best performance after ______?
var SIZE = 1 << SHIFT;
var MASK = SIZE - 1;
// A consistent shared value representing "not set" which equals nothing other
// than itself, and nothing that could be provided externally.
var NOT_SET = {};
// Boolean references, Rough equivalent of `bool &`.
var CHANGE_LENGTH = { value: false };
var DID_ALTER = { value: false };
function MakeRef(ref) {
ref.value = false;
return ref;
}
function SetRef(ref) {
ref && (ref.value = true);
}
// A function which returns a value representing an "owner" for transient writes
// to tries. The return value will only ever equal itself, and will not equal
// the return of any subsequent call of this function.
function OwnerID() {}
// http://jsperf.com/copy-array-inline
function arrCopy(arr, offset) {
offset = offset || 0;
var len = Math.max(0, arr.length - offset);
var newArr = new Array(len);
for (var ii = 0; ii < len; ii++) {
newArr[ii] = arr[ii + offset];
}
return newArr;
}
function ensureSize(iter) {
if (iter.size === undefined) {
iter.size = iter.__iterate(returnTrue);
}
return iter.size;
}
function wrapIndex(iter, index) {
// This implements "is array index" which the ECMAString spec defines as:
//
// A String property name P is an array index if and only if
// ToString(ToUint32(P)) is equal to P and ToUint32(P) is not equal
// to 2^32−1.
//
// http://www.ecma-international.org/ecma-262/6.0/#sec-array-exotic-objects
if (typeof index !== 'number') {
var uint32Index = index >>> 0; // N >>> 0 is shorthand for ToUint32
if ('' + uint32Index !== index || uint32Index === 4294967295) {
return NaN;
}
index = uint32Index;
}
return index < 0 ? ensureSize(iter) + index : index;
}
function returnTrue() {
return true;
}
function wholeSlice(begin, end, size) {
return (begin === 0 || (size !== undefined && begin <= -size)) &&
(end === undefined || (size !== undefined && end >= size));
}
function resolveBegin(begin, size) {
return resolveIndex(begin, size, 0);
}
function resolveEnd(end, size) {
return resolveIndex(end, size, size);
}
function resolveIndex(index, size, defaultIndex) {
return index === undefined ?
defaultIndex :
index < 0 ?
Math.max(0, size + index) :
size === undefined ?
index :
Math.min(size, index);
}
/* global Symbol */
var ITERATE_KEYS = 0;
var ITERATE_VALUES = 1;
var ITERATE_ENTRIES = 2;
var REAL_ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator;
var FAUX_ITERATOR_SYMBOL = '@@iterator';
var ITERATOR_SYMBOL = REAL_ITERATOR_SYMBOL || FAUX_ITERATOR_SYMBOL;
function Iterator(next) {
this.next = next;
}
Iterator.prototype.toString = function() {
return '[Iterator]';
};
Iterator.KEYS = ITERATE_KEYS;
Iterator.VALUES = ITERATE_VALUES;
Iterator.ENTRIES = ITERATE_ENTRIES;
Iterator.prototype.inspect =
Iterator.prototype.toSource = function () { return this.toString(); }
Iterator.prototype[ITERATOR_SYMBOL] = function () {
return this;
};
function iteratorValue(type, k, v, iteratorResult) {
var value = type === 0 ? k : type === 1 ? v : [k, v];
iteratorResult ? (iteratorResult.value = value) : (iteratorResult = {
value: value, done: false
});
return iteratorResult;
}
function iteratorDone() {
return { value: undefined, done: true };
}
function hasIterator(maybeIterable) {
return !!getIteratorFn(maybeIterable);
}
function isIterator(maybeIterator) {
return maybeIterator && typeof maybeIterator.next === 'function';
}
function getIterator(iterable) {
var iteratorFn = getIteratorFn(iterable);
return iteratorFn && iteratorFn.call(iterable);
}
function getIteratorFn(iterable) {
var iteratorFn = iterable && (
(REAL_ITERATOR_SYMBOL && iterable[REAL_ITERATOR_SYMBOL]) ||
iterable[FAUX_ITERATOR_SYMBOL]
);
if (typeof iteratorFn === 'function') {
return iteratorFn;
}
}
function isArrayLike(value) {
return value && typeof value.length === 'number';
}
createClass(Seq, Iterable);
function Seq(value) {
return value === null || value === undefined ? emptySequence() :
isIterable(value) ? value.toSeq() : seqFromValue(value);
}
Seq.of = function(/*...values*/) {
return Seq(arguments);
};
Seq.prototype.toSeq = function() {
return this;
};
Seq.prototype.toString = function() {
return this.__toString('Seq {', '}');
};
Seq.prototype.cacheResult = function() {
if (!this._cache && this.__iterateUncached) {
this._cache = this.entrySeq().toArray();
this.size = this._cache.length;
}
return this;
};
// abstract __iterateUncached(fn, reverse)
Seq.prototype.__iterate = function(fn, reverse) {
return seqIterate(this, fn, reverse, true);
};
// abstract __iteratorUncached(type, reverse)
Seq.prototype.__iterator = function(type, reverse) {
return seqIterator(this, type, reverse, true);
};
createClass(KeyedSeq, Seq);
function KeyedSeq(value) {
return value === null || value === undefined ?
emptySequence().toKeyedSeq() :
isIterable(value) ?
(isKeyed(value) ? value.toSeq() : value.fromEntrySeq()) :
keyedSeqFromValue(value);
}
KeyedSeq.prototype.toKeyedSeq = function() {
return this;
};
createClass(IndexedSeq, Seq);
function IndexedSeq(value) {
return value === null || value === undefined ? emptySequence() :
!isIterable(value) ? indexedSeqFromValue(value) :
isKeyed(value) ? value.entrySeq() : value.toIndexedSeq();
}
IndexedSeq.of = function(/*...values*/) {
return IndexedSeq(arguments);
};
IndexedSeq.prototype.toIndexedSeq = function() {
return this;
};
IndexedSeq.prototype.toString = function() {
return this.__toString('Seq [', ']');
};
IndexedSeq.prototype.__iterate = function(fn, reverse) {
return seqIterate(this, fn, reverse, false);
};
IndexedSeq.prototype.__iterator = function(type, reverse) {
return seqIterator(this, type, reverse, false);
};
createClass(SetSeq, Seq);
function SetSeq(value) {
return (
value === null || value === undefined ? emptySequence() :
!isIterable(value) ? indexedSeqFromValue(value) :
isKeyed(value) ? value.entrySeq() : value
).toSetSeq();
}
SetSeq.of = function(/*...values*/) {
return SetSeq(arguments);
};
SetSeq.prototype.toSetSeq = function() {
return this;
};
Seq.isSeq = isSeq;
Seq.Keyed = KeyedSeq;
Seq.Set = SetSeq;
Seq.Indexed = IndexedSeq;
var IS_SEQ_SENTINEL = '@@__IMMUTABLE_SEQ__@@';
Seq.prototype[IS_SEQ_SENTINEL] = true;
createClass(ArraySeq, IndexedSeq);
function ArraySeq(array) {
this._array = array;
this.size = array.length;
}
ArraySeq.prototype.get = function(index, notSetValue) {
return this.has(index) ? this._array[wrapIndex(this, index)] : notSetValue;
};
ArraySeq.prototype.__iterate = function(fn, reverse) {
var array = this._array;
var maxIndex = array.length - 1;
for (var ii = 0; ii <= maxIndex; ii++) {
if (fn(array[reverse ? maxIndex - ii : ii], ii, this) === false) {
return ii + 1;
}
}
return ii;
};
ArraySeq.prototype.__iterator = function(type, reverse) {
var array = this._array;
var maxIndex = array.length - 1;
var ii = 0;
return new Iterator(function()
{return ii > maxIndex ?
iteratorDone() :
iteratorValue(type, ii, array[reverse ? maxIndex - ii++ : ii++])}
);
};
createClass(ObjectSeq, KeyedSeq);
function ObjectSeq(object) {
var keys = Object.keys(object);
this._object = object;
this._keys = keys;
this.size = keys.length;
}
ObjectSeq.prototype.get = function(key, notSetValue) {
if (notSetValue !== undefined && !this.has(key)) {
return notSetValue;
}
return this._object[key];
};
ObjectSeq.prototype.has = function(key) {
return this._object.hasOwnProperty(key);
};
ObjectSeq.prototype.__iterate = function(fn, reverse) {
var object = this._object;
var keys = this._keys;
var maxIndex = keys.length - 1;
for (var ii = 0; ii <= maxIndex; ii++) {
var key = keys[reverse ? maxIndex - ii : ii];
if (fn(object[key], key, this) === false) {
return ii + 1;
}
}
return ii;
};
ObjectSeq.prototype.__iterator = function(type, reverse) {
var object = this._object;
var keys = this._keys;
var maxIndex = keys.length - 1;
var ii = 0;
return new Iterator(function() {
var key = keys[reverse ? maxIndex - ii : ii];
return ii++ > maxIndex ?
iteratorDone() :
iteratorValue(type, key, object[key]);
});
};
ObjectSeq.prototype[IS_ORDERED_SENTINEL] = true;
createClass(IterableSeq, IndexedSeq);
function IterableSeq(iterable) {
this._iterable = iterable;
this.size = iterable.length || iterable.size;
}
IterableSeq.prototype.__iterateUncached = function(fn, reverse) {
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var iterable = this._iterable;
var iterator = getIterator(iterable);
var iterations = 0;
if (isIterator(iterator)) {
var step;
while (!(step = iterator.next()).done) {
if (fn(step.value, iterations++, this) === false) {
break;
}
}
}
return iterations;
};
IterableSeq.prototype.__iteratorUncached = function(type, reverse) {
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterable = this._iterable;
var iterator = getIterator(iterable);
if (!isIterator(iterator)) {
return new Iterator(iteratorDone);
}
var iterations = 0;
return new Iterator(function() {
var step = iterator.next();
return step.done ? step : iteratorValue(type, iterations++, step.value);
});
};
createClass(IteratorSeq, IndexedSeq);
function IteratorSeq(iterator) {
this._iterator = iterator;
this._iteratorCache = [];
}
IteratorSeq.prototype.__iterateUncached = function(fn, reverse) {
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var iterator = this._iterator;
var cache = this._iteratorCache;
var iterations = 0;
while (iterations < cache.length) {
if (fn(cache[iterations], iterations++, this) === false) {
return iterations;
}
}
var step;
while (!(step = iterator.next()).done) {
var val = step.value;
cache[iterations] = val;
if (fn(val, iterations++, this) === false) {
break;
}
}
return iterations;
};
IteratorSeq.prototype.__iteratorUncached = function(type, reverse) {
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterator = this._iterator;
var cache = this._iteratorCache;
var iterations = 0;
return new Iterator(function() {
if (iterations >= cache.length) {
var step = iterator.next();
if (step.done) {
return step;
}
cache[iterations] = step.value;
}
return iteratorValue(type, iterations, cache[iterations++]);
});
};
// # pragma Helper functions
function isSeq(maybeSeq) {
return !!(maybeSeq && maybeSeq[IS_SEQ_SENTINEL]);
}
var EMPTY_SEQ;
function emptySequence() {
return EMPTY_SEQ || (EMPTY_SEQ = new ArraySeq([]));
}
function keyedSeqFromValue(value) {
var seq =
Array.isArray(value) ? new ArraySeq(value).fromEntrySeq() :
isIterator(value) ? new IteratorSeq(value).fromEntrySeq() :
hasIterator(value) ? new IterableSeq(value).fromEntrySeq() :
typeof value === 'object' ? new ObjectSeq(value) :
undefined;
if (!seq) {
throw new TypeError(
'Expected Array or iterable object of [k, v] entries, '+
'or keyed object: ' + value
);
}
return seq;
}
function indexedSeqFromValue(value) {
var seq = maybeIndexedSeqFromValue(value);
if (!seq) {
throw new TypeError(
'Expected Array or iterable object of values: ' + value
);
}
return seq;
}
function seqFromValue(value) {
var seq = maybeIndexedSeqFromValue(value) ||
(typeof value === 'object' && new ObjectSeq(value));
if (!seq) {
throw new TypeError(
'Expected Array or iterable object of values, or keyed object: ' + value
);
}
return seq;
}
function maybeIndexedSeqFromValue(value) {
return (
isArrayLike(value) ? new ArraySeq(value) :
isIterator(value) ? new IteratorSeq(value) :
hasIterator(value) ? new IterableSeq(value) :
undefined
);
}
function seqIterate(seq, fn, reverse, useKeys) {
var cache = seq._cache;
if (cache) {
var maxIndex = cache.length - 1;
for (var ii = 0; ii <= maxIndex; ii++) {
var entry = cache[reverse ? maxIndex - ii : ii];
if (fn(entry[1], useKeys ? entry[0] : ii, seq) === false) {
return ii + 1;
}
}
return ii;
}
return seq.__iterateUncached(fn, reverse);
}
function seqIterator(seq, type, reverse, useKeys) {
var cache = seq._cache;
if (cache) {
var maxIndex = cache.length - 1;
var ii = 0;
return new Iterator(function() {
var entry = cache[reverse ? maxIndex - ii : ii];
return ii++ > maxIndex ?
iteratorDone() :
iteratorValue(type, useKeys ? entry[0] : ii - 1, entry[1]);
});
}
return seq.__iteratorUncached(type, reverse);
}
function fromJS(json, converter) {
return converter ?
fromJSWith(converter, json, '', {'': json}) :
fromJSDefault(json);
}
function fromJSWith(converter, json, key, parentJSON) {
if (Array.isArray(json)) {
return converter.call(parentJSON, key, IndexedSeq(json).map(function(v, k) {return fromJSWith(converter, v, k, json)}));
}
if (isPlainObj(json)) {
return converter.call(parentJSON, key, KeyedSeq(json).map(function(v, k) {return fromJSWith(converter, v, k, json)}));
}
return json;
}
function fromJSDefault(json) {
if (Array.isArray(json)) {
return IndexedSeq(json).map(fromJSDefault).toList();
}
if (isPlainObj(json)) {
return KeyedSeq(json).map(fromJSDefault).toMap();
}
return json;
}
function isPlainObj(value) {
return value && (value.constructor === Object || value.constructor === undefined);
}
/**
* An extension of the "same-value" algorithm as [described for use by ES6 Map
* and Set](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map#Key_equality)
*
* NaN is considered the same as NaN, however -0 and 0 are considered the same
* value, which is different from the algorithm described by
* [`Object.is`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is).
*
* This is extended further to allow Objects to describe the values they
* represent, by way of `valueOf` or `equals` (and `hashCode`).
*
* Note: because of this extension, the key equality of Immutable.Map and the
* value equality of Immutable.Set will differ from ES6 Map and Set.
*
* ### Defining custom values
*
* The easiest way to describe the value an object represents is by implementing
* `valueOf`. For example, `Date` represents a value by returning a unix
* timestamp for `valueOf`:
*
* var date1 = new Date(1234567890000); // Fri Feb 13 2009 ...
* var date2 = new Date(1234567890000);
* date1.valueOf(); // 1234567890000
* assert( date1 !== date2 );
* assert( Immutable.is( date1, date2 ) );
*
* Note: overriding `valueOf` may have other implications if you use this object
* where JavaScript expects a primitive, such as implicit string coercion.
*
* For more complex types, especially collections, implementing `valueOf` may
* not be performant. An alternative is to implement `equals` and `hashCode`.
*
* `equals` takes another object, presumably of similar type, and returns true
* if the it is equal. Equality is symmetrical, so the same result should be
* returned if this and the argument are flipped.
*
* assert( a.equals(b) === b.equals(a) );
*
* `hashCode` returns a 32bit integer number representing the object which will
* be used to determine how to store the value object in a Map or Set. You must
* provide both or neither methods, one must not exist without the other.
*
* Also, an important relationship between these methods must be upheld: if two
* values are equal, they *must* return the same hashCode. If the values are not
* equal, they might have the same hashCode; this is called a hash collision,
* and while undesirable for performance reasons, it is acceptable.
*
* if (a.equals(b)) {
* assert( a.hashCode() === b.hashCode() );
* }
*
* All Immutable collections implement `equals` and `hashCode`.
*
*/
function is(valueA, valueB) {
if (valueA === valueB || (valueA !== valueA && valueB !== valueB)) {
return true;
}
if (!valueA || !valueB) {
return false;
}
if (typeof valueA.valueOf === 'function' &&
typeof valueB.valueOf === 'function') {
valueA = valueA.valueOf();
valueB = valueB.valueOf();
if (valueA === valueB || (valueA !== valueA && valueB !== valueB)) {
return true;
}
if (!valueA || !valueB) {
return false;
}
}
if (typeof valueA.equals === 'function' &&
typeof valueB.equals === 'function' &&
valueA.equals(valueB)) {
return true;
}
return false;
}
function deepEqual(a, b) {
if (a === b) {
return true;
}
if (
!isIterable(b) ||
a.size !== undefined && b.size !== undefined && a.size !== b.size ||
a.__hash !== undefined && b.__hash !== undefined && a.__hash !== b.__hash ||
isKeyed(a) !== isKeyed(b) ||
isIndexed(a) !== isIndexed(b) ||
isOrdered(a) !== isOrdered(b)
) {
return false;
}
if (a.size === 0 && b.size === 0) {
return true;
}
var notAssociative = !isAssociative(a);
if (isOrdered(a)) {
var entries = a.entries();
return b.every(function(v, k) {
var entry = entries.next().value;
return entry && is(entry[1], v) && (notAssociative || is(entry[0], k));
}) && entries.next().done;
}
var flipped = false;
if (a.size === undefined) {
if (b.size === undefined) {
if (typeof a.cacheResult === 'function') {
a.cacheResult();
}
} else {
flipped = true;
var _ = a;
a = b;
b = _;
}
}
var allEqual = true;
var bSize = b.__iterate(function(v, k) {
if (notAssociative ? !a.has(v) :
flipped ? !is(v, a.get(k, NOT_SET)) : !is(a.get(k, NOT_SET), v)) {
allEqual = false;
return false;
}
});
return allEqual && a.size === bSize;
}
createClass(Repeat, IndexedSeq);
function Repeat(value, times) {
if (!(this instanceof Repeat)) {
return new Repeat(value, times);
}
this._value = value;
this.size = times === undefined ? Infinity : Math.max(0, times);
if (this.size === 0) {
if (EMPTY_REPEAT) {
return EMPTY_REPEAT;
}
EMPTY_REPEAT = this;
}
}
Repeat.prototype.toString = function() {
if (this.size === 0) {
return 'Repeat []';
}
return 'Repeat [ ' + this._value + ' ' + this.size + ' times ]';
};
Repeat.prototype.get = function(index, notSetValue) {
return this.has(index) ? this._value : notSetValue;
};
Repeat.prototype.includes = function(searchValue) {
return is(this._value, searchValue);
};
Repeat.prototype.slice = function(begin, end) {
var size = this.size;
return wholeSlice(begin, end, size) ? this :
new Repeat(this._value, resolveEnd(end, size) - resolveBegin(begin, size));
};
Repeat.prototype.reverse = function() {
return this;
};
Repeat.prototype.indexOf = function(searchValue) {
if (is(this._value, searchValue)) {
return 0;
}
return -1;
};
Repeat.prototype.lastIndexOf = function(searchValue) {
if (is(this._value, searchValue)) {
return this.size;
}
return -1;
};
Repeat.prototype.__iterate = function(fn, reverse) {
for (var ii = 0; ii < this.size; ii++) {
if (fn(this._value, ii, this) === false) {
return ii + 1;
}
}
return ii;
};
Repeat.prototype.__iterator = function(type, reverse) {var this$0 = this;
var ii = 0;
return new Iterator(function()
{return ii < this$0.size ? iteratorValue(type, ii++, this$0._value) : iteratorDone()}
);
};
Repeat.prototype.equals = function(other) {
return other instanceof Repeat ?
is(this._value, other._value) :
deepEqual(other);
};
var EMPTY_REPEAT;
function invariant(condition, error) {
if (!condition) throw new Error(error);
}
createClass(Range, IndexedSeq);
function Range(start, end, step) {
if (!(this instanceof Range)) {
return new Range(start, end, step);
}
invariant(step !== 0, 'Cannot step a Range by 0');
start = start || 0;
if (end === undefined) {
end = Infinity;
}
step = step === undefined ? 1 : Math.abs(step);
if (end < start) {
step = -step;
}
this._start = start;
this._end = end;
this._step = step;
this.size = Math.max(0, Math.ceil((end - start) / step - 1) + 1);
if (this.size === 0) {
if (EMPTY_RANGE) {
return EMPTY_RANGE;
}
EMPTY_RANGE = this;
}
}
Range.prototype.toString = function() {
if (this.size === 0) {
return 'Range []';
}
return 'Range [ ' +
this._start + '...' + this._end +
(this._step > 1 ? ' by ' + this._step : '') +
' ]';
};
Range.prototype.get = function(index, notSetValue) {
return this.has(index) ?
this._start + wrapIndex(this, index) * this._step :
notSetValue;
};
Range.prototype.includes = function(searchValue) {
var possibleIndex = (searchValue - this._start) / this._step;
return possibleIndex >= 0 &&
possibleIndex < this.size &&
possibleIndex === Math.floor(possibleIndex);
};
Range.prototype.slice = function(begin, end) {
if (wholeSlice(begin, end, this.size)) {
return this;
}
begin = resolveBegin(begin, this.size);
end = resolveEnd(end, this.size);
if (end <= begin) {
return new Range(0, 0);
}
return new Range(this.get(begin, this._end), this.get(end, this._end), this._step);
};
Range.prototype.indexOf = function(searchValue) {
var offsetValue = searchValue - this._start;
if (offsetValue % this._step === 0) {
var index = offsetValue / this._step;
if (index >= 0 && index < this.size) {
return index
}
}
return -1;
};
Range.prototype.lastIndexOf = function(searchValue) {
return this.indexOf(searchValue);
};
Range.prototype.__iterate = function(fn, reverse) {
var maxIndex = this.size - 1;
var step = this._step;
var value = reverse ? this._start + maxIndex * step : this._start;
for (var ii = 0; ii <= maxIndex; ii++) {
if (fn(value, ii, this) === false) {
return ii + 1;
}
value += reverse ? -step : step;
}
return ii;
};
Range.prototype.__iterator = function(type, reverse) {
var maxIndex = this.size - 1;
var step = this._step;
var value = reverse ? this._start + maxIndex * step : this._start;
var ii = 0;
return new Iterator(function() {
var v = value;
value += reverse ? -step : step;
return ii > maxIndex ? iteratorDone() : iteratorValue(type, ii++, v);
});
};
Range.prototype.equals = function(other) {
return other instanceof Range ?
this._start === other._start &&
this._end === other._end &&
this._step === other._step :
deepEqual(this, other);
};
var EMPTY_RANGE;
createClass(Collection, Iterable);
function Collection() {
throw TypeError('Abstract');
}
createClass(KeyedCollection, Collection);function KeyedCollection() {}
createClass(IndexedCollection, Collection);function IndexedCollection() {}
createClass(SetCollection, Collection);function SetCollection() {}
Collection.Keyed = KeyedCollection;
Collection.Indexed = IndexedCollection;
Collection.Set = SetCollection;
var imul =
typeof Math.imul === 'function' && Math.imul(0xffffffff, 2) === -2 ?
Math.imul :
function imul(a, b) {
a = a | 0; // int
b = b | 0; // int
var c = a & 0xffff;
var d = b & 0xffff;
// Shift by 0 fixes the sign on the high part.
return (c * d) + ((((a >>> 16) * d + c * (b >>> 16)) << 16) >>> 0) | 0; // int
};
// v8 has an optimization for storing 31-bit signed numbers.
// Values which have either 00 or 11 as the high order bits qualify.
// This function drops the highest order bit in a signed number, maintaining
// the sign bit.
function smi(i32) {
return ((i32 >>> 1) & 0x40000000) | (i32 & 0xBFFFFFFF);
}
function hash(o) {
if (o === false || o === null || o === undefined) {
return 0;
}
if (typeof o.valueOf === 'function') {
o = o.valueOf();
if (o === false || o === null || o === undefined) {
return 0;
}
}
if (o === true) {
return 1;
}
var type = typeof o;
if (type === 'number') {
var h = o | 0;
if (h !== o) {
h ^= o * 0xFFFFFFFF;
}
while (o > 0xFFFFFFFF) {
o /= 0xFFFFFFFF;
h ^= o;
}
return smi(h);
}
if (type === 'string') {
return o.length > STRING_HASH_CACHE_MIN_STRLEN ? cachedHashString(o) : hashString(o);
}
if (typeof o.hashCode === 'function') {
return o.hashCode();
}
if (type === 'object') {
return hashJSObj(o);
}
if (typeof o.toString === 'function') {
return hashString(o.toString());
}
throw new Error('Value type ' + type + ' cannot be hashed.');
}
function cachedHashString(string) {
var hash = stringHashCache[string];
if (hash === undefined) {
hash = hashString(string);
if (STRING_HASH_CACHE_SIZE === STRING_HASH_CACHE_MAX_SIZE) {
STRING_HASH_CACHE_SIZE = 0;
stringHashCache = {};
}
STRING_HASH_CACHE_SIZE++;
stringHashCache[string] = hash;
}
return hash;
}
// http://jsperf.com/hashing-strings
function hashString(string) {
// This is the hash from JVM
// The hash code for a string is computed as
// s[0] * 31 ^ (n - 1) + s[1] * 31 ^ (n - 2) + ... + s[n - 1],
// where s[i] is the ith character of the string and n is the length of
// the string. We "mod" the result to make it between 0 (inclusive) and 2^31
// (exclusive) by dropping high bits.
var hash = 0;
for (var ii = 0; ii < string.length; ii++) {
hash = 31 * hash + string.charCodeAt(ii) | 0;
}
return smi(hash);
}
function hashJSObj(obj) {
var hash;
if (usingWeakMap) {
hash = weakMap.get(obj);
if (hash !== undefined) {
return hash;
}
}
hash = obj[UID_HASH_KEY];
if (hash !== undefined) {
return hash;
}
if (!canDefineProperty) {
hash = obj.propertyIsEnumerable && obj.propertyIsEnumerable[UID_HASH_KEY];
if (hash !== undefined) {
return hash;
}
hash = getIENodeHash(obj);
if (hash !== undefined) {
return hash;
}
}
hash = ++objHashUID;
if (objHashUID & 0x40000000) {
objHashUID = 0;
}
if (usingWeakMap) {
weakMap.set(obj, hash);
} else if (isExtensible !== undefined && isExtensible(obj) === false) {
throw new Error('Non-extensible objects are not allowed as keys.');
} else if (canDefineProperty) {
Object.defineProperty(obj, UID_HASH_KEY, {
'enumerable': false,
'configurable': false,
'writable': false,
'value': hash
});
} else if (obj.propertyIsEnumerable !== undefined &&
obj.propertyIsEnumerable === obj.constructor.prototype.propertyIsEnumerable) {
// Since we can't define a non-enumerable property on the object
// we'll hijack one of the less-used non-enumerable properties to
// save our hash on it. Since this is a function it will not show up in
// `JSON.stringify` which is what we want.
obj.propertyIsEnumerable = function() {
return this.constructor.prototype.propertyIsEnumerable.apply(this, arguments);
};
obj.propertyIsEnumerable[UID_HASH_KEY] = hash;
} else if (obj.nodeType !== undefined) {
// At this point we couldn't get the IE `uniqueID` to use as a hash
// and we couldn't use a non-enumerable property to exploit the
// dontEnum bug so we simply add the `UID_HASH_KEY` on the node
// itself.
obj[UID_HASH_KEY] = hash;
} else {
throw new Error('Unable to set a non-enumerable property on object.');
}
return hash;
}
// Get references to ES5 object methods.
var isExtensible = Object.isExtensible;
// True if Object.defineProperty works as expected. IE8 fails this test.
var canDefineProperty = (function() {
try {
Object.defineProperty({}, '@', {});
return true;
} catch (e) {
return false;
}
}());
// IE has a `uniqueID` property on DOM nodes. We can construct the hash from it
// and avoid memory leaks from the IE cloneNode bug.
function getIENodeHash(node) {
if (node && node.nodeType > 0) {
switch (node.nodeType) {
case 1: // Element
return node.uniqueID;
case 9: // Document
return node.documentElement && node.documentElement.uniqueID;
}
}
}
// If possible, use a WeakMap.
var usingWeakMap = typeof WeakMap === 'function';
var weakMap;
if (usingWeakMap) {
weakMap = new WeakMap();
}
var objHashUID = 0;
var UID_HASH_KEY = '__immutablehash__';
if (typeof Symbol === 'function') {
UID_HASH_KEY = Symbol(UID_HASH_KEY);
}
var STRING_HASH_CACHE_MIN_STRLEN = 16;
var STRING_HASH_CACHE_MAX_SIZE = 255;
var STRING_HASH_CACHE_SIZE = 0;
var stringHashCache = {};
function assertNotInfinite(size) {
invariant(
size !== Infinity,
'Cannot perform this action with an infinite size.'
);
}
createClass(Map, KeyedCollection);
// @pragma Construction
function Map(value) {
return value === null || value === undefined ? emptyMap() :
isMap(value) && !isOrdered(value) ? value :
emptyMap().withMutations(function(map ) {
var iter = KeyedIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v, k) {return map.set(k, v)});
});
}
Map.prototype.toString = function() {
return this.__toString('Map {', '}');
};
// @pragma Access
Map.prototype.get = function(k, notSetValue) {
return this._root ?
this._root.get(0, undefined, k, notSetValue) :
notSetValue;
};
// @pragma Modification
Map.prototype.set = function(k, v) {
return updateMap(this, k, v);
};
Map.prototype.setIn = function(keyPath, v) {
return this.updateIn(keyPath, NOT_SET, function() {return v});
};
Map.prototype.remove = function(k) {
return updateMap(this, k, NOT_SET);
};
Map.prototype.deleteIn = function(keyPath) {
return this.updateIn(keyPath, function() {return NOT_SET});
};
Map.prototype.update = function(k, notSetValue, updater) {
return arguments.length === 1 ?
k(this) :
this.updateIn([k], notSetValue, updater);
};
Map.prototype.updateIn = function(keyPath, notSetValue, updater) {
if (!updater) {
updater = notSetValue;
notSetValue = undefined;
}
var updatedValue = updateInDeepMap(
this,
forceIterator(keyPath),
notSetValue,
updater
);
return updatedValue === NOT_SET ? undefined : updatedValue;
};
Map.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = 0;
this._root = null;
this.__hash = undefined;
this.__altered = true;
return this;
}
return emptyMap();
};
// @pragma Composition
Map.prototype.merge = function(/*...iters*/) {
return mergeIntoMapWith(this, undefined, arguments);
};
Map.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoMapWith(this, merger, iters);
};
Map.prototype.mergeIn = function(keyPath) {var iters = SLICE$0.call(arguments, 1);
return this.updateIn(
keyPath,
emptyMap(),
function(m ) {return typeof m.merge === 'function' ?
m.merge.apply(m, iters) :
iters[iters.length - 1]}
);
};
Map.prototype.mergeDeep = function(/*...iters*/) {
return mergeIntoMapWith(this, deepMerger, arguments);
};
Map.prototype.mergeDeepWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoMapWith(this, deepMergerWith(merger), iters);
};
Map.prototype.mergeDeepIn = function(keyPath) {var iters = SLICE$0.call(arguments, 1);
return this.updateIn(
keyPath,
emptyMap(),
function(m ) {return typeof m.mergeDeep === 'function' ?
m.mergeDeep.apply(m, iters) :
iters[iters.length - 1]}
);
};
Map.prototype.sort = function(comparator) {
// Late binding
return OrderedMap(sortFactory(this, comparator));
};
Map.prototype.sortBy = function(mapper, comparator) {
// Late binding
return OrderedMap(sortFactory(this, comparator, mapper));
};
// @pragma Mutability
Map.prototype.withMutations = function(fn) {
var mutable = this.asMutable();
fn(mutable);
return mutable.wasAltered() ? mutable.__ensureOwner(this.__ownerID) : this;
};
Map.prototype.asMutable = function() {
return this.__ownerID ? this : this.__ensureOwner(new OwnerID());
};
Map.prototype.asImmutable = function() {
return this.__ensureOwner();
};
Map.prototype.wasAltered = function() {
return this.__altered;
};
Map.prototype.__iterator = function(type, reverse) {
return new MapIterator(this, type, reverse);
};
Map.prototype.__iterate = function(fn, reverse) {var this$0 = this;
var iterations = 0;
this._root && this._root.iterate(function(entry ) {
iterations++;
return fn(entry[1], entry[0], this$0);
}, reverse);
return iterations;
};
Map.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
if (!ownerID) {
this.__ownerID = ownerID;
this.__altered = false;
return this;
}
return makeMap(this.size, this._root, ownerID, this.__hash);
};
function isMap(maybeMap) {
return !!(maybeMap && maybeMap[IS_MAP_SENTINEL]);
}
Map.isMap = isMap;
var IS_MAP_SENTINEL = '@@__IMMUTABLE_MAP__@@';
var MapPrototype = Map.prototype;
MapPrototype[IS_MAP_SENTINEL] = true;
MapPrototype[DELETE] = MapPrototype.remove;
MapPrototype.removeIn = MapPrototype.deleteIn;
// #pragma Trie Nodes
function ArrayMapNode(ownerID, entries) {
this.ownerID = ownerID;
this.entries = entries;
}
ArrayMapNode.prototype.get = function(shift, keyHash, key, notSetValue) {
var entries = this.entries;
for (var ii = 0, len = entries.length; ii < len; ii++) {
if (is(key, entries[ii][0])) {
return entries[ii][1];
}
}
return notSetValue;
};
ArrayMapNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
var removed = value === NOT_SET;
var entries = this.entries;
var idx = 0;
for (var len = entries.length; idx < len; idx++) {
if (is(key, entries[idx][0])) {
break;
}
}
var exists = idx < len;
if (exists ? entries[idx][1] === value : removed) {
return this;
}
SetRef(didAlter);
(removed || !exists) && SetRef(didChangeSize);
if (removed && entries.length === 1) {
return; // undefined
}
if (!exists && !removed && entries.length >= MAX_ARRAY_MAP_SIZE) {
return createNodes(ownerID, entries, key, value);
}
var isEditable = ownerID && ownerID === this.ownerID;
var newEntries = isEditable ? entries : arrCopy(entries);
if (exists) {
if (removed) {
idx === len - 1 ? newEntries.pop() : (newEntries[idx] = newEntries.pop());
} else {
newEntries[idx] = [key, value];
}
} else {
newEntries.push([key, value]);
}
if (isEditable) {
this.entries = newEntries;
return this;
}
return new ArrayMapNode(ownerID, newEntries);
};
function BitmapIndexedNode(ownerID, bitmap, nodes) {
this.ownerID = ownerID;
this.bitmap = bitmap;
this.nodes = nodes;
}
BitmapIndexedNode.prototype.get = function(shift, keyHash, key, notSetValue) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var bit = (1 << ((shift === 0 ? keyHash : keyHash >>> shift) & MASK));
var bitmap = this.bitmap;
return (bitmap & bit) === 0 ? notSetValue :
this.nodes[popCount(bitmap & (bit - 1))].get(shift + SHIFT, keyHash, key, notSetValue);
};
BitmapIndexedNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var keyHashFrag = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var bit = 1 << keyHashFrag;
var bitmap = this.bitmap;
var exists = (bitmap & bit) !== 0;
if (!exists && value === NOT_SET) {
return this;
}
var idx = popCount(bitmap & (bit - 1));
var nodes = this.nodes;
var node = exists ? nodes[idx] : undefined;
var newNode = updateNode(node, ownerID, shift + SHIFT, keyHash, key, value, didChangeSize, didAlter);
if (newNode === node) {
return this;
}
if (!exists && newNode && nodes.length >= MAX_BITMAP_INDEXED_SIZE) {
return expandNodes(ownerID, nodes, bitmap, keyHashFrag, newNode);
}
if (exists && !newNode && nodes.length === 2 && isLeafNode(nodes[idx ^ 1])) {
return nodes[idx ^ 1];
}
if (exists && newNode && nodes.length === 1 && isLeafNode(newNode)) {
return newNode;
}
var isEditable = ownerID && ownerID === this.ownerID;
var newBitmap = exists ? newNode ? bitmap : bitmap ^ bit : bitmap | bit;
var newNodes = exists ? newNode ?
setIn(nodes, idx, newNode, isEditable) :
spliceOut(nodes, idx, isEditable) :
spliceIn(nodes, idx, newNode, isEditable);
if (isEditable) {
this.bitmap = newBitmap;
this.nodes = newNodes;
return this;
}
return new BitmapIndexedNode(ownerID, newBitmap, newNodes);
};
function HashArrayMapNode(ownerID, count, nodes) {
this.ownerID = ownerID;
this.count = count;
this.nodes = nodes;
}
HashArrayMapNode.prototype.get = function(shift, keyHash, key, notSetValue) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var idx = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var node = this.nodes[idx];
return node ? node.get(shift + SHIFT, keyHash, key, notSetValue) : notSetValue;
};
HashArrayMapNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var idx = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var removed = value === NOT_SET;
var nodes = this.nodes;
var node = nodes[idx];
if (removed && !node) {
return this;
}
var newNode = updateNode(node, ownerID, shift + SHIFT, keyHash, key, value, didChangeSize, didAlter);
if (newNode === node) {
return this;
}
var newCount = this.count;
if (!node) {
newCount++;
} else if (!newNode) {
newCount--;
if (newCount < MIN_HASH_ARRAY_MAP_SIZE) {
return packNodes(ownerID, nodes, newCount, idx);
}
}
var isEditable = ownerID && ownerID === this.ownerID;
var newNodes = setIn(nodes, idx, newNode, isEditable);
if (isEditable) {
this.count = newCount;
this.nodes = newNodes;
return this;
}
return new HashArrayMapNode(ownerID, newCount, newNodes);
};
function HashCollisionNode(ownerID, keyHash, entries) {
this.ownerID = ownerID;
this.keyHash = keyHash;
this.entries = entries;
}
HashCollisionNode.prototype.get = function(shift, keyHash, key, notSetValue) {
var entries = this.entries;
for (var ii = 0, len = entries.length; ii < len; ii++) {
if (is(key, entries[ii][0])) {
return entries[ii][1];
}
}
return notSetValue;
};
HashCollisionNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (keyHash === undefined) {
keyHash = hash(key);
}
var removed = value === NOT_SET;
if (keyHash !== this.keyHash) {
if (removed) {
return this;
}
SetRef(didAlter);
SetRef(didChangeSize);
return mergeIntoNode(this, ownerID, shift, keyHash, [key, value]);
}
var entries = this.entries;
var idx = 0;
for (var len = entries.length; idx < len; idx++) {
if (is(key, entries[idx][0])) {
break;
}
}
var exists = idx < len;
if (exists ? entries[idx][1] === value : removed) {
return this;
}
SetRef(didAlter);
(removed || !exists) && SetRef(didChangeSize);
if (removed && len === 2) {
return new ValueNode(ownerID, this.keyHash, entries[idx ^ 1]);
}
var isEditable = ownerID && ownerID === this.ownerID;
var newEntries = isEditable ? entries : arrCopy(entries);
if (exists) {
if (removed) {
idx === len - 1 ? newEntries.pop() : (newEntries[idx] = newEntries.pop());
} else {
newEntries[idx] = [key, value];
}
} else {
newEntries.push([key, value]);
}
if (isEditable) {
this.entries = newEntries;
return this;
}
return new HashCollisionNode(ownerID, this.keyHash, newEntries);
};
function ValueNode(ownerID, keyHash, entry) {
this.ownerID = ownerID;
this.keyHash = keyHash;
this.entry = entry;
}
ValueNode.prototype.get = function(shift, keyHash, key, notSetValue) {
return is(key, this.entry[0]) ? this.entry[1] : notSetValue;
};
ValueNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
var removed = value === NOT_SET;
var keyMatch = is(key, this.entry[0]);
if (keyMatch ? value === this.entry[1] : removed) {
return this;
}
SetRef(didAlter);
if (removed) {
SetRef(didChangeSize);
return; // undefined
}
if (keyMatch) {
if (ownerID && ownerID === this.ownerID) {
this.entry[1] = value;
return this;
}
return new ValueNode(ownerID, this.keyHash, [key, value]);
}
SetRef(didChangeSize);
return mergeIntoNode(this, ownerID, shift, hash(key), [key, value]);
};
// #pragma Iterators
ArrayMapNode.prototype.iterate =
HashCollisionNode.prototype.iterate = function (fn, reverse) {
var entries = this.entries;
for (var ii = 0, maxIndex = entries.length - 1; ii <= maxIndex; ii++) {
if (fn(entries[reverse ? maxIndex - ii : ii]) === false) {
return false;
}
}
}
BitmapIndexedNode.prototype.iterate =
HashArrayMapNode.prototype.iterate = function (fn, reverse) {
var nodes = this.nodes;
for (var ii = 0, maxIndex = nodes.length - 1; ii <= maxIndex; ii++) {
var node = nodes[reverse ? maxIndex - ii : ii];
if (node && node.iterate(fn, reverse) === false) {
return false;
}
}
}
ValueNode.prototype.iterate = function (fn, reverse) {
return fn(this.entry);
}
createClass(MapIterator, Iterator);
function MapIterator(map, type, reverse) {
this._type = type;
this._reverse = reverse;
this._stack = map._root && mapIteratorFrame(map._root);
}
MapIterator.prototype.next = function() {
var type = this._type;
var stack = this._stack;
while (stack) {
var node = stack.node;
var index = stack.index++;
var maxIndex;
if (node.entry) {
if (index === 0) {
return mapIteratorValue(type, node.entry);
}
} else if (node.entries) {
maxIndex = node.entries.length - 1;
if (index <= maxIndex) {
return mapIteratorValue(type, node.entries[this._reverse ? maxIndex - index : index]);
}
} else {
maxIndex = node.nodes.length - 1;
if (index <= maxIndex) {
var subNode = node.nodes[this._reverse ? maxIndex - index : index];
if (subNode) {
if (subNode.entry) {
return mapIteratorValue(type, subNode.entry);
}
stack = this._stack = mapIteratorFrame(subNode, stack);
}
continue;
}
}
stack = this._stack = this._stack.__prev;
}
return iteratorDone();
};
function mapIteratorValue(type, entry) {
return iteratorValue(type, entry[0], entry[1]);
}
function mapIteratorFrame(node, prev) {
return {
node: node,
index: 0,
__prev: prev
};
}
function makeMap(size, root, ownerID, hash) {
var map = Object.create(MapPrototype);
map.size = size;
map._root = root;
map.__ownerID = ownerID;
map.__hash = hash;
map.__altered = false;
return map;
}
var EMPTY_MAP;
function emptyMap() {
return EMPTY_MAP || (EMPTY_MAP = makeMap(0));
}
function updateMap(map, k, v) {
var newRoot;
var newSize;
if (!map._root) {
if (v === NOT_SET) {
return map;
}
newSize = 1;
newRoot = new ArrayMapNode(map.__ownerID, [[k, v]]);
} else {
var didChangeSize = MakeRef(CHANGE_LENGTH);
var didAlter = MakeRef(DID_ALTER);
newRoot = updateNode(map._root, map.__ownerID, 0, undefined, k, v, didChangeSize, didAlter);
if (!didAlter.value) {
return map;
}
newSize = map.size + (didChangeSize.value ? v === NOT_SET ? -1 : 1 : 0);
}
if (map.__ownerID) {
map.size = newSize;
map._root = newRoot;
map.__hash = undefined;
map.__altered = true;
return map;
}
return newRoot ? makeMap(newSize, newRoot) : emptyMap();
}
function updateNode(node, ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
if (!node) {
if (value === NOT_SET) {
return node;
}
SetRef(didAlter);
SetRef(didChangeSize);
return new ValueNode(ownerID, keyHash, [key, value]);
}
return node.update(ownerID, shift, keyHash, key, value, didChangeSize, didAlter);
}
function isLeafNode(node) {
return node.constructor === ValueNode || node.constructor === HashCollisionNode;
}
function mergeIntoNode(node, ownerID, shift, keyHash, entry) {
if (node.keyHash === keyHash) {
return new HashCollisionNode(ownerID, keyHash, [node.entry, entry]);
}
var idx1 = (shift === 0 ? node.keyHash : node.keyHash >>> shift) & MASK;
var idx2 = (shift === 0 ? keyHash : keyHash >>> shift) & MASK;
var newNode;
var nodes = idx1 === idx2 ?
[mergeIntoNode(node, ownerID, shift + SHIFT, keyHash, entry)] :
((newNode = new ValueNode(ownerID, keyHash, entry)), idx1 < idx2 ? [node, newNode] : [newNode, node]);
return new BitmapIndexedNode(ownerID, (1 << idx1) | (1 << idx2), nodes);
}
function createNodes(ownerID, entries, key, value) {
if (!ownerID) {
ownerID = new OwnerID();
}
var node = new ValueNode(ownerID, hash(key), [key, value]);
for (var ii = 0; ii < entries.length; ii++) {
var entry = entries[ii];
node = node.update(ownerID, 0, undefined, entry[0], entry[1]);
}
return node;
}
function packNodes(ownerID, nodes, count, excluding) {
var bitmap = 0;
var packedII = 0;
var packedNodes = new Array(count);
for (var ii = 0, bit = 1, len = nodes.length; ii < len; ii++, bit <<= 1) {
var node = nodes[ii];
if (node !== undefined && ii !== excluding) {
bitmap |= bit;
packedNodes[packedII++] = node;
}
}
return new BitmapIndexedNode(ownerID, bitmap, packedNodes);
}
function expandNodes(ownerID, nodes, bitmap, including, node) {
var count = 0;
var expandedNodes = new Array(SIZE);
for (var ii = 0; bitmap !== 0; ii++, bitmap >>>= 1) {
expandedNodes[ii] = bitmap & 1 ? nodes[count++] : undefined;
}
expandedNodes[including] = node;
return new HashArrayMapNode(ownerID, count + 1, expandedNodes);
}
function mergeIntoMapWith(map, merger, iterables) {
var iters = [];
for (var ii = 0; ii < iterables.length; ii++) {
var value = iterables[ii];
var iter = KeyedIterable(value);
if (!isIterable(value)) {
iter = iter.map(function(v ) {return fromJS(v)});
}
iters.push(iter);
}
return mergeIntoCollectionWith(map, merger, iters);
}
function deepMerger(existing, value, key) {
return existing && existing.mergeDeep && isIterable(value) ?
existing.mergeDeep(value) :
is(existing, value) ? existing : value;
}
function deepMergerWith(merger) {
return function(existing, value, key) {
if (existing && existing.mergeDeepWith && isIterable(value)) {
return existing.mergeDeepWith(merger, value);
}
var nextValue = merger(existing, value, key);
return is(existing, nextValue) ? existing : nextValue;
};
}
function mergeIntoCollectionWith(collection, merger, iters) {
iters = iters.filter(function(x ) {return x.size !== 0});
if (iters.length === 0) {
return collection;
}
if (collection.size === 0 && !collection.__ownerID && iters.length === 1) {
return collection.constructor(iters[0]);
}
return collection.withMutations(function(collection ) {
var mergeIntoMap = merger ?
function(value, key) {
collection.update(key, NOT_SET, function(existing )
{return existing === NOT_SET ? value : merger(existing, value, key)}
);
} :
function(value, key) {
collection.set(key, value);
}
for (var ii = 0; ii < iters.length; ii++) {
iters[ii].forEach(mergeIntoMap);
}
});
}
function updateInDeepMap(existing, keyPathIter, notSetValue, updater) {
var isNotSet = existing === NOT_SET;
var step = keyPathIter.next();
if (step.done) {
var existingValue = isNotSet ? notSetValue : existing;
var newValue = updater(existingValue);
return newValue === existingValue ? existing : newValue;
}
invariant(
isNotSet || (existing && existing.set),
'invalid keyPath'
);
var key = step.value;
var nextExisting = isNotSet ? NOT_SET : existing.get(key, NOT_SET);
var nextUpdated = updateInDeepMap(
nextExisting,
keyPathIter,
notSetValue,
updater
);
return nextUpdated === nextExisting ? existing :
nextUpdated === NOT_SET ? existing.remove(key) :
(isNotSet ? emptyMap() : existing).set(key, nextUpdated);
}
function popCount(x) {
x = x - ((x >> 1) & 0x55555555);
x = (x & 0x33333333) + ((x >> 2) & 0x33333333);
x = (x + (x >> 4)) & 0x0f0f0f0f;
x = x + (x >> 8);
x = x + (x >> 16);
return x & 0x7f;
}
function setIn(array, idx, val, canEdit) {
var newArray = canEdit ? array : arrCopy(array);
newArray[idx] = val;
return newArray;
}
function spliceIn(array, idx, val, canEdit) {
var newLen = array.length + 1;
if (canEdit && idx + 1 === newLen) {
array[idx] = val;
return array;
}
var newArray = new Array(newLen);
var after = 0;
for (var ii = 0; ii < newLen; ii++) {
if (ii === idx) {
newArray[ii] = val;
after = -1;
} else {
newArray[ii] = array[ii + after];
}
}
return newArray;
}
function spliceOut(array, idx, canEdit) {
var newLen = array.length - 1;
if (canEdit && idx === newLen) {
array.pop();
return array;
}
var newArray = new Array(newLen);
var after = 0;
for (var ii = 0; ii < newLen; ii++) {
if (ii === idx) {
after = 1;
}
newArray[ii] = array[ii + after];
}
return newArray;
}
var MAX_ARRAY_MAP_SIZE = SIZE / 4;
var MAX_BITMAP_INDEXED_SIZE = SIZE / 2;
var MIN_HASH_ARRAY_MAP_SIZE = SIZE / 4;
createClass(List, IndexedCollection);
// @pragma Construction
function List(value) {
var empty = emptyList();
if (value === null || value === undefined) {
return empty;
}
if (isList(value)) {
return value;
}
var iter = IndexedIterable(value);
var size = iter.size;
if (size === 0) {
return empty;
}
assertNotInfinite(size);
if (size > 0 && size < SIZE) {
return makeList(0, size, SHIFT, null, new VNode(iter.toArray()));
}
return empty.withMutations(function(list ) {
list.setSize(size);
iter.forEach(function(v, i) {return list.set(i, v)});
});
}
List.of = function(/*...values*/) {
return this(arguments);
};
List.prototype.toString = function() {
return this.__toString('List [', ']');
};
// @pragma Access
List.prototype.get = function(index, notSetValue) {
index = wrapIndex(this, index);
if (index >= 0 && index < this.size) {
index += this._origin;
var node = listNodeFor(this, index);
return node && node.array[index & MASK];
}
return notSetValue;
};
// @pragma Modification
List.prototype.set = function(index, value) {
return updateList(this, index, value);
};
List.prototype.remove = function(index) {
return !this.has(index) ? this :
index === 0 ? this.shift() :
index === this.size - 1 ? this.pop() :
this.splice(index, 1);
};
List.prototype.insert = function(index, value) {
return this.splice(index, 0, value);
};
List.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = this._origin = this._capacity = 0;
this._level = SHIFT;
this._root = this._tail = null;
this.__hash = undefined;
this.__altered = true;
return this;
}
return emptyList();
};
List.prototype.push = function(/*...values*/) {
var values = arguments;
var oldSize = this.size;
return this.withMutations(function(list ) {
setListBounds(list, 0, oldSize + values.length);
for (var ii = 0; ii < values.length; ii++) {
list.set(oldSize + ii, values[ii]);
}
});
};
List.prototype.pop = function() {
return setListBounds(this, 0, -1);
};
List.prototype.unshift = function(/*...values*/) {
var values = arguments;
return this.withMutations(function(list ) {
setListBounds(list, -values.length);
for (var ii = 0; ii < values.length; ii++) {
list.set(ii, values[ii]);
}
});
};
List.prototype.shift = function() {
return setListBounds(this, 1);
};
// @pragma Composition
List.prototype.merge = function(/*...iters*/) {
return mergeIntoListWith(this, undefined, arguments);
};
List.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoListWith(this, merger, iters);
};
List.prototype.mergeDeep = function(/*...iters*/) {
return mergeIntoListWith(this, deepMerger, arguments);
};
List.prototype.mergeDeepWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return mergeIntoListWith(this, deepMergerWith(merger), iters);
};
List.prototype.setSize = function(size) {
return setListBounds(this, 0, size);
};
// @pragma Iteration
List.prototype.slice = function(begin, end) {
var size = this.size;
if (wholeSlice(begin, end, size)) {
return this;
}
return setListBounds(
this,
resolveBegin(begin, size),
resolveEnd(end, size)
);
};
List.prototype.__iterator = function(type, reverse) {
var index = 0;
var values = iterateList(this, reverse);
return new Iterator(function() {
var value = values();
return value === DONE ?
iteratorDone() :
iteratorValue(type, index++, value);
});
};
List.prototype.__iterate = function(fn, reverse) {
var index = 0;
var values = iterateList(this, reverse);
var value;
while ((value = values()) !== DONE) {
if (fn(value, index++, this) === false) {
break;
}
}
return index;
};
List.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
if (!ownerID) {
this.__ownerID = ownerID;
return this;
}
return makeList(this._origin, this._capacity, this._level, this._root, this._tail, ownerID, this.__hash);
};
function isList(maybeList) {
return !!(maybeList && maybeList[IS_LIST_SENTINEL]);
}
List.isList = isList;
var IS_LIST_SENTINEL = '@@__IMMUTABLE_LIST__@@';
var ListPrototype = List.prototype;
ListPrototype[IS_LIST_SENTINEL] = true;
ListPrototype[DELETE] = ListPrototype.remove;
ListPrototype.setIn = MapPrototype.setIn;
ListPrototype.deleteIn =
ListPrototype.removeIn = MapPrototype.removeIn;
ListPrototype.update = MapPrototype.update;
ListPrototype.updateIn = MapPrototype.updateIn;
ListPrototype.mergeIn = MapPrototype.mergeIn;
ListPrototype.mergeDeepIn = MapPrototype.mergeDeepIn;
ListPrototype.withMutations = MapPrototype.withMutations;
ListPrototype.asMutable = MapPrototype.asMutable;
ListPrototype.asImmutable = MapPrototype.asImmutable;
ListPrototype.wasAltered = MapPrototype.wasAltered;
function VNode(array, ownerID) {
this.array = array;
this.ownerID = ownerID;
}
// TODO: seems like these methods are very similar
VNode.prototype.removeBefore = function(ownerID, level, index) {
if (index === level ? 1 << level : 0 || this.array.length === 0) {
return this;
}
var originIndex = (index >>> level) & MASK;
if (originIndex >= this.array.length) {
return new VNode([], ownerID);
}
var removingFirst = originIndex === 0;
var newChild;
if (level > 0) {
var oldChild = this.array[originIndex];
newChild = oldChild && oldChild.removeBefore(ownerID, level - SHIFT, index);
if (newChild === oldChild && removingFirst) {
return this;
}
}
if (removingFirst && !newChild) {
return this;
}
var editable = editableVNode(this, ownerID);
if (!removingFirst) {
for (var ii = 0; ii < originIndex; ii++) {
editable.array[ii] = undefined;
}
}
if (newChild) {
editable.array[originIndex] = newChild;
}
return editable;
};
VNode.prototype.removeAfter = function(ownerID, level, index) {
if (index === (level ? 1 << level : 0) || this.array.length === 0) {
return this;
}
var sizeIndex = ((index - 1) >>> level) & MASK;
if (sizeIndex >= this.array.length) {
return this;
}
var newChild;
if (level > 0) {
var oldChild = this.array[sizeIndex];
newChild = oldChild && oldChild.removeAfter(ownerID, level - SHIFT, index);
if (newChild === oldChild && sizeIndex === this.array.length - 1) {
return this;
}
}
var editable = editableVNode(this, ownerID);
editable.array.splice(sizeIndex + 1);
if (newChild) {
editable.array[sizeIndex] = newChild;
}
return editable;
};
var DONE = {};
function iterateList(list, reverse) {
var left = list._origin;
var right = list._capacity;
var tailPos = getTailOffset(right);
var tail = list._tail;
return iterateNodeOrLeaf(list._root, list._level, 0);
function iterateNodeOrLeaf(node, level, offset) {
return level === 0 ?
iterateLeaf(node, offset) :
iterateNode(node, level, offset);
}
function iterateLeaf(node, offset) {
var array = offset === tailPos ? tail && tail.array : node && node.array;
var from = offset > left ? 0 : left - offset;
var to = right - offset;
if (to > SIZE) {
to = SIZE;
}
return function() {
if (from === to) {
return DONE;
}
var idx = reverse ? --to : from++;
return array && array[idx];
};
}
function iterateNode(node, level, offset) {
var values;
var array = node && node.array;
var from = offset > left ? 0 : (left - offset) >> level;
var to = ((right - offset) >> level) + 1;
if (to > SIZE) {
to = SIZE;
}
return function() {
do {
if (values) {
var value = values();
if (value !== DONE) {
return value;
}
values = null;
}
if (from === to) {
return DONE;
}
var idx = reverse ? --to : from++;
values = iterateNodeOrLeaf(
array && array[idx], level - SHIFT, offset + (idx << level)
);
} while (true);
};
}
}
function makeList(origin, capacity, level, root, tail, ownerID, hash) {
var list = Object.create(ListPrototype);
list.size = capacity - origin;
list._origin = origin;
list._capacity = capacity;
list._level = level;
list._root = root;
list._tail = tail;
list.__ownerID = ownerID;
list.__hash = hash;
list.__altered = false;
return list;
}
var EMPTY_LIST;
function emptyList() {
return EMPTY_LIST || (EMPTY_LIST = makeList(0, 0, SHIFT));
}
function updateList(list, index, value) {
index = wrapIndex(list, index);
if (index !== index) {
return list;
}
if (index >= list.size || index < 0) {
return list.withMutations(function(list ) {
index < 0 ?
setListBounds(list, index).set(0, value) :
setListBounds(list, 0, index + 1).set(index, value)
});
}
index += list._origin;
var newTail = list._tail;
var newRoot = list._root;
var didAlter = MakeRef(DID_ALTER);
if (index >= getTailOffset(list._capacity)) {
newTail = updateVNode(newTail, list.__ownerID, 0, index, value, didAlter);
} else {
newRoot = updateVNode(newRoot, list.__ownerID, list._level, index, value, didAlter);
}
if (!didAlter.value) {
return list;
}
if (list.__ownerID) {
list._root = newRoot;
list._tail = newTail;
list.__hash = undefined;
list.__altered = true;
return list;
}
return makeList(list._origin, list._capacity, list._level, newRoot, newTail);
}
function updateVNode(node, ownerID, level, index, value, didAlter) {
var idx = (index >>> level) & MASK;
var nodeHas = node && idx < node.array.length;
if (!nodeHas && value === undefined) {
return node;
}
var newNode;
if (level > 0) {
var lowerNode = node && node.array[idx];
var newLowerNode = updateVNode(lowerNode, ownerID, level - SHIFT, index, value, didAlter);
if (newLowerNode === lowerNode) {
return node;
}
newNode = editableVNode(node, ownerID);
newNode.array[idx] = newLowerNode;
return newNode;
}
if (nodeHas && node.array[idx] === value) {
return node;
}
SetRef(didAlter);
newNode = editableVNode(node, ownerID);
if (value === undefined && idx === newNode.array.length - 1) {
newNode.array.pop();
} else {
newNode.array[idx] = value;
}
return newNode;
}
function editableVNode(node, ownerID) {
if (ownerID && node && ownerID === node.ownerID) {
return node;
}
return new VNode(node ? node.array.slice() : [], ownerID);
}
function listNodeFor(list, rawIndex) {
if (rawIndex >= getTailOffset(list._capacity)) {
return list._tail;
}
if (rawIndex < 1 << (list._level + SHIFT)) {
var node = list._root;
var level = list._level;
while (node && level > 0) {
node = node.array[(rawIndex >>> level) & MASK];
level -= SHIFT;
}
return node;
}
}
function setListBounds(list, begin, end) {
// Sanitize begin & end using this shorthand for ToInt32(argument)
// http://www.ecma-international.org/ecma-262/6.0/#sec-toint32
if (begin !== undefined) {
begin = begin | 0;
}
if (end !== undefined) {
end = end | 0;
}
var owner = list.__ownerID || new OwnerID();
var oldOrigin = list._origin;
var oldCapacity = list._capacity;
var newOrigin = oldOrigin + begin;
var newCapacity = end === undefined ? oldCapacity : end < 0 ? oldCapacity + end : oldOrigin + end;
if (newOrigin === oldOrigin && newCapacity === oldCapacity) {
return list;
}
// If it's going to end after it starts, it's empty.
if (newOrigin >= newCapacity) {
return list.clear();
}
var newLevel = list._level;
var newRoot = list._root;
// New origin might need creating a higher root.
var offsetShift = 0;
while (newOrigin + offsetShift < 0) {
newRoot = new VNode(newRoot && newRoot.array.length ? [undefined, newRoot] : [], owner);
newLevel += SHIFT;
offsetShift += 1 << newLevel;
}
if (offsetShift) {
newOrigin += offsetShift;
oldOrigin += offsetShift;
newCapacity += offsetShift;
oldCapacity += offsetShift;
}
var oldTailOffset = getTailOffset(oldCapacity);
var newTailOffset = getTailOffset(newCapacity);
// New size might need creating a higher root.
while (newTailOffset >= 1 << (newLevel + SHIFT)) {
newRoot = new VNode(newRoot && newRoot.array.length ? [newRoot] : [], owner);
newLevel += SHIFT;
}
// Locate or create the new tail.
var oldTail = list._tail;
var newTail = newTailOffset < oldTailOffset ?
listNodeFor(list, newCapacity - 1) :
newTailOffset > oldTailOffset ? new VNode([], owner) : oldTail;
// Merge Tail into tree.
if (oldTail && newTailOffset > oldTailOffset && newOrigin < oldCapacity && oldTail.array.length) {
newRoot = editableVNode(newRoot, owner);
var node = newRoot;
for (var level = newLevel; level > SHIFT; level -= SHIFT) {
var idx = (oldTailOffset >>> level) & MASK;
node = node.array[idx] = editableVNode(node.array[idx], owner);
}
node.array[(oldTailOffset >>> SHIFT) & MASK] = oldTail;
}
// If the size has been reduced, there's a chance the tail needs to be trimmed.
if (newCapacity < oldCapacity) {
newTail = newTail && newTail.removeAfter(owner, 0, newCapacity);
}
// If the new origin is within the tail, then we do not need a root.
if (newOrigin >= newTailOffset) {
newOrigin -= newTailOffset;
newCapacity -= newTailOffset;
newLevel = SHIFT;
newRoot = null;
newTail = newTail && newTail.removeBefore(owner, 0, newOrigin);
// Otherwise, if the root has been trimmed, garbage collect.
} else if (newOrigin > oldOrigin || newTailOffset < oldTailOffset) {
offsetShift = 0;
// Identify the new top root node of the subtree of the old root.
while (newRoot) {
var beginIndex = (newOrigin >>> newLevel) & MASK;
if (beginIndex !== (newTailOffset >>> newLevel) & MASK) {
break;
}
if (beginIndex) {
offsetShift += (1 << newLevel) * beginIndex;
}
newLevel -= SHIFT;
newRoot = newRoot.array[beginIndex];
}
// Trim the new sides of the new root.
if (newRoot && newOrigin > oldOrigin) {
newRoot = newRoot.removeBefore(owner, newLevel, newOrigin - offsetShift);
}
if (newRoot && newTailOffset < oldTailOffset) {
newRoot = newRoot.removeAfter(owner, newLevel, newTailOffset - offsetShift);
}
if (offsetShift) {
newOrigin -= offsetShift;
newCapacity -= offsetShift;
}
}
if (list.__ownerID) {
list.size = newCapacity - newOrigin;
list._origin = newOrigin;
list._capacity = newCapacity;
list._level = newLevel;
list._root = newRoot;
list._tail = newTail;
list.__hash = undefined;
list.__altered = true;
return list;
}
return makeList(newOrigin, newCapacity, newLevel, newRoot, newTail);
}
function mergeIntoListWith(list, merger, iterables) {
var iters = [];
var maxSize = 0;
for (var ii = 0; ii < iterables.length; ii++) {
var value = iterables[ii];
var iter = IndexedIterable(value);
if (iter.size > maxSize) {
maxSize = iter.size;
}
if (!isIterable(value)) {
iter = iter.map(function(v ) {return fromJS(v)});
}
iters.push(iter);
}
if (maxSize > list.size) {
list = list.setSize(maxSize);
}
return mergeIntoCollectionWith(list, merger, iters);
}
function getTailOffset(size) {
return size < SIZE ? 0 : (((size - 1) >>> SHIFT) << SHIFT);
}
createClass(OrderedMap, Map);
// @pragma Construction
function OrderedMap(value) {
return value === null || value === undefined ? emptyOrderedMap() :
isOrderedMap(value) ? value :
emptyOrderedMap().withMutations(function(map ) {
var iter = KeyedIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v, k) {return map.set(k, v)});
});
}
OrderedMap.of = function(/*...values*/) {
return this(arguments);
};
OrderedMap.prototype.toString = function() {
return this.__toString('OrderedMap {', '}');
};
// @pragma Access
OrderedMap.prototype.get = function(k, notSetValue) {
var index = this._map.get(k);
return index !== undefined ? this._list.get(index)[1] : notSetValue;
};
// @pragma Modification
OrderedMap.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = 0;
this._map.clear();
this._list.clear();
return this;
}
return emptyOrderedMap();
};
OrderedMap.prototype.set = function(k, v) {
return updateOrderedMap(this, k, v);
};
OrderedMap.prototype.remove = function(k) {
return updateOrderedMap(this, k, NOT_SET);
};
OrderedMap.prototype.wasAltered = function() {
return this._map.wasAltered() || this._list.wasAltered();
};
OrderedMap.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._list.__iterate(
function(entry ) {return entry && fn(entry[1], entry[0], this$0)},
reverse
);
};
OrderedMap.prototype.__iterator = function(type, reverse) {
return this._list.fromEntrySeq().__iterator(type, reverse);
};
OrderedMap.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
var newMap = this._map.__ensureOwner(ownerID);
var newList = this._list.__ensureOwner(ownerID);
if (!ownerID) {
this.__ownerID = ownerID;
this._map = newMap;
this._list = newList;
return this;
}
return makeOrderedMap(newMap, newList, ownerID, this.__hash);
};
function isOrderedMap(maybeOrderedMap) {
return isMap(maybeOrderedMap) && isOrdered(maybeOrderedMap);
}
OrderedMap.isOrderedMap = isOrderedMap;
OrderedMap.prototype[IS_ORDERED_SENTINEL] = true;
OrderedMap.prototype[DELETE] = OrderedMap.prototype.remove;
function makeOrderedMap(map, list, ownerID, hash) {
var omap = Object.create(OrderedMap.prototype);
omap.size = map ? map.size : 0;
omap._map = map;
omap._list = list;
omap.__ownerID = ownerID;
omap.__hash = hash;
return omap;
}
var EMPTY_ORDERED_MAP;
function emptyOrderedMap() {
return EMPTY_ORDERED_MAP || (EMPTY_ORDERED_MAP = makeOrderedMap(emptyMap(), emptyList()));
}
function updateOrderedMap(omap, k, v) {
var map = omap._map;
var list = omap._list;
var i = map.get(k);
var has = i !== undefined;
var newMap;
var newList;
if (v === NOT_SET) { // removed
if (!has) {
return omap;
}
if (list.size >= SIZE && list.size >= map.size * 2) {
newList = list.filter(function(entry, idx) {return entry !== undefined && i !== idx});
newMap = newList.toKeyedSeq().map(function(entry ) {return entry[0]}).flip().toMap();
if (omap.__ownerID) {
newMap.__ownerID = newList.__ownerID = omap.__ownerID;
}
} else {
newMap = map.remove(k);
newList = i === list.size - 1 ? list.pop() : list.set(i, undefined);
}
} else {
if (has) {
if (v === list.get(i)[1]) {
return omap;
}
newMap = map;
newList = list.set(i, [k, v]);
} else {
newMap = map.set(k, list.size);
newList = list.set(list.size, [k, v]);
}
}
if (omap.__ownerID) {
omap.size = newMap.size;
omap._map = newMap;
omap._list = newList;
omap.__hash = undefined;
return omap;
}
return makeOrderedMap(newMap, newList);
}
createClass(ToKeyedSequence, KeyedSeq);
function ToKeyedSequence(indexed, useKeys) {
this._iter = indexed;
this._useKeys = useKeys;
this.size = indexed.size;
}
ToKeyedSequence.prototype.get = function(key, notSetValue) {
return this._iter.get(key, notSetValue);
};
ToKeyedSequence.prototype.has = function(key) {
return this._iter.has(key);
};
ToKeyedSequence.prototype.valueSeq = function() {
return this._iter.valueSeq();
};
ToKeyedSequence.prototype.reverse = function() {var this$0 = this;
var reversedSequence = reverseFactory(this, true);
if (!this._useKeys) {
reversedSequence.valueSeq = function() {return this$0._iter.toSeq().reverse()};
}
return reversedSequence;
};
ToKeyedSequence.prototype.map = function(mapper, context) {var this$0 = this;
var mappedSequence = mapFactory(this, mapper, context);
if (!this._useKeys) {
mappedSequence.valueSeq = function() {return this$0._iter.toSeq().map(mapper, context)};
}
return mappedSequence;
};
ToKeyedSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
var ii;
return this._iter.__iterate(
this._useKeys ?
function(v, k) {return fn(v, k, this$0)} :
((ii = reverse ? resolveSize(this) : 0),
function(v ) {return fn(v, reverse ? --ii : ii++, this$0)}),
reverse
);
};
ToKeyedSequence.prototype.__iterator = function(type, reverse) {
if (this._useKeys) {
return this._iter.__iterator(type, reverse);
}
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
var ii = reverse ? resolveSize(this) : 0;
return new Iterator(function() {
var step = iterator.next();
return step.done ? step :
iteratorValue(type, reverse ? --ii : ii++, step.value, step);
});
};
ToKeyedSequence.prototype[IS_ORDERED_SENTINEL] = true;
createClass(ToIndexedSequence, IndexedSeq);
function ToIndexedSequence(iter) {
this._iter = iter;
this.size = iter.size;
}
ToIndexedSequence.prototype.includes = function(value) {
return this._iter.includes(value);
};
ToIndexedSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
var iterations = 0;
return this._iter.__iterate(function(v ) {return fn(v, iterations++, this$0)}, reverse);
};
ToIndexedSequence.prototype.__iterator = function(type, reverse) {
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
var iterations = 0;
return new Iterator(function() {
var step = iterator.next();
return step.done ? step :
iteratorValue(type, iterations++, step.value, step)
});
};
createClass(ToSetSequence, SetSeq);
function ToSetSequence(iter) {
this._iter = iter;
this.size = iter.size;
}
ToSetSequence.prototype.has = function(key) {
return this._iter.includes(key);
};
ToSetSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._iter.__iterate(function(v ) {return fn(v, v, this$0)}, reverse);
};
ToSetSequence.prototype.__iterator = function(type, reverse) {
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
return new Iterator(function() {
var step = iterator.next();
return step.done ? step :
iteratorValue(type, step.value, step.value, step);
});
};
createClass(FromEntriesSequence, KeyedSeq);
function FromEntriesSequence(entries) {
this._iter = entries;
this.size = entries.size;
}
FromEntriesSequence.prototype.entrySeq = function() {
return this._iter.toSeq();
};
FromEntriesSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._iter.__iterate(function(entry ) {
// Check if entry exists first so array access doesn't throw for holes
// in the parent iteration.
if (entry) {
validateEntry(entry);
var indexedIterable = isIterable(entry);
return fn(
indexedIterable ? entry.get(1) : entry[1],
indexedIterable ? entry.get(0) : entry[0],
this$0
);
}
}, reverse);
};
FromEntriesSequence.prototype.__iterator = function(type, reverse) {
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse);
return new Iterator(function() {
while (true) {
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
// Check if entry exists first so array access doesn't throw for holes
// in the parent iteration.
if (entry) {
validateEntry(entry);
var indexedIterable = isIterable(entry);
return iteratorValue(
type,
indexedIterable ? entry.get(0) : entry[0],
indexedIterable ? entry.get(1) : entry[1],
step
);
}
}
});
};
ToIndexedSequence.prototype.cacheResult =
ToKeyedSequence.prototype.cacheResult =
ToSetSequence.prototype.cacheResult =
FromEntriesSequence.prototype.cacheResult =
cacheResultThrough;
function flipFactory(iterable) {
var flipSequence = makeSequence(iterable);
flipSequence._iter = iterable;
flipSequence.size = iterable.size;
flipSequence.flip = function() {return iterable};
flipSequence.reverse = function () {
var reversedSequence = iterable.reverse.apply(this); // super.reverse()
reversedSequence.flip = function() {return iterable.reverse()};
return reversedSequence;
};
flipSequence.has = function(key ) {return iterable.includes(key)};
flipSequence.includes = function(key ) {return iterable.has(key)};
flipSequence.cacheResult = cacheResultThrough;
flipSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
return iterable.__iterate(function(v, k) {return fn(k, v, this$0) !== false}, reverse);
}
flipSequence.__iteratorUncached = function(type, reverse) {
if (type === ITERATE_ENTRIES) {
var iterator = iterable.__iterator(type, reverse);
return new Iterator(function() {
var step = iterator.next();
if (!step.done) {
var k = step.value[0];
step.value[0] = step.value[1];
step.value[1] = k;
}
return step;
});
}
return iterable.__iterator(
type === ITERATE_VALUES ? ITERATE_KEYS : ITERATE_VALUES,
reverse
);
}
return flipSequence;
}
function mapFactory(iterable, mapper, context) {
var mappedSequence = makeSequence(iterable);
mappedSequence.size = iterable.size;
mappedSequence.has = function(key ) {return iterable.has(key)};
mappedSequence.get = function(key, notSetValue) {
var v = iterable.get(key, NOT_SET);
return v === NOT_SET ?
notSetValue :
mapper.call(context, v, key, iterable);
};
mappedSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
return iterable.__iterate(
function(v, k, c) {return fn(mapper.call(context, v, k, c), k, this$0) !== false},
reverse
);
}
mappedSequence.__iteratorUncached = function (type, reverse) {
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
return new Iterator(function() {
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
var key = entry[0];
return iteratorValue(
type,
key,
mapper.call(context, entry[1], key, iterable),
step
);
});
}
return mappedSequence;
}
function reverseFactory(iterable, useKeys) {
var reversedSequence = makeSequence(iterable);
reversedSequence._iter = iterable;
reversedSequence.size = iterable.size;
reversedSequence.reverse = function() {return iterable};
if (iterable.flip) {
reversedSequence.flip = function () {
var flipSequence = flipFactory(iterable);
flipSequence.reverse = function() {return iterable.flip()};
return flipSequence;
};
}
reversedSequence.get = function(key, notSetValue)
{return iterable.get(useKeys ? key : -1 - key, notSetValue)};
reversedSequence.has = function(key )
{return iterable.has(useKeys ? key : -1 - key)};
reversedSequence.includes = function(value ) {return iterable.includes(value)};
reversedSequence.cacheResult = cacheResultThrough;
reversedSequence.__iterate = function (fn, reverse) {var this$0 = this;
return iterable.__iterate(function(v, k) {return fn(v, k, this$0)}, !reverse);
};
reversedSequence.__iterator =
function(type, reverse) {return iterable.__iterator(type, !reverse)};
return reversedSequence;
}
function filterFactory(iterable, predicate, context, useKeys) {
var filterSequence = makeSequence(iterable);
if (useKeys) {
filterSequence.has = function(key ) {
var v = iterable.get(key, NOT_SET);
return v !== NOT_SET && !!predicate.call(context, v, key, iterable);
};
filterSequence.get = function(key, notSetValue) {
var v = iterable.get(key, NOT_SET);
return v !== NOT_SET && predicate.call(context, v, key, iterable) ?
v : notSetValue;
};
}
filterSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
var iterations = 0;
iterable.__iterate(function(v, k, c) {
if (predicate.call(context, v, k, c)) {
iterations++;
return fn(v, useKeys ? k : iterations - 1, this$0);
}
}, reverse);
return iterations;
};
filterSequence.__iteratorUncached = function (type, reverse) {
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
var iterations = 0;
return new Iterator(function() {
while (true) {
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
var key = entry[0];
var value = entry[1];
if (predicate.call(context, value, key, iterable)) {
return iteratorValue(type, useKeys ? key : iterations++, value, step);
}
}
});
}
return filterSequence;
}
function countByFactory(iterable, grouper, context) {
var groups = Map().asMutable();
iterable.__iterate(function(v, k) {
groups.update(
grouper.call(context, v, k, iterable),
0,
function(a ) {return a + 1}
);
});
return groups.asImmutable();
}
function groupByFactory(iterable, grouper, context) {
var isKeyedIter = isKeyed(iterable);
var groups = (isOrdered(iterable) ? OrderedMap() : Map()).asMutable();
iterable.__iterate(function(v, k) {
groups.update(
grouper.call(context, v, k, iterable),
function(a ) {return (a = a || [], a.push(isKeyedIter ? [k, v] : v), a)}
);
});
var coerce = iterableClass(iterable);
return groups.map(function(arr ) {return reify(iterable, coerce(arr))});
}
function sliceFactory(iterable, begin, end, useKeys) {
var originalSize = iterable.size;
// Sanitize begin & end using this shorthand for ToInt32(argument)
// http://www.ecma-international.org/ecma-262/6.0/#sec-toint32
if (begin !== undefined) {
begin = begin | 0;
}
if (end !== undefined) {
end = end | 0;
}
if (wholeSlice(begin, end, originalSize)) {
return iterable;
}
var resolvedBegin = resolveBegin(begin, originalSize);
var resolvedEnd = resolveEnd(end, originalSize);
// begin or end will be NaN if they were provided as negative numbers and
// this iterable's size is unknown. In that case, cache first so there is
// a known size and these do not resolve to NaN.
if (resolvedBegin !== resolvedBegin || resolvedEnd !== resolvedEnd) {
return sliceFactory(iterable.toSeq().cacheResult(), begin, end, useKeys);
}
// Note: resolvedEnd is undefined when the original sequence's length is
// unknown and this slice did not supply an end and should contain all
// elements after resolvedBegin.
// In that case, resolvedSize will be NaN and sliceSize will remain undefined.
var resolvedSize = resolvedEnd - resolvedBegin;
var sliceSize;
if (resolvedSize === resolvedSize) {
sliceSize = resolvedSize < 0 ? 0 : resolvedSize;
}
var sliceSeq = makeSequence(iterable);
// If iterable.size is undefined, the size of the realized sliceSeq is
// unknown at this point unless the number of items to slice is 0
sliceSeq.size = sliceSize === 0 ? sliceSize : iterable.size && sliceSize || undefined;
if (!useKeys && isSeq(iterable) && sliceSize >= 0) {
sliceSeq.get = function (index, notSetValue) {
index = wrapIndex(this, index);
return index >= 0 && index < sliceSize ?
iterable.get(index + resolvedBegin, notSetValue) :
notSetValue;
}
}
sliceSeq.__iterateUncached = function(fn, reverse) {var this$0 = this;
if (sliceSize === 0) {
return 0;
}
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var skipped = 0;
var isSkipping = true;
var iterations = 0;
iterable.__iterate(function(v, k) {
if (!(isSkipping && (isSkipping = skipped++ < resolvedBegin))) {
iterations++;
return fn(v, useKeys ? k : iterations - 1, this$0) !== false &&
iterations !== sliceSize;
}
});
return iterations;
};
sliceSeq.__iteratorUncached = function(type, reverse) {
if (sliceSize !== 0 && reverse) {
return this.cacheResult().__iterator(type, reverse);
}
// Don't bother instantiating parent iterator if taking 0.
var iterator = sliceSize !== 0 && iterable.__iterator(type, reverse);
var skipped = 0;
var iterations = 0;
return new Iterator(function() {
while (skipped++ < resolvedBegin) {
iterator.next();
}
if (++iterations > sliceSize) {
return iteratorDone();
}
var step = iterator.next();
if (useKeys || type === ITERATE_VALUES) {
return step;
} else if (type === ITERATE_KEYS) {
return iteratorValue(type, iterations - 1, undefined, step);
} else {
return iteratorValue(type, iterations - 1, step.value[1], step);
}
});
}
return sliceSeq;
}
function takeWhileFactory(iterable, predicate, context) {
var takeSequence = makeSequence(iterable);
takeSequence.__iterateUncached = function(fn, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var iterations = 0;
iterable.__iterate(function(v, k, c)
{return predicate.call(context, v, k, c) && ++iterations && fn(v, k, this$0)}
);
return iterations;
};
takeSequence.__iteratorUncached = function(type, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
var iterating = true;
return new Iterator(function() {
if (!iterating) {
return iteratorDone();
}
var step = iterator.next();
if (step.done) {
return step;
}
var entry = step.value;
var k = entry[0];
var v = entry[1];
if (!predicate.call(context, v, k, this$0)) {
iterating = false;
return iteratorDone();
}
return type === ITERATE_ENTRIES ? step :
iteratorValue(type, k, v, step);
});
};
return takeSequence;
}
function skipWhileFactory(iterable, predicate, context, useKeys) {
var skipSequence = makeSequence(iterable);
skipSequence.__iterateUncached = function (fn, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterate(fn, reverse);
}
var isSkipping = true;
var iterations = 0;
iterable.__iterate(function(v, k, c) {
if (!(isSkipping && (isSkipping = predicate.call(context, v, k, c)))) {
iterations++;
return fn(v, useKeys ? k : iterations - 1, this$0);
}
});
return iterations;
};
skipSequence.__iteratorUncached = function(type, reverse) {var this$0 = this;
if (reverse) {
return this.cacheResult().__iterator(type, reverse);
}
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse);
var skipping = true;
var iterations = 0;
return new Iterator(function() {
var step, k, v;
do {
step = iterator.next();
if (step.done) {
if (useKeys || type === ITERATE_VALUES) {
return step;
} else if (type === ITERATE_KEYS) {
return iteratorValue(type, iterations++, undefined, step);
} else {
return iteratorValue(type, iterations++, step.value[1], step);
}
}
var entry = step.value;
k = entry[0];
v = entry[1];
skipping && (skipping = predicate.call(context, v, k, this$0));
} while (skipping);
return type === ITERATE_ENTRIES ? step :
iteratorValue(type, k, v, step);
});
};
return skipSequence;
}
function concatFactory(iterable, values) {
var isKeyedIterable = isKeyed(iterable);
var iters = [iterable].concat(values).map(function(v ) {
if (!isIterable(v)) {
v = isKeyedIterable ?
keyedSeqFromValue(v) :
indexedSeqFromValue(Array.isArray(v) ? v : [v]);
} else if (isKeyedIterable) {
v = KeyedIterable(v);
}
return v;
}).filter(function(v ) {return v.size !== 0});
if (iters.length === 0) {
return iterable;
}
if (iters.length === 1) {
var singleton = iters[0];
if (singleton === iterable ||
isKeyedIterable && isKeyed(singleton) ||
isIndexed(iterable) && isIndexed(singleton)) {
return singleton;
}
}
var concatSeq = new ArraySeq(iters);
if (isKeyedIterable) {
concatSeq = concatSeq.toKeyedSeq();
} else if (!isIndexed(iterable)) {
concatSeq = concatSeq.toSetSeq();
}
concatSeq = concatSeq.flatten(true);
concatSeq.size = iters.reduce(
function(sum, seq) {
if (sum !== undefined) {
var size = seq.size;
if (size !== undefined) {
return sum + size;
}
}
},
0
);
return concatSeq;
}
function flattenFactory(iterable, depth, useKeys) {
var flatSequence = makeSequence(iterable);
flatSequence.__iterateUncached = function(fn, reverse) {
var iterations = 0;
var stopped = false;
function flatDeep(iter, currentDepth) {var this$0 = this;
iter.__iterate(function(v, k) {
if ((!depth || currentDepth < depth) && isIterable(v)) {
flatDeep(v, currentDepth + 1);
} else if (fn(v, useKeys ? k : iterations++, this$0) === false) {
stopped = true;
}
return !stopped;
}, reverse);
}
flatDeep(iterable, 0);
return iterations;
}
flatSequence.__iteratorUncached = function(type, reverse) {
var iterator = iterable.__iterator(type, reverse);
var stack = [];
var iterations = 0;
return new Iterator(function() {
while (iterator) {
var step = iterator.next();
if (step.done !== false) {
iterator = stack.pop();
continue;
}
var v = step.value;
if (type === ITERATE_ENTRIES) {
v = v[1];
}
if ((!depth || stack.length < depth) && isIterable(v)) {
stack.push(iterator);
iterator = v.__iterator(type, reverse);
} else {
return useKeys ? step : iteratorValue(type, iterations++, v, step);
}
}
return iteratorDone();
});
}
return flatSequence;
}
function flatMapFactory(iterable, mapper, context) {
var coerce = iterableClass(iterable);
return iterable.toSeq().map(
function(v, k) {return coerce(mapper.call(context, v, k, iterable))}
).flatten(true);
}
function interposeFactory(iterable, separator) {
var interposedSequence = makeSequence(iterable);
interposedSequence.size = iterable.size && iterable.size * 2 -1;
interposedSequence.__iterateUncached = function(fn, reverse) {var this$0 = this;
var iterations = 0;
iterable.__iterate(function(v, k)
{return (!iterations || fn(separator, iterations++, this$0) !== false) &&
fn(v, iterations++, this$0) !== false},
reverse
);
return iterations;
};
interposedSequence.__iteratorUncached = function(type, reverse) {
var iterator = iterable.__iterator(ITERATE_VALUES, reverse);
var iterations = 0;
var step;
return new Iterator(function() {
if (!step || iterations % 2) {
step = iterator.next();
if (step.done) {
return step;
}
}
return iterations % 2 ?
iteratorValue(type, iterations++, separator) :
iteratorValue(type, iterations++, step.value, step);
});
};
return interposedSequence;
}
function sortFactory(iterable, comparator, mapper) {
if (!comparator) {
comparator = defaultComparator;
}
var isKeyedIterable = isKeyed(iterable);
var index = 0;
var entries = iterable.toSeq().map(
function(v, k) {return [k, v, index++, mapper ? mapper(v, k, iterable) : v]}
).toArray();
entries.sort(function(a, b) {return comparator(a[3], b[3]) || a[2] - b[2]}).forEach(
isKeyedIterable ?
function(v, i) { entries[i].length = 2; } :
function(v, i) { entries[i] = v[1]; }
);
return isKeyedIterable ? KeyedSeq(entries) :
isIndexed(iterable) ? IndexedSeq(entries) :
SetSeq(entries);
}
function maxFactory(iterable, comparator, mapper) {
if (!comparator) {
comparator = defaultComparator;
}
if (mapper) {
var entry = iterable.toSeq()
.map(function(v, k) {return [v, mapper(v, k, iterable)]})
.reduce(function(a, b) {return maxCompare(comparator, a[1], b[1]) ? b : a});
return entry && entry[0];
} else {
return iterable.reduce(function(a, b) {return maxCompare(comparator, a, b) ? b : a});
}
}
function maxCompare(comparator, a, b) {
var comp = comparator(b, a);
// b is considered the new max if the comparator declares them equal, but
// they are not equal and b is in fact a nullish value.
return (comp === 0 && b !== a && (b === undefined || b === null || b !== b)) || comp > 0;
}
function zipWithFactory(keyIter, zipper, iters) {
var zipSequence = makeSequence(keyIter);
zipSequence.size = new ArraySeq(iters).map(function(i ) {return i.size}).min();
// Note: this a generic base implementation of __iterate in terms of
// __iterator which may be more generically useful in the future.
zipSequence.__iterate = function(fn, reverse) {
/* generic:
var iterator = this.__iterator(ITERATE_ENTRIES, reverse);
var step;
var iterations = 0;
while (!(step = iterator.next()).done) {
iterations++;
if (fn(step.value[1], step.value[0], this) === false) {
break;
}
}
return iterations;
*/
// indexed:
var iterator = this.__iterator(ITERATE_VALUES, reverse);
var step;
var iterations = 0;
while (!(step = iterator.next()).done) {
if (fn(step.value, iterations++, this) === false) {
break;
}
}
return iterations;
};
zipSequence.__iteratorUncached = function(type, reverse) {
var iterators = iters.map(function(i )
{return (i = Iterable(i), getIterator(reverse ? i.reverse() : i))}
);
var iterations = 0;
var isDone = false;
return new Iterator(function() {
var steps;
if (!isDone) {
steps = iterators.map(function(i ) {return i.next()});
isDone = steps.some(function(s ) {return s.done});
}
if (isDone) {
return iteratorDone();
}
return iteratorValue(
type,
iterations++,
zipper.apply(null, steps.map(function(s ) {return s.value}))
);
});
};
return zipSequence
}
// #pragma Helper Functions
function reify(iter, seq) {
return isSeq(iter) ? seq : iter.constructor(seq);
}
function validateEntry(entry) {
if (entry !== Object(entry)) {
throw new TypeError('Expected [K, V] tuple: ' + entry);
}
}
function resolveSize(iter) {
assertNotInfinite(iter.size);
return ensureSize(iter);
}
function iterableClass(iterable) {
return isKeyed(iterable) ? KeyedIterable :
isIndexed(iterable) ? IndexedIterable :
SetIterable;
}
function makeSequence(iterable) {
return Object.create(
(
isKeyed(iterable) ? KeyedSeq :
isIndexed(iterable) ? IndexedSeq :
SetSeq
).prototype
);
}
function cacheResultThrough() {
if (this._iter.cacheResult) {
this._iter.cacheResult();
this.size = this._iter.size;
return this;
} else {
return Seq.prototype.cacheResult.call(this);
}
}
function defaultComparator(a, b) {
return a > b ? 1 : a < b ? -1 : 0;
}
function forceIterator(keyPath) {
var iter = getIterator(keyPath);
if (!iter) {
// Array might not be iterable in this environment, so we need a fallback
// to our wrapped type.
if (!isArrayLike(keyPath)) {
throw new TypeError('Expected iterable or array-like: ' + keyPath);
}
iter = getIterator(Iterable(keyPath));
}
return iter;
}
createClass(Record, KeyedCollection);
function Record(defaultValues, name) {
var hasInitialized;
var RecordType = function Record(values) {
if (values instanceof RecordType) {
return values;
}
if (!(this instanceof RecordType)) {
return new RecordType(values);
}
if (!hasInitialized) {
hasInitialized = true;
var keys = Object.keys(defaultValues);
setProps(RecordTypePrototype, keys);
RecordTypePrototype.size = keys.length;
RecordTypePrototype._name = name;
RecordTypePrototype._keys = keys;
RecordTypePrototype._defaultValues = defaultValues;
}
this._map = Map(values);
};
var RecordTypePrototype = RecordType.prototype = Object.create(RecordPrototype);
RecordTypePrototype.constructor = RecordType;
return RecordType;
}
Record.prototype.toString = function() {
return this.__toString(recordName(this) + ' {', '}');
};
// @pragma Access
Record.prototype.has = function(k) {
return this._defaultValues.hasOwnProperty(k);
};
Record.prototype.get = function(k, notSetValue) {
if (!this.has(k)) {
return notSetValue;
}
var defaultVal = this._defaultValues[k];
return this._map ? this._map.get(k, defaultVal) : defaultVal;
};
// @pragma Modification
Record.prototype.clear = function() {
if (this.__ownerID) {
this._map && this._map.clear();
return this;
}
var RecordType = this.constructor;
return RecordType._empty || (RecordType._empty = makeRecord(this, emptyMap()));
};
Record.prototype.set = function(k, v) {
if (!this.has(k)) {
throw new Error('Cannot set unknown key "' + k + '" on ' + recordName(this));
}
var newMap = this._map && this._map.set(k, v);
if (this.__ownerID || newMap === this._map) {
return this;
}
return makeRecord(this, newMap);
};
Record.prototype.remove = function(k) {
if (!this.has(k)) {
return this;
}
var newMap = this._map && this._map.remove(k);
if (this.__ownerID || newMap === this._map) {
return this;
}
return makeRecord(this, newMap);
};
Record.prototype.wasAltered = function() {
return this._map.wasAltered();
};
Record.prototype.__iterator = function(type, reverse) {var this$0 = this;
return KeyedIterable(this._defaultValues).map(function(_, k) {return this$0.get(k)}).__iterator(type, reverse);
};
Record.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return KeyedIterable(this._defaultValues).map(function(_, k) {return this$0.get(k)}).__iterate(fn, reverse);
};
Record.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
var newMap = this._map && this._map.__ensureOwner(ownerID);
if (!ownerID) {
this.__ownerID = ownerID;
this._map = newMap;
return this;
}
return makeRecord(this, newMap, ownerID);
};
var RecordPrototype = Record.prototype;
RecordPrototype[DELETE] = RecordPrototype.remove;
RecordPrototype.deleteIn =
RecordPrototype.removeIn = MapPrototype.removeIn;
RecordPrototype.merge = MapPrototype.merge;
RecordPrototype.mergeWith = MapPrototype.mergeWith;
RecordPrototype.mergeIn = MapPrototype.mergeIn;
RecordPrototype.mergeDeep = MapPrototype.mergeDeep;
RecordPrototype.mergeDeepWith = MapPrototype.mergeDeepWith;
RecordPrototype.mergeDeepIn = MapPrototype.mergeDeepIn;
RecordPrototype.setIn = MapPrototype.setIn;
RecordPrototype.update = MapPrototype.update;
RecordPrototype.updateIn = MapPrototype.updateIn;
RecordPrototype.withMutations = MapPrototype.withMutations;
RecordPrototype.asMutable = MapPrototype.asMutable;
RecordPrototype.asImmutable = MapPrototype.asImmutable;
function makeRecord(likeRecord, map, ownerID) {
var record = Object.create(Object.getPrototypeOf(likeRecord));
record._map = map;
record.__ownerID = ownerID;
return record;
}
function recordName(record) {
return record._name || record.constructor.name || 'Record';
}
function setProps(prototype, names) {
try {
names.forEach(setProp.bind(undefined, prototype));
} catch (error) {
// Object.defineProperty failed. Probably IE8.
}
}
function setProp(prototype, name) {
Object.defineProperty(prototype, name, {
get: function() {
return this.get(name);
},
set: function(value) {
invariant(this.__ownerID, 'Cannot set on an immutable record.');
this.set(name, value);
}
});
}
createClass(Set, SetCollection);
// @pragma Construction
function Set(value) {
return value === null || value === undefined ? emptySet() :
isSet(value) && !isOrdered(value) ? value :
emptySet().withMutations(function(set ) {
var iter = SetIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v ) {return set.add(v)});
});
}
Set.of = function(/*...values*/) {
return this(arguments);
};
Set.fromKeys = function(value) {
return this(KeyedIterable(value).keySeq());
};
Set.prototype.toString = function() {
return this.__toString('Set {', '}');
};
// @pragma Access
Set.prototype.has = function(value) {
return this._map.has(value);
};
// @pragma Modification
Set.prototype.add = function(value) {
return updateSet(this, this._map.set(value, true));
};
Set.prototype.remove = function(value) {
return updateSet(this, this._map.remove(value));
};
Set.prototype.clear = function() {
return updateSet(this, this._map.clear());
};
// @pragma Composition
Set.prototype.union = function() {var iters = SLICE$0.call(arguments, 0);
iters = iters.filter(function(x ) {return x.size !== 0});
if (iters.length === 0) {
return this;
}
if (this.size === 0 && !this.__ownerID && iters.length === 1) {
return this.constructor(iters[0]);
}
return this.withMutations(function(set ) {
for (var ii = 0; ii < iters.length; ii++) {
SetIterable(iters[ii]).forEach(function(value ) {return set.add(value)});
}
});
};
Set.prototype.intersect = function() {var iters = SLICE$0.call(arguments, 0);
if (iters.length === 0) {
return this;
}
iters = iters.map(function(iter ) {return SetIterable(iter)});
var originalSet = this;
return this.withMutations(function(set ) {
originalSet.forEach(function(value ) {
if (!iters.every(function(iter ) {return iter.includes(value)})) {
set.remove(value);
}
});
});
};
Set.prototype.subtract = function() {var iters = SLICE$0.call(arguments, 0);
if (iters.length === 0) {
return this;
}
iters = iters.map(function(iter ) {return SetIterable(iter)});
var originalSet = this;
return this.withMutations(function(set ) {
originalSet.forEach(function(value ) {
if (iters.some(function(iter ) {return iter.includes(value)})) {
set.remove(value);
}
});
});
};
Set.prototype.merge = function() {
return this.union.apply(this, arguments);
};
Set.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1);
return this.union.apply(this, iters);
};
Set.prototype.sort = function(comparator) {
// Late binding
return OrderedSet(sortFactory(this, comparator));
};
Set.prototype.sortBy = function(mapper, comparator) {
// Late binding
return OrderedSet(sortFactory(this, comparator, mapper));
};
Set.prototype.wasAltered = function() {
return this._map.wasAltered();
};
Set.prototype.__iterate = function(fn, reverse) {var this$0 = this;
return this._map.__iterate(function(_, k) {return fn(k, k, this$0)}, reverse);
};
Set.prototype.__iterator = function(type, reverse) {
return this._map.map(function(_, k) {return k}).__iterator(type, reverse);
};
Set.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
var newMap = this._map.__ensureOwner(ownerID);
if (!ownerID) {
this.__ownerID = ownerID;
this._map = newMap;
return this;
}
return this.__make(newMap, ownerID);
};
function isSet(maybeSet) {
return !!(maybeSet && maybeSet[IS_SET_SENTINEL]);
}
Set.isSet = isSet;
var IS_SET_SENTINEL = '@@__IMMUTABLE_SET__@@';
var SetPrototype = Set.prototype;
SetPrototype[IS_SET_SENTINEL] = true;
SetPrototype[DELETE] = SetPrototype.remove;
SetPrototype.mergeDeep = SetPrototype.merge;
SetPrototype.mergeDeepWith = SetPrototype.mergeWith;
SetPrototype.withMutations = MapPrototype.withMutations;
SetPrototype.asMutable = MapPrototype.asMutable;
SetPrototype.asImmutable = MapPrototype.asImmutable;
SetPrototype.__empty = emptySet;
SetPrototype.__make = makeSet;
function updateSet(set, newMap) {
if (set.__ownerID) {
set.size = newMap.size;
set._map = newMap;
return set;
}
return newMap === set._map ? set :
newMap.size === 0 ? set.__empty() :
set.__make(newMap);
}
function makeSet(map, ownerID) {
var set = Object.create(SetPrototype);
set.size = map ? map.size : 0;
set._map = map;
set.__ownerID = ownerID;
return set;
}
var EMPTY_SET;
function emptySet() {
return EMPTY_SET || (EMPTY_SET = makeSet(emptyMap()));
}
createClass(OrderedSet, Set);
// @pragma Construction
function OrderedSet(value) {
return value === null || value === undefined ? emptyOrderedSet() :
isOrderedSet(value) ? value :
emptyOrderedSet().withMutations(function(set ) {
var iter = SetIterable(value);
assertNotInfinite(iter.size);
iter.forEach(function(v ) {return set.add(v)});
});
}
OrderedSet.of = function(/*...values*/) {
return this(arguments);
};
OrderedSet.fromKeys = function(value) {
return this(KeyedIterable(value).keySeq());
};
OrderedSet.prototype.toString = function() {
return this.__toString('OrderedSet {', '}');
};
function isOrderedSet(maybeOrderedSet) {
return isSet(maybeOrderedSet) && isOrdered(maybeOrderedSet);
}
OrderedSet.isOrderedSet = isOrderedSet;
var OrderedSetPrototype = OrderedSet.prototype;
OrderedSetPrototype[IS_ORDERED_SENTINEL] = true;
OrderedSetPrototype.__empty = emptyOrderedSet;
OrderedSetPrototype.__make = makeOrderedSet;
function makeOrderedSet(map, ownerID) {
var set = Object.create(OrderedSetPrototype);
set.size = map ? map.size : 0;
set._map = map;
set.__ownerID = ownerID;
return set;
}
var EMPTY_ORDERED_SET;
function emptyOrderedSet() {
return EMPTY_ORDERED_SET || (EMPTY_ORDERED_SET = makeOrderedSet(emptyOrderedMap()));
}
createClass(Stack, IndexedCollection);
// @pragma Construction
function Stack(value) {
return value === null || value === undefined ? emptyStack() :
isStack(value) ? value :
emptyStack().unshiftAll(value);
}
Stack.of = function(/*...values*/) {
return this(arguments);
};
Stack.prototype.toString = function() {
return this.__toString('Stack [', ']');
};
// @pragma Access
Stack.prototype.get = function(index, notSetValue) {
var head = this._head;
index = wrapIndex(this, index);
while (head && index--) {
head = head.next;
}
return head ? head.value : notSetValue;
};
Stack.prototype.peek = function() {
return this._head && this._head.value;
};
// @pragma Modification
Stack.prototype.push = function(/*...values*/) {
if (arguments.length === 0) {
return this;
}
var newSize = this.size + arguments.length;
var head = this._head;
for (var ii = arguments.length - 1; ii >= 0; ii--) {
head = {
value: arguments[ii],
next: head
};
}
if (this.__ownerID) {
this.size = newSize;
this._head = head;
this.__hash = undefined;
this.__altered = true;
return this;
}
return makeStack(newSize, head);
};
Stack.prototype.pushAll = function(iter) {
iter = IndexedIterable(iter);
if (iter.size === 0) {
return this;
}
assertNotInfinite(iter.size);
var newSize = this.size;
var head = this._head;
iter.reverse().forEach(function(value ) {
newSize++;
head = {
value: value,
next: head
};
});
if (this.__ownerID) {
this.size = newSize;
this._head = head;
this.__hash = undefined;
this.__altered = true;
return this;
}
return makeStack(newSize, head);
};
Stack.prototype.pop = function() {
return this.slice(1);
};
Stack.prototype.unshift = function(/*...values*/) {
return this.push.apply(this, arguments);
};
Stack.prototype.unshiftAll = function(iter) {
return this.pushAll(iter);
};
Stack.prototype.shift = function() {
return this.pop.apply(this, arguments);
};
Stack.prototype.clear = function() {
if (this.size === 0) {
return this;
}
if (this.__ownerID) {
this.size = 0;
this._head = undefined;
this.__hash = undefined;
this.__altered = true;
return this;
}
return emptyStack();
};
Stack.prototype.slice = function(begin, end) {
if (wholeSlice(begin, end, this.size)) {
return this;
}
var resolvedBegin = resolveBegin(begin, this.size);
var resolvedEnd = resolveEnd(end, this.size);
if (resolvedEnd !== this.size) {
// super.slice(begin, end);
return IndexedCollection.prototype.slice.call(this, begin, end);
}
var newSize = this.size - resolvedBegin;
var head = this._head;
while (resolvedBegin--) {
head = head.next;
}
if (this.__ownerID) {
this.size = newSize;
this._head = head;
this.__hash = undefined;
this.__altered = true;
return this;
}
return makeStack(newSize, head);
};
// @pragma Mutability
Stack.prototype.__ensureOwner = function(ownerID) {
if (ownerID === this.__ownerID) {
return this;
}
if (!ownerID) {
this.__ownerID = ownerID;
this.__altered = false;
return this;
}
return makeStack(this.size, this._head, ownerID, this.__hash);
};
// @pragma Iteration
Stack.prototype.__iterate = function(fn, reverse) {
if (reverse) {
return this.reverse().__iterate(fn);
}
var iterations = 0;
var node = this._head;
while (node) {
if (fn(node.value, iterations++, this) === false) {
break;
}
node = node.next;
}
return iterations;
};
Stack.prototype.__iterator = function(type, reverse) {
if (reverse) {
return this.reverse().__iterator(type);
}
var iterations = 0;
var node = this._head;
return new Iterator(function() {
if (node) {
var value = node.value;
node = node.next;
return iteratorValue(type, iterations++, value);
}
return iteratorDone();
});
};
function isStack(maybeStack) {
return !!(maybeStack && maybeStack[IS_STACK_SENTINEL]);
}
Stack.isStack = isStack;
var IS_STACK_SENTINEL = '@@__IMMUTABLE_STACK__@@';
var StackPrototype = Stack.prototype;
StackPrototype[IS_STACK_SENTINEL] = true;
StackPrototype.withMutations = MapPrototype.withMutations;
StackPrototype.asMutable = MapPrototype.asMutable;
StackPrototype.asImmutable = MapPrototype.asImmutable;
StackPrototype.wasAltered = MapPrototype.wasAltered;
function makeStack(size, head, ownerID, hash) {
var map = Object.create(StackPrototype);
map.size = size;
map._head = head;
map.__ownerID = ownerID;
map.__hash = hash;
map.__altered = false;
return map;
}
var EMPTY_STACK;
function emptyStack() {
return EMPTY_STACK || (EMPTY_STACK = makeStack(0));
}
/**
* Contributes additional methods to a constructor
*/
function mixin(ctor, methods) {
var keyCopier = function(key ) { ctor.prototype[key] = methods[key]; };
Object.keys(methods).forEach(keyCopier);
Object.getOwnPropertySymbols &&
Object.getOwnPropertySymbols(methods).forEach(keyCopier);
return ctor;
}
Iterable.Iterator = Iterator;
mixin(Iterable, {
// ### Conversion to other types
toArray: function() {
assertNotInfinite(this.size);
var array = new Array(this.size || 0);
this.valueSeq().__iterate(function(v, i) { array[i] = v; });
return array;
},
toIndexedSeq: function() {
return new ToIndexedSequence(this);
},
toJS: function() {
return this.toSeq().map(
function(value ) {return value && typeof value.toJS === 'function' ? value.toJS() : value}
).__toJS();
},
toJSON: function() {
return this.toSeq().map(
function(value ) {return value && typeof value.toJSON === 'function' ? value.toJSON() : value}
).__toJS();
},
toKeyedSeq: function() {
return new ToKeyedSequence(this, true);
},
toMap: function() {
// Use Late Binding here to solve the circular dependency.
return Map(this.toKeyedSeq());
},
toObject: function() {
assertNotInfinite(this.size);
var object = {};
this.__iterate(function(v, k) { object[k] = v; });
return object;
},
toOrderedMap: function() {
// Use Late Binding here to solve the circular dependency.
return OrderedMap(this.toKeyedSeq());
},
toOrderedSet: function() {
// Use Late Binding here to solve the circular dependency.
return OrderedSet(isKeyed(this) ? this.valueSeq() : this);
},
toSet: function() {
// Use Late Binding here to solve the circular dependency.
return Set(isKeyed(this) ? this.valueSeq() : this);
},
toSetSeq: function() {
return new ToSetSequence(this);
},
toSeq: function() {
return isIndexed(this) ? this.toIndexedSeq() :
isKeyed(this) ? this.toKeyedSeq() :
this.toSetSeq();
},
toStack: function() {
// Use Late Binding here to solve the circular dependency.
return Stack(isKeyed(this) ? this.valueSeq() : this);
},
toList: function() {
// Use Late Binding here to solve the circular dependency.
return List(isKeyed(this) ? this.valueSeq() : this);
},
// ### Common JavaScript methods and properties
toString: function() {
return '[Iterable]';
},
__toString: function(head, tail) {
if (this.size === 0) {
return head + tail;
}
return head + ' ' + this.toSeq().map(this.__toStringMapper).join(', ') + ' ' + tail;
},
// ### ES6 Collection methods (ES6 Array and Map)
concat: function() {var values = SLICE$0.call(arguments, 0);
return reify(this, concatFactory(this, values));
},
includes: function(searchValue) {
return this.some(function(value ) {return is(value, searchValue)});
},
entries: function() {
return this.__iterator(ITERATE_ENTRIES);
},
every: function(predicate, context) {
assertNotInfinite(this.size);
var returnValue = true;
this.__iterate(function(v, k, c) {
if (!predicate.call(context, v, k, c)) {
returnValue = false;
return false;
}
});
return returnValue;
},
filter: function(predicate, context) {
return reify(this, filterFactory(this, predicate, context, true));
},
find: function(predicate, context, notSetValue) {
var entry = this.findEntry(predicate, context);
return entry ? entry[1] : notSetValue;
},
findEntry: function(predicate, context) {
var found;
this.__iterate(function(v, k, c) {
if (predicate.call(context, v, k, c)) {
found = [k, v];
return false;
}
});
return found;
},
findLastEntry: function(predicate, context) {
return this.toSeq().reverse().findEntry(predicate, context);
},
forEach: function(sideEffect, context) {
assertNotInfinite(this.size);
return this.__iterate(context ? sideEffect.bind(context) : sideEffect);
},
join: function(separator) {
assertNotInfinite(this.size);
separator = separator !== undefined ? '' + separator : ',';
var joined = '';
var isFirst = true;
this.__iterate(function(v ) {
isFirst ? (isFirst = false) : (joined += separator);
joined += v !== null && v !== undefined ? v.toString() : '';
});
return joined;
},
keys: function() {
return this.__iterator(ITERATE_KEYS);
},
map: function(mapper, context) {
return reify(this, mapFactory(this, mapper, context));
},
reduce: function(reducer, initialReduction, context) {
assertNotInfinite(this.size);
var reduction;
var useFirst;
if (arguments.length < 2) {
useFirst = true;
} else {
reduction = initialReduction;
}
this.__iterate(function(v, k, c) {
if (useFirst) {
useFirst = false;
reduction = v;
} else {
reduction = reducer.call(context, reduction, v, k, c);
}
});
return reduction;
},
reduceRight: function(reducer, initialReduction, context) {
var reversed = this.toKeyedSeq().reverse();
return reversed.reduce.apply(reversed, arguments);
},
reverse: function() {
return reify(this, reverseFactory(this, true));
},
slice: function(begin, end) {
return reify(this, sliceFactory(this, begin, end, true));
},
some: function(predicate, context) {
return !this.every(not(predicate), context);
},
sort: function(comparator) {
return reify(this, sortFactory(this, comparator));
},
values: function() {
return this.__iterator(ITERATE_VALUES);
},
// ### More sequential methods
butLast: function() {
return this.slice(0, -1);
},
isEmpty: function() {
return this.size !== undefined ? this.size === 0 : !this.some(function() {return true});
},
count: function(predicate, context) {
return ensureSize(
predicate ? this.toSeq().filter(predicate, context) : this
);
},
countBy: function(grouper, context) {
return countByFactory(this, grouper, context);
},
equals: function(other) {
return deepEqual(this, other);
},
entrySeq: function() {
var iterable = this;
if (iterable._cache) {
// We cache as an entries array, so we can just return the cache!
return new ArraySeq(iterable._cache);
}
var entriesSequence = iterable.toSeq().map(entryMapper).toIndexedSeq();
entriesSequence.fromEntrySeq = function() {return iterable.toSeq()};
return entriesSequence;
},
filterNot: function(predicate, context) {
return this.filter(not(predicate), context);
},
findLast: function(predicate, context, notSetValue) {
return this.toKeyedSeq().reverse().find(predicate, context, notSetValue);
},
first: function() {
return this.find(returnTrue);
},
flatMap: function(mapper, context) {
return reify(this, flatMapFactory(this, mapper, context));
},
flatten: function(depth) {
return reify(this, flattenFactory(this, depth, true));
},
fromEntrySeq: function() {
return new FromEntriesSequence(this);
},
get: function(searchKey, notSetValue) {
return this.find(function(_, key) {return is(key, searchKey)}, undefined, notSetValue);
},
getIn: function(searchKeyPath, notSetValue) {
var nested = this;
// Note: in an ES6 environment, we would prefer:
// for (var key of searchKeyPath) {
var iter = forceIterator(searchKeyPath);
var step;
while (!(step = iter.next()).done) {
var key = step.value;
nested = nested && nested.get ? nested.get(key, NOT_SET) : NOT_SET;
if (nested === NOT_SET) {
return notSetValue;
}
}
return nested;
},
groupBy: function(grouper, context) {
return groupByFactory(this, grouper, context);
},
has: function(searchKey) {
return this.get(searchKey, NOT_SET) !== NOT_SET;
},
hasIn: function(searchKeyPath) {
return this.getIn(searchKeyPath, NOT_SET) !== NOT_SET;
},
isSubset: function(iter) {
iter = typeof iter.includes === 'function' ? iter : Iterable(iter);
return this.every(function(value ) {return iter.includes(value)});
},
isSuperset: function(iter) {
iter = typeof iter.isSubset === 'function' ? iter : Iterable(iter);
return iter.isSubset(this);
},
keySeq: function() {
return this.toSeq().map(keyMapper).toIndexedSeq();
},
last: function() {
return this.toSeq().reverse().first();
},
max: function(comparator) {
return maxFactory(this, comparator);
},
maxBy: function(mapper, comparator) {
return maxFactory(this, comparator, mapper);
},
min: function(comparator) {
return maxFactory(this, comparator ? neg(comparator) : defaultNegComparator);
},
minBy: function(mapper, comparator) {
return maxFactory(this, comparator ? neg(comparator) : defaultNegComparator, mapper);
},
rest: function() {
return this.slice(1);
},
skip: function(amount) {
return this.slice(Math.max(0, amount));
},
skipLast: function(amount) {
return reify(this, this.toSeq().reverse().skip(amount).reverse());
},
skipWhile: function(predicate, context) {
return reify(this, skipWhileFactory(this, predicate, context, true));
},
skipUntil: function(predicate, context) {
return this.skipWhile(not(predicate), context);
},
sortBy: function(mapper, comparator) {
return reify(this, sortFactory(this, comparator, mapper));
},
take: function(amount) {
return this.slice(0, Math.max(0, amount));
},
takeLast: function(amount) {
return reify(this, this.toSeq().reverse().take(amount).reverse());
},
takeWhile: function(predicate, context) {
return reify(this, takeWhileFactory(this, predicate, context));
},
takeUntil: function(predicate, context) {
return this.takeWhile(not(predicate), context);
},
valueSeq: function() {
return this.toIndexedSeq();
},
// ### Hashable Object
hashCode: function() {
return this.__hash || (this.__hash = hashIterable(this));
}
// ### Internal
// abstract __iterate(fn, reverse)
// abstract __iterator(type, reverse)
});
// var IS_ITERABLE_SENTINEL = '@@__IMMUTABLE_ITERABLE__@@';
// var IS_KEYED_SENTINEL = '@@__IMMUTABLE_KEYED__@@';
// var IS_INDEXED_SENTINEL = '@@__IMMUTABLE_INDEXED__@@';
// var IS_ORDERED_SENTINEL = '@@__IMMUTABLE_ORDERED__@@';
var IterablePrototype = Iterable.prototype;
IterablePrototype[IS_ITERABLE_SENTINEL] = true;
IterablePrototype[ITERATOR_SYMBOL] = IterablePrototype.values;
IterablePrototype.__toJS = IterablePrototype.toArray;
IterablePrototype.__toStringMapper = quoteString;
IterablePrototype.inspect =
IterablePrototype.toSource = function() { return this.toString(); };
IterablePrototype.chain = IterablePrototype.flatMap;
IterablePrototype.contains = IterablePrototype.includes;
// Temporary warning about using length
(function () {
try {
Object.defineProperty(IterablePrototype, 'length', {
get: function () {
if (!Iterable.noLengthWarning) {
var stack;
try {
throw new Error();
} catch (error) {
stack = error.stack;
}
if (stack.indexOf('_wrapObject') === -1) {
console && console.warn && console.warn(
'iterable.length has been deprecated, '+
'use iterable.size or iterable.count(). '+
'This warning will become a silent error in a future version. ' +
stack
);
return this.size;
}
}
}
});
} catch (e) {}
})();
mixin(KeyedIterable, {
// ### More sequential methods
flip: function() {
return reify(this, flipFactory(this));
},
findKey: function(predicate, context) {
var entry = this.findEntry(predicate, context);
return entry && entry[0];
},
findLastKey: function(predicate, context) {
return this.toSeq().reverse().findKey(predicate, context);
},
keyOf: function(searchValue) {
return this.findKey(function(value ) {return is(value, searchValue)});
},
lastKeyOf: function(searchValue) {
return this.findLastKey(function(value ) {return is(value, searchValue)});
},
mapEntries: function(mapper, context) {var this$0 = this;
var iterations = 0;
return reify(this,
this.toSeq().map(
function(v, k) {return mapper.call(context, [k, v], iterations++, this$0)}
).fromEntrySeq()
);
},
mapKeys: function(mapper, context) {var this$0 = this;
return reify(this,
this.toSeq().flip().map(
function(k, v) {return mapper.call(context, k, v, this$0)}
).flip()
);
}
});
var KeyedIterablePrototype = KeyedIterable.prototype;
KeyedIterablePrototype[IS_KEYED_SENTINEL] = true;
KeyedIterablePrototype[ITERATOR_SYMBOL] = IterablePrototype.entries;
KeyedIterablePrototype.__toJS = IterablePrototype.toObject;
KeyedIterablePrototype.__toStringMapper = function(v, k) {return JSON.stringify(k) + ': ' + quoteString(v)};
mixin(IndexedIterable, {
// ### Conversion to other types
toKeyedSeq: function() {
return new ToKeyedSequence(this, false);
},
// ### ES6 Collection methods (ES6 Array and Map)
filter: function(predicate, context) {
return reify(this, filterFactory(this, predicate, context, false));
},
findIndex: function(predicate, context) {
var entry = this.findEntry(predicate, context);
return entry ? entry[0] : -1;
},
indexOf: function(searchValue) {
var key = this.toKeyedSeq().keyOf(searchValue);
return key === undefined ? -1 : key;
},
lastIndexOf: function(searchValue) {
var key = this.toKeyedSeq().reverse().keyOf(searchValue);
return key === undefined ? -1 : key;
// var index =
// return this.toSeq().reverse().indexOf(searchValue);
},
reverse: function() {
return reify(this, reverseFactory(this, false));
},
slice: function(begin, end) {
return reify(this, sliceFactory(this, begin, end, false));
},
splice: function(index, removeNum /*, ...values*/) {
var numArgs = arguments.length;
removeNum = Math.max(removeNum | 0, 0);
if (numArgs === 0 || (numArgs === 2 && !removeNum)) {
return this;
}
// If index is negative, it should resolve relative to the size of the
// collection. However size may be expensive to compute if not cached, so
// only call count() if the number is in fact negative.
index = resolveBegin(index, index < 0 ? this.count() : this.size);
var spliced = this.slice(0, index);
return reify(
this,
numArgs === 1 ?
spliced :
spliced.concat(arrCopy(arguments, 2), this.slice(index + removeNum))
);
},
// ### More collection methods
findLastIndex: function(predicate, context) {
var key = this.toKeyedSeq().findLastKey(predicate, context);
return key === undefined ? -1 : key;
},
first: function() {
return this.get(0);
},
flatten: function(depth) {
return reify(this, flattenFactory(this, depth, false));
},
get: function(index, notSetValue) {
index = wrapIndex(this, index);
return (index < 0 || (this.size === Infinity ||
(this.size !== undefined && index > this.size))) ?
notSetValue :
this.find(function(_, key) {return key === index}, undefined, notSetValue);
},
has: function(index) {
index = wrapIndex(this, index);
return index >= 0 && (this.size !== undefined ?
this.size === Infinity || index < this.size :
this.indexOf(index) !== -1
);
},
interpose: function(separator) {
return reify(this, interposeFactory(this, separator));
},
interleave: function(/*...iterables*/) {
var iterables = [this].concat(arrCopy(arguments));
var zipped = zipWithFactory(this.toSeq(), IndexedSeq.of, iterables);
var interleaved = zipped.flatten(true);
if (zipped.size) {
interleaved.size = zipped.size * iterables.length;
}
return reify(this, interleaved);
},
last: function() {
return this.get(-1);
},
skipWhile: function(predicate, context) {
return reify(this, skipWhileFactory(this, predicate, context, false));
},
zip: function(/*, ...iterables */) {
var iterables = [this].concat(arrCopy(arguments));
return reify(this, zipWithFactory(this, defaultZipper, iterables));
},
zipWith: function(zipper/*, ...iterables */) {
var iterables = arrCopy(arguments);
iterables[0] = this;
return reify(this, zipWithFactory(this, zipper, iterables));
}
});
IndexedIterable.prototype[IS_INDEXED_SENTINEL] = true;
IndexedIterable.prototype[IS_ORDERED_SENTINEL] = true;
mixin(SetIterable, {
// ### ES6 Collection methods (ES6 Array and Map)
get: function(value, notSetValue) {
return this.has(value) ? value : notSetValue;
},
includes: function(value) {
return this.has(value);
},
// ### More sequential methods
keySeq: function() {
return this.valueSeq();
}
});
SetIterable.prototype.has = IterablePrototype.includes;
// Mixin subclasses
mixin(KeyedSeq, KeyedIterable.prototype);
mixin(IndexedSeq, IndexedIterable.prototype);
mixin(SetSeq, SetIterable.prototype);
mixin(KeyedCollection, KeyedIterable.prototype);
mixin(IndexedCollection, IndexedIterable.prototype);
mixin(SetCollection, SetIterable.prototype);
// #pragma Helper functions
function keyMapper(v, k) {
return k;
}
function entryMapper(v, k) {
return [k, v];
}
function not(predicate) {
return function() {
return !predicate.apply(this, arguments);
}
}
function neg(predicate) {
return function() {
return -predicate.apply(this, arguments);
}
}
function quoteString(value) {
return typeof value === 'string' ? JSON.stringify(value) : value;
}
function defaultZipper() {
return arrCopy(arguments);
}
function defaultNegComparator(a, b) {
return a < b ? 1 : a > b ? -1 : 0;
}
function hashIterable(iterable) {
if (iterable.size === Infinity) {
return 0;
}
var ordered = isOrdered(iterable);
var keyed = isKeyed(iterable);
var h = ordered ? 1 : 0;
var size = iterable.__iterate(
keyed ?
ordered ?
function(v, k) { h = 31 * h + hashMerge(hash(v), hash(k)) | 0; } :
function(v, k) { h = h + hashMerge(hash(v), hash(k)) | 0; } :
ordered ?
function(v ) { h = 31 * h + hash(v) | 0; } :
function(v ) { h = h + hash(v) | 0; }
);
return murmurHashOfSize(size, h);
}
function murmurHashOfSize(size, h) {
h = imul(h, 0xCC9E2D51);
h = imul(h << 15 | h >>> -15, 0x1B873593);
h = imul(h << 13 | h >>> -13, 5);
h = (h + 0xE6546B64 | 0) ^ size;
h = imul(h ^ h >>> 16, 0x85EBCA6B);
h = imul(h ^ h >>> 13, 0xC2B2AE35);
h = smi(h ^ h >>> 16);
return h;
}
function hashMerge(a, b) {
return a ^ b + 0x9E3779B9 + (a << 6) + (a >> 2) | 0; // int
}
var Immutable = {
Iterable: Iterable,
Seq: Seq,
Collection: Collection,
Map: Map,
OrderedMap: OrderedMap,
List: List,
Stack: Stack,
Set: Set,
OrderedSet: OrderedSet,
Record: Record,
Range: Range,
Repeat: Repeat,
is: is,
fromJS: fromJS
};
return Immutable;
}));
|
Bringing in ImmutableJS 3.8.2 which is release under MIT license. Source of immutableJS https://github.com/facebook/immutable-js/releases/tag/v3.8.2
|
src/thirdparty/immutable.js
|
Bringing in ImmutableJS 3.8.2 which is release under MIT license. Source of immutableJS https://github.com/facebook/immutable-js/releases/tag/v3.8.2
|
<ide><path>rc/thirdparty/immutable.js
<ide> /**
<del> * Copyright (c) 2014-2015, Facebook, Inc.
<del> * All rights reserved.
<add> * Copyright (c) 2014-present, Facebook, Inc.
<ide> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<ide> */
<ide>
<ide> (function (global, factory) {
<ide> typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
<ide> typeof define === 'function' && define.amd ? define(factory) :
<del> global.Immutable = factory();
<add> (global.Immutable = factory());
<ide> }(this, function () { 'use strict';var SLICE$0 = Array.prototype.slice;
<ide>
<ide> function createClass(ctor, superClass) {
<ide> }
<ide> return 'Range [ ' +
<ide> this._start + '...' + this._end +
<del> (this._step > 1 ? ' by ' + this._step : '') +
<add> (this._step !== 1 ? ' by ' + this._step : '') +
<ide> ' ]';
<ide> };
<ide>
<ide> }
<ide> var type = typeof o;
<ide> if (type === 'number') {
<add> if (o !== o || o === Infinity) {
<add> return 0;
<add> }
<ide> var h = o | 0;
<ide> if (h !== o) {
<ide> h ^= o * 0xFFFFFFFF;
<ide> });
<ide> }
<ide>
<add> Map.of = function() {var keyValues = SLICE$0.call(arguments, 0);
<add> return emptyMap().withMutations(function(map ) {
<add> for (var i = 0; i < keyValues.length; i += 2) {
<add> if (i + 1 >= keyValues.length) {
<add> throw new Error('Missing value for key: ' + keyValues[i]);
<add> }
<add> map.set(keyValues[i], keyValues[i + 1]);
<add> }
<add> });
<add> };
<add>
<ide> Map.prototype.toString = function() {
<ide> return this.__toString('Map {', '}');
<ide> };
<ide> begin = begin | 0;
<ide> }
<ide> if (end !== undefined) {
<del> end = end | 0;
<add> if (end === Infinity) {
<add> end = originalSize;
<add> } else {
<add> end = end | 0;
<add> }
<ide> }
<ide>
<ide> if (wholeSlice(begin, end, originalSize)) {
<ide> if (!this.has(k)) {
<ide> throw new Error('Cannot set unknown key "' + k + '" on ' + recordName(this));
<ide> }
<add> if (this._map && !this._map.has(k)) {
<add> var defaultVal = this._defaultValues[k];
<add> if (v === defaultVal) {
<add> return this;
<add> }
<add> }
<ide> var newMap = this._map && this._map.set(k, v);
<ide> if (this.__ownerID || newMap === this._map) {
<ide> return this;
<ide> return entry ? entry[1] : notSetValue;
<ide> },
<ide>
<del> findEntry: function(predicate, context) {
<del> var found;
<del> this.__iterate(function(v, k, c) {
<del> if (predicate.call(context, v, k, c)) {
<del> found = [k, v];
<del> return false;
<del> }
<del> });
<del> return found;
<del> },
<del>
<del> findLastEntry: function(predicate, context) {
<del> return this.toSeq().reverse().findEntry(predicate, context);
<del> },
<del>
<ide> forEach: function(sideEffect, context) {
<ide> assertNotInfinite(this.size);
<ide> return this.__iterate(context ? sideEffect.bind(context) : sideEffect);
<ide> return this.filter(not(predicate), context);
<ide> },
<ide>
<add> findEntry: function(predicate, context, notSetValue) {
<add> var found = notSetValue;
<add> this.__iterate(function(v, k, c) {
<add> if (predicate.call(context, v, k, c)) {
<add> found = [k, v];
<add> return false;
<add> }
<add> });
<add> return found;
<add> },
<add>
<add> findKey: function(predicate, context) {
<add> var entry = this.findEntry(predicate, context);
<add> return entry && entry[0];
<add> },
<add>
<ide> findLast: function(predicate, context, notSetValue) {
<ide> return this.toKeyedSeq().reverse().find(predicate, context, notSetValue);
<add> },
<add>
<add> findLastEntry: function(predicate, context, notSetValue) {
<add> return this.toKeyedSeq().reverse().findEntry(predicate, context, notSetValue);
<add> },
<add>
<add> findLastKey: function(predicate, context) {
<add> return this.toKeyedSeq().reverse().findKey(predicate, context);
<ide> },
<ide>
<ide> first: function() {
<ide> return iter.isSubset(this);
<ide> },
<ide>
<add> keyOf: function(searchValue) {
<add> return this.findKey(function(value ) {return is(value, searchValue)});
<add> },
<add>
<ide> keySeq: function() {
<ide> return this.toSeq().map(keyMapper).toIndexedSeq();
<ide> },
<ide>
<ide> last: function() {
<ide> return this.toSeq().reverse().first();
<add> },
<add>
<add> lastKeyOf: function(searchValue) {
<add> return this.toKeyedSeq().reverse().keyOf(searchValue);
<ide> },
<ide>
<ide> max: function(comparator) {
<ide> IterablePrototype.chain = IterablePrototype.flatMap;
<ide> IterablePrototype.contains = IterablePrototype.includes;
<ide>
<del> // Temporary warning about using length
<del> (function () {
<del> try {
<del> Object.defineProperty(IterablePrototype, 'length', {
<del> get: function () {
<del> if (!Iterable.noLengthWarning) {
<del> var stack;
<del> try {
<del> throw new Error();
<del> } catch (error) {
<del> stack = error.stack;
<del> }
<del> if (stack.indexOf('_wrapObject') === -1) {
<del> console && console.warn && console.warn(
<del> 'iterable.length has been deprecated, '+
<del> 'use iterable.size or iterable.count(). '+
<del> 'This warning will become a silent error in a future version. ' +
<del> stack
<del> );
<del> return this.size;
<del> }
<del> }
<del> }
<del> });
<del> } catch (e) {}
<del> })();
<del>
<del>
<del>
<ide> mixin(KeyedIterable, {
<ide>
<ide> // ### More sequential methods
<ide>
<ide> flip: function() {
<ide> return reify(this, flipFactory(this));
<del> },
<del>
<del> findKey: function(predicate, context) {
<del> var entry = this.findEntry(predicate, context);
<del> return entry && entry[0];
<del> },
<del>
<del> findLastKey: function(predicate, context) {
<del> return this.toSeq().reverse().findKey(predicate, context);
<del> },
<del>
<del> keyOf: function(searchValue) {
<del> return this.findKey(function(value ) {return is(value, searchValue)});
<del> },
<del>
<del> lastKeyOf: function(searchValue) {
<del> return this.findLastKey(function(value ) {return is(value, searchValue)});
<ide> },
<ide>
<ide> mapEntries: function(mapper, context) {var this$0 = this;
<ide> },
<ide>
<ide> indexOf: function(searchValue) {
<del> var key = this.toKeyedSeq().keyOf(searchValue);
<add> var key = this.keyOf(searchValue);
<ide> return key === undefined ? -1 : key;
<ide> },
<ide>
<ide> lastIndexOf: function(searchValue) {
<del> var key = this.toKeyedSeq().reverse().keyOf(searchValue);
<add> var key = this.lastKeyOf(searchValue);
<ide> return key === undefined ? -1 : key;
<del>
<del> // var index =
<del> // return this.toSeq().reverse().indexOf(searchValue);
<ide> },
<ide>
<ide> reverse: function() {
<ide> // ### More collection methods
<ide>
<ide> findLastIndex: function(predicate, context) {
<del> var key = this.toKeyedSeq().findLastKey(predicate, context);
<del> return key === undefined ? -1 : key;
<add> var entry = this.findLastEntry(predicate, context);
<add> return entry ? entry[0] : -1;
<ide> },
<ide>
<ide> first: function() {
<ide> return reify(this, interleaved);
<ide> },
<ide>
<add> keySeq: function() {
<add> return Range(0, this.size);
<add> },
<add>
<ide> last: function() {
<ide> return this.get(-1);
<ide> },
<ide> });
<ide>
<ide> SetIterable.prototype.has = IterablePrototype.includes;
<add> SetIterable.prototype.contains = SetIterable.prototype.includes;
<ide>
<ide>
<ide> // Mixin subclasses
<ide> }
<ide>
<ide> function quoteString(value) {
<del> return typeof value === 'string' ? JSON.stringify(value) : value;
<add> return typeof value === 'string' ? JSON.stringify(value) : String(value);
<ide> }
<ide>
<ide> function defaultZipper() {
|
|
Java
|
apache-2.0
|
ec9ae88aa904bf61eb93868d781d0109e4e83f43
| 0 |
eclipse-vertx/vertx-codegen,vert-x3/vertx-codegen
|
package io.vertx.codegen;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.TypeVariable;
import javax.lang.model.type.WildcardType;
import javax.lang.model.util.Types;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
/**
* Describes a java type.
*
* @author <a href="mailto:[email protected]">Julien Viet</a>
*/
public abstract class TypeInfo {
public static TypeInfo create(Type type) {
if (type instanceof java.lang.Class) {
String fqcn = type.getTypeName();
return new Class(Helper.getKind(((java.lang.Class) type)::getAnnotation, fqcn), fqcn);
} else if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
List<TypeInfo> args = Arrays.asList(parameterizedType.getActualTypeArguments()).
stream().
map(TypeInfo::create).
collect(Collectors.toList());
java.lang.Class raw = (java.lang.Class) parameterizedType.getRawType();
String fqcn = raw.getName();
return new Parameterized(new Class(Helper.getKind(raw::getAnnotation, fqcn), fqcn), args);
} else if (type instanceof java.lang.reflect.TypeVariable) {
return new Variable(((java.lang.reflect.TypeVariable)type).getName());
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
public static TypeInfo create(Types typeUtils, TypeMirror type) {
switch (type.getKind()) {
case DECLARED:
return create(typeUtils, (DeclaredType) type);
case DOUBLE:
case LONG:
case FLOAT:
case CHAR:
case BYTE:
case SHORT:
case BOOLEAN:
case INT:
return new Primitive(type.toString());
case TYPEVAR:
return create(typeUtils, (TypeVariable) type);
case WILDCARD:
return create(typeUtils, (WildcardType) type);
default:
throw new IllegalArgumentException("Illegal type " + type + " of kind " + type.getKind());
}
}
public static Wildcard create(Types typeUtils, WildcardType type) {
if (type.getExtendsBound() != null) {
throw new IllegalArgumentException("Wildcard type cannot have an upper bound");
}
if (type.getSuperBound() != null) {
throw new IllegalArgumentException("Wildcard type cannot have a lower bound");
}
return new Wildcard();
}
/**
* Simple wildcard without bound support.
*/
public static class Wildcard extends TypeInfo {
@Override
public boolean equals(Object obj) {
return obj instanceof Wildcard;
}
@Override
public String format(boolean qualified) {
return "?";
}
}
public static Variable create(Types typeUtils, TypeVariable type) {
return new Variable(type.toString());
}
public static TypeInfo create(Types typeUtils, DeclaredType type) {
String fqcn = typeUtils.erasure(type).toString();
TypeKind kind = Helper.getKind(annotationType -> type.asElement().getAnnotation(annotationType), fqcn);
Class raw = new Class(kind, fqcn);
List<? extends TypeMirror> typeArgs = type.getTypeArguments();
if (typeArgs.size() > 0) {
List<TypeInfo> typeArguments;
typeArguments = new ArrayList<>(typeArgs.size());
for (TypeMirror typeArg : typeArgs) {
TypeInfo typeArgDesc = create(typeUtils, typeArg);
// Need to check it is an interface type
typeArguments.add(typeArgDesc);
}
return new Parameterized(raw, typeArguments);
} else {
return raw;
}
}
public static class Primitive extends TypeInfo {
final String name;
public Primitive(String name) {
this.name = name;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Primitive) {
return name.equals(((Primitive) obj).name);
}
return false;
}
@Override
public String format(boolean qualified) {
return name;
}
}
public static class Variable extends TypeInfo {
final String name;
public Variable(String name) {
this.name = name;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Variable) {
Variable that = (Variable) obj;
return name.equals(that.name);
} else {
return false;
}
}
@Override
public String toString() {
return name;
}
@Override
public String format(boolean qualified) {
return name;
}
}
public static class Parameterized extends TypeInfo {
final Class raw;
final List<TypeInfo> typeArguments;
public Parameterized(Class raw, List<TypeInfo> typeArguments) {
this.raw = raw;
this.typeArguments = typeArguments;
}
public Class getRaw() {
return raw;
}
@Override
public void collectImports(Collection<TypeInfo.Class> imports) {
raw.collectImports(imports);
typeArguments.stream().forEach(a -> a.collectImports(imports));
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Parameterized) {
Parameterized that = (Parameterized) obj;
return raw.equals(that.raw) && typeArguments.equals(that.typeArguments);
}
return false;
}
@Override
public String format(boolean qualified) {
StringBuilder buf = new StringBuilder(raw.format(qualified)).append('<');
for (int i = 0;i < typeArguments.size();i++) {
TypeInfo typeArgument = typeArguments.get(i);
if (i > 0) {
buf.append(',');
}
buf.append(typeArgument.format(qualified));
}
buf.append('>');
return buf.toString();
}
}
public static class Class extends TypeInfo {
final TypeKind kind;
final String fqcn;
final String simpleName;
public Class(TypeKind kind, String fqcn) {
this.kind = kind;
this.fqcn = fqcn;
this.simpleName = Helper.getSimpleName(fqcn);
}
public TypeKind getKind() {
return kind;
}
public String getSimpleName() {
return simpleName;
}
@Override
public void collectImports(Collection<TypeInfo.Class> imports) {
imports.add(this);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Class) {
return fqcn.equals(((Class) obj).fqcn);
}
return false;
}
@Override
public String format(boolean qualified) {
return qualified ? fqcn : simpleName;
}
}
public abstract boolean equals(Object obj);
public int hashCode() {
return toString().hashCode();
}
/**
* Collect the import fqcn needed by this type.
*
* @param imports the imports
*/
public void collectImports(Collection<TypeInfo.Class> imports) {
}
/**
* @return the type name
*/
public String getName() {
return format(true);
}
/**
* Renders the type name using fqcn.
*
* @return the representation of this type
*/
public String toString() {
return getName();
}
/**
* Renders the type name.
*
* @param qualified true when class fqcn should be used, otherwise simple names will be used
* @return the representation of the type
*/
public abstract String format(boolean qualified);
}
|
src/main/java/io/vertx/codegen/TypeInfo.java
|
package io.vertx.codegen;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.TypeVariable;
import javax.lang.model.type.WildcardType;
import javax.lang.model.util.Types;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
/**
* Describes a java type.
*
* @author <a href="mailto:[email protected]">Julien Viet</a>
*/
public abstract class TypeInfo {
public static TypeInfo create(Type type) {
if (type instanceof java.lang.Class) {
String fqcn = type.getTypeName();
return new Class(Helper.getKind(((java.lang.Class) type)::getAnnotation, fqcn), fqcn);
} else if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
List<TypeInfo> args = Arrays.asList(parameterizedType.getActualTypeArguments()).
stream().
map(TypeInfo::create).
collect(Collectors.toList());
java.lang.Class raw = (java.lang.Class) parameterizedType.getRawType();
String fqcn = raw.getName();
return new Parameterized(new Class(Helper.getKind(raw::getAnnotation, fqcn), fqcn), args);
} else if (type instanceof java.lang.reflect.TypeVariable) {
return new Variable(((java.lang.reflect.TypeVariable)type).getName());
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
public static TypeInfo create(Types typeUtils, TypeMirror type) {
switch (type.getKind()) {
case DECLARED:
return create(typeUtils, (DeclaredType) type);
case DOUBLE:
case LONG:
case FLOAT:
case CHAR:
case BYTE:
case SHORT:
case BOOLEAN:
case INT:
return new Primitive(type.toString());
case TYPEVAR:
return create(typeUtils, (TypeVariable) type);
case WILDCARD:
return create(typeUtils, (WildcardType) type);
default:
throw new IllegalArgumentException("Illegal type " + type + " of kind " + type.getKind());
}
}
public static Wildcard create(Types typeUtils, WildcardType type) {
if (type.getExtendsBound() != null) {
throw new IllegalArgumentException("Wildcard type cannot have an upper bound");
}
if (type.getSuperBound() != null) {
throw new IllegalArgumentException("Wildcard type cannot have a lower bound");
}
return new Wildcard();
}
/**
* Simple wildcard without bound support.
*/
public static class Wildcard extends TypeInfo {
@Override
public boolean equals(Object obj) {
return obj instanceof Wildcard;
}
@Override
public String toString(boolean qualified) {
return "?";
}
}
public static Variable create(Types typeUtils, TypeVariable type) {
return new Variable(type.toString());
}
public static TypeInfo create(Types typeUtils, DeclaredType type) {
String fqcn = typeUtils.erasure(type).toString();
TypeKind kind = Helper.getKind(annotationType -> type.asElement().getAnnotation(annotationType), fqcn);
Class raw = new Class(kind, fqcn);
List<? extends TypeMirror> typeArgs = type.getTypeArguments();
if (typeArgs.size() > 0) {
List<TypeInfo> typeArguments;
typeArguments = new ArrayList<>(typeArgs.size());
for (TypeMirror typeArg : typeArgs) {
TypeInfo typeArgDesc = create(typeUtils, typeArg);
// Need to check it is an interface type
typeArguments.add(typeArgDesc);
}
return new Parameterized(raw, typeArguments);
} else {
return raw;
}
}
public static class Primitive extends TypeInfo {
final String name;
public Primitive(String name) {
this.name = name;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Primitive) {
return name.equals(((Primitive) obj).name);
}
return false;
}
@Override
public String toString(boolean qualified) {
return name;
}
}
public static class Variable extends TypeInfo {
final String name;
public Variable(String name) {
this.name = name;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Variable) {
Variable that = (Variable) obj;
return name.equals(that.name);
} else {
return false;
}
}
@Override
public String toString() {
return name;
}
@Override
public String toString(boolean qualified) {
return name;
}
}
public static class Parameterized extends TypeInfo {
final Class raw;
final List<TypeInfo> typeArguments;
public Parameterized(Class raw, List<TypeInfo> typeArguments) {
this.raw = raw;
this.typeArguments = typeArguments;
}
public Class getRaw() {
return raw;
}
@Override
public void collectImports(Collection<TypeInfo.Class> imports) {
raw.collectImports(imports);
typeArguments.stream().forEach(a -> a.collectImports(imports));
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Parameterized) {
Parameterized that = (Parameterized) obj;
return raw.equals(that.raw) && typeArguments.equals(that.typeArguments);
}
return false;
}
@Override
public String toString(boolean qualified) {
StringBuilder buf = new StringBuilder(raw.toString(qualified)).append('<');
for (int i = 0;i < typeArguments.size();i++) {
TypeInfo typeArgument = typeArguments.get(i);
if (i > 0) {
buf.append(',');
}
buf.append(typeArgument.toString(qualified));
}
buf.append('>');
return buf.toString();
}
}
public static class Class extends TypeInfo {
final TypeKind kind;
final String fqcn;
final String simpleName;
public Class(TypeKind kind, String fqcn) {
this.kind = kind;
this.fqcn = fqcn;
this.simpleName = Helper.getSimpleName(fqcn);
}
public TypeKind getKind() {
return kind;
}
public String getSimpleName() {
return simpleName;
}
@Override
public void collectImports(Collection<TypeInfo.Class> imports) {
imports.add(this);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Class) {
return fqcn.equals(((Class) obj).fqcn);
}
return false;
}
@Override
public String toString(boolean qualified) {
return qualified ? fqcn : simpleName;
}
}
public abstract boolean equals(Object obj);
public int hashCode() {
return toString().hashCode();
}
/**
* Collect the import fqcn needed by this type.
*
* @param imports the imports
*/
public void collectImports(Collection<TypeInfo.Class> imports) {
}
/**
* Renders the type name using fqcn.
*
* @return the representation of this type
*/
public String toString() {
return toString(true);
}
/**
* Renders the type name.
*
* @param qualified true when class fqcn should be used, otherwise simple names will be used
* @return the representation of the type
*/
public abstract String toString(boolean qualified);
}
|
Use getName on TypeInfo instead of toString
|
src/main/java/io/vertx/codegen/TypeInfo.java
|
Use getName on TypeInfo instead of toString
|
<ide><path>rc/main/java/io/vertx/codegen/TypeInfo.java
<ide> }
<ide>
<ide> @Override
<del> public String toString(boolean qualified) {
<add> public String format(boolean qualified) {
<ide> return "?";
<ide> }
<ide> }
<ide> }
<ide>
<ide> @Override
<del> public String toString(boolean qualified) {
<add> public String format(boolean qualified) {
<ide> return name;
<ide> }
<ide> }
<ide> }
<ide>
<ide> @Override
<del> public String toString(boolean qualified) {
<add> public String format(boolean qualified) {
<ide> return name;
<ide> }
<ide> }
<ide> }
<ide>
<ide> @Override
<del> public String toString(boolean qualified) {
<del> StringBuilder buf = new StringBuilder(raw.toString(qualified)).append('<');
<add> public String format(boolean qualified) {
<add> StringBuilder buf = new StringBuilder(raw.format(qualified)).append('<');
<ide> for (int i = 0;i < typeArguments.size();i++) {
<ide> TypeInfo typeArgument = typeArguments.get(i);
<ide> if (i > 0) {
<ide> buf.append(',');
<ide> }
<del> buf.append(typeArgument.toString(qualified));
<add> buf.append(typeArgument.format(qualified));
<ide> }
<ide> buf.append('>');
<ide> return buf.toString();
<ide> }
<ide>
<ide> @Override
<del> public String toString(boolean qualified) {
<add> public String format(boolean qualified) {
<ide> return qualified ? fqcn : simpleName;
<ide> }
<ide> }
<ide> }
<ide>
<ide> /**
<add> * @return the type name
<add> */
<add> public String getName() {
<add> return format(true);
<add> }
<add>
<add> /**
<ide> * Renders the type name using fqcn.
<ide> *
<ide> * @return the representation of this type
<ide> */
<ide> public String toString() {
<del> return toString(true);
<add> return getName();
<ide> }
<ide>
<ide> /**
<ide> * @param qualified true when class fqcn should be used, otherwise simple names will be used
<ide> * @return the representation of the type
<ide> */
<del> public abstract String toString(boolean qualified);
<add> public abstract String format(boolean qualified);
<ide>
<ide> }
|
|
Java
|
apache-2.0
|
ef3fa94bf0b2508ce31b58ed8300989cdc0bc41a
| 0 |
sul-dlss/openwayback,efundamentals/openwayback,bitzl/openwayback,nla/openwayback,sul-dlss/openwayback,SpiralsSeminaire/openwayback,ukwa/openwayback,SpiralsSeminaire/openwayback,kris-sigur/openwayback,zubairkhatri/openwayback,nlnwa/openwayback,zubairkhatri/openwayback,kris-sigur/openwayback,MohammedElsayyed/openwayback,nlnwa/openwayback,emijrp/openwayback,JesseWeinstein/openwayback,nlnwa/openwayback,efundamentals/openwayback,kris-sigur/openwayback,iipc/openwayback,bitzl/openwayback,SpiralsSeminaire/openwayback,emijrp/openwayback,emijrp/openwayback,chasehd/openwayback,chasehd/openwayback,JesseWeinstein/openwayback,nlnwa/openwayback,MohammedElsayyed/openwayback,bitzl/openwayback,iipc/openwayback,zubairkhatri/openwayback,nla/openwayback,nla/openwayback,ukwa/openwayback,efundamentals/openwayback,JesseWeinstein/openwayback,SpiralsSeminaire/openwayback,emijrp/openwayback,kris-sigur/openwayback,SpiralsSeminaire/openwayback,efundamentals/openwayback,nla/openwayback,efundamentals/openwayback,nlnwa/openwayback,kris-sigur/openwayback,zubairkhatri/openwayback,bitzl/openwayback,sul-dlss/openwayback,JesseWeinstein/openwayback,nla/openwayback,JesseWeinstein/openwayback,MohammedElsayyed/openwayback,bitzl/openwayback,emijrp/openwayback,chasehd/openwayback,ukwa/openwayback,iipc/openwayback
|
/* RobotExclusionFilter
*
* $Id$
*
* Created on 3:10:54 PM Mar 14, 2007.
*
* Copyright (C) 2007 Internet Archive.
*
* This file is part of wayback.
*
* wayback is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* any later version.
*
* wayback is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with wayback-svn; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.archive.wayback.accesscontrol.robotstxt;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.archive.util.ArchiveUtils;
import org.archive.wayback.core.Resource;
import org.archive.wayback.core.CaptureSearchResult;
import org.archive.wayback.exception.LiveDocumentNotAvailableException;
import org.archive.wayback.exception.LiveWebCacheUnavailableException;
import org.archive.wayback.liveweb.LiveWebCache;
import org.archive.wayback.util.ObjectFilter;
/**
* CaptureSearchResult Filter that uses a LiveWebCache to retrieve robots.txt
* documents from the live web, and filters SearchResults based on the rules
* therein.
*
* This class caches parsed RobotRules that are retrieved, so using the same
* instance to filter multiple SearchResults from the same host will be more
* efficient.
*
* Instances are expected to be transient for each request: The internally
* cached StringBuilder is not thread safe.
*
* @author brad
* @version $Date$, $Revision$
*/
public class RobotExclusionFilter implements ObjectFilter<CaptureSearchResult> {
private final static Logger LOGGER = Logger.getLogger(RobotExclusionFilter.class.getName());
private final static String HTTP_PREFIX = "http://";
private final static String ROBOT_SUFFIX = "/robots.txt";
private static String WWWN_REGEX = "^www[0-9]+\\.";
private final static Pattern WWWN_PATTERN = Pattern.compile(WWWN_REGEX);
private LiveWebCache webCache = null;
private HashMap<String,RobotRules> rulesCache = null;
private long maxCacheMS = 0;
private String userAgent = null;
private StringBuilder sb = null;
private final static RobotRules emptyRules = new RobotRules();
/**
* Construct a new RobotExclusionFilter that uses webCache to pull
* robots.txt documents. filtering is based on userAgent, and cached
* documents newer than maxCacheMS in the webCache are considered valid.
*
* @param webCache LiveWebCache from which documents can be retrieved
* @param userAgent String user agent to use for requests to the live web.
* @param maxCacheMS long number of milliseconds to cache documents in the
* LiveWebCache
*/
public RobotExclusionFilter(LiveWebCache webCache, String userAgent,
long maxCacheMS) {
rulesCache = new HashMap<String,RobotRules>();
this.webCache = webCache;
this.userAgent = userAgent;
this.maxCacheMS = maxCacheMS;
sb = new StringBuilder(100);
}
private String hostToRobotUrlString(String host) {
sb.setLength(0);
sb.append(HTTP_PREFIX).append(host).append(ROBOT_SUFFIX);
return sb.toString();
}
/*
* Return a List of all robots.txt urls to attempt for this url:
* If originalURL starts with "www.DOMAIN":
* [originalURL,DOMAIN]
* If url starts with "www[0-9]+.DOMAIN":
* [originalURL,www.DOMAIN,DOMAIN]
* Otherwise:
* [originalURL,www.originalURL]
*/
protected List<String> searchResultToRobotUrlStrings(String resultHost) {
ArrayList<String> list = new ArrayList<String>();
list.add(hostToRobotUrlString(resultHost));
if(resultHost.startsWith("www")) {
if(resultHost.startsWith("www.")) {
list.add(hostToRobotUrlString(resultHost.substring(4)));
} else {
Matcher m = WWWN_PATTERN.matcher(resultHost);
if(m.find()) {
String massagedHost = resultHost.substring(m.end());
list.add(hostToRobotUrlString("www." + massagedHost));
list.add(hostToRobotUrlString(massagedHost));
}
}
} else {
list.add(hostToRobotUrlString("www." + resultHost));
}
return list;
}
private RobotRules getRules(CaptureSearchResult result) {
RobotRules rules = null;
RobotRules tmpRules = null;
String host = result.getOriginalHost();
List<String> urlStrings = searchResultToRobotUrlStrings(host);
Iterator<String> itr = urlStrings.iterator();
String firstUrlString = null;
while(rules == null && itr.hasNext()) {
String urlString = (String) itr.next();
if(firstUrlString == null) {
firstUrlString = urlString;
}
if(rulesCache.containsKey(urlString)) {
LOGGER.fine("ROBOT: Cached("+urlString+")");
rules = rulesCache.get(urlString);
} else {
try {
LOGGER.fine("ROBOT: NotCached("+urlString+")");
tmpRules = new RobotRules();
Resource resource = webCache.getCachedResource(new URL(urlString),
maxCacheMS,true);
if(resource.getStatusCode() != 200) {
LOGGER.info("ROBOT: NotAvailable("+urlString+")");
throw new LiveDocumentNotAvailableException(urlString);
}
tmpRules.parse(resource);
rulesCache.put(firstUrlString,tmpRules);
rules = tmpRules;
LOGGER.info("ROBOT: Downloaded("+urlString+")");
} catch (LiveDocumentNotAvailableException e) {
// cache an empty rule: all OK
// rulesCache.put(firstUrlString, emptyRules);
// rules = emptyRules;
continue;
} catch (MalformedURLException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
e.printStackTrace();
return null;
} catch (LiveWebCacheUnavailableException e) {
e.printStackTrace();
return null;
}
}
}
if(rules == null) {
// special-case, allow empty rules if no longer available.
rulesCache.put(firstUrlString,emptyRules);
rules = emptyRules;
}
return rules;
}
/* (non-Javadoc)
* @see org.archive.wayback.resourceindex.SearchResultFilter#filterSearchResult(org.archive.wayback.core.SearchResult)
*/
public int filterObject(CaptureSearchResult r) {
int filterResult = ObjectFilter.FILTER_EXCLUDE;
RobotRules rules = getRules(r);
if(rules != null) {
String resultURL = r.getOriginalUrl();
URL url;
try {
url = new URL(ArchiveUtils.addImpliedHttpIfNecessary(resultURL));
if(!rules.blocksPathForUA(url.getPath(), userAgent)) {
filterResult = ObjectFilter.FILTER_INCLUDE;
} else {
LOGGER.info("ROBOT: BLOCKED("+resultURL+")");
}
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
return filterResult;
}
}
|
wayback-core/src/main/java/org/archive/wayback/accesscontrol/robotstxt/RobotExclusionFilter.java
|
/* RobotExclusionFilter
*
* $Id$
*
* Created on 3:10:54 PM Mar 14, 2007.
*
* Copyright (C) 2007 Internet Archive.
*
* This file is part of wayback.
*
* wayback is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* any later version.
*
* wayback is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with wayback-svn; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.archive.wayback.accesscontrol.robotstxt;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.archive.util.ArchiveUtils;
import org.archive.wayback.core.Resource;
import org.archive.wayback.core.CaptureSearchResult;
import org.archive.wayback.exception.LiveDocumentNotAvailableException;
import org.archive.wayback.liveweb.LiveWebCache;
import org.archive.wayback.util.ObjectFilter;
/**
* CaptureSearchResult Filter that uses a LiveWebCache to retrieve robots.txt
* documents from the live web, and filters SearchResults based on the rules
* therein.
*
* This class caches parsed RobotRules that are retrieved, so using the same
* instance to filter multiple SearchResults from the same host will be more
* efficient.
*
* Instances are expected to be transient for each request: The internally
* cached StringBuilder is not thread safe.
*
* @author brad
* @version $Date$, $Revision$
*/
public class RobotExclusionFilter implements ObjectFilter<CaptureSearchResult> {
private final static String HTTP_PREFIX = "http://";
private final static String ROBOT_SUFFIX = "/robots.txt";
private static String WWWN_REGEX = "^www[0-9]+\\.";
private final static Pattern WWWN_PATTERN = Pattern.compile(WWWN_REGEX);
private LiveWebCache webCache = null;
private HashMap<String,RobotRules> rulesCache = null;
private long maxCacheMS = 0;
private String userAgent = null;
private StringBuilder sb = null;
private final static RobotRules emptyRules = new RobotRules();
/**
* Construct a new RobotExclusionFilter that uses webCache to pull
* robots.txt documents. filtering is based on userAgent, and cached
* documents newer than maxCacheMS in the webCache are considered valid.
*
* @param webCache LiveWebCache from which documents can be retrieved
* @param userAgent String user agent to use for requests to the live web.
* @param maxCacheMS long number of milliseconds to cache documents in the
* LiveWebCache
*/
public RobotExclusionFilter(LiveWebCache webCache, String userAgent,
long maxCacheMS) {
rulesCache = new HashMap<String,RobotRules>();
this.webCache = webCache;
this.userAgent = userAgent;
this.maxCacheMS = maxCacheMS;
sb = new StringBuilder(100);
}
private String hostToRobotUrlString(String host) {
sb.setLength(0);
sb.append(HTTP_PREFIX).append(host).append(ROBOT_SUFFIX);
return sb.toString();
}
/*
* Return a List of all robots.txt urls to attempt for this url:
* If originalURL starts with "www.DOMAIN":
* [originalURL,DOMAIN]
* If url starts with "www[0-9]+.DOMAIN":
* [originalURL,www.DOMAIN,DOMAIN]
* Otherwise:
* [originalURL,www.originalURL]
*/
protected List<String> searchResultToRobotUrlStrings(String resultHost) {
ArrayList<String> list = new ArrayList<String>();
list.add(hostToRobotUrlString(resultHost));
if(resultHost.startsWith("www")) {
if(resultHost.startsWith("www.")) {
list.add(hostToRobotUrlString(resultHost.substring(4)));
} else {
Matcher m = WWWN_PATTERN.matcher(resultHost);
if(m.find()) {
String massagedHost = resultHost.substring(m.end());
list.add(hostToRobotUrlString("www." + massagedHost));
list.add(hostToRobotUrlString(massagedHost));
}
}
} else {
list.add(hostToRobotUrlString("www." + resultHost));
}
return list;
}
private RobotRules getRules(CaptureSearchResult result) {
RobotRules rules = null;
RobotRules tmpRules = null;
String host = result.getOriginalHost();
List<String> urlStrings = searchResultToRobotUrlStrings(host);
Iterator<String> itr = urlStrings.iterator();
String firstUrlString = null;
while(rules == null && itr.hasNext()) {
String urlString = (String) itr.next();
if(firstUrlString == null) {
firstUrlString = urlString;
}
if(rulesCache.containsKey(urlString)) {
rules = rulesCache.get(urlString);
} else {
try {
tmpRules = new RobotRules();
Resource resource = webCache.getCachedResource(new URL(urlString),
maxCacheMS,true);
tmpRules.parse(resource);
rulesCache.put(firstUrlString,tmpRules);
rules = tmpRules;
} catch (LiveDocumentNotAvailableException e) {
continue;
} catch (MalformedURLException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
}
if(rules == null) {
// special-case, allow empty rules if no longer available.
rulesCache.put(firstUrlString,emptyRules);
rules = emptyRules;
}
return rules;
}
/* (non-Javadoc)
* @see org.archive.wayback.resourceindex.SearchResultFilter#filterSearchResult(org.archive.wayback.core.SearchResult)
*/
public int filterObject(CaptureSearchResult r) {
int filterResult = ObjectFilter.FILTER_EXCLUDE;
RobotRules rules = getRules(r);
if(rules != null) {
String resultURL = r.getOriginalUrl();
URL url;
try {
url = new URL(ArchiveUtils.addImpliedHttpIfNecessary(resultURL));
if(!rules.blocksPathForUA(url.getPath(), userAgent)) {
filterResult = ObjectFilter.FILTER_INCLUDE;
}
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
return filterResult;
}
}
|
FEATURE: added logging
git-svn-id: ca6d9ebf75caaf710f0e3a4ee74a890c456d4c90@2949 69e27eb3-6e27-0410-b9c6-fffd7e226fab
|
wayback-core/src/main/java/org/archive/wayback/accesscontrol/robotstxt/RobotExclusionFilter.java
|
FEATURE: added logging
|
<ide><path>ayback-core/src/main/java/org/archive/wayback/accesscontrol/robotstxt/RobotExclusionFilter.java
<ide> import java.util.HashMap;
<ide> import java.util.Iterator;
<ide> import java.util.List;
<add>import java.util.logging.Logger;
<ide> import java.util.regex.Matcher;
<ide> import java.util.regex.Pattern;
<ide>
<ide> import org.archive.wayback.core.Resource;
<ide> import org.archive.wayback.core.CaptureSearchResult;
<ide> import org.archive.wayback.exception.LiveDocumentNotAvailableException;
<add>import org.archive.wayback.exception.LiveWebCacheUnavailableException;
<ide> import org.archive.wayback.liveweb.LiveWebCache;
<ide> import org.archive.wayback.util.ObjectFilter;
<ide>
<ide> */
<ide> public class RobotExclusionFilter implements ObjectFilter<CaptureSearchResult> {
<ide>
<add> private final static Logger LOGGER = Logger.getLogger(RobotExclusionFilter.class.getName());
<add>
<ide> private final static String HTTP_PREFIX = "http://";
<ide> private final static String ROBOT_SUFFIX = "/robots.txt";
<ide>
<ide> firstUrlString = urlString;
<ide> }
<ide> if(rulesCache.containsKey(urlString)) {
<add> LOGGER.fine("ROBOT: Cached("+urlString+")");
<ide> rules = rulesCache.get(urlString);
<ide> } else {
<ide> try {
<del>
<add> LOGGER.fine("ROBOT: NotCached("+urlString+")");
<add>
<ide> tmpRules = new RobotRules();
<ide> Resource resource = webCache.getCachedResource(new URL(urlString),
<ide> maxCacheMS,true);
<add> if(resource.getStatusCode() != 200) {
<add> LOGGER.info("ROBOT: NotAvailable("+urlString+")");
<add> throw new LiveDocumentNotAvailableException(urlString);
<add> }
<ide> tmpRules.parse(resource);
<ide> rulesCache.put(firstUrlString,tmpRules);
<ide> rules = tmpRules;
<add> LOGGER.info("ROBOT: Downloaded("+urlString+")");
<ide>
<ide> } catch (LiveDocumentNotAvailableException e) {
<add> // cache an empty rule: all OK
<add>// rulesCache.put(firstUrlString, emptyRules);
<add>// rules = emptyRules;
<ide> continue;
<ide> } catch (MalformedURLException e) {
<ide> e.printStackTrace();
<ide> return null;
<ide> } catch (IOException e) {
<add> e.printStackTrace();
<add> return null;
<add> } catch (LiveWebCacheUnavailableException e) {
<ide> e.printStackTrace();
<ide> return null;
<ide> }
<ide> url = new URL(ArchiveUtils.addImpliedHttpIfNecessary(resultURL));
<ide> if(!rules.blocksPathForUA(url.getPath(), userAgent)) {
<ide> filterResult = ObjectFilter.FILTER_INCLUDE;
<add> } else {
<add> LOGGER.info("ROBOT: BLOCKED("+resultURL+")");
<ide> }
<ide> } catch (MalformedURLException e) {
<ide> e.printStackTrace();
|
|
Java
|
apache-2.0
|
6251f8f8c035ecf50051b974ca99499913e001c1
| 0 |
rvaleti/phoenix,RCheungIT/phoenix,rvaleti/phoenix,Guavus/phoenix,apache/phoenix,ankitsinghal/phoenix,growingio/phoenix,7shurik/phoenix,twdsilva/phoenix,ankitsinghal/phoenix,rvaleti/phoenix,twdsilva/phoenix,AyolaJayamaha/phoenix,shehzaadn/phoenix,ohadshacham/phoenix,dumindux/phoenix,growingio/phoenix,apurtell/phoenix,shehzaadn/phoenix,dumindux/phoenix,apache/phoenix,growingio/phoenix,7shurik/phoenix,7shurik/phoenix,shehzaadn/phoenix,7shurik/phoenix,twdsilva/phoenix,rvaleti/phoenix,jfernandosf/phoenix,ohadshacham/phoenix,twdsilva/phoenix,apurtell/phoenix,ohadshacham/phoenix,jfernandosf/phoenix,apurtell/phoenix,shehzaadn/phoenix,ankitsinghal/phoenix,ohadshacham/phoenix,RCheungIT/phoenix,apache/phoenix,apurtell/phoenix,growingio/phoenix,ohadshacham/phoenix,Guavus/phoenix,AyolaJayamaha/phoenix,rvaleti/phoenix,ankitsinghal/phoenix,RCheungIT/phoenix,Guavus/phoenix,growingio/phoenix,RCheungIT/phoenix,Guavus/phoenix,AyolaJayamaha/phoenix,AyolaJayamaha/phoenix,AyolaJayamaha/phoenix,jfernandosf/phoenix,apache/phoenix,jfernandosf/phoenix,ankitsinghal/phoenix,apache/phoenix,RCheungIT/phoenix,shehzaadn/phoenix,apurtell/phoenix,twdsilva/phoenix,jfernandosf/phoenix,Guavus/phoenix
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.query;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.apache.hadoop.hbase.HColumnDescriptor.TTL;
import static org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_MAJOR_VERSION;
import static org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_MINOR_VERSION;
import static org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_PATCH_NUMBER;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_DROP_METADATA;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_ENABLED;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_THREAD_POOL_SIZE;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_THRESHOLD_MILLISECONDS;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS;
import static org.apache.phoenix.util.UpgradeUtil.upgradeTo4_5_0;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.annotation.concurrent.GuardedBy;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
import org.apache.hadoop.hbase.regionserver.IndexHalfStoreFileReaderGenerator;
import org.apache.hadoop.hbase.regionserver.LocalIndexSplitter;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.phoenix.compile.MutationPlan;
import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
import org.apache.phoenix.coprocessor.MetaDataEndpointImpl;
import org.apache.phoenix.coprocessor.MetaDataProtocol;
import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
import org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode;
import org.apache.phoenix.coprocessor.MetaDataRegionObserver;
import org.apache.phoenix.coprocessor.ScanRegionObserver;
import org.apache.phoenix.coprocessor.SequenceRegionObserver;
import org.apache.phoenix.coprocessor.ServerCachingEndpointImpl;
import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest;
import org.apache.phoenix.exception.PhoenixIOException;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.exception.SQLExceptionInfo;
import org.apache.phoenix.execute.MutationState;
import org.apache.phoenix.hbase.index.IndexRegionSplitPolicy;
import org.apache.phoenix.hbase.index.Indexer;
import org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder;
import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
import org.apache.phoenix.hbase.index.util.VersionUtil;
import org.apache.phoenix.index.PhoenixIndexBuilder;
import org.apache.phoenix.index.PhoenixIndexCodec;
import org.apache.phoenix.index.PhoenixTransactionalIndexer;
import org.apache.phoenix.iterate.TableResultIterator;
import org.apache.phoenix.iterate.TableResultIterator.RenewLeaseStatus;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.ConnectionInfo;
import org.apache.phoenix.parse.PFunction;
import org.apache.phoenix.protobuf.ProtobufUtil;
import org.apache.phoenix.schema.ColumnAlreadyExistsException;
import org.apache.phoenix.schema.ColumnFamilyNotFoundException;
import org.apache.phoenix.schema.EmptySequenceCacheException;
import org.apache.phoenix.schema.FunctionNotFoundException;
import org.apache.phoenix.schema.MetaDataSplitPolicy;
import org.apache.phoenix.schema.NewerTableAlreadyExistsException;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PColumnFamily;
import org.apache.phoenix.schema.PMetaData;
import org.apache.phoenix.schema.PMetaDataImpl;
import org.apache.phoenix.schema.PName;
import org.apache.phoenix.schema.PNameFactory;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTableKey;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.schema.ReadOnlyTableException;
import org.apache.phoenix.schema.SaltingUtil;
import org.apache.phoenix.schema.Sequence;
import org.apache.phoenix.schema.SequenceAllocation;
import org.apache.phoenix.schema.SequenceKey;
import org.apache.phoenix.schema.TableAlreadyExistsException;
import org.apache.phoenix.schema.TableNotFoundException;
import org.apache.phoenix.schema.TableProperty;
import org.apache.phoenix.schema.stats.PTableStats;
import org.apache.phoenix.schema.stats.StatisticsUtil;
import org.apache.phoenix.schema.types.PBoolean;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PUnsignedTinyint;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.Closeables;
import org.apache.phoenix.util.ConfigUtil;
import org.apache.phoenix.util.JDBCUtil;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.PhoenixContextExecutor;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PhoenixStopWatch;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.ServerUtil;
import org.apache.phoenix.util.UpgradeUtil;
import org.apache.twill.discovery.ZKDiscoveryService;
import org.apache.twill.zookeeper.RetryStrategies;
import org.apache.twill.zookeeper.ZKClientService;
import org.apache.twill.zookeeper.ZKClientServices;
import org.apache.twill.zookeeper.ZKClients;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import co.cask.tephra.TransactionSystemClient;
import co.cask.tephra.TxConstants;
import co.cask.tephra.distributed.PooledClientProvider;
import co.cask.tephra.distributed.TransactionServiceClient;
import co.cask.tephra.hbase11.coprocessor.TransactionProcessor;
public class ConnectionQueryServicesImpl extends DelegateQueryServices implements ConnectionQueryServices {
private static final Logger logger = LoggerFactory.getLogger(ConnectionQueryServicesImpl.class);
private static final int INITIAL_CHILD_SERVICES_CAPACITY = 100;
private static final int DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS = 1000;
// Max number of cached table stats for view or shared index physical tables
private static final int MAX_TABLE_STATS_CACHE_ENTRIES = 512;
protected final Configuration config;
private final ConnectionInfo connectionInfo;
// Copy of config.getProps(), but read-only to prevent synchronization that we
// don't need.
private final ReadOnlyProps props;
private final String userName;
private final ConcurrentHashMap<ImmutableBytesWritable,ConnectionQueryServices> childServices;
private final Cache<ImmutableBytesPtr, PTableStats> tableStatsCache;
// Cache the latest meta data here for future connections
// writes guarded by "latestMetaDataLock"
private volatile PMetaData latestMetaData;
private final Object latestMetaDataLock = new Object();
// Lowest HBase version on the cluster.
private int lowestClusterHBaseVersion = Integer.MAX_VALUE;
private boolean hasIndexWALCodec = true;
@GuardedBy("connectionCountLock")
private int connectionCount = 0;
private final Object connectionCountLock = new Object();
private final boolean returnSequenceValues ;
private HConnection connection;
private TransactionServiceClient txServiceClient;
private volatile boolean initialized;
private volatile int nSequenceSaltBuckets;
// writes guarded by "this"
private volatile boolean closed;
private volatile SQLException initializationException;
// setting this member variable guarded by "connectionCountLock"
private volatile ConcurrentMap<SequenceKey,Sequence> sequenceMap = Maps.newConcurrentMap();
private KeyValueBuilder kvBuilder;
private final int renewLeaseTaskFrequency;
private final int renewLeasePoolSize;
private final int renewLeaseThreshold;
// List of queues instead of a single queue to provide reduced contention via lock striping
private final List<LinkedBlockingQueue<WeakReference<PhoenixConnection>>> connectionQueues;
private ScheduledExecutorService renewLeaseExecutor;
private final boolean renewLeaseEnabled;
private static interface FeatureSupported {
boolean isSupported(ConnectionQueryServices services);
}
private final Map<Feature, FeatureSupported> featureMap = ImmutableMap.<Feature, FeatureSupported>of(
Feature.LOCAL_INDEX, new FeatureSupported() {
@Override
public boolean isSupported(ConnectionQueryServices services) {
int hbaseVersion = services.getLowestClusterHBaseVersion();
return hbaseVersion < PhoenixDatabaseMetaData.MIN_LOCAL_SI_VERSION_DISALLOW || hbaseVersion > PhoenixDatabaseMetaData.MAX_LOCAL_SI_VERSION_DISALLOW;
}
},
Feature.RENEW_LEASE, new FeatureSupported() {
@Override
public boolean isSupported(ConnectionQueryServices services) {
int hbaseVersion = services.getLowestClusterHBaseVersion();
return hbaseVersion >= PhoenixDatabaseMetaData.MIN_RENEW_LEASE_VERSION;
}
});
private PMetaData newEmptyMetaData() {
long maxSizeBytes = props.getLong(QueryServices.MAX_CLIENT_METADATA_CACHE_SIZE_ATTRIB,
QueryServicesOptions.DEFAULT_MAX_CLIENT_METADATA_CACHE_SIZE);
return new PMetaDataImpl(INITIAL_META_DATA_TABLE_CAPACITY, maxSizeBytes);
}
/**
* Construct a ConnectionQueryServicesImpl that represents a connection to an HBase
* cluster.
* @param services base services from where we derive our default configuration
* @param connectionInfo to provide connection information
* @param info hbase configuration properties
* @throws SQLException
*/
public ConnectionQueryServicesImpl(QueryServices services, ConnectionInfo connectionInfo, Properties info) {
super(services);
Configuration config = HBaseFactoryProvider.getConfigurationFactory().getConfiguration();
for (Entry<String,String> entry : services.getProps()) {
config.set(entry.getKey(), entry.getValue());
}
if (info != null) {
for (Object key : info.keySet()) {
config.set((String) key, info.getProperty((String) key));
}
}
for (Entry<String,String> entry : connectionInfo.asProps()) {
config.set(entry.getKey(), entry.getValue());
}
this.connectionInfo = connectionInfo;
// Without making a copy of the configuration we cons up, we lose some of our properties
// on the server side during testing.
this.config = HBaseFactoryProvider.getConfigurationFactory().getConfiguration(config);
// set replication required parameter
ConfigUtil.setReplicationConfigIfAbsent(this.config);
this.props = new ReadOnlyProps(this.config.iterator());
this.userName = connectionInfo.getPrincipal();
this.latestMetaData = newEmptyMetaData();
// TODO: should we track connection wide memory usage or just org-wide usage?
// If connection-wide, create a MemoryManager here, otherwise just use the one from the delegate
this.childServices = new ConcurrentHashMap<ImmutableBytesWritable,ConnectionQueryServices>(INITIAL_CHILD_SERVICES_CAPACITY);
// find the HBase version and use that to determine the KeyValueBuilder that should be used
String hbaseVersion = VersionInfo.getVersion();
this.kvBuilder = KeyValueBuilder.get(hbaseVersion);
long halfStatsUpdateFreq = config.getLong(
QueryServices.STATS_UPDATE_FREQ_MS_ATTRIB,
QueryServicesOptions.DEFAULT_STATS_UPDATE_FREQ_MS) / 2;
tableStatsCache = CacheBuilder.newBuilder()
.maximumSize(MAX_TABLE_STATS_CACHE_ENTRIES)
.expireAfterWrite(halfStatsUpdateFreq, TimeUnit.MILLISECONDS)
.build();
this.returnSequenceValues = props.getBoolean(QueryServices.RETURN_SEQUENCE_VALUES_ATTRIB, QueryServicesOptions.DEFAULT_RETURN_SEQUENCE_VALUES);
this.renewLeaseEnabled = config.getBoolean(RENEW_LEASE_ENABLED, DEFAULT_RENEW_LEASE_ENABLED);
this.renewLeasePoolSize = config.getInt(RENEW_LEASE_THREAD_POOL_SIZE, DEFAULT_RENEW_LEASE_THREAD_POOL_SIZE);
this.renewLeaseThreshold = config.getInt(RENEW_LEASE_THRESHOLD_MILLISECONDS, DEFAULT_RENEW_LEASE_THRESHOLD_MILLISECONDS);
this.renewLeaseTaskFrequency = config.getInt(RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS, DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS);
List<LinkedBlockingQueue<WeakReference<PhoenixConnection>>> list = Lists.newArrayListWithCapacity(renewLeasePoolSize);
for (int i = 0; i < renewLeasePoolSize; i++) {
LinkedBlockingQueue<WeakReference<PhoenixConnection>> queue = new LinkedBlockingQueue<WeakReference<PhoenixConnection>>();
list.add(queue);
}
connectionQueues = ImmutableList.copyOf(list);
}
@Override
public TransactionSystemClient getTransactionSystemClient() {
return txServiceClient;
}
private void initTxServiceClient() {
String zkQuorumServersString = connectionInfo.getZookeeperQuorum()+":"+connectionInfo.getPort();
ZKClientService zkClientService = ZKClientServices.delegate(
ZKClients.reWatchOnExpire(
ZKClients.retryOnFailure(
ZKClientService.Builder.of(zkQuorumServersString)
.setSessionTimeout(props.getInt(HConstants.ZK_SESSION_TIMEOUT, HConstants.DEFAULT_ZK_SESSION_TIMEOUT))
.build(),
RetryStrategies.exponentialDelay(500, 2000, TimeUnit.MILLISECONDS)
)
)
);
zkClientService.startAndWait();
ZKDiscoveryService zkDiscoveryService = new ZKDiscoveryService(zkClientService);
PooledClientProvider pooledClientProvider = new PooledClientProvider(
config, zkDiscoveryService);
this.txServiceClient = new TransactionServiceClient(config,pooledClientProvider);
}
private void openConnection() throws SQLException {
try {
// check if we need to authenticate with kerberos
String clientKeytab = this.getProps().get(HBASE_CLIENT_KEYTAB);
String clientPrincipal = this.getProps().get(HBASE_CLIENT_PRINCIPAL);
if (clientKeytab != null && clientPrincipal != null) {
logger.info("Trying to connect to a secure cluster with keytab:" + clientKeytab);
UserGroupInformation.setConfiguration(config);
User.login(config, HBASE_CLIENT_KEYTAB, HBASE_CLIENT_PRINCIPAL, null);
logger.info("Successfull login to secure cluster!!");
}
boolean transactionsEnabled = props.getBoolean(
QueryServices.TRANSACTIONS_ENABLED,
QueryServicesOptions.DEFAULT_TRANSACTIONS_ENABLED);
// only initialize the tx service client if needed
if (transactionsEnabled) {
initTxServiceClient();
}
this.connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(this.config);
} catch (IOException e) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)
.setRootCause(e).build().buildException();
}
if (this.connection.isClosed()) { // TODO: why the heck doesn't this throw above?
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION).build().buildException();
}
}
@Override
public HTableInterface getTable(byte[] tableName) throws SQLException {
try {
return HBaseFactoryProvider.getHTableFactory().getTable(tableName, connection, null);
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
throw new TableNotFoundException(SchemaUtil.getSchemaNameFromFullName(tableName), SchemaUtil.getTableNameFromFullName(tableName));
} catch (IOException e) {
throw new SQLException(e);
}
}
@Override
public HTableDescriptor getTableDescriptor(byte[] tableName) throws SQLException {
HTableInterface htable = getTable(tableName);
try {
return htable.getTableDescriptor();
} catch (IOException e) {
if(e instanceof org.apache.hadoop.hbase.TableNotFoundException ||
e.getCause() instanceof org.apache.hadoop.hbase.TableNotFoundException) {
byte[][] schemaAndTableName = new byte[2][];
SchemaUtil.getVarChars(tableName, schemaAndTableName);
throw new TableNotFoundException(Bytes.toString(schemaAndTableName[0]), Bytes.toString(schemaAndTableName[1]));
}
throw new RuntimeException(e);
} finally {
Closeables.closeQuietly(htable);
}
}
@Override
public ReadOnlyProps getProps() {
return props;
}
/**
* Closes the underlying connection to zookeeper. The QueryServices
* may not be used after that point. When a Connection is closed,
* this is not called, since these instances are pooled by the
* Driver. Instead, the Driver should call this if the QueryServices
* is ever removed from the pool
*/
@Override
public void close() throws SQLException {
if (closed) {
return;
}
synchronized (this) {
if (closed) {
return;
}
closed = true;
SQLException sqlE = null;
try {
// Attempt to return any unused sequences.
if (connection != null) returnAllSequences(this.sequenceMap);
} catch (SQLException e) {
sqlE = e;
} finally {
try {
childServices.clear();
if (renewLeaseExecutor != null) {
renewLeaseExecutor.shutdownNow();
}
synchronized (latestMetaDataLock) {
latestMetaData = null;
latestMetaDataLock.notifyAll();
}
if (connection != null) connection.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
try {
tableStatsCache.invalidateAll();
super.close();
} catch (SQLException e) {
if (sqlE == null) {
sqlE = e;
} else {
sqlE.setNextException(e);
}
} finally {
if (sqlE != null) { throw sqlE; }
}
}
}
}
}
protected ConnectionQueryServices newChildQueryService() {
return new ChildQueryServices(this);
}
/**
* Get (and create if necessary) a child QueryService for a given tenantId.
* The QueryService will be cached for the lifetime of the parent QueryService
* @param tenantId the tenant ID
* @return the child QueryService
*/
@Override
public ConnectionQueryServices getChildQueryServices(ImmutableBytesWritable tenantId) {
ConnectionQueryServices childQueryService = childServices.get(tenantId);
if (childQueryService == null) {
childQueryService = newChildQueryService();
ConnectionQueryServices prevQueryService = childServices.putIfAbsent(tenantId, childQueryService);
return prevQueryService == null ? childQueryService : prevQueryService;
}
return childQueryService;
}
@Override
public void clearTableRegionCache(byte[] tableName) throws SQLException {
connection.clearRegionCache(TableName.valueOf(tableName));
}
@Override
public List<HRegionLocation> getAllTableRegions(byte[] tableName) throws SQLException {
/*
* Use HConnection.getRegionLocation as it uses the cache in HConnection, while getting
* all region locations from the HTable doesn't.
*/
int retryCount = 0, maxRetryCount = 1;
boolean reload =false;
while (true) {
try {
// We could surface the package projected HConnectionImplementation.getNumberOfCachedRegionLocations
// to get the sizing info we need, but this would require a new class in the same package and a cast
// to this implementation class, so it's probably not worth it.
List<HRegionLocation> locations = Lists.newArrayList();
byte[] currentKey = HConstants.EMPTY_START_ROW;
do {
HRegionLocation regionLocation = connection.getRegionLocation(
TableName.valueOf(tableName), currentKey, reload);
locations.add(regionLocation);
currentKey = regionLocation.getRegionInfo().getEndKey();
} while (!Bytes.equals(currentKey, HConstants.EMPTY_END_ROW));
return locations;
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
String fullName = Bytes.toString(tableName);
throw new TableNotFoundException(SchemaUtil.getSchemaNameFromFullName(fullName), SchemaUtil.getTableNameFromFullName(fullName));
} catch (IOException e) {
if (retryCount++ < maxRetryCount) { // One retry, in case split occurs while navigating
reload = true;
continue;
}
throw new SQLExceptionInfo.Builder(SQLExceptionCode.GET_TABLE_REGIONS_FAIL)
.setRootCause(e).build().buildException();
}
}
}
@Override
public PMetaData addTable(PTable table, long resolvedTime) throws SQLException {
synchronized (latestMetaDataLock) {
try {
throwConnectionClosedIfNullMetaData();
// If existing table isn't older than new table, don't replace
// If a client opens a connection at an earlier timestamp, this can happen
PTable existingTable = latestMetaData.getTableRef(new PTableKey(table.getTenantId(), table.getName().getString())).getTable();
if (existingTable.getTimeStamp() >= table.getTimeStamp()) {
return latestMetaData;
}
} catch (TableNotFoundException e) {}
latestMetaData = latestMetaData.addTable(table, resolvedTime);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
public PMetaData updateResolvedTimestamp(PTable table, long resolvedTime) throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
latestMetaData = latestMetaData.updateResolvedTimestamp(table, resolvedTime);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
private static interface Mutator {
PMetaData mutate(PMetaData metaData) throws SQLException;
}
/**
* Ensures that metaData mutations are handled in the correct order
*/
private PMetaData metaDataMutated(PName tenantId, String tableName, long tableSeqNum, Mutator mutator) throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
PMetaData metaData = latestMetaData;
PTable table;
long endTime = System.currentTimeMillis() + DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS;
while (true) {
try {
try {
table = metaData.getTableRef(new PTableKey(tenantId, tableName)).getTable();
/* If the table is at the prior sequence number, then we're good to go.
* We know if we've got this far, that the server validated the mutations,
* so we'd just need to wait until the other connection that mutated the same
* table is processed.
*/
if (table.getSequenceNumber() + 1 == tableSeqNum) {
// TODO: assert that timeStamp is bigger that table timeStamp?
metaData = mutator.mutate(metaData);
break;
} else if (table.getSequenceNumber() >= tableSeqNum) {
logger.warn("Attempt to cache older version of " + tableName + ": current= " + table.getSequenceNumber() + ", new=" + tableSeqNum);
break;
}
} catch (TableNotFoundException e) {
}
long waitTime = endTime - System.currentTimeMillis();
// We waited long enough - just remove the table from the cache
// and the next time it's used it'll be pulled over from the server.
if (waitTime <= 0) {
logger.warn("Unable to update meta data repo within " + (DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS/1000) + " seconds for " + tableName);
// There will never be a parentTableName here, as that would only
// be non null for an index an we never add/remove columns from an index.
metaData = metaData.removeTable(tenantId, tableName, null, HConstants.LATEST_TIMESTAMP);
break;
}
latestMetaDataLock.wait(waitTime);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
throw new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME
}
}
latestMetaData = metaData;
latestMetaDataLock.notifyAll();
return metaData;
}
}
@Override
public PMetaData addColumn(final PName tenantId, final String tableName, final List<PColumn> columns, final long tableTimeStamp,
final long tableSeqNum, final boolean isImmutableRows, final boolean isWalDisabled, final boolean isMultitenant,
final boolean storeNulls, final boolean isTransactional, final long updateCacheFrequency, final long resolvedTime) throws SQLException {
return metaDataMutated(tenantId, tableName, tableSeqNum, new Mutator() {
@Override
public PMetaData mutate(PMetaData metaData) throws SQLException {
try {
return metaData.addColumn(tenantId, tableName, columns, tableTimeStamp, tableSeqNum, isImmutableRows, isWalDisabled, isMultitenant, storeNulls, isTransactional, updateCacheFrequency, resolvedTime);
} catch (TableNotFoundException e) {
// The DROP TABLE may have been processed first, so just ignore.
return metaData;
}
}
});
}
@Override
public PMetaData removeTable(PName tenantId, final String tableName, String parentTableName, long tableTimeStamp) throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
latestMetaData = latestMetaData.removeTable(tenantId, tableName, parentTableName, tableTimeStamp);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
@Override
public PMetaData removeColumn(final PName tenantId, final String tableName, final List<PColumn> columnsToRemove, final long tableTimeStamp, final long tableSeqNum, final long resolvedTime) throws SQLException {
return metaDataMutated(tenantId, tableName, tableSeqNum, new Mutator() {
@Override
public PMetaData mutate(PMetaData metaData) throws SQLException {
try {
return metaData.removeColumn(tenantId, tableName, columnsToRemove, tableTimeStamp, tableSeqNum, resolvedTime);
} catch (TableNotFoundException e) {
// The DROP TABLE may have been processed first, so just ignore.
return metaData;
}
}
});
}
@Override
public PhoenixConnection connect(String url, Properties info) throws SQLException {
checkClosed();
PMetaData metadata = latestMetaData;
if (metadata == null) {
throwConnectionClosedException();
}
return new PhoenixConnection(this, url, info, metadata);
}
private HColumnDescriptor generateColumnFamilyDescriptor(Pair<byte[],Map<String,Object>> family, PTableType tableType) throws SQLException {
HColumnDescriptor columnDesc = new HColumnDescriptor(family.getFirst());
if (tableType != PTableType.VIEW) {
if(props.get(QueryServices.DEFAULT_KEEP_DELETED_CELLS_ATTRIB) != null){
columnDesc.setKeepDeletedCells(props.getBoolean(
QueryServices.DEFAULT_KEEP_DELETED_CELLS_ATTRIB, QueryServicesOptions.DEFAULT_KEEP_DELETED_CELLS));
}
columnDesc.setDataBlockEncoding(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING);
for (Entry<String,Object> entry : family.getSecond().entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
setHColumnDescriptorValue(columnDesc, key, value);
}
}
return columnDesc;
}
// Workaround HBASE-14737
private static void setHColumnDescriptorValue(HColumnDescriptor columnDesc, String key, Object value) {
if (HConstants.VERSIONS.equals(key)) {
columnDesc.setMaxVersions(getMaxVersion(value));
} else {
columnDesc.setValue(key, value == null ? null : value.toString());
}
}
private static int getMaxVersion(Object value) {
if (value == null) {
return -1; // HColumnDescriptor.UNINITIALIZED is private
}
if (value instanceof Number) {
return ((Number)value).intValue();
}
String stringValue = value.toString();
if (stringValue.isEmpty()) {
return -1;
}
return Integer.parseInt(stringValue);
}
private void modifyColumnFamilyDescriptor(HColumnDescriptor hcd, Map<String,Object> props) throws SQLException {
for (Entry<String, Object> entry : props.entrySet()) {
String propName = entry.getKey();
Object value = entry.getValue();
setHColumnDescriptorValue(hcd, propName, value);
}
}
private HTableDescriptor generateTableDescriptor(byte[] tableName, HTableDescriptor existingDesc, PTableType tableType, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits) throws SQLException {
String defaultFamilyName = (String)tableProps.remove(PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME);
HTableDescriptor tableDescriptor = (existingDesc != null) ? new HTableDescriptor(existingDesc) :
new HTableDescriptor(TableName.valueOf(tableName));
for (Entry<String,Object> entry : tableProps.entrySet()) {
String key = entry.getKey();
if (!TableProperty.isPhoenixTableProperty(key)) {
Object value = entry.getValue();
tableDescriptor.setValue(key, value == null ? null : value.toString());
}
}
if (families.isEmpty()) {
if (tableType != PTableType.VIEW) {
byte[] defaultFamilyByes = defaultFamilyName == null ? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES : Bytes.toBytes(defaultFamilyName);
// Add dummy column family so we have key values for tables that
HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(new Pair<byte[],Map<String,Object>>(defaultFamilyByes,Collections.<String,Object>emptyMap()), tableType);
tableDescriptor.addFamily(columnDescriptor);
}
} else {
for (Pair<byte[],Map<String,Object>> family : families) {
// If family is only in phoenix description, add it. otherwise, modify its property accordingly.
byte[] familyByte = family.getFirst();
if (tableDescriptor.getFamily(familyByte) == null) {
if (tableType == PTableType.VIEW) {
String fullTableName = Bytes.toString(tableName);
throw new ReadOnlyTableException(
"The HBase column families for a read-only table must already exist",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName),
Bytes.toString(familyByte));
}
HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(family, tableType);
tableDescriptor.addFamily(columnDescriptor);
} else {
if (tableType != PTableType.VIEW) {
HColumnDescriptor columnDescriptor = tableDescriptor.getFamily(familyByte);
if (columnDescriptor == null) {
throw new IllegalArgumentException("Unable to find column descriptor with family name " + Bytes.toString(family.getFirst()));
}
modifyColumnFamilyDescriptor(columnDescriptor, family.getSecond());
}
}
}
}
addCoprocessors(tableName, tableDescriptor, tableType, tableProps);
return tableDescriptor;
}
private void addCoprocessors(byte[] tableName, HTableDescriptor descriptor, PTableType tableType, Map<String,Object> tableProps) throws SQLException {
// The phoenix jar must be available on HBase classpath
int priority = props.getInt(QueryServices.COPROCESSOR_PRIORITY_ATTRIB, QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY);
try {
if (!descriptor.hasCoprocessor(ScanRegionObserver.class.getName())) {
descriptor.addCoprocessor(ScanRegionObserver.class.getName(), null, priority, null);
}
if (!descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName())) {
descriptor.addCoprocessor(UngroupedAggregateRegionObserver.class.getName(), null, priority, null);
}
if (!descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName())) {
descriptor.addCoprocessor(GroupedAggregateRegionObserver.class.getName(), null, priority, null);
}
if (!descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName())) {
descriptor.addCoprocessor(ServerCachingEndpointImpl.class.getName(), null, priority, null);
}
boolean isTransactional =
Boolean.TRUE.equals(tableProps.get(TableProperty.TRANSACTIONAL.name())) ||
Boolean.TRUE.equals(tableProps.get(TxConstants.READ_NON_TX_DATA)); // For ALTER TABLE
// TODO: better encapsulation for this
// Since indexes can't have indexes, don't install our indexing coprocessor for indexes.
// Also don't install on the SYSTEM.CATALOG and SYSTEM.STATS table because we use
// all-or-none mutate class which break when this coprocessor is installed (PHOENIX-1318).
if ((tableType != PTableType.INDEX && tableType != PTableType.VIEW)
&& !SchemaUtil.isMetaTable(tableName)
&& !SchemaUtil.isStatsTable(tableName)) {
if (isTransactional) {
if (!descriptor.hasCoprocessor(PhoenixTransactionalIndexer.class.getName())) {
descriptor.addCoprocessor(PhoenixTransactionalIndexer.class.getName(), null, priority, null);
}
// For alter table, remove non transactional index coprocessor
if (descriptor.hasCoprocessor(Indexer.class.getName())) {
descriptor.removeCoprocessor(Indexer.class.getName());
}
} else {
if (!descriptor.hasCoprocessor(Indexer.class.getName())) {
// If exception on alter table to transition back to non transactional
if (descriptor.hasCoprocessor(PhoenixTransactionalIndexer.class.getName())) {
descriptor.removeCoprocessor(PhoenixTransactionalIndexer.class.getName());
}
Map<String, String> opts = Maps.newHashMapWithExpectedSize(1);
opts.put(NonTxIndexBuilder.CODEC_CLASS_NAME_KEY, PhoenixIndexCodec.class.getName());
Indexer.enableIndexing(descriptor, PhoenixIndexBuilder.class, opts, priority);
}
}
}
if (SchemaUtil.isStatsTable(tableName) && !descriptor.hasCoprocessor(MultiRowMutationEndpoint.class.getName())) {
descriptor.addCoprocessor(MultiRowMutationEndpoint.class.getName(),
null, priority, null);
}
if (descriptor.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES) != null
&& Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(descriptor
.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
if (!descriptor.hasCoprocessor(IndexHalfStoreFileReaderGenerator.class.getName())) {
descriptor.addCoprocessor(IndexHalfStoreFileReaderGenerator.class.getName(),
null, priority, null);
}
} else {
if (!descriptor.hasCoprocessor(LocalIndexSplitter.class.getName())
&& !SchemaUtil.isMetaTable(tableName)
&& !SchemaUtil.isSequenceTable(tableName)) {
descriptor.addCoprocessor(LocalIndexSplitter.class.getName(), null, priority, null);
}
}
// Setup split policy on Phoenix metadata table to ensure that the key values of a Phoenix table
// stay on the same region.
if (SchemaUtil.isMetaTable(tableName) || SchemaUtil.isFunctionTable(tableName)) {
if (!descriptor.hasCoprocessor(MetaDataEndpointImpl.class.getName())) {
descriptor.addCoprocessor(MetaDataEndpointImpl.class.getName(), null, priority, null);
}
if(SchemaUtil.isMetaTable(tableName) ) {
if (!descriptor.hasCoprocessor(MetaDataRegionObserver.class.getName())) {
descriptor.addCoprocessor(MetaDataRegionObserver.class.getName(), null, priority + 1, null);
}
}
} else if (SchemaUtil.isSequenceTable(tableName)) {
if (!descriptor.hasCoprocessor(SequenceRegionObserver.class.getName())) {
descriptor.addCoprocessor(SequenceRegionObserver.class.getName(), null, priority, null);
}
}
if (isTransactional) {
if (!descriptor.hasCoprocessor(TransactionProcessor.class.getName())) {
descriptor.addCoprocessor(TransactionProcessor.class.getName(), null, priority - 10, null);
}
} else {
// If exception on alter table to transition back to non transactional
if (descriptor.hasCoprocessor(TransactionProcessor.class.getName())) {
descriptor.removeCoprocessor(TransactionProcessor.class.getName());
}
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
}
}
private static interface RetriableOperation {
boolean checkForCompletion() throws TimeoutException, IOException;
String getOperatioName();
}
private void pollForUpdatedTableDescriptor(final HBaseAdmin admin, final HTableDescriptor newTableDescriptor,
final byte[] tableName) throws InterruptedException, TimeoutException {
checkAndRetry(new RetriableOperation() {
@Override
public String getOperatioName() {
return "UpdateOrNewTableDescriptor";
}
@Override
public boolean checkForCompletion() throws TimeoutException, IOException {
HTableDescriptor tableDesc = admin.getTableDescriptor(tableName);
return newTableDescriptor.equals(tableDesc);
}
});
}
private void checkAndRetry(RetriableOperation op) throws InterruptedException, TimeoutException {
int maxRetries = ConnectionQueryServicesImpl.this.props.getInt(
QueryServices.NUM_RETRIES_FOR_SCHEMA_UPDATE_CHECK,
QueryServicesOptions.DEFAULT_RETRIES_FOR_SCHEMA_UPDATE_CHECK);
long sleepInterval = ConnectionQueryServicesImpl.this.props
.getLong(QueryServices.DELAY_FOR_SCHEMA_UPDATE_CHECK,
QueryServicesOptions.DEFAULT_DELAY_FOR_SCHEMA_UPDATE_CHECK);
boolean success = false;
int numTries = 1;
PhoenixStopWatch watch = new PhoenixStopWatch();
watch.start();
do {
try {
success = op.checkForCompletion();
} catch (Exception ex) {
// If we encounter any exception on the first or last try, propagate the exception and fail.
// Else, we swallow the exception and retry till we reach maxRetries.
if (numTries == 1 || numTries == maxRetries) {
watch.stop();
TimeoutException toThrow = new TimeoutException("Operation " + op.getOperatioName()
+ " didn't complete because of exception. Time elapsed: " + watch.elapsedMillis());
toThrow.initCause(ex);
throw toThrow;
}
}
numTries++;
Thread.sleep(sleepInterval);
} while (numTries < maxRetries && !success);
watch.stop();
if (!success) {
throw new TimeoutException("Operation " + op.getOperatioName() + " didn't complete within "
+ watch.elapsedMillis() + " ms "
+ (numTries > 1 ? ("after trying " + numTries + (numTries > 1 ? "times." : "time.")) : ""));
} else {
if (logger.isDebugEnabled()) {
logger.debug("Operation "
+ op.getOperatioName()
+ " completed within "
+ watch.elapsedMillis()
+ "ms "
+ (numTries > 1 ? ("after trying " + numTries + (numTries > 1 ? "times." : "time.")) : ""));
}
}
}
private boolean allowOnlineTableSchemaUpdate() {
return props.getBoolean(
QueryServices.ALLOW_ONLINE_TABLE_SCHEMA_UPDATE,
QueryServicesOptions.DEFAULT_ALLOW_ONLINE_TABLE_SCHEMA_UPDATE);
}
/**
*
* @param tableName
* @param splits
* @param modifyExistingMetaData TODO
* @return true if table was created and false if it already exists
* @throws SQLException
*/
private HTableDescriptor ensureTableCreated(byte[] tableName, PTableType tableType , Map<String,Object> props, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits, boolean modifyExistingMetaData) throws SQLException {
HBaseAdmin admin = null;
SQLException sqlE = null;
HTableDescriptor existingDesc = null;
boolean isMetaTable = SchemaUtil.isMetaTable(tableName);
boolean tableExist = true;
try {
final String quorum = ZKConfig.getZKQuorumServersString(config);
final String znode = this.props.get(HConstants.ZOOKEEPER_ZNODE_PARENT);
logger.debug("Found quorum: " + quorum + ":" + znode);
admin = new HBaseAdmin(config);
try {
existingDesc = admin.getTableDescriptor(tableName);
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
tableExist = false;
if (tableType == PTableType.VIEW) {
String fullTableName = Bytes.toString(tableName);
throw new ReadOnlyTableException(
"An HBase table for a VIEW must already exist",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName));
}
}
HTableDescriptor newDesc = generateTableDescriptor(tableName, existingDesc, tableType , props, families, splits);
if (!tableExist) {
if (newDesc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES) != null && Boolean.TRUE.equals(
PBoolean.INSTANCE.toObject(newDesc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
newDesc.setValue(HTableDescriptor.SPLIT_POLICY, IndexRegionSplitPolicy.class.getName());
}
// Remove the splitPolicy attribute to prevent HBASE-12570
if (isMetaTable) {
newDesc.remove(HTableDescriptor.SPLIT_POLICY);
}
try {
if (splits == null) {
admin.createTable(newDesc);
} else {
admin.createTable(newDesc, splits);
}
} catch (TableExistsException e) {
// We can ignore this, as it just means that another client beat us
// to creating the HBase metadata.
return null;
}
if (isMetaTable) {
checkClientServerCompatibility();
/*
* Now we modify the table to add the split policy, since we know that the client and
* server and compatible. This works around HBASE-12570 which causes the cluster to be
* brought down.
*/
newDesc.setValue(HTableDescriptor.SPLIT_POLICY, MetaDataSplitPolicy.class.getName());
if (allowOnlineTableSchemaUpdate()) {
// No need to wait/poll for this update
admin.modifyTable(tableName, newDesc);
} else {
admin.disableTable(tableName);
admin.modifyTable(tableName, newDesc);
admin.enableTable(tableName);
}
}
return null;
} else {
if (isMetaTable) {
checkClientServerCompatibility();
}
if (!modifyExistingMetaData) {
return existingDesc; // Caller already knows that no metadata was changed
}
boolean willBeTx = Boolean.TRUE.equals(props.get(TableProperty.TRANSACTIONAL.name()));
// If mapping an existing table as transactional, set property so that existing
// data is correctly read.
if (willBeTx) {
newDesc.setValue(TxConstants.READ_NON_TX_DATA, Boolean.TRUE.toString());
} else {
// If we think we're creating a non transactional table when it's already
// transactional, don't allow.
if (existingDesc.hasCoprocessor(TransactionProcessor.class.getName())) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.TX_MAY_NOT_SWITCH_TO_NON_TX)
.setSchemaName(SchemaUtil.getSchemaNameFromFullName(tableName))
.setTableName(SchemaUtil.getTableNameFromFullName(tableName)).build().buildException();
}
newDesc.remove(TxConstants.READ_NON_TX_DATA);
}
if (existingDesc.equals(newDesc)) {
return null; // Indicate that no metadata was changed
}
modifyTable(tableName, newDesc, true);
return newDesc;
}
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).build().buildException();
} catch (TimeoutException e) {
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.OPERATION_TIMED_OUT).setRootCause(e.getCause() != null ? e.getCause() : e).build().buildException();
} finally {
try {
if (admin != null) {
admin.close();
}
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
return null; // will never make it here
}
private void modifyTable(byte[] tableName, HTableDescriptor newDesc, boolean shouldPoll) throws IOException,
InterruptedException, TimeoutException {
try (HBaseAdmin admin = new HBaseAdmin(config)) {
if (!allowOnlineTableSchemaUpdate()) {
admin.disableTable(tableName);
admin.modifyTable(tableName, newDesc);
admin.enableTable(tableName);
} else {
admin.modifyTable(tableName, newDesc);
if (shouldPoll) {
pollForUpdatedTableDescriptor(admin, newDesc, tableName);
}
}
}
}
private static boolean hasIndexWALCodec(Long serverVersion) {
if (serverVersion == null) {
return true;
}
return MetaDataUtil.decodeHasIndexWALCodec(serverVersion);
}
private static boolean isCompatible(Long serverVersion) {
if (serverVersion == null) {
return false;
}
return MetaDataUtil.areClientAndServerCompatible(serverVersion);
}
private void checkClientServerCompatibility() throws SQLException {
StringBuilder buf = new StringBuilder("The following servers require an updated " + QueryConstants.DEFAULT_COPROCESS_PATH + " to be put in the classpath of HBase: ");
boolean isIncompatible = false;
int minHBaseVersion = Integer.MAX_VALUE;
try {
List<HRegionLocation> locations = this.getAllTableRegions(SYSTEM_CATALOG_NAME_BYTES);
Set<HRegionLocation> serverMap = Sets.newHashSetWithExpectedSize(locations.size());
TreeMap<byte[], HRegionLocation> regionMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
List<byte[]> regionKeys = Lists.newArrayListWithExpectedSize(locations.size());
for (HRegionLocation entry : locations) {
if (!serverMap.contains(entry)) {
regionKeys.add(entry.getRegionInfo().getStartKey());
regionMap.put(entry.getRegionInfo().getRegionName(), entry);
serverMap.add(entry);
}
}
HTableInterface ht = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
final Map<byte[], Long> results =
ht.coprocessorService(MetaDataService.class, null, null, new Batch.Call<MetaDataService,Long>() {
@Override
public Long call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<GetVersionResponse> rpcCallback =
new BlockingRpcCallback<GetVersionResponse>();
GetVersionRequest.Builder builder = GetVersionRequest.newBuilder();
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.getVersion(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get().getVersion();
}
});
for (Map.Entry<byte[],Long> result : results.entrySet()) {
// This is the "phoenix.jar" is in-place, but server is out-of-sync with client case.
if (!isCompatible(result.getValue())) {
isIncompatible = true;
HRegionLocation name = regionMap.get(result.getKey());
buf.append(name);
buf.append(';');
}
hasIndexWALCodec &= hasIndexWALCodec(result.getValue());
if (minHBaseVersion > MetaDataUtil.decodeHBaseVersion(result.getValue())) {
minHBaseVersion = MetaDataUtil.decodeHBaseVersion(result.getValue());
}
}
lowestClusterHBaseVersion = minHBaseVersion;
} catch (SQLException e) {
throw e;
} catch (Throwable t) {
// This is the case if the "phoenix.jar" is not on the classpath of HBase on the region server
throw new SQLExceptionInfo.Builder(SQLExceptionCode.INCOMPATIBLE_CLIENT_SERVER_JAR).setRootCause(t)
.setMessage("Ensure that " + QueryConstants.DEFAULT_COPROCESS_PATH + " is put on the classpath of HBase in every region server: " + t.getMessage())
.build().buildException();
}
if (isIncompatible) {
buf.setLength(buf.length()-1);
throw new SQLExceptionInfo.Builder(SQLExceptionCode.OUTDATED_JARS).setMessage(buf.toString()).build().buildException();
}
}
/**
* Invoke meta data coprocessor with one retry if the key was found to not be in the regions
* (due to a table split)
*/
private MetaDataMutationResult metaDataCoprocessorExec(byte[] tableKey,
Batch.Call<MetaDataService, MetaDataResponse> callable) throws SQLException {
return metaDataCoprocessorExec(tableKey, callable, PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
}
/**
* Invoke meta data coprocessor with one retry if the key was found to not be in the regions
* (due to a table split)
*/
private MetaDataMutationResult metaDataCoprocessorExec(byte[] tableKey,
Batch.Call<MetaDataService, MetaDataResponse> callable, byte[] tableName) throws SQLException {
try {
boolean retried = false;
while (true) {
if (retried) {
connection.relocateRegion(
TableName.valueOf(tableName),
tableKey);
}
HTableInterface ht = this.getTable(tableName);
try {
final Map<byte[], MetaDataResponse> results =
ht.coprocessorService(MetaDataService.class, tableKey, tableKey, callable);
assert(results.size() == 1);
MetaDataResponse result = results.values().iterator().next();
if (result.getReturnCode() == MetaDataProtos.MutationCode.TABLE_NOT_IN_REGION
|| result.getReturnCode() == MetaDataProtos.MutationCode.FUNCTION_NOT_IN_REGION) {
if (retried) return MetaDataMutationResult.constructFromProto(result);
retried = true;
continue;
}
return MetaDataMutationResult.constructFromProto(result);
} finally {
Closeables.closeQuietly(ht);
}
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} catch (Throwable t) {
throw new SQLException(t);
}
}
// Our property values are translated using toString, so we need to "string-ify" this.
private static final String TRUE_BYTES_AS_STRING = Bytes.toString(PDataType.TRUE_BYTES);
private void ensureViewIndexTableCreated(byte[] physicalTableName, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits, long timestamp) throws SQLException {
Long maxFileSize = (Long)tableProps.get(HTableDescriptor.MAX_FILESIZE);
if (maxFileSize == null) {
maxFileSize = this.props.getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE);
}
byte[] physicalIndexName = MetaDataUtil.getViewIndexPhysicalName(physicalTableName);
int indexMaxFileSizePerc;
// Get percentage to use from table props first and then fallback to config
Integer indexMaxFileSizePercProp = (Integer)tableProps.remove(QueryServices.INDEX_MAX_FILESIZE_PERC_ATTRIB);
if (indexMaxFileSizePercProp == null) {
indexMaxFileSizePerc = this.props.getInt(QueryServices.INDEX_MAX_FILESIZE_PERC_ATTRIB, QueryServicesOptions.DEFAULT_INDEX_MAX_FILESIZE_PERC);
} else {
indexMaxFileSizePerc = indexMaxFileSizePercProp;
}
long indexMaxFileSize = maxFileSize * indexMaxFileSizePerc / 100;
tableProps.put(HTableDescriptor.MAX_FILESIZE, indexMaxFileSize);
tableProps.put(MetaDataUtil.IS_VIEW_INDEX_TABLE_PROP_NAME, TRUE_BYTES_AS_STRING);
HTableDescriptor desc = ensureTableCreated(physicalIndexName, PTableType.TABLE, tableProps, families, splits, false);
if (desc != null) {
if (!Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_VIEW_INDEX_TABLE_PROP_BYTES)))) {
String fullTableName = Bytes.toString(physicalIndexName);
throw new TableAlreadyExistsException(
"Unable to create shared physical table for indexes on views.",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName));
}
}
}
private void ensureLocalIndexTableCreated(byte[] physicalTableName, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits, long timestamp) throws SQLException {
PTable table;
String parentTableName = Bytes.toString(physicalTableName, MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX_BYTES.length,
physicalTableName.length - MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX_BYTES.length);
try {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
table = latestMetaData.getTableRef(new PTableKey(PName.EMPTY_NAME, parentTableName)).getTable();
latestMetaDataLock.notifyAll();
}
if (table.getTimeStamp() >= timestamp) { // Table in cache is newer than client timestamp which shouldn't be the case
throw new TableNotFoundException(table.getSchemaName().getString(), table.getTableName().getString());
}
} catch (TableNotFoundException e) {
byte[] schemaName = Bytes.toBytes(SchemaUtil.getSchemaNameFromFullName(parentTableName));
byte[] tableName = Bytes.toBytes(SchemaUtil.getTableNameFromFullName(parentTableName));
MetaDataMutationResult result = this.getTable(null, schemaName, tableName, HConstants.LATEST_TIMESTAMP, timestamp);
table = result.getTable();
if (table == null) {
throw e;
}
}
ensureLocalIndexTableCreated(physicalTableName, tableProps, families, splits);
}
private void ensureLocalIndexTableCreated(byte[] physicalTableName, Map<String, Object> tableProps, List<Pair<byte[], Map<String, Object>>> families, byte[][] splits) throws SQLException, TableAlreadyExistsException {
// If we're not allowing local indexes or the hbase version is too low,
// don't create the local index table
if ( !this.getProps().getBoolean(QueryServices.ALLOW_LOCAL_INDEX_ATTRIB, QueryServicesOptions.DEFAULT_ALLOW_LOCAL_INDEX)
|| !this.supportsFeature(Feature.LOCAL_INDEX)) {
return;
}
tableProps.put(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_NAME, TRUE_BYTES_AS_STRING);
HTableDescriptor desc = ensureTableCreated(physicalTableName, PTableType.TABLE, tableProps, families, splits, true);
if (desc != null) {
if (!Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
String fullTableName = Bytes.toString(physicalTableName);
throw new TableAlreadyExistsException(
"Unable to create shared physical table for local indexes.",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName));
}
}
}
private boolean ensureViewIndexTableDropped(byte[] physicalTableName, long timestamp) throws SQLException {
byte[] physicalIndexName = MetaDataUtil.getViewIndexPhysicalName(physicalTableName);
HTableDescriptor desc = null;
HBaseAdmin admin = null;
boolean wasDeleted = false;
try {
admin = new HBaseAdmin(config);
try {
desc = admin.getTableDescriptor(physicalIndexName);
if (Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_VIEW_INDEX_TABLE_PROP_BYTES)))) {
this.tableStatsCache.invalidate(new ImmutableBytesPtr(physicalIndexName));
final ReadOnlyProps props = this.getProps();
final boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
admin.disableTable(physicalIndexName);
admin.deleteTable(physicalIndexName);
clearTableRegionCache(physicalIndexName);
wasDeleted = true;
}
}
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) admin.close();
} catch (IOException e) {
logger.warn("",e);
}
}
return wasDeleted;
}
private boolean ensureLocalIndexTableDropped(byte[] physicalTableName, long timestamp) throws SQLException {
byte[] physicalIndexName = MetaDataUtil.getLocalIndexPhysicalName(physicalTableName);
HTableDescriptor desc = null;
HBaseAdmin admin = null;
boolean wasDeleted = false;
try {
admin = new HBaseAdmin(config);
try {
desc = admin.getTableDescriptor(physicalIndexName);
if (Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
this.tableStatsCache.invalidate(new ImmutableBytesPtr(physicalIndexName));
final ReadOnlyProps props = this.getProps();
final boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
admin.disableTable(physicalIndexName);
admin.deleteTable(physicalIndexName);
clearTableRegionCache(physicalIndexName);
wasDeleted = true;
}
}
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) admin.close();
} catch (IOException e) {
logger.warn("",e);
}
}
return wasDeleted;
}
@Override
public MetaDataMutationResult createTable(final List<Mutation> tableMetaData, byte[] physicalTableName, PTableType tableType,
Map<String,Object> tableProps, final List<Pair<byte[],Map<String,Object>>> families, byte[][] splits) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
Mutation m = MetaDataUtil.getPutOnlyTableHeaderRow(tableMetaData);
byte[] key = m.getRow();
SchemaUtil.getVarChars(key, rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableName = physicalTableName != null ? physicalTableName : SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
boolean localIndexTable = Boolean.TRUE.equals(tableProps.remove(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_NAME));
if ((tableType == PTableType.VIEW && physicalTableName != null) || (tableType != PTableType.VIEW && physicalTableName == null)) {
// For views this will ensure that metadata already exists
// For tables and indexes, this will create the metadata if it doesn't already exist
ensureTableCreated(tableName, tableType, tableProps, families, splits, true);
}
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
if (tableType == PTableType.INDEX) { // Index on view
// Physical index table created up front for multi tenant
// TODO: if viewIndexId is Short.MIN_VALUE, then we don't need to attempt to create it
if (physicalTableName != null) {
if (localIndexTable) {
ensureLocalIndexTableCreated(tableName, tableProps, families, splits, MetaDataUtil.getClientTimeStamp(m));
} else if (!MetaDataUtil.isMultiTenant(m, kvBuilder, ptr)) {
ensureViewIndexTableCreated(tenantIdBytes.length == 0 ? null : PNameFactory.newName(tenantIdBytes), physicalTableName, MetaDataUtil.getClientTimeStamp(m));
}
}
} else if (tableType == PTableType.TABLE && MetaDataUtil.isMultiTenant(m, kvBuilder, ptr)) { // Create view index table up front for multi tenant tables
ptr.set(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES);
MetaDataUtil.getMutationValue(m, PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME_BYTES, kvBuilder, ptr);
List<Pair<byte[],Map<String,Object>>> familiesPlusDefault = null;
for (Pair<byte[],Map<String,Object>> family : families) {
byte[] cf = family.getFirst();
if (Bytes.compareTo(cf, 0, cf.length, ptr.get(), ptr.getOffset(),ptr.getLength()) == 0) {
familiesPlusDefault = families;
break;
}
}
// Don't override if default family already present
if (familiesPlusDefault == null) {
byte[] defaultCF = ByteUtil.copyKeyBytesIfNecessary(ptr);
// Only use splits if table is salted, otherwise it may not be applicable
// Always add default column family, as we don't know in advance if we'll need it
familiesPlusDefault = Lists.newArrayList(families);
familiesPlusDefault.add(new Pair<byte[],Map<String,Object>>(defaultCF,Collections.<String,Object>emptyMap()));
}
ensureViewIndexTableCreated(tableName, tableProps, familiesPlusDefault, MetaDataUtil.isSalted(m, kvBuilder, ptr) ? splits : null, MetaDataUtil.getClientTimeStamp(m));
}
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
MetaDataMutationResult result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
CreateTableRequest.Builder builder = CreateTableRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
CreateTableRequest build = builder.build();
instance.createTable(controller, build, rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
return result;
}
@Override
public MetaDataMutationResult getTable(final PName tenantId, final byte[] schemaBytes, final byte[] tableBytes,
final long tableTimestamp, final long clientTimestamp) throws SQLException {
final byte[] tenantIdBytes = tenantId == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantId.getBytes();
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
return metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
GetTableRequest.Builder builder = GetTableRequest.newBuilder();
builder.setTenantId(ByteStringer.wrap(tenantIdBytes));
builder.setSchemaName(ByteStringer.wrap(schemaBytes));
builder.setTableName(ByteStringer.wrap(tableBytes));
builder.setTableTimestamp(tableTimestamp);
builder.setClientTimestamp(clientTimestamp);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.getTable(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
}
@Override
public MetaDataMutationResult dropTable(final List<Mutation> tableMetaData, final PTableType tableType, final boolean cascade) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
SchemaUtil.getVarChars(tableMetaData.get(0).getRow(), rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantIdBytes, schemaBytes, tableBytes);
final MetaDataMutationResult result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
DropTableRequest.Builder builder = DropTableRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setTableType(tableType.getSerializedValue());
builder.setCascade(cascade);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.dropTable(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
final MutationCode code = result.getMutationCode();
switch(code) {
case TABLE_ALREADY_EXISTS:
ReadOnlyProps props = this.getProps();
boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
dropTables(result.getTableNamesToDelete());
}
invalidateTables(result.getTableNamesToDelete());
if (tableType == PTableType.TABLE) {
byte[] physicalName = SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
long timestamp = MetaDataUtil.getClientTimeStamp(tableMetaData);
ensureViewIndexTableDropped(physicalName, timestamp);
ensureLocalIndexTableDropped(physicalName, timestamp);
tableStatsCache.invalidate(new ImmutableBytesPtr(physicalName));
}
break;
default:
break;
}
return result;
}
@Override
public MetaDataMutationResult dropFunction(final List<Mutation> functionData, final boolean ifExists) throws SQLException {
byte[][] rowKeyMetadata = new byte[2][];
byte[] key = functionData.get(0).getRow();
SchemaUtil.getVarChars(key, rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] functionBytes = rowKeyMetadata[PhoenixDatabaseMetaData.FUNTION_NAME_INDEX];
byte[] functionKey = SchemaUtil.getFunctionKey(tenantIdBytes, functionBytes);
final MetaDataMutationResult result = metaDataCoprocessorExec(functionKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
DropFunctionRequest.Builder builder = DropFunctionRequest.newBuilder();
for (Mutation m : functionData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setIfExists(ifExists);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.dropFunction(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
}, PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES);
return result;
}
private void invalidateTables(final List<byte[]> tableNamesToDelete) {
if (tableNamesToDelete != null) {
for ( byte[] tableName : tableNamesToDelete ) {
tableStatsCache.invalidate(new ImmutableBytesPtr(tableName));
}
}
}
private void dropTables(final List<byte[]> tableNamesToDelete) throws SQLException {
HBaseAdmin admin = null;
SQLException sqlE = null;
try{
admin = new HBaseAdmin(config);
if (tableNamesToDelete != null){
for ( byte[] tableName : tableNamesToDelete ) {
if ( admin.tableExists(tableName) ) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
clearTableRegionCache(tableName);
}
}
}
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) {
admin.close();
}
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
}
private static Map<String,Object> createPropertiesMap(Map<ImmutableBytesWritable,ImmutableBytesWritable> htableProps) {
Map<String,Object> props = Maps.newHashMapWithExpectedSize(htableProps.size());
for (Map.Entry<ImmutableBytesWritable,ImmutableBytesWritable> entry : htableProps.entrySet()) {
ImmutableBytesWritable key = entry.getKey();
ImmutableBytesWritable value = entry.getValue();
props.put(Bytes.toString(key.get(), key.getOffset(), key.getLength()), Bytes.toString(value.get(), value.getOffset(), value.getLength()));
}
return props;
}
private void ensureViewIndexTableCreated(PName tenantId, byte[] physicalIndexTableName, long timestamp) throws SQLException {
PTable table;
String name = Bytes.toString(
physicalIndexTableName,
MetaDataUtil.VIEW_INDEX_TABLE_PREFIX_BYTES.length,
physicalIndexTableName.length-MetaDataUtil.VIEW_INDEX_TABLE_PREFIX_BYTES.length);
try {
PMetaData metadata = latestMetaData;
if (metadata == null) {
throwConnectionClosedException();
}
table = metadata.getTableRef(new PTableKey(tenantId, name)).getTable();
if (table.getTimeStamp() >= timestamp) { // Table in cache is newer than client timestamp which shouldn't be the case
throw new TableNotFoundException(table.getSchemaName().getString(), table.getTableName().getString());
}
} catch (TableNotFoundException e) {
byte[] schemaName = Bytes.toBytes(SchemaUtil.getSchemaNameFromFullName(name));
byte[] tableName = Bytes.toBytes(SchemaUtil.getTableNameFromFullName(name));
MetaDataMutationResult result = this.getTable(null, schemaName, tableName, HConstants.LATEST_TIMESTAMP, timestamp);
table = result.getTable();
if (table == null) {
throw e;
}
}
ensureViewIndexTableCreated(table, timestamp);
}
private void ensureViewIndexTableCreated(PTable table, long timestamp) throws SQLException {
byte[] physicalTableName = table.getPhysicalName().getBytes();
HTableDescriptor htableDesc = this.getTableDescriptor(physicalTableName);
Map<String,Object> tableProps = createPropertiesMap(htableDesc.getValues());
List<Pair<byte[],Map<String,Object>>> families = Lists.newArrayListWithExpectedSize(Math.max(1, table.getColumnFamilies().size()+1));
if (families.isEmpty()) {
byte[] familyName = SchemaUtil.getEmptyColumnFamily(table);
Map<String,Object> familyProps = createPropertiesMap(htableDesc.getFamily(familyName).getValues());
families.add(new Pair<byte[],Map<String,Object>>(familyName, familyProps));
} else {
for (PColumnFamily family : table.getColumnFamilies()) {
byte[] familyName = family.getName().getBytes();
Map<String,Object> familyProps = createPropertiesMap(htableDesc.getFamily(familyName).getValues());
families.add(new Pair<byte[],Map<String,Object>>(familyName, familyProps));
}
// Always create default column family, because we don't know in advance if we'll
// need it for an index with no covered columns.
families.add(new Pair<byte[],Map<String,Object>>(table.getDefaultFamilyName().getBytes(), Collections.<String,Object>emptyMap()));
}
byte[][] splits = null;
if (table.getBucketNum() != null) {
splits = SaltingUtil.getSalteByteSplitPoints(table.getBucketNum());
}
ensureViewIndexTableCreated(physicalTableName, tableProps, families, splits, timestamp);
}
@Override
public MetaDataMutationResult addColumn(final List<Mutation> tableMetaData, PTable table, Map<String, List<Pair<String,Object>>> stmtProperties, Set<String> colFamiliesForPColumnsToBeAdded) throws SQLException {
List<Pair<byte[], Map<String, Object>>> families = new ArrayList<>(stmtProperties.size());
Map<String, Object> tableProps = new HashMap<String, Object>();
Set<HTableDescriptor> tableDescriptors = Collections.emptySet();
Set<HTableDescriptor> origTableDescriptors = Collections.emptySet();
boolean nonTxToTx = false;
Pair<HTableDescriptor,HTableDescriptor> tableDescriptorPair = separateAndValidateProperties(table, stmtProperties, colFamiliesForPColumnsToBeAdded, families, tableProps);
HTableDescriptor tableDescriptor = tableDescriptorPair.getSecond();
HTableDescriptor origTableDescriptor = tableDescriptorPair.getFirst();
if (tableDescriptor != null) {
tableDescriptors = Sets.newHashSetWithExpectedSize(3 + table.getIndexes().size());
origTableDescriptors = Sets.newHashSetWithExpectedSize(3 + table.getIndexes().size());
tableDescriptors.add(tableDescriptor);
origTableDescriptors.add(origTableDescriptor);
nonTxToTx = Boolean.TRUE.equals(tableProps.get(TxConstants.READ_NON_TX_DATA));
/*
* If the table was transitioned from non transactional to transactional, we need
* to also transition the index tables.
*/
if (nonTxToTx) {
updateDescriptorForTx(table, tableProps, tableDescriptor, Boolean.TRUE.toString(), tableDescriptors, origTableDescriptors);
}
}
boolean success = false;
boolean metaDataUpdated = !tableDescriptors.isEmpty();
boolean pollingNeeded = !(!tableProps.isEmpty() && families.isEmpty() && colFamiliesForPColumnsToBeAdded.isEmpty());
MetaDataMutationResult result = null;
try {
boolean modifyHTable = true;
if (table.getType() == PTableType.VIEW) {
boolean canViewsAddNewCF = props.getBoolean(QueryServices.ALLOW_VIEWS_ADD_NEW_CF_BASE_TABLE,
QueryServicesOptions.DEFAULT_ALLOW_VIEWS_ADD_NEW_CF_BASE_TABLE);
// When adding a column to a view, base physical table should only be modified when new column families are being added.
modifyHTable = canViewsAddNewCF && !existingColumnFamiliesForBaseTable(table.getPhysicalName()).containsAll(colFamiliesForPColumnsToBeAdded);
}
if (modifyHTable) {
sendHBaseMetaData(tableDescriptors, pollingNeeded);
}
// Special case for call during drop table to ensure that the empty column family exists.
// In this, case we only include the table header row, as until we add schemaBytes and tableBytes
// as args to this function, we have no way of getting them in this case.
// TODO: change to if (tableMetaData.isEmpty()) once we pass through schemaBytes and tableBytes
// Also, could be used to update property values on ALTER TABLE t SET prop=xxx
if ((tableMetaData.isEmpty()) || (tableMetaData.size() == 1 && tableMetaData.get(0).isEmpty())) {
return new MetaDataMutationResult(MutationCode.NO_OP, System.currentTimeMillis(), table);
}
byte[][] rowKeyMetaData = new byte[3][];
PTableType tableType = table.getType();
Mutation m = tableMetaData.get(0);
byte[] rowKey = m.getRow();
SchemaUtil.getVarChars(rowKey, rowKeyMetaData);
byte[] tenantIdBytes = rowKeyMetaData[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetaData[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetaData[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
AddColumnRequest.Builder builder = AddColumnRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.addColumn(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
if (result.getMutationCode() == MutationCode.COLUMN_NOT_FOUND || result.getMutationCode() == MutationCode.TABLE_ALREADY_EXISTS) { // Success
success = true;
// Flush the table if transitioning DISABLE_WAL from TRUE to FALSE
if ( MetaDataUtil.getMutationValue(m,PhoenixDatabaseMetaData.DISABLE_WAL_BYTES, kvBuilder, ptr)
&& Boolean.FALSE.equals(PBoolean.INSTANCE.toObject(ptr))) {
flushTable(table.getPhysicalName().getBytes());
}
if (tableType == PTableType.TABLE) {
// If we're changing MULTI_TENANT to true or false, create or drop the view index table
if (MetaDataUtil.getMutationValue(m, PhoenixDatabaseMetaData.MULTI_TENANT_BYTES, kvBuilder, ptr)){
long timestamp = MetaDataUtil.getClientTimeStamp(m);
if (Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(ptr.get(), ptr.getOffset(), ptr.getLength()))) {
this.ensureViewIndexTableCreated(table, timestamp);
} else {
this.ensureViewIndexTableDropped(table.getPhysicalName().getBytes(), timestamp);
}
}
}
}
} finally {
// If we weren't successful with our metadata update
// and we've already pushed the HBase metadata changes to the server
// and we've tried to go from non transactional to transactional
// then we must undo the metadata change otherwise the table will
// no longer function correctly.
// Note that if this fails, we're in a corrupt state.
if (!success && metaDataUpdated && nonTxToTx) {
sendHBaseMetaData(origTableDescriptors, pollingNeeded);
}
}
return result;
}
private void updateDescriptorForTx(PTable table, Map<String, Object> tableProps, HTableDescriptor tableDescriptor,
String txValue, Set<HTableDescriptor> descriptorsToUpdate, Set<HTableDescriptor> origDescriptors) throws SQLException {
HBaseAdmin admin = null;
byte[] physicalTableName = table.getPhysicalName().getBytes();
try {
admin = new HBaseAdmin(config);
setTransactional(tableDescriptor, table.getType(), txValue, tableProps);
Map<String, Object> indexTableProps;
if (txValue == null) {
indexTableProps = Collections.<String,Object>emptyMap();
} else {
indexTableProps = Maps.newHashMapWithExpectedSize(1);
indexTableProps.put(TxConstants.READ_NON_TX_DATA, Boolean.valueOf(txValue));
}
for (PTable index : table.getIndexes()) {
HTableDescriptor indexDescriptor = admin.getTableDescriptor(index.getPhysicalName().getBytes());
origDescriptors.add(indexDescriptor);
indexDescriptor = new HTableDescriptor(indexDescriptor);
descriptorsToUpdate.add(indexDescriptor);
if (index.getColumnFamilies().isEmpty()) {
byte[] dataFamilyName = SchemaUtil.getEmptyColumnFamily(table);
byte[] indexFamilyName = SchemaUtil.getEmptyColumnFamily(index);
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(indexFamilyName);
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(dataFamilyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
} else {
for (PColumnFamily family : index.getColumnFamilies()) {
byte[] familyName = family.getName().getBytes();
indexDescriptor.getFamily(familyName).setMaxVersions(tableDescriptor.getFamily(familyName).getMaxVersions());
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(familyName);
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(familyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
}
}
setTransactional(indexDescriptor, index.getType(), txValue, indexTableProps);
}
try {
HTableDescriptor indexDescriptor = admin.getTableDescriptor(MetaDataUtil.getViewIndexPhysicalName(physicalTableName));
origDescriptors.add(indexDescriptor);
indexDescriptor = new HTableDescriptor(indexDescriptor);
descriptorsToUpdate.add(indexDescriptor);
setSharedIndexMaxVersion(table, tableDescriptor, indexDescriptor);
setTransactional(indexDescriptor, PTableType.INDEX, txValue, indexTableProps);
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
try {
HTableDescriptor indexDescriptor = admin.getTableDescriptor(MetaDataUtil.getLocalIndexPhysicalName(physicalTableName));
origDescriptors.add(indexDescriptor);
indexDescriptor = new HTableDescriptor(indexDescriptor);
descriptorsToUpdate.add(indexDescriptor);
setSharedIndexMaxVersion(table, tableDescriptor, indexDescriptor);
setTransactional(indexDescriptor, PTableType.INDEX, txValue, indexTableProps);
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) admin.close();
} catch (IOException e) {
logger.warn("Could not close admin",e);
}
}
}
private void setSharedIndexMaxVersion(PTable table, HTableDescriptor tableDescriptor,
HTableDescriptor indexDescriptor) {
if (table.getColumnFamilies().isEmpty()) {
byte[] familyName = SchemaUtil.getEmptyColumnFamily(table);
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(familyName);
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(familyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
} else {
for (PColumnFamily family : table.getColumnFamilies()) {
byte[] familyName = family.getName().getBytes();
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(familyName);
if (indexColDescriptor != null) {
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(familyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
}
}
}
}
private void sendHBaseMetaData(Set<HTableDescriptor> tableDescriptors, boolean pollingNeeded) throws SQLException {
SQLException sqlE = null;
for (HTableDescriptor descriptor : tableDescriptors) {
try {
modifyTable(descriptor.getName(), descriptor, pollingNeeded);
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).build().buildException();
} catch (TimeoutException e) {
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.OPERATION_TIMED_OUT).setRootCause(e.getCause() != null ? e.getCause() : e).build().buildException();
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
}
private void setTransactional(HTableDescriptor tableDescriptor, PTableType tableType, String txValue, Map<String, Object> tableProps) throws SQLException {
if (txValue == null) {
tableDescriptor.remove(TxConstants.READ_NON_TX_DATA);
} else {
tableDescriptor.setValue(TxConstants.READ_NON_TX_DATA, txValue);
}
this.addCoprocessors(tableDescriptor.getName(), tableDescriptor, tableType, tableProps);
}
private Pair<HTableDescriptor,HTableDescriptor> separateAndValidateProperties(PTable table, Map<String, List<Pair<String, Object>>> properties, Set<String> colFamiliesForPColumnsToBeAdded, List<Pair<byte[], Map<String, Object>>> families, Map<String, Object> tableProps) throws SQLException {
Map<String, Map<String, Object>> stmtFamiliesPropsMap = new HashMap<>(properties.size());
Map<String,Object> commonFamilyProps = new HashMap<>();
boolean addingColumns = colFamiliesForPColumnsToBeAdded != null && !colFamiliesForPColumnsToBeAdded.isEmpty();
HashSet<String> existingColumnFamilies = existingColumnFamilies(table);
Map<String, Map<String, Object>> allFamiliesProps = new HashMap<>(existingColumnFamilies.size());
boolean isTransactional = table.isTransactional();
boolean willBeTransactional = false;
boolean isOrWillBeTransactional = isTransactional;
Integer newTTL = null;
for (String family : properties.keySet()) {
List<Pair<String, Object>> propsList = properties.get(family);
if (propsList != null && propsList.size() > 0) {
Map<String, Object> colFamilyPropsMap = new HashMap<String, Object>(propsList.size());
for (Pair<String, Object> prop : propsList) {
String propName = prop.getFirst();
Object propValue = prop.getSecond();
if ((isHTableProperty(propName) || TableProperty.isPhoenixTableProperty(propName)) && addingColumns) {
// setting HTable and PhoenixTable properties while adding a column is not allowed.
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_SET_TABLE_PROPERTY_ADD_COLUMN)
.setMessage("Property: " + propName).build()
.buildException();
}
if (isHTableProperty(propName)) {
// Can't have a column family name for a property that's an HTableProperty
if (!family.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY)) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_FAMILY_NOT_ALLOWED_TABLE_PROPERTY)
.setMessage("Column Family: " + family + ", Property: " + propName).build()
.buildException();
}
tableProps.put(propName, propValue);
} else {
if (TableProperty.isPhoenixTableProperty(propName)) {
TableProperty.valueOf(propName).validate(true, !family.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY), table.getType());
if (propName.equals(TTL)) {
newTTL = ((Number)prop.getSecond()).intValue();
// Even though TTL is really a HColumnProperty we treat it specially.
// We enforce that all column families have the same TTL.
commonFamilyProps.put(propName, prop.getSecond());
} else if (propName.equals(PhoenixDatabaseMetaData.TRANSACTIONAL) && Boolean.TRUE.equals(propValue)) {
willBeTransactional = isOrWillBeTransactional = true;
tableProps.put(TxConstants.READ_NON_TX_DATA, propValue);
}
} else {
if (isHColumnProperty(propName)) {
if (family.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY)) {
commonFamilyProps.put(propName, propValue);
} else {
colFamilyPropsMap.put(propName, propValue);
}
} else {
// invalid property - neither of HTableProp, HColumnProp or PhoenixTableProp
// FIXME: This isn't getting triggered as currently a property gets evaluated
// as HTableProp if its neither HColumnProp or PhoenixTableProp.
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ALTER_PROPERTY)
.setMessage("Column Family: " + family + ", Property: " + propName).build()
.buildException();
}
}
}
}
if (!colFamilyPropsMap.isEmpty()) {
stmtFamiliesPropsMap.put(family, colFamilyPropsMap);
}
}
}
commonFamilyProps = Collections.unmodifiableMap(commonFamilyProps);
boolean isAddingPkColOnly = colFamiliesForPColumnsToBeAdded.size() == 1 && colFamiliesForPColumnsToBeAdded.contains(null);
if (!commonFamilyProps.isEmpty()) {
if (!addingColumns) {
// Add the common family props to all existing column families
for (String existingColFamily : existingColumnFamilies) {
Map<String, Object> m = new HashMap<String, Object>(commonFamilyProps.size());
m.putAll(commonFamilyProps);
allFamiliesProps.put(existingColFamily, m);
}
} else {
// Add the common family props to the column families of the columns being added
for (String colFamily : colFamiliesForPColumnsToBeAdded) {
if (colFamily != null) {
// only set properties for key value columns
Map<String, Object> m = new HashMap<String, Object>(commonFamilyProps.size());
m.putAll(commonFamilyProps);
allFamiliesProps.put(colFamily, m);
} else if (isAddingPkColOnly) {
// Setting HColumnProperty for a pk column is invalid
// because it will be part of the row key and not a key value column family.
// However, if both pk cols as well as key value columns are getting added
// together, then its allowed. The above if block will make sure that we add properties
// only for the kv cols and not pk cols.
throw new SQLExceptionInfo.Builder(SQLExceptionCode.SET_UNSUPPORTED_PROP_ON_ALTER_TABLE)
.build().buildException();
}
}
}
}
// Now go through the column family properties specified in the statement
// and merge them with the common family properties.
for (String f : stmtFamiliesPropsMap.keySet()) {
if (!addingColumns && !existingColumnFamilies.contains(f)) {
throw new ColumnFamilyNotFoundException(f);
}
if (addingColumns && !colFamiliesForPColumnsToBeAdded.contains(f)) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_SET_PROPERTY_FOR_COLUMN_NOT_ADDED).build().buildException();
}
Map<String, Object> commonProps = allFamiliesProps.get(f);
Map<String, Object> stmtProps = stmtFamiliesPropsMap.get(f);
if (commonProps != null) {
if (stmtProps != null) {
// merge common props with statement props for the family
commonProps.putAll(stmtProps);
}
} else {
// if no common props were specified, then assign family specific props
if (stmtProps != null) {
allFamiliesProps.put(f, stmtProps);
}
}
}
// case when there is a column family being added but there are no props
// For ex - in DROP COLUMN when a new empty CF needs to be added since all
// the columns of the existing empty CF are getting dropped. Or the case
// when one is just adding a column for a column family like this:
// ALTER TABLE ADD CF.COL
for (String cf : colFamiliesForPColumnsToBeAdded) {
if (cf != null && allFamiliesProps.get(cf) == null) {
allFamiliesProps.put(cf, new HashMap<String, Object>());
}
}
if (table.getColumnFamilies().isEmpty() && !addingColumns && !commonFamilyProps.isEmpty()) {
allFamiliesProps.put(Bytes.toString(table.getDefaultFamilyName() == null ? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES : table.getDefaultFamilyName().getBytes() ), commonFamilyProps);
}
// Views are not allowed to have any of these properties.
if (table.getType() == PTableType.VIEW && (!stmtFamiliesPropsMap.isEmpty() || !commonFamilyProps.isEmpty() || !tableProps.isEmpty())) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.VIEW_WITH_PROPERTIES).build()
.buildException();
}
HTableDescriptor newTableDescriptor = null;
HTableDescriptor origTableDescriptor = null;
if (!allFamiliesProps.isEmpty() || !tableProps.isEmpty()) {
byte[] tableNameBytes = Bytes.toBytes(table.getPhysicalName().getString());
HTableDescriptor existingTableDescriptor = origTableDescriptor = getTableDescriptor(tableNameBytes);
newTableDescriptor = new HTableDescriptor(existingTableDescriptor);
if (!tableProps.isEmpty()) {
// add all the table properties to the existing table descriptor
for (Entry<String, Object> entry : tableProps.entrySet()) {
newTableDescriptor.setValue(entry.getKey(), entry.getValue() != null ? entry.getValue().toString() : null);
}
}
if (addingColumns) {
// Make sure that all the CFs of the table have the same TTL as the empty CF.
setTTLForNewCFs(allFamiliesProps, table, newTableDescriptor, newTTL);
}
// Set TTL on all table column families, even if they're not referenced here
if (newTTL != null) {
for (PColumnFamily family : table.getColumnFamilies()) {
if (!allFamiliesProps.containsKey(family.getName().getString())) {
Map<String,Object> familyProps = Maps.newHashMapWithExpectedSize(1);
familyProps.put(TTL, newTTL);
allFamiliesProps.put(family.getName().getString(), familyProps);
}
}
}
Integer defaultTxMaxVersions = null;
if (isOrWillBeTransactional) {
// Calculate default for max versions
Map<String, Object> emptyFamilyProps = allFamiliesProps.get(SchemaUtil.getEmptyColumnFamilyAsString(table));
if (emptyFamilyProps != null) {
defaultTxMaxVersions = (Integer)emptyFamilyProps.get(HConstants.VERSIONS);
}
if (defaultTxMaxVersions == null) {
if (isTransactional) {
defaultTxMaxVersions = newTableDescriptor.getFamily(SchemaUtil.getEmptyColumnFamily(table)).getMaxVersions();
} else {
defaultTxMaxVersions =
this.getProps().getInt(
QueryServices.MAX_VERSIONS_TRANSACTIONAL_ATTRIB,
QueryServicesOptions.DEFAULT_MAX_VERSIONS_TRANSACTIONAL);
}
}
if (willBeTransactional) {
// Set VERSIONS for all column families when transitioning to transactional
for (PColumnFamily family : table.getColumnFamilies()) {
if (!allFamiliesProps.containsKey(family.getName().getString())) {
Map<String,Object> familyProps = Maps.newHashMapWithExpectedSize(1);
familyProps.put(HConstants.VERSIONS, defaultTxMaxVersions);
allFamiliesProps.put(family.getName().getString(), familyProps);
}
}
}
}
// Set Tephra's TTL property based on HBase property if we're
// transitioning to become transactional or setting TTL on
// an already transactional table.
if (isOrWillBeTransactional) {
int ttl = getTTL(table, newTableDescriptor, newTTL);
if (ttl != HColumnDescriptor.DEFAULT_TTL) {
for (Map.Entry<String, Map<String, Object>> entry : allFamiliesProps.entrySet()) {
Map<String, Object> props = entry.getValue();
if (props == null) {
props = new HashMap<String, Object>();
}
props.put(TxConstants.PROPERTY_TTL, ttl);
// Remove HBase TTL if we're not transitioning an existing table to become transactional
// or if the existing transactional table wasn't originally non transactional.
if (!willBeTransactional && !Boolean.valueOf(newTableDescriptor.getValue(TxConstants.READ_NON_TX_DATA))) {
props.remove(TTL);
}
}
}
}
for (Entry<String, Map<String, Object>> entry : allFamiliesProps.entrySet()) {
Map<String,Object> familyProps = entry.getValue();
if (isOrWillBeTransactional) {
if (!familyProps.containsKey(HConstants.VERSIONS)) {
familyProps.put(HConstants.VERSIONS, defaultTxMaxVersions);
}
}
byte[] cf = Bytes.toBytes(entry.getKey());
HColumnDescriptor colDescriptor = newTableDescriptor.getFamily(cf);
if (colDescriptor == null) {
// new column family
colDescriptor = generateColumnFamilyDescriptor(new Pair<>(cf, familyProps), table.getType());
newTableDescriptor.addFamily(colDescriptor);
} else {
modifyColumnFamilyDescriptor(colDescriptor, familyProps);
}
if (isOrWillBeTransactional) {
checkTransactionalVersionsValue(colDescriptor);
}
}
}
return new Pair<>(origTableDescriptor, newTableDescriptor);
}
private void checkTransactionalVersionsValue(HColumnDescriptor colDescriptor) throws SQLException {
int maxVersions = colDescriptor.getMaxVersions();
if (maxVersions <= 1) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.TX_MAX_VERSIONS_MUST_BE_GREATER_THAN_ONE)
.setFamilyName(colDescriptor.getNameAsString())
.build().buildException();
}
}
private boolean isHColumnProperty(String propName) {
return HColumnDescriptor.getDefaultValues().containsKey(propName);
}
private boolean isHTableProperty(String propName) {
return !isHColumnProperty(propName) && !TableProperty.isPhoenixTableProperty(propName);
}
private HashSet<String> existingColumnFamiliesForBaseTable(PName baseTableName) throws TableNotFoundException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
PTable table = latestMetaData.getTableRef(new PTableKey(null, baseTableName.getString())).getTable();
latestMetaDataLock.notifyAll();
return existingColumnFamilies(table);
}
}
private HashSet<String> existingColumnFamilies(PTable table) {
List<PColumnFamily> cfs = table.getColumnFamilies();
HashSet<String> cfNames = new HashSet<>(cfs.size());
for (PColumnFamily cf : table.getColumnFamilies()) {
cfNames.add(cf.getName().getString());
}
return cfNames;
}
private static int getTTL(PTable table, HTableDescriptor tableDesc, Integer newTTL) throws SQLException {
// If we're setting TTL now, then use that value. Otherwise, use empty column family value
int ttl = newTTL != null ? newTTL
: tableDesc.getFamily(SchemaUtil.getEmptyColumnFamily(table)).getTimeToLive();
return ttl;
}
private static void setTTLForNewCFs(Map<String, Map<String, Object>> familyProps, PTable table,
HTableDescriptor tableDesc, Integer newTTL) throws SQLException {
if (!familyProps.isEmpty()) {
int ttl = getTTL(table, tableDesc, newTTL);
for (Map.Entry<String, Map<String, Object>> entry : familyProps.entrySet()) {
Map<String, Object> props = entry.getValue();
if (props == null) {
props = new HashMap<String, Object>();
}
props.put(TTL, ttl);
}
}
}
@Override
public MetaDataMutationResult dropColumn(final List<Mutation> tableMetaData, PTableType tableType) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
SchemaUtil.getVarChars(tableMetaData.get(0).getRow(), rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
MetaDataMutationResult result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
DropColumnRequest.Builder builder = DropColumnRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.dropColumn(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
final MutationCode code = result.getMutationCode();
switch(code) {
case TABLE_ALREADY_EXISTS:
final ReadOnlyProps props = this.getProps();
final boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
dropTables(result.getTableNamesToDelete());
}
invalidateTables(result.getTableNamesToDelete());
break;
default:
break;
}
return result;
}
/**
* This closes the passed connection.
*/
private PhoenixConnection addColumn(PhoenixConnection oldMetaConnection, String tableName, long timestamp, String columns, boolean addIfNotExists) throws SQLException {
Properties props = PropertiesUtil.deepCopy(oldMetaConnection.getClientInfo());
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(timestamp));
// Cannot go through DriverManager or you end up in an infinite loop because it'll call init again
PhoenixConnection metaConnection = new PhoenixConnection(oldMetaConnection, this, props);
SQLException sqlE = null;
try {
metaConnection.createStatement().executeUpdate("ALTER TABLE " + tableName + " ADD " + (addIfNotExists ? " IF NOT EXISTS " : "") + columns );
} catch (NewerTableAlreadyExistsException e) {
logger.warn("Table already modified at this timestamp, so assuming add of these columns already done: " + columns);
} catch (SQLException e) {
logger.warn("Add column failed due to:" + e);
sqlE = e;
} finally {
try {
oldMetaConnection.close();
} catch (SQLException e) {
if (sqlE != null) {
sqlE.setNextException(e);
} else {
sqlE = e;
}
}
if (sqlE != null) {
throw sqlE;
}
}
return metaConnection;
}
/**
* Keeping this to use for further upgrades. This method closes the oldMetaConnection.
*/
private PhoenixConnection addColumnsIfNotExists(PhoenixConnection oldMetaConnection,
String tableName, long timestamp, String columns) throws SQLException {
return addColumn(oldMetaConnection, tableName, timestamp, columns, true);
}
@Override
public void init(final String url, final Properties props) throws SQLException {
try {
PhoenixContextExecutor.call(new Callable<Void>() {
@Override
public Void call() throws Exception {
if (initialized) {
if (initializationException != null) {
// Throw previous initialization exception, as we won't resuse this instance
throw initializationException;
}
return null;
}
synchronized (ConnectionQueryServicesImpl.this) {
if (initialized) {
if (initializationException != null) {
// Throw previous initialization exception, as we won't resuse this instance
throw initializationException;
}
return null;
}
checkClosed();
PhoenixConnection metaConnection = null;
try {
openConnection();
Properties scnProps = PropertiesUtil.deepCopy(props);
scnProps.setProperty(
PhoenixRuntime.CURRENT_SCN_ATTRIB,
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
String globalUrl = JDBCUtil.removeProperty(url, PhoenixRuntime.TENANT_ID_ATTRIB);
metaConnection = new PhoenixConnection(
ConnectionQueryServicesImpl.this, globalUrl, scnProps, newEmptyMetaData());
try {
metaConnection.createStatement().executeUpdate(QueryConstants.CREATE_TABLE_METADATA);
} catch (NewerTableAlreadyExistsException ignore) {
// Ignore, as this will happen if the SYSTEM.CATALOG already exists at this fixed timestamp.
// A TableAlreadyExistsException is not thrown, since the table only exists *after* this fixed timestamp.
} catch (TableAlreadyExistsException e) {
// This will occur if we have an older SYSTEM.CATALOG and we need to update it to include
// any new columns we've added.
long currentServerSideTableTimeStamp = e.getTable().getTimeStamp();
String columnsToAdd = "";
if(currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_3_0) {
// We know that we always need to add the STORE_NULLS column for 4.3 release
columnsToAdd += "," + PhoenixDatabaseMetaData.STORE_NULLS + " " + PBoolean.INSTANCE.getSqlTypeName();
HBaseAdmin admin = null;
try {
admin = getAdmin();
HTableDescriptor[] localIndexTables = admin.listTables(MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX+".*");
for (HTableDescriptor table : localIndexTables) {
if (table.getValue(MetaDataUtil.PARENT_TABLE_KEY) == null
&& table.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_NAME) != null) {
table.setValue(MetaDataUtil.PARENT_TABLE_KEY,
MetaDataUtil.getUserTableName(table
.getNameAsString()));
// Explicitly disable, modify and enable the table to ensure co-location of data
// and index regions. If we just modify the table descriptor when online schema
// change enabled may reopen the region in same region server instead of following data region.
admin.disableTable(table.getTableName());
admin.modifyTable(table.getTableName(), table);
admin.enableTable(table.getTableName());
}
}
} finally {
if (admin != null) admin.close();
}
}
// If the server side schema is before MIN_SYSTEM_TABLE_TIMESTAMP_4_1_0 then
// we need to add INDEX_TYPE and INDEX_DISABLE_TIMESTAMP columns too.
// TODO: Once https://issues.apache.org/jira/browse/PHOENIX-1614 is fixed,
// we should just have a ALTER TABLE ADD IF NOT EXISTS statement with all
// the column names that have been added to SYSTEM.CATALOG since 4.0.
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_1_0) {
columnsToAdd += ", " + PhoenixDatabaseMetaData.INDEX_TYPE + " " + PUnsignedTinyint.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP + " " + PLong.INSTANCE.getSqlTypeName();
}
// If we have some new columns from 4.1-4.3 to add, add them now.
if (!columnsToAdd.isEmpty()) {
// Ugh..need to assign to another local variable to keep eclipse happy.
PhoenixConnection newMetaConnection = addColumnsIfNotExists(metaConnection,
PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_3_0, columnsToAdd);
metaConnection = newMetaConnection;
}
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0) {
columnsToAdd = PhoenixDatabaseMetaData.BASE_COLUMN_COUNT + " "
+ PInteger.INSTANCE.getSqlTypeName();
try {
metaConnection = addColumn(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0, columnsToAdd, false);
upgradeTo4_5_0(metaConnection);
} catch (ColumnAlreadyExistsException ignored) {
/*
* Upgrade to 4.5 is a slightly special case. We use the fact that the column
* BASE_COLUMN_COUNT is already part of the meta-data schema as the signal that
* the server side upgrade has finished or is in progress.
*/
logger.debug("No need to run 4.5 upgrade");
}
Properties props = PropertiesUtil.deepCopy(metaConnection.getClientInfo());
props.remove(PhoenixRuntime.CURRENT_SCN_ATTRIB);
props.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
PhoenixConnection conn = new PhoenixConnection(ConnectionQueryServicesImpl.this, metaConnection.getURL(), props, metaConnection.getMetaDataCache());
try {
List<String> tablesNeedingUpgrade = UpgradeUtil.getPhysicalTablesWithDescRowKey(conn);
if (!tablesNeedingUpgrade.isEmpty()) {
logger.warn("The following tables require upgrade due to a bug causing the row key to be incorrect for descending columns and ascending BINARY columns (PHOENIX-2067 and PHOENIX-2120):\n" + Joiner.on(' ').join(tablesNeedingUpgrade) + "\nTo upgrade issue the \"bin/psql.py -u\" command.");
}
List<String> unsupportedTables = UpgradeUtil.getPhysicalTablesWithDescVarbinaryRowKey(conn);
if (!unsupportedTables.isEmpty()) {
logger.warn("The following tables use an unsupported VARBINARY DESC construct and need to be changed:\n" + Joiner.on(' ').join(unsupportedTables));
}
} catch (Exception ex) {
logger.error("Unable to determine tables requiring upgrade due to PHOENIX-2067", ex);
} finally {
conn.close();
}
}
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_6_0) {
columnsToAdd = PhoenixDatabaseMetaData.IS_ROW_TIMESTAMP + " " + PBoolean.INSTANCE.getSqlTypeName();
metaConnection = addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_6_0, columnsToAdd);
}
if(currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0) {
// Add these columns one at a time, each with different timestamps so that if folks have
// run the upgrade code already for a snapshot, we'll still enter this block (and do the
// parts we haven't yet done).
metaConnection = addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG, MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0 - 2,
PhoenixDatabaseMetaData.TRANSACTIONAL + " " + PBoolean.INSTANCE.getSqlTypeName());
metaConnection = addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG, MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0 - 1,
PhoenixDatabaseMetaData.UPDATE_CACHE_FREQUENCY + " " + PLong.INSTANCE.getSqlTypeName());
setImmutableTableIndexesImmutable(metaConnection);
// Drop old stats table so that new stats table is created
metaConnection = dropStatsTable(metaConnection,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0);
// Clear the server cache so the above changes make it over to any clients
// that already have cached data.
clearCache();
}
}
int nSaltBuckets = ConnectionQueryServicesImpl.this.props.getInt(QueryServices.SEQUENCE_SALT_BUCKETS_ATTRIB,
QueryServicesOptions.DEFAULT_SEQUENCE_TABLE_SALT_BUCKETS);
try {
String createSequenceTable = Sequence.getCreateTableStatement(nSaltBuckets);
metaConnection.createStatement().executeUpdate(createSequenceTable);
nSequenceSaltBuckets = nSaltBuckets;
} catch (NewerTableAlreadyExistsException e) {
// Ignore, as this will happen if the SYSTEM.SEQUENCE already exists at this fixed timestamp.
// A TableAlreadyExistsException is not thrown, since the table only exists *after* this fixed timestamp.
nSequenceSaltBuckets = getSaltBuckets(e);
} catch (TableAlreadyExistsException e) {
// This will occur if we have an older SYSTEM.SEQUENCE and we need to update it to include
// any new columns we've added.
long currentServerSideTableTimeStamp = e.getTable().getTimeStamp();
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_1_0) {
// If the table time stamp is before 4.1.0 then we need to add below columns
// to the SYSTEM.SEQUENCE table.
String columnsToAdd = PhoenixDatabaseMetaData.MIN_VALUE + " " + PLong.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.MAX_VALUE + " " + PLong.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.CYCLE_FLAG + " " + PBoolean.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG + " " + PBoolean.INSTANCE.getSqlTypeName();
addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
}
// If the table timestamp is before 4.2.1 then run the upgrade script
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_2_1) {
if (UpgradeUtil.upgradeSequenceTable(metaConnection, nSaltBuckets, e.getTable())) {
metaConnection.removeTable(null,
PhoenixDatabaseMetaData.SEQUENCE_SCHEMA_NAME,
PhoenixDatabaseMetaData.SEQUENCE_TABLE_NAME,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP);
clearTableFromCache(ByteUtil.EMPTY_BYTE_ARRAY,
PhoenixDatabaseMetaData.SEQUENCE_SCHEMA_NAME_BYTES,
PhoenixDatabaseMetaData.SEQUENCE_TABLE_NAME_BYTES,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP);
clearTableRegionCache(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
}
nSequenceSaltBuckets = nSaltBuckets;
} else {
nSequenceSaltBuckets = getSaltBuckets(e);
}
}
try {
metaConnection.createStatement().executeUpdate(
QueryConstants.CREATE_STATS_TABLE_METADATA);
} catch (NewerTableAlreadyExistsException ignore) {
} catch(TableAlreadyExistsException ignore) {
metaConnection = addColumnsIfNotExists(
metaConnection,
PhoenixDatabaseMetaData.SYSTEM_STATS_NAME,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP,
PhoenixDatabaseMetaData.GUIDE_POSTS_ROW_COUNT + " "
+ PLong.INSTANCE.getSqlTypeName());
}
try {
metaConnection.createStatement().executeUpdate(
QueryConstants.CREATE_FUNCTION_METADATA);
} catch (NewerTableAlreadyExistsException e) {
} catch (TableAlreadyExistsException e) {
}
scheduleRenewLeaseTasks();
} catch (Exception e) {
if (e instanceof SQLException) {
initializationException = (SQLException)e;
} else {
// wrap every other exception into a SQLException
initializationException = new SQLException(e);
}
} finally {
try {
if (metaConnection != null) metaConnection.close();
} catch (SQLException e) {
if (initializationException != null) {
initializationException.setNextException(e);
} else {
initializationException = e;
}
} finally {
try {
if (initializationException != null) {
throw initializationException;
}
} finally {
initialized = true;
}
}
}
}
return null;
}
});
} catch (Exception e) {
Throwables.propagateIfInstanceOf(e, SQLException.class);
throw Throwables.propagate(e);
}
}
/**
* Set IMMUTABLE_ROWS to true for all index tables over immutable tables.
* @param metaConnection connection over which to run the upgrade
* @throws SQLException
*/
private static void setImmutableTableIndexesImmutable(PhoenixConnection metaConnection) throws SQLException {
boolean autoCommit = metaConnection.getAutoCommit();
try {
metaConnection.setAutoCommit(true);
metaConnection.createStatement().execute(
"UPSERT INTO SYSTEM.CATALOG(TENANT_ID, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, COLUMN_FAMILY, IMMUTABLE_ROWS)\n" +
"SELECT A.TENANT_ID, A.TABLE_SCHEM,B.COLUMN_FAMILY,null,null,true\n" +
"FROM SYSTEM.CATALOG A JOIN SYSTEM.CATALOG B ON (\n" +
" A.TENANT_ID = B.TENANT_ID AND \n" +
" A.TABLE_SCHEM = B.TABLE_SCHEM AND\n" +
" A.TABLE_NAME = B.TABLE_NAME AND\n" +
" A.COLUMN_NAME = B.COLUMN_NAME AND\n" +
" B.LINK_TYPE = 1\n" +
")\n" +
"WHERE A.COLUMN_FAMILY IS NULL AND\n" +
" B.COLUMN_FAMILY IS NOT NULL AND\n" +
" A.IMMUTABLE_ROWS = TRUE");
} finally {
metaConnection.setAutoCommit(autoCommit);
}
}
private PhoenixConnection dropStatsTable(PhoenixConnection oldMetaConnection, long timestamp)
throws SQLException, IOException {
Properties props = PropertiesUtil.deepCopy(oldMetaConnection.getClientInfo());
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(timestamp-1));
PhoenixConnection metaConnection = new PhoenixConnection(oldMetaConnection, this, props);
SQLException sqlE = null;
boolean wasCommit = metaConnection.getAutoCommit();
try {
metaConnection.setAutoCommit(true);
metaConnection.createStatement()
.executeUpdate("DELETE FROM " + PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME + " WHERE "
+ PhoenixDatabaseMetaData.TABLE_NAME + "='" + PhoenixDatabaseMetaData.SYSTEM_STATS_TABLE
+ "' AND " + PhoenixDatabaseMetaData.TABLE_SCHEM + "='"
+ PhoenixDatabaseMetaData.SYSTEM_SCHEMA_NAME + "'");
} catch (SQLException e) {
logger.warn("exception during upgrading stats table:" + e);
sqlE = e;
} finally {
try {
metaConnection.setAutoCommit(wasCommit);
oldMetaConnection.close();
} catch (SQLException e) {
if (sqlE != null) {
sqlE.setNextException(e);
} else {
sqlE = e;
}
}
if (sqlE != null) {
throw sqlE;
}
}
oldMetaConnection = metaConnection;
props = PropertiesUtil.deepCopy(oldMetaConnection.getClientInfo());
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(timestamp));
try {
metaConnection = new PhoenixConnection(oldMetaConnection, ConnectionQueryServicesImpl.this, props);
} finally {
try {
oldMetaConnection.close();
} catch (SQLException e) {
if (sqlE != null) {
sqlE.setNextException(e);
} else {
sqlE = e;
}
}
if (sqlE != null) {
throw sqlE;
}
}
return metaConnection;
}
private void scheduleRenewLeaseTasks() {
if (isRenewingLeasesEnabled()) {
ThreadFactory threadFactory =
new ThreadFactoryBuilder().setDaemon(true)
.setNameFormat("PHOENIX-SCANNER-RENEW-LEASE" + "-thread-%s").build();
renewLeaseExecutor =
Executors.newScheduledThreadPool(renewLeasePoolSize, threadFactory);
for (LinkedBlockingQueue<WeakReference<PhoenixConnection>> q : connectionQueues) {
renewLeaseExecutor.scheduleAtFixedRate(new RenewLeaseTask(q), 0,
renewLeaseTaskFrequency, TimeUnit.MILLISECONDS);
}
}
}
private static int getSaltBuckets(TableAlreadyExistsException e) {
PTable table = e.getTable();
Integer sequenceSaltBuckets = table == null ? null : table.getBucketNum();
return sequenceSaltBuckets == null ? 0 : sequenceSaltBuckets;
}
@Override
public MutationState updateData(MutationPlan plan) throws SQLException {
MutationState state = plan.execute();
plan.getContext().getConnection().commit();
return state;
}
@Override
public int getLowestClusterHBaseVersion() {
return lowestClusterHBaseVersion;
}
@Override
public boolean hasIndexWALCodec() {
return hasIndexWALCodec;
}
/**
* Clears the Phoenix meta data cache on each region server
* @throws SQLException
*/
@Override
public long clearCache() throws SQLException {
try {
SQLException sqlE = null;
HTableInterface htable = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
try {
final Map<byte[], Long> results =
htable.coprocessorService(MetaDataService.class, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, new Batch.Call<MetaDataService, Long>() {
@Override
public Long call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<ClearCacheResponse> rpcCallback =
new BlockingRpcCallback<ClearCacheResponse>();
ClearCacheRequest.Builder builder = ClearCacheRequest.newBuilder();
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.clearCache(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get().getUnfreedBytes();
}
});
long unfreedBytes = 0;
for (Map.Entry<byte[],Long> result : results.entrySet()) {
if (result.getValue() != null) {
unfreedBytes += result.getValue();
}
}
return unfreedBytes;
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} catch (Throwable e) {
sqlE = new SQLException(e);
} finally {
try {
tableStatsCache.invalidateAll();
htable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
} catch (Exception e) {
throw new SQLException(ServerUtil.parseServerException(e));
}
return 0;
}
private void flushTable(byte[] tableName) throws SQLException {
HBaseAdmin admin = getAdmin();
try {
admin.flush(tableName);
} catch (IOException e) {
throw new PhoenixIOException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
throw new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).build()
.buildException();
} finally {
Closeables.closeQuietly(admin);
}
}
@Override
public HBaseAdmin getAdmin() throws SQLException {
try {
return new HBaseAdmin(config);
} catch (IOException e) {
throw new PhoenixIOException(e);
}
}
@Override
public MetaDataMutationResult updateIndexState(final List<Mutation> tableMetaData, String parentTableName) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
SchemaUtil.getVarChars(tableMetaData.get(0).getRow(), rowKeyMetadata);
byte[] tableKey = SchemaUtil.getTableKey(ByteUtil.EMPTY_BYTE_ARRAY, rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX], rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX]);
return metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
UpdateIndexStateRequest.Builder builder = UpdateIndexStateRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.updateIndexState(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
}
@Override
public long createSequence(String tenantId, String schemaName, String sequenceName,
long startWith, long incrementBy, long cacheSize, long minValue, long maxValue,
boolean cycle, long timestamp) throws SQLException {
SequenceKey sequenceKey = new SequenceKey(tenantId, schemaName, sequenceName, nSequenceSaltBuckets);
Sequence newSequences = new Sequence(sequenceKey);
Sequence sequence = sequenceMap.putIfAbsent(sequenceKey, newSequences);
if (sequence == null) {
sequence = newSequences;
}
try {
sequence.getLock().lock();
// Now that we have the lock we need, create the sequence
Append append = sequence.createSequence(startWith, incrementBy, cacheSize, timestamp, minValue, maxValue, cycle);
HTableInterface htable =
this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
htable.setAutoFlush(true);
try {
Result result = htable.append(append);
return sequence.createSequence(result, minValue, maxValue, cycle);
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
Closeables.closeQuietly(htable);
}
} finally {
sequence.getLock().unlock();
}
}
@Override
public long dropSequence(String tenantId, String schemaName, String sequenceName, long timestamp) throws SQLException {
SequenceKey sequenceKey = new SequenceKey(tenantId, schemaName, sequenceName, nSequenceSaltBuckets);
Sequence newSequences = new Sequence(sequenceKey);
Sequence sequence = sequenceMap.putIfAbsent(sequenceKey, newSequences);
if (sequence == null) {
sequence = newSequences;
}
try {
sequence.getLock().lock();
// Now that we have the lock we need, create the sequence
Append append = sequence.dropSequence(timestamp);
HTableInterface htable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
try {
Result result = htable.append(append);
return sequence.dropSequence(result);
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
Closeables.closeQuietly(htable);
}
} finally {
sequence.getLock().unlock();
}
}
/**
* Gets the current sequence value
* @throws SQLException if cached sequence cannot be found
*/
@Override
public long currentSequenceValue(SequenceKey sequenceKey, long timestamp) throws SQLException {
Sequence sequence = sequenceMap.get(sequenceKey);
if (sequence == null) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_CALL_CURRENT_BEFORE_NEXT_VALUE)
.setSchemaName(sequenceKey.getSchemaName()).setTableName(sequenceKey.getSequenceName())
.build().buildException();
}
sequence.getLock().lock();
try {
return sequence.currentValue(timestamp);
} catch (EmptySequenceCacheException e) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_CALL_CURRENT_BEFORE_NEXT_VALUE)
.setSchemaName(sequenceKey.getSchemaName()).setTableName(sequenceKey.getSequenceName())
.build().buildException();
} finally {
sequence.getLock().unlock();
}
}
/**
* Verifies that sequences exist and reserves values for them if reserveValues is true
*/
@Override
public void validateSequences(List<SequenceAllocation> sequenceAllocations, long timestamp, long[] values, SQLException[] exceptions, Sequence.ValueOp action) throws SQLException {
incrementSequenceValues(sequenceAllocations, timestamp, values, exceptions, action);
}
/**
* Increment any of the set of sequences that need more values. These are the sequences
* that are asking for the next value within a given statement. The returned sequences
* are the ones that were not found because they were deleted by another client.
* @param sequenceKeys sorted list of sequence kyes
* @param timestamp
* @throws SQLException if any of the sequences cannot be found
*
*/
@Override
public void incrementSequences(List<SequenceAllocation> sequenceAllocations, long timestamp, long[] values, SQLException[] exceptions) throws SQLException {
incrementSequenceValues(sequenceAllocations, timestamp, values, exceptions, Sequence.ValueOp.INCREMENT_SEQUENCE);
}
@SuppressWarnings("deprecation")
private void incrementSequenceValues(List<SequenceAllocation> sequenceAllocations, long timestamp, long[] values, SQLException[] exceptions, Sequence.ValueOp op) throws SQLException {
List<Sequence> sequences = Lists.newArrayListWithExpectedSize(sequenceAllocations.size());
for (SequenceAllocation sequenceAllocation : sequenceAllocations) {
SequenceKey key = sequenceAllocation.getSequenceKey();
Sequence newSequences = new Sequence(key);
Sequence sequence = sequenceMap.putIfAbsent(key, newSequences);
if (sequence == null) {
sequence = newSequences;
}
sequences.add(sequence);
}
try {
for (Sequence sequence : sequences) {
sequence.getLock().lock();
}
// Now that we have all the locks we need, increment the sequences
List<Increment> incrementBatch = Lists.newArrayListWithExpectedSize(sequences.size());
List<Sequence> toIncrementList = Lists.newArrayListWithExpectedSize(sequences.size());
int[] indexes = new int[sequences.size()];
for (int i = 0; i < sequences.size(); i++) {
Sequence sequence = sequences.get(i);
try {
values[i] = sequence.incrementValue(timestamp, op, sequenceAllocations.get(i).getNumAllocations());
} catch (EmptySequenceCacheException e) {
indexes[toIncrementList.size()] = i;
toIncrementList.add(sequence);
Increment inc = sequence.newIncrement(timestamp, op, sequenceAllocations.get(i).getNumAllocations());
incrementBatch.add(inc);
} catch (SQLException e) {
exceptions[i] = e;
}
}
if (toIncrementList.isEmpty()) {
return;
}
HTableInterface hTable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
Object[] resultObjects = null;
SQLException sqlE = null;
try {
resultObjects= hTable.batch(incrementBatch);
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME ?
} finally {
try {
hTable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
}
if (sqlE != null) {
throw sqlE;
}
}
for (int i=0;i<resultObjects.length;i++){
Sequence sequence = toIncrementList.get(i);
Result result = (Result)resultObjects[i];
try {
long numToAllocate = Bytes.toLong(incrementBatch.get(i).getAttribute(SequenceRegionObserver.NUM_TO_ALLOCATE));
values[indexes[i]] = sequence.incrementValue(result, op, numToAllocate);
} catch (SQLException e) {
exceptions[indexes[i]] = e;
}
}
} finally {
for (Sequence sequence : sequences) {
sequence.getLock().unlock();
}
}
}
@Override
public void clearTableFromCache(final byte[] tenantId, final byte[] schemaName, final byte[] tableName,
final long clientTS) throws SQLException {
// clear the meta data cache for the table here
try {
SQLException sqlE = null;
HTableInterface htable = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
try {
htable.coprocessorService(MetaDataService.class, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
new Batch.Call<MetaDataService, ClearTableFromCacheResponse>() {
@Override
public ClearTableFromCacheResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<ClearTableFromCacheResponse> rpcCallback = new BlockingRpcCallback<ClearTableFromCacheResponse>();
ClearTableFromCacheRequest.Builder builder = ClearTableFromCacheRequest.newBuilder();
builder.setTenantId(ByteStringer.wrap(tenantId));
builder.setTableName(ByteStringer.wrap(tableName));
builder.setSchemaName(ByteStringer.wrap(schemaName));
builder.setClientTimestamp(clientTS);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.clearTableFromCache(controller, builder.build(), rpcCallback);
if (controller.getFailedOn() != null) { throw controller.getFailedOn(); }
return rpcCallback.get();
}
});
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} catch (Throwable e) {
sqlE = new SQLException(e);
} finally {
try {
if (tenantId.length == 0) tableStatsCache.invalidate(new ImmutableBytesPtr(SchemaUtil.getTableNameAsBytes(schemaName, tableName)));
htable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) { throw sqlE; }
}
}
} catch (Exception e) {
throw new SQLException(ServerUtil.parseServerException(e));
}
}
@SuppressWarnings("deprecation")
@Override
public void returnSequences(List<SequenceKey> keys, long timestamp, SQLException[] exceptions) throws SQLException {
List<Sequence> sequences = Lists.newArrayListWithExpectedSize(keys.size());
for (SequenceKey key : keys) {
Sequence newSequences = new Sequence(key);
Sequence sequence = sequenceMap.putIfAbsent(key, newSequences);
if (sequence == null) {
sequence = newSequences;
}
sequences.add(sequence);
}
try {
for (Sequence sequence : sequences) {
sequence.getLock().lock();
}
// Now that we have all the locks we need, attempt to return the unused sequence values
List<Append> mutations = Lists.newArrayListWithExpectedSize(sequences.size());
List<Sequence> toReturnList = Lists.newArrayListWithExpectedSize(sequences.size());
int[] indexes = new int[sequences.size()];
for (int i = 0; i < sequences.size(); i++) {
Sequence sequence = sequences.get(i);
try {
Append append = sequence.newReturn(timestamp);
toReturnList.add(sequence);
mutations.add(append);
} catch (EmptySequenceCacheException ignore) { // Nothing to return, so ignore
}
}
if (toReturnList.isEmpty()) {
return;
}
HTableInterface hTable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
Object[] resultObjects = null;
SQLException sqlE = null;
try {
resultObjects= hTable.batch(mutations);
} catch (IOException e){
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e){
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME ?
} finally {
try {
hTable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
}
if (sqlE != null) {
throw sqlE;
}
}
for (int i=0;i<resultObjects.length;i++){
Sequence sequence = toReturnList.get(i);
Result result = (Result)resultObjects[i];
try {
sequence.returnValue(result);
} catch (SQLException e) {
exceptions[indexes[i]] = e;
}
}
} finally {
for (Sequence sequence : sequences) {
sequence.getLock().unlock();
}
}
}
// Take no locks, as this only gets run when there are no open connections
// so there's no danger of contention.
@SuppressWarnings("deprecation")
private void returnAllSequences(ConcurrentMap<SequenceKey,Sequence> sequenceMap) throws SQLException {
List<Append> mutations = Lists.newArrayListWithExpectedSize(sequenceMap.size());
for (Sequence sequence : sequenceMap.values()) {
mutations.addAll(sequence.newReturns());
}
if (mutations.isEmpty()) {
return;
}
HTableInterface hTable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
SQLException sqlE = null;
try {
hTable.batch(mutations);
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME ?
} finally {
try {
hTable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
}
if (sqlE != null) {
throw sqlE;
}
}
}
@Override
public void addConnection(PhoenixConnection connection) throws SQLException {
connectionQueues.get(getQueueIndex(connection)).add(new WeakReference<PhoenixConnection>(connection));
if (returnSequenceValues) {
synchronized (connectionCountLock) {
connectionCount++;
}
}
}
@Override
public void removeConnection(PhoenixConnection connection) throws SQLException {
if (returnSequenceValues) {
ConcurrentMap<SequenceKey,Sequence> formerSequenceMap = null;
synchronized (connectionCountLock) {
if (--connectionCount == 0) {
if (!this.sequenceMap.isEmpty()) {
formerSequenceMap = this.sequenceMap;
this.sequenceMap = Maps.newConcurrentMap();
}
}
}
// Since we're using the former sequenceMap, we can do this outside
// the lock.
if (formerSequenceMap != null) {
// When there are no more connections, attempt to return any sequences
returnAllSequences(formerSequenceMap);
}
}
}
private int getQueueIndex(PhoenixConnection conn) {
return ThreadLocalRandom.current().nextInt(renewLeasePoolSize);
}
@Override
public KeyValueBuilder getKeyValueBuilder() {
return this.kvBuilder;
}
@Override
public boolean supportsFeature(Feature feature) {
FeatureSupported supported = featureMap.get(feature);
if (supported == null) {
return false;
}
return supported.isSupported(this);
}
@Override
public String getUserName() {
return userName;
}
private void checkClosed() {
if (closed) {
throwConnectionClosedException();
}
}
private void throwConnectionClosedIfNullMetaData() {
if (latestMetaData == null) {
throwConnectionClosedException();
}
}
private void throwConnectionClosedException() {
throw new IllegalStateException("Connection to the cluster is closed");
}
@Override
public PTableStats getTableStats(final byte[] physicalName, final long clientTimeStamp) throws SQLException {
try {
return tableStatsCache.get(new ImmutableBytesPtr(physicalName), new Callable<PTableStats>() {
@Override
public PTableStats call() throws Exception {
/*
* The shared view index case is tricky, because we don't have
* table metadata for it, only an HBase table. We do have stats,
* though, so we'll query them directly here and cache them so
* we don't keep querying for them.
*/
HTableInterface statsHTable = ConnectionQueryServicesImpl.this.getTable(PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES);
try {
return StatisticsUtil.readStatistics(statsHTable, physicalName, clientTimeStamp);
} catch (IOException e) {
logger.warn("Unable to read from stats table", e);
// Just cache empty stats. We'll try again after some time anyway.
return PTableStats.EMPTY_STATS;
} finally {
try {
statsHTable.close();
} catch (IOException e) {
// Log, but continue. We have our stats anyway now.
logger.warn("Unable to close stats table", e);
}
}
}
});
} catch (ExecutionException e) {
throw ServerUtil.parseServerException(e);
}
}
@Override
public int getSequenceSaltBuckets() {
return nSequenceSaltBuckets;
}
@Override
public PMetaData addFunction(PFunction function) throws SQLException {
synchronized (latestMetaDataLock) {
try {
throwConnectionClosedIfNullMetaData();
// If existing table isn't older than new table, don't replace
// If a client opens a connection at an earlier timestamp, this can happen
PFunction existingFunction = latestMetaData.getFunction(new PTableKey(function.getTenantId(), function.getFunctionName()));
if (existingFunction.getTimeStamp() >= function.getTimeStamp()) {
return latestMetaData;
}
} catch (FunctionNotFoundException e) {}
latestMetaData = latestMetaData.addFunction(function);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
@Override
public PMetaData removeFunction(PName tenantId, String function, long functionTimeStamp)
throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
latestMetaData = latestMetaData.removeFunction(tenantId, function, functionTimeStamp);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
@Override
public MetaDataMutationResult getFunctions(PName tenantId, final List<Pair<byte[], Long>> functions,
final long clientTimestamp) throws SQLException {
final byte[] tenantIdBytes = tenantId == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantId.getBytes();
return metaDataCoprocessorExec(tenantIdBytes,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
GetFunctionsRequest.Builder builder = GetFunctionsRequest.newBuilder();
builder.setTenantId(ByteStringer.wrap(tenantIdBytes));
for(Pair<byte[], Long> function: functions) {
builder.addFunctionNames(ByteStringer.wrap(function.getFirst()));
builder.addFunctionTimestamps(function.getSecond().longValue());
}
builder.setClientTimestamp(clientTimestamp);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.getFunctions(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
}, PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES);
}
// TODO the mutations should be added to System functions table.
@Override
public MetaDataMutationResult createFunction(final List<Mutation> functionData,
final PFunction function, final boolean temporary) throws SQLException {
byte[][] rowKeyMetadata = new byte[2][];
Mutation m = MetaDataUtil.getPutOnlyTableHeaderRow(functionData);
byte[] key = m.getRow();
SchemaUtil.getVarChars(key, rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] functionBytes = rowKeyMetadata[PhoenixDatabaseMetaData.FUNTION_NAME_INDEX];
byte[] functionKey = SchemaUtil.getFunctionKey(tenantIdBytes, functionBytes);
MetaDataMutationResult result = metaDataCoprocessorExec(functionKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
CreateFunctionRequest.Builder builder = CreateFunctionRequest.newBuilder();
for (Mutation m : functionData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setTemporary(temporary);
builder.setReplace(function.isReplace());
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.createFunction(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
}, PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES);
return result;
}
@VisibleForTesting
static class RenewLeaseTask implements Runnable {
private final LinkedBlockingQueue<WeakReference<PhoenixConnection>> connectionsQueue;
private final Random random = new Random();
private static final int MAX_WAIT_TIME = 1000;
RenewLeaseTask(LinkedBlockingQueue<WeakReference<PhoenixConnection>> queue) {
this.connectionsQueue = queue;
}
private void waitForRandomDuration() throws InterruptedException {
new CountDownLatch(1).await(random.nextInt(MAX_WAIT_TIME), MILLISECONDS);
}
@Override
public void run() {
try {
int numConnections = connectionsQueue.size();
boolean wait = true;
// We keep adding items to the end of the queue. So to stop the loop, iterate only up to
// whatever the current count is.
while (numConnections > 0) {
if (wait) {
// wait for some random duration to prevent all threads from renewing lease at
// the same time.
waitForRandomDuration();
wait = false;
}
// It is guaranteed that this poll won't hang indefinitely because this is the
// only thread that removes items from the queue. Still adding a 1 ms timeout
// for sanity check.
WeakReference<PhoenixConnection> connRef =
connectionsQueue.poll(1, TimeUnit.MILLISECONDS);
if (connRef == null) {
throw new IllegalStateException(
"Connection ref found to be null. This is a bug. Some other thread removed items from the connection queue.");
}
PhoenixConnection conn = connRef.get();
if (conn != null && !conn.isClosed()) {
LinkedBlockingQueue<WeakReference<TableResultIterator>> scannerQueue =
conn.getScanners();
// We keep adding items to the end of the queue. So to stop the loop,
// iterate only up to whatever the current count is.
int numScanners = scannerQueue.size();
int renewed = 0;
long start = System.currentTimeMillis();
while (numScanners > 0) {
// It is guaranteed that this poll won't hang indefinitely because this is the
// only thread that removes items from the queue. Still adding a 1 ms timeout
// for sanity check.
WeakReference<TableResultIterator> ref =
scannerQueue.poll(1, TimeUnit.MILLISECONDS);
if (ref == null) {
throw new IllegalStateException(
"TableResulIterator ref found to be null. This is a bug. Some other thread removed items from the scanner queue.");
}
TableResultIterator scanningItr = ref.get();
if (scanningItr != null) {
RenewLeaseStatus status = scanningItr.renewLease();
switch (status) {
case RENEWED:
renewed++;
// add it back at the tail
scannerQueue.offer(new WeakReference<TableResultIterator>(
scanningItr));
logger.info("Lease renewed for scanner: " + scanningItr);
break;
case UNINITIALIZED:
case THRESHOLD_NOT_REACHED:
// add it back at the tail
scannerQueue.offer(new WeakReference<TableResultIterator>(
scanningItr));
break;
// if lease wasn't renewed or scanner was closed, don't add the
// scanner back to the queue.
case CLOSED:
case NOT_RENEWED:
break;
}
}
numScanners--;
}
if (renewed > 0) {
logger.info("Renewed leases for " + renewed + " scanner/s in "
+ (System.currentTimeMillis() - start) + " ms ");
}
connectionsQueue.offer(connRef);
}
numConnections--;
}
} catch (InterruptedException e1) {
Thread.currentThread().interrupt(); // restore the interrupt status
logger.warn("Thread interrupted when renewing lease ", e1);
throw new RuntimeException(e1);
} catch (Exception e2) {
logger.warn("Exception thrown when renewing lease ", e2);
throw new RuntimeException(e2);
}
}
}
@Override
public long getRenewLeaseThresholdMilliSeconds() {
return renewLeaseThreshold;
}
@Override
public boolean isRenewingLeasesEnabled() {
return supportsFeature(ConnectionQueryServices.Feature.RENEW_LEASE) && renewLeaseEnabled;
}
}
|
phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.query;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.apache.hadoop.hbase.HColumnDescriptor.TTL;
import static org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_MAJOR_VERSION;
import static org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_MINOR_VERSION;
import static org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_PATCH_NUMBER;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_DROP_METADATA;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_ENABLED;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_THREAD_POOL_SIZE;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_THRESHOLD_MILLISECONDS;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS;
import static org.apache.phoenix.util.UpgradeUtil.upgradeTo4_5_0;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.annotation.concurrent.GuardedBy;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
import org.apache.hadoop.hbase.regionserver.IndexHalfStoreFileReaderGenerator;
import org.apache.hadoop.hbase.regionserver.LocalIndexSplitter;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.phoenix.compile.MutationPlan;
import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
import org.apache.phoenix.coprocessor.MetaDataEndpointImpl;
import org.apache.phoenix.coprocessor.MetaDataProtocol;
import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
import org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode;
import org.apache.phoenix.coprocessor.MetaDataRegionObserver;
import org.apache.phoenix.coprocessor.ScanRegionObserver;
import org.apache.phoenix.coprocessor.SequenceRegionObserver;
import org.apache.phoenix.coprocessor.ServerCachingEndpointImpl;
import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest;
import org.apache.phoenix.exception.PhoenixIOException;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.exception.SQLExceptionInfo;
import org.apache.phoenix.execute.MutationState;
import org.apache.phoenix.hbase.index.IndexRegionSplitPolicy;
import org.apache.phoenix.hbase.index.Indexer;
import org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder;
import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
import org.apache.phoenix.hbase.index.util.VersionUtil;
import org.apache.phoenix.index.PhoenixIndexBuilder;
import org.apache.phoenix.index.PhoenixIndexCodec;
import org.apache.phoenix.index.PhoenixTransactionalIndexer;
import org.apache.phoenix.iterate.TableResultIterator;
import org.apache.phoenix.iterate.TableResultIterator.RenewLeaseStatus;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.ConnectionInfo;
import org.apache.phoenix.parse.PFunction;
import org.apache.phoenix.protobuf.ProtobufUtil;
import org.apache.phoenix.schema.ColumnAlreadyExistsException;
import org.apache.phoenix.schema.ColumnFamilyNotFoundException;
import org.apache.phoenix.schema.EmptySequenceCacheException;
import org.apache.phoenix.schema.FunctionNotFoundException;
import org.apache.phoenix.schema.MetaDataSplitPolicy;
import org.apache.phoenix.schema.NewerTableAlreadyExistsException;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PColumnFamily;
import org.apache.phoenix.schema.PMetaData;
import org.apache.phoenix.schema.PMetaDataImpl;
import org.apache.phoenix.schema.PName;
import org.apache.phoenix.schema.PNameFactory;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTableKey;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.schema.ReadOnlyTableException;
import org.apache.phoenix.schema.SaltingUtil;
import org.apache.phoenix.schema.Sequence;
import org.apache.phoenix.schema.SequenceAllocation;
import org.apache.phoenix.schema.SequenceKey;
import org.apache.phoenix.schema.TableAlreadyExistsException;
import org.apache.phoenix.schema.TableNotFoundException;
import org.apache.phoenix.schema.TableProperty;
import org.apache.phoenix.schema.stats.PTableStats;
import org.apache.phoenix.schema.stats.StatisticsUtil;
import org.apache.phoenix.schema.types.PBoolean;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PUnsignedTinyint;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.Closeables;
import org.apache.phoenix.util.ConfigUtil;
import org.apache.phoenix.util.JDBCUtil;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.PhoenixContextExecutor;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PhoenixStopWatch;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.ServerUtil;
import org.apache.phoenix.util.UpgradeUtil;
import org.apache.twill.discovery.ZKDiscoveryService;
import org.apache.twill.zookeeper.RetryStrategies;
import org.apache.twill.zookeeper.ZKClientService;
import org.apache.twill.zookeeper.ZKClientServices;
import org.apache.twill.zookeeper.ZKClients;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import co.cask.tephra.TransactionSystemClient;
import co.cask.tephra.TxConstants;
import co.cask.tephra.distributed.PooledClientProvider;
import co.cask.tephra.distributed.TransactionServiceClient;
import co.cask.tephra.hbase11.coprocessor.TransactionProcessor;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
public class ConnectionQueryServicesImpl extends DelegateQueryServices implements ConnectionQueryServices {
private static final Logger logger = LoggerFactory.getLogger(ConnectionQueryServicesImpl.class);
private static final int INITIAL_CHILD_SERVICES_CAPACITY = 100;
private static final int DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS = 1000;
// Max number of cached table stats for view or shared index physical tables
private static final int MAX_TABLE_STATS_CACHE_ENTRIES = 512;
protected final Configuration config;
private final ConnectionInfo connectionInfo;
// Copy of config.getProps(), but read-only to prevent synchronization that we
// don't need.
private final ReadOnlyProps props;
private final String userName;
private final ConcurrentHashMap<ImmutableBytesWritable,ConnectionQueryServices> childServices;
private final Cache<ImmutableBytesPtr, PTableStats> tableStatsCache;
// Cache the latest meta data here for future connections
// writes guarded by "latestMetaDataLock"
private volatile PMetaData latestMetaData;
private final Object latestMetaDataLock = new Object();
// Lowest HBase version on the cluster.
private int lowestClusterHBaseVersion = Integer.MAX_VALUE;
private boolean hasIndexWALCodec = true;
@GuardedBy("connectionCountLock")
private int connectionCount = 0;
private final Object connectionCountLock = new Object();
private final boolean returnSequenceValues ;
private HConnection connection;
private TransactionServiceClient txServiceClient;
private volatile boolean initialized;
private volatile int nSequenceSaltBuckets;
// writes guarded by "this"
private volatile boolean closed;
private volatile SQLException initializationException;
// setting this member variable guarded by "connectionCountLock"
private volatile ConcurrentMap<SequenceKey,Sequence> sequenceMap = Maps.newConcurrentMap();
private KeyValueBuilder kvBuilder;
private final int renewLeaseTaskFrequency;
private final int renewLeasePoolSize;
private final int renewLeaseThreshold;
// List of queues instead of a single queue to provide reduced contention via lock striping
private final List<LinkedBlockingQueue<WeakReference<PhoenixConnection>>> connectionQueues;
private ScheduledExecutorService renewLeaseExecutor;
private final boolean renewLeaseEnabled;
private static interface FeatureSupported {
boolean isSupported(ConnectionQueryServices services);
}
private final Map<Feature, FeatureSupported> featureMap = ImmutableMap.<Feature, FeatureSupported>of(
Feature.LOCAL_INDEX, new FeatureSupported() {
@Override
public boolean isSupported(ConnectionQueryServices services) {
int hbaseVersion = services.getLowestClusterHBaseVersion();
return hbaseVersion < PhoenixDatabaseMetaData.MIN_LOCAL_SI_VERSION_DISALLOW || hbaseVersion > PhoenixDatabaseMetaData.MAX_LOCAL_SI_VERSION_DISALLOW;
}
},
Feature.RENEW_LEASE, new FeatureSupported() {
@Override
public boolean isSupported(ConnectionQueryServices services) {
int hbaseVersion = services.getLowestClusterHBaseVersion();
return hbaseVersion >= PhoenixDatabaseMetaData.MIN_RENEW_LEASE_VERSION;
}
});
private PMetaData newEmptyMetaData() {
long maxSizeBytes = props.getLong(QueryServices.MAX_CLIENT_METADATA_CACHE_SIZE_ATTRIB,
QueryServicesOptions.DEFAULT_MAX_CLIENT_METADATA_CACHE_SIZE);
return new PMetaDataImpl(INITIAL_META_DATA_TABLE_CAPACITY, maxSizeBytes);
}
/**
* Construct a ConnectionQueryServicesImpl that represents a connection to an HBase
* cluster.
* @param services base services from where we derive our default configuration
* @param connectionInfo to provide connection information
* @param info hbase configuration properties
* @throws SQLException
*/
public ConnectionQueryServicesImpl(QueryServices services, ConnectionInfo connectionInfo, Properties info) {
super(services);
Configuration config = HBaseFactoryProvider.getConfigurationFactory().getConfiguration();
for (Entry<String,String> entry : services.getProps()) {
config.set(entry.getKey(), entry.getValue());
}
if (info != null) {
for (Object key : info.keySet()) {
config.set((String) key, info.getProperty((String) key));
}
}
for (Entry<String,String> entry : connectionInfo.asProps()) {
config.set(entry.getKey(), entry.getValue());
}
this.connectionInfo = connectionInfo;
// Without making a copy of the configuration we cons up, we lose some of our properties
// on the server side during testing.
this.config = HBaseFactoryProvider.getConfigurationFactory().getConfiguration(config);
// set replication required parameter
ConfigUtil.setReplicationConfigIfAbsent(this.config);
this.props = new ReadOnlyProps(this.config.iterator());
this.userName = connectionInfo.getPrincipal();
this.latestMetaData = newEmptyMetaData();
// TODO: should we track connection wide memory usage or just org-wide usage?
// If connection-wide, create a MemoryManager here, otherwise just use the one from the delegate
this.childServices = new ConcurrentHashMap<ImmutableBytesWritable,ConnectionQueryServices>(INITIAL_CHILD_SERVICES_CAPACITY);
// find the HBase version and use that to determine the KeyValueBuilder that should be used
String hbaseVersion = VersionInfo.getVersion();
this.kvBuilder = KeyValueBuilder.get(hbaseVersion);
long halfStatsUpdateFreq = config.getLong(
QueryServices.STATS_UPDATE_FREQ_MS_ATTRIB,
QueryServicesOptions.DEFAULT_STATS_UPDATE_FREQ_MS) / 2;
tableStatsCache = CacheBuilder.newBuilder()
.maximumSize(MAX_TABLE_STATS_CACHE_ENTRIES)
.expireAfterWrite(halfStatsUpdateFreq, TimeUnit.MILLISECONDS)
.build();
this.returnSequenceValues = props.getBoolean(QueryServices.RETURN_SEQUENCE_VALUES_ATTRIB, QueryServicesOptions.DEFAULT_RETURN_SEQUENCE_VALUES);
this.renewLeaseEnabled = config.getBoolean(RENEW_LEASE_ENABLED, DEFAULT_RENEW_LEASE_ENABLED);
this.renewLeasePoolSize = config.getInt(RENEW_LEASE_THREAD_POOL_SIZE, DEFAULT_RENEW_LEASE_THREAD_POOL_SIZE);
this.renewLeaseThreshold = config.getInt(RENEW_LEASE_THRESHOLD_MILLISECONDS, DEFAULT_RENEW_LEASE_THRESHOLD_MILLISECONDS);
this.renewLeaseTaskFrequency = config.getInt(RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS, DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS);
List<LinkedBlockingQueue<WeakReference<PhoenixConnection>>> list = Lists.newArrayListWithCapacity(renewLeasePoolSize);
for (int i = 0; i < renewLeasePoolSize; i++) {
LinkedBlockingQueue<WeakReference<PhoenixConnection>> queue = new LinkedBlockingQueue<WeakReference<PhoenixConnection>>();
list.add(queue);
}
connectionQueues = ImmutableList.copyOf(list);
}
@Override
public TransactionSystemClient getTransactionSystemClient() {
return txServiceClient;
}
private void initTxServiceClient() {
String zkQuorumServersString = connectionInfo.getZookeeperQuorum()+":"+connectionInfo.getPort();
ZKClientService zkClientService = ZKClientServices.delegate(
ZKClients.reWatchOnExpire(
ZKClients.retryOnFailure(
ZKClientService.Builder.of(zkQuorumServersString)
.setSessionTimeout(props.getInt(HConstants.ZK_SESSION_TIMEOUT, HConstants.DEFAULT_ZK_SESSION_TIMEOUT))
.build(),
RetryStrategies.exponentialDelay(500, 2000, TimeUnit.MILLISECONDS)
)
)
);
zkClientService.startAndWait();
ZKDiscoveryService zkDiscoveryService = new ZKDiscoveryService(zkClientService);
PooledClientProvider pooledClientProvider = new PooledClientProvider(
config, zkDiscoveryService);
this.txServiceClient = new TransactionServiceClient(config,pooledClientProvider);
}
private void openConnection() throws SQLException {
try {
// check if we need to authenticate with kerberos
String clientKeytab = this.getProps().get(HBASE_CLIENT_KEYTAB);
String clientPrincipal = this.getProps().get(HBASE_CLIENT_PRINCIPAL);
if (clientKeytab != null && clientPrincipal != null) {
logger.info("Trying to connect to a secure cluster with keytab:" + clientKeytab);
UserGroupInformation.setConfiguration(config);
User.login(config, HBASE_CLIENT_KEYTAB, HBASE_CLIENT_PRINCIPAL, null);
logger.info("Successfull login to secure cluster!!");
}
boolean transactionsEnabled = props.getBoolean(
QueryServices.TRANSACTIONS_ENABLED,
QueryServicesOptions.DEFAULT_TRANSACTIONS_ENABLED);
// only initialize the tx service client if needed
if (transactionsEnabled) {
initTxServiceClient();
}
this.connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(this.config);
} catch (IOException e) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)
.setRootCause(e).build().buildException();
}
if (this.connection.isClosed()) { // TODO: why the heck doesn't this throw above?
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION).build().buildException();
}
}
@Override
public HTableInterface getTable(byte[] tableName) throws SQLException {
try {
return HBaseFactoryProvider.getHTableFactory().getTable(tableName, connection, null);
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
throw new TableNotFoundException(SchemaUtil.getSchemaNameFromFullName(tableName), SchemaUtil.getTableNameFromFullName(tableName));
} catch (IOException e) {
throw new SQLException(e);
}
}
@Override
public HTableDescriptor getTableDescriptor(byte[] tableName) throws SQLException {
HTableInterface htable = getTable(tableName);
try {
return htable.getTableDescriptor();
} catch (IOException e) {
if(e instanceof org.apache.hadoop.hbase.TableNotFoundException ||
e.getCause() instanceof org.apache.hadoop.hbase.TableNotFoundException) {
byte[][] schemaAndTableName = new byte[2][];
SchemaUtil.getVarChars(tableName, schemaAndTableName);
throw new TableNotFoundException(Bytes.toString(schemaAndTableName[0]), Bytes.toString(schemaAndTableName[1]));
}
throw new RuntimeException(e);
} finally {
Closeables.closeQuietly(htable);
}
}
@Override
public ReadOnlyProps getProps() {
return props;
}
/**
* Closes the underlying connection to zookeeper. The QueryServices
* may not be used after that point. When a Connection is closed,
* this is not called, since these instances are pooled by the
* Driver. Instead, the Driver should call this if the QueryServices
* is ever removed from the pool
*/
@Override
public void close() throws SQLException {
if (closed) {
return;
}
synchronized (this) {
if (closed) {
return;
}
closed = true;
SQLException sqlE = null;
try {
// Attempt to return any unused sequences.
if (connection != null) returnAllSequences(this.sequenceMap);
} catch (SQLException e) {
sqlE = e;
} finally {
try {
childServices.clear();
if (renewLeaseExecutor != null) {
renewLeaseExecutor.shutdownNow();
}
synchronized (latestMetaDataLock) {
latestMetaData = null;
latestMetaDataLock.notifyAll();
}
if (connection != null) connection.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
try {
tableStatsCache.invalidateAll();
super.close();
} catch (SQLException e) {
if (sqlE == null) {
sqlE = e;
} else {
sqlE.setNextException(e);
}
} finally {
if (sqlE != null) { throw sqlE; }
}
}
}
}
}
protected ConnectionQueryServices newChildQueryService() {
return new ChildQueryServices(this);
}
/**
* Get (and create if necessary) a child QueryService for a given tenantId.
* The QueryService will be cached for the lifetime of the parent QueryService
* @param tenantId the tenant ID
* @return the child QueryService
*/
@Override
public ConnectionQueryServices getChildQueryServices(ImmutableBytesWritable tenantId) {
ConnectionQueryServices childQueryService = childServices.get(tenantId);
if (childQueryService == null) {
childQueryService = newChildQueryService();
ConnectionQueryServices prevQueryService = childServices.putIfAbsent(tenantId, childQueryService);
return prevQueryService == null ? childQueryService : prevQueryService;
}
return childQueryService;
}
@Override
public void clearTableRegionCache(byte[] tableName) throws SQLException {
connection.clearRegionCache(TableName.valueOf(tableName));
}
@Override
public List<HRegionLocation> getAllTableRegions(byte[] tableName) throws SQLException {
/*
* Use HConnection.getRegionLocation as it uses the cache in HConnection, while getting
* all region locations from the HTable doesn't.
*/
int retryCount = 0, maxRetryCount = 1;
boolean reload =false;
while (true) {
try {
// We could surface the package projected HConnectionImplementation.getNumberOfCachedRegionLocations
// to get the sizing info we need, but this would require a new class in the same package and a cast
// to this implementation class, so it's probably not worth it.
List<HRegionLocation> locations = Lists.newArrayList();
byte[] currentKey = HConstants.EMPTY_START_ROW;
do {
HRegionLocation regionLocation = connection.getRegionLocation(
TableName.valueOf(tableName), currentKey, reload);
locations.add(regionLocation);
currentKey = regionLocation.getRegionInfo().getEndKey();
} while (!Bytes.equals(currentKey, HConstants.EMPTY_END_ROW));
return locations;
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
String fullName = Bytes.toString(tableName);
throw new TableNotFoundException(SchemaUtil.getSchemaNameFromFullName(fullName), SchemaUtil.getTableNameFromFullName(fullName));
} catch (IOException e) {
if (retryCount++ < maxRetryCount) { // One retry, in case split occurs while navigating
reload = true;
continue;
}
throw new SQLExceptionInfo.Builder(SQLExceptionCode.GET_TABLE_REGIONS_FAIL)
.setRootCause(e).build().buildException();
}
}
}
@Override
public PMetaData addTable(PTable table, long resolvedTime) throws SQLException {
synchronized (latestMetaDataLock) {
try {
throwConnectionClosedIfNullMetaData();
// If existing table isn't older than new table, don't replace
// If a client opens a connection at an earlier timestamp, this can happen
PTable existingTable = latestMetaData.getTableRef(new PTableKey(table.getTenantId(), table.getName().getString())).getTable();
if (existingTable.getTimeStamp() >= table.getTimeStamp()) {
return latestMetaData;
}
} catch (TableNotFoundException e) {}
latestMetaData = latestMetaData.addTable(table, resolvedTime);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
public PMetaData updateResolvedTimestamp(PTable table, long resolvedTime) throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
latestMetaData = latestMetaData.updateResolvedTimestamp(table, resolvedTime);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
private static interface Mutator {
PMetaData mutate(PMetaData metaData) throws SQLException;
}
/**
* Ensures that metaData mutations are handled in the correct order
*/
private PMetaData metaDataMutated(PName tenantId, String tableName, long tableSeqNum, Mutator mutator) throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
PMetaData metaData = latestMetaData;
PTable table;
long endTime = System.currentTimeMillis() + DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS;
while (true) {
try {
try {
table = metaData.getTableRef(new PTableKey(tenantId, tableName)).getTable();
/* If the table is at the prior sequence number, then we're good to go.
* We know if we've got this far, that the server validated the mutations,
* so we'd just need to wait until the other connection that mutated the same
* table is processed.
*/
if (table.getSequenceNumber() + 1 == tableSeqNum) {
// TODO: assert that timeStamp is bigger that table timeStamp?
metaData = mutator.mutate(metaData);
break;
} else if (table.getSequenceNumber() >= tableSeqNum) {
logger.warn("Attempt to cache older version of " + tableName + ": current= " + table.getSequenceNumber() + ", new=" + tableSeqNum);
break;
}
} catch (TableNotFoundException e) {
}
long waitTime = endTime - System.currentTimeMillis();
// We waited long enough - just remove the table from the cache
// and the next time it's used it'll be pulled over from the server.
if (waitTime <= 0) {
logger.warn("Unable to update meta data repo within " + (DEFAULT_OUT_OF_ORDER_MUTATIONS_WAIT_TIME_MS/1000) + " seconds for " + tableName);
// There will never be a parentTableName here, as that would only
// be non null for an index an we never add/remove columns from an index.
metaData = metaData.removeTable(tenantId, tableName, null, HConstants.LATEST_TIMESTAMP);
break;
}
latestMetaDataLock.wait(waitTime);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
throw new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME
}
}
latestMetaData = metaData;
latestMetaDataLock.notifyAll();
return metaData;
}
}
@Override
public PMetaData addColumn(final PName tenantId, final String tableName, final List<PColumn> columns, final long tableTimeStamp,
final long tableSeqNum, final boolean isImmutableRows, final boolean isWalDisabled, final boolean isMultitenant,
final boolean storeNulls, final boolean isTransactional, final long updateCacheFrequency, final long resolvedTime) throws SQLException {
return metaDataMutated(tenantId, tableName, tableSeqNum, new Mutator() {
@Override
public PMetaData mutate(PMetaData metaData) throws SQLException {
try {
return metaData.addColumn(tenantId, tableName, columns, tableTimeStamp, tableSeqNum, isImmutableRows, isWalDisabled, isMultitenant, storeNulls, isTransactional, updateCacheFrequency, resolvedTime);
} catch (TableNotFoundException e) {
// The DROP TABLE may have been processed first, so just ignore.
return metaData;
}
}
});
}
@Override
public PMetaData removeTable(PName tenantId, final String tableName, String parentTableName, long tableTimeStamp) throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
latestMetaData = latestMetaData.removeTable(tenantId, tableName, parentTableName, tableTimeStamp);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
@Override
public PMetaData removeColumn(final PName tenantId, final String tableName, final List<PColumn> columnsToRemove, final long tableTimeStamp, final long tableSeqNum, final long resolvedTime) throws SQLException {
return metaDataMutated(tenantId, tableName, tableSeqNum, new Mutator() {
@Override
public PMetaData mutate(PMetaData metaData) throws SQLException {
try {
return metaData.removeColumn(tenantId, tableName, columnsToRemove, tableTimeStamp, tableSeqNum, resolvedTime);
} catch (TableNotFoundException e) {
// The DROP TABLE may have been processed first, so just ignore.
return metaData;
}
}
});
}
@Override
public PhoenixConnection connect(String url, Properties info) throws SQLException {
checkClosed();
PMetaData metadata = latestMetaData;
if (metadata == null) {
throwConnectionClosedException();
}
return new PhoenixConnection(this, url, info, metadata);
}
private HColumnDescriptor generateColumnFamilyDescriptor(Pair<byte[],Map<String,Object>> family, PTableType tableType) throws SQLException {
HColumnDescriptor columnDesc = new HColumnDescriptor(family.getFirst());
if (tableType != PTableType.VIEW) {
if(props.get(QueryServices.DEFAULT_KEEP_DELETED_CELLS_ATTRIB) != null){
columnDesc.setKeepDeletedCells(props.getBoolean(
QueryServices.DEFAULT_KEEP_DELETED_CELLS_ATTRIB, QueryServicesOptions.DEFAULT_KEEP_DELETED_CELLS));
}
columnDesc.setDataBlockEncoding(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING);
for (Entry<String,Object> entry : family.getSecond().entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
setHColumnDescriptorValue(columnDesc, key, value);
}
}
return columnDesc;
}
// Workaround HBASE-14737
private static void setHColumnDescriptorValue(HColumnDescriptor columnDesc, String key, Object value) {
if (HConstants.VERSIONS.equals(key)) {
columnDesc.setMaxVersions(getMaxVersion(value));
} else {
columnDesc.setValue(key, value == null ? null : value.toString());
}
}
private static int getMaxVersion(Object value) {
if (value == null) {
return -1; // HColumnDescriptor.UNINITIALIZED is private
}
if (value instanceof Number) {
return ((Number)value).intValue();
}
String stringValue = value.toString();
if (stringValue.isEmpty()) {
return -1;
}
return Integer.parseInt(stringValue);
}
private void modifyColumnFamilyDescriptor(HColumnDescriptor hcd, Map<String,Object> props) throws SQLException {
for (Entry<String, Object> entry : props.entrySet()) {
String propName = entry.getKey();
Object value = entry.getValue();
setHColumnDescriptorValue(hcd, propName, value);
}
}
private HTableDescriptor generateTableDescriptor(byte[] tableName, HTableDescriptor existingDesc, PTableType tableType, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits) throws SQLException {
String defaultFamilyName = (String)tableProps.remove(PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME);
HTableDescriptor tableDescriptor = (existingDesc != null) ? new HTableDescriptor(existingDesc) :
new HTableDescriptor(TableName.valueOf(tableName));
for (Entry<String,Object> entry : tableProps.entrySet()) {
String key = entry.getKey();
if (!TableProperty.isPhoenixTableProperty(key)) {
Object value = entry.getValue();
tableDescriptor.setValue(key, value == null ? null : value.toString());
}
}
if (families.isEmpty()) {
if (tableType != PTableType.VIEW) {
byte[] defaultFamilyByes = defaultFamilyName == null ? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES : Bytes.toBytes(defaultFamilyName);
// Add dummy column family so we have key values for tables that
HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(new Pair<byte[],Map<String,Object>>(defaultFamilyByes,Collections.<String,Object>emptyMap()), tableType);
tableDescriptor.addFamily(columnDescriptor);
}
} else {
for (Pair<byte[],Map<String,Object>> family : families) {
// If family is only in phoenix description, add it. otherwise, modify its property accordingly.
byte[] familyByte = family.getFirst();
if (tableDescriptor.getFamily(familyByte) == null) {
if (tableType == PTableType.VIEW) {
String fullTableName = Bytes.toString(tableName);
throw new ReadOnlyTableException(
"The HBase column families for a read-only table must already exist",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName),
Bytes.toString(familyByte));
}
HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(family, tableType);
tableDescriptor.addFamily(columnDescriptor);
} else {
if (tableType != PTableType.VIEW) {
HColumnDescriptor columnDescriptor = tableDescriptor.getFamily(familyByte);
if (columnDescriptor == null) {
throw new IllegalArgumentException("Unable to find column descriptor with family name " + Bytes.toString(family.getFirst()));
}
modifyColumnFamilyDescriptor(columnDescriptor, family.getSecond());
}
}
}
}
addCoprocessors(tableName, tableDescriptor, tableType, tableProps);
return tableDescriptor;
}
private void addCoprocessors(byte[] tableName, HTableDescriptor descriptor, PTableType tableType, Map<String,Object> tableProps) throws SQLException {
// The phoenix jar must be available on HBase classpath
int priority = props.getInt(QueryServices.COPROCESSOR_PRIORITY_ATTRIB, QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY);
try {
if (!descriptor.hasCoprocessor(ScanRegionObserver.class.getName())) {
descriptor.addCoprocessor(ScanRegionObserver.class.getName(), null, priority, null);
}
if (!descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName())) {
descriptor.addCoprocessor(UngroupedAggregateRegionObserver.class.getName(), null, priority, null);
}
if (!descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName())) {
descriptor.addCoprocessor(GroupedAggregateRegionObserver.class.getName(), null, priority, null);
}
if (!descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName())) {
descriptor.addCoprocessor(ServerCachingEndpointImpl.class.getName(), null, priority, null);
}
boolean isTransactional =
Boolean.TRUE.equals(tableProps.get(TableProperty.TRANSACTIONAL.name())) ||
Boolean.TRUE.equals(tableProps.get(TxConstants.READ_NON_TX_DATA)); // For ALTER TABLE
// TODO: better encapsulation for this
// Since indexes can't have indexes, don't install our indexing coprocessor for indexes.
// Also don't install on the SYSTEM.CATALOG and SYSTEM.STATS table because we use
// all-or-none mutate class which break when this coprocessor is installed (PHOENIX-1318).
if ((tableType != PTableType.INDEX && tableType != PTableType.VIEW)
&& !SchemaUtil.isMetaTable(tableName)
&& !SchemaUtil.isStatsTable(tableName)) {
if (isTransactional) {
if (!descriptor.hasCoprocessor(PhoenixTransactionalIndexer.class.getName())) {
descriptor.addCoprocessor(PhoenixTransactionalIndexer.class.getName(), null, priority, null);
}
// For alter table, remove non transactional index coprocessor
if (descriptor.hasCoprocessor(Indexer.class.getName())) {
descriptor.removeCoprocessor(Indexer.class.getName());
}
} else {
if (!descriptor.hasCoprocessor(Indexer.class.getName())) {
// If exception on alter table to transition back to non transactional
if (descriptor.hasCoprocessor(PhoenixTransactionalIndexer.class.getName())) {
descriptor.removeCoprocessor(PhoenixTransactionalIndexer.class.getName());
}
Map<String, String> opts = Maps.newHashMapWithExpectedSize(1);
opts.put(NonTxIndexBuilder.CODEC_CLASS_NAME_KEY, PhoenixIndexCodec.class.getName());
Indexer.enableIndexing(descriptor, PhoenixIndexBuilder.class, opts, priority);
}
}
}
if (SchemaUtil.isStatsTable(tableName) && !descriptor.hasCoprocessor(MultiRowMutationEndpoint.class.getName())) {
descriptor.addCoprocessor(MultiRowMutationEndpoint.class.getName(),
null, priority, null);
}
if (descriptor.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES) != null
&& Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(descriptor
.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
if (!descriptor.hasCoprocessor(IndexHalfStoreFileReaderGenerator.class.getName())) {
descriptor.addCoprocessor(IndexHalfStoreFileReaderGenerator.class.getName(),
null, priority, null);
}
} else {
if (!descriptor.hasCoprocessor(LocalIndexSplitter.class.getName())
&& !SchemaUtil.isMetaTable(tableName)
&& !SchemaUtil.isSequenceTable(tableName)) {
descriptor.addCoprocessor(LocalIndexSplitter.class.getName(), null, priority, null);
}
}
// Setup split policy on Phoenix metadata table to ensure that the key values of a Phoenix table
// stay on the same region.
if (SchemaUtil.isMetaTable(tableName) || SchemaUtil.isFunctionTable(tableName)) {
if (!descriptor.hasCoprocessor(MetaDataEndpointImpl.class.getName())) {
descriptor.addCoprocessor(MetaDataEndpointImpl.class.getName(), null, priority, null);
}
if(SchemaUtil.isMetaTable(tableName) ) {
if (!descriptor.hasCoprocessor(MetaDataRegionObserver.class.getName())) {
descriptor.addCoprocessor(MetaDataRegionObserver.class.getName(), null, priority + 1, null);
}
}
} else if (SchemaUtil.isSequenceTable(tableName)) {
if (!descriptor.hasCoprocessor(SequenceRegionObserver.class.getName())) {
descriptor.addCoprocessor(SequenceRegionObserver.class.getName(), null, priority, null);
}
}
if (isTransactional) {
if (!descriptor.hasCoprocessor(TransactionProcessor.class.getName())) {
descriptor.addCoprocessor(TransactionProcessor.class.getName(), null, priority - 10, null);
}
} else {
// If exception on alter table to transition back to non transactional
if (descriptor.hasCoprocessor(TransactionProcessor.class.getName())) {
descriptor.removeCoprocessor(TransactionProcessor.class.getName());
}
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
}
}
private static interface RetriableOperation {
boolean checkForCompletion() throws TimeoutException, IOException;
String getOperatioName();
}
private void pollForUpdatedTableDescriptor(final HBaseAdmin admin, final HTableDescriptor newTableDescriptor,
final byte[] tableName) throws InterruptedException, TimeoutException {
checkAndRetry(new RetriableOperation() {
@Override
public String getOperatioName() {
return "UpdateOrNewTableDescriptor";
}
@Override
public boolean checkForCompletion() throws TimeoutException, IOException {
HTableDescriptor tableDesc = admin.getTableDescriptor(tableName);
return newTableDescriptor.equals(tableDesc);
}
});
}
private void checkAndRetry(RetriableOperation op) throws InterruptedException, TimeoutException {
int maxRetries = ConnectionQueryServicesImpl.this.props.getInt(
QueryServices.NUM_RETRIES_FOR_SCHEMA_UPDATE_CHECK,
QueryServicesOptions.DEFAULT_RETRIES_FOR_SCHEMA_UPDATE_CHECK);
long sleepInterval = ConnectionQueryServicesImpl.this.props
.getLong(QueryServices.DELAY_FOR_SCHEMA_UPDATE_CHECK,
QueryServicesOptions.DEFAULT_DELAY_FOR_SCHEMA_UPDATE_CHECK);
boolean success = false;
int numTries = 1;
PhoenixStopWatch watch = new PhoenixStopWatch();
watch.start();
do {
try {
success = op.checkForCompletion();
} catch (Exception ex) {
// If we encounter any exception on the first or last try, propagate the exception and fail.
// Else, we swallow the exception and retry till we reach maxRetries.
if (numTries == 1 || numTries == maxRetries) {
watch.stop();
TimeoutException toThrow = new TimeoutException("Operation " + op.getOperatioName()
+ " didn't complete because of exception. Time elapsed: " + watch.elapsedMillis());
toThrow.initCause(ex);
throw toThrow;
}
}
numTries++;
Thread.sleep(sleepInterval);
} while (numTries < maxRetries && !success);
watch.stop();
if (!success) {
throw new TimeoutException("Operation " + op.getOperatioName() + " didn't complete within "
+ watch.elapsedMillis() + " ms "
+ (numTries > 1 ? ("after trying " + numTries + (numTries > 1 ? "times." : "time.")) : ""));
} else {
if (logger.isDebugEnabled()) {
logger.debug("Operation "
+ op.getOperatioName()
+ " completed within "
+ watch.elapsedMillis()
+ "ms "
+ (numTries > 1 ? ("after trying " + numTries + (numTries > 1 ? "times." : "time.")) : ""));
}
}
}
private boolean allowOnlineTableSchemaUpdate() {
return props.getBoolean(
QueryServices.ALLOW_ONLINE_TABLE_SCHEMA_UPDATE,
QueryServicesOptions.DEFAULT_ALLOW_ONLINE_TABLE_SCHEMA_UPDATE);
}
/**
*
* @param tableName
* @param splits
* @param modifyExistingMetaData TODO
* @return true if table was created and false if it already exists
* @throws SQLException
*/
private HTableDescriptor ensureTableCreated(byte[] tableName, PTableType tableType , Map<String,Object> props, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits, boolean modifyExistingMetaData) throws SQLException {
HBaseAdmin admin = null;
SQLException sqlE = null;
HTableDescriptor existingDesc = null;
boolean isMetaTable = SchemaUtil.isMetaTable(tableName);
boolean tableExist = true;
try {
final String quorum = ZKConfig.getZKQuorumServersString(config);
final String znode = this.props.get(HConstants.ZOOKEEPER_ZNODE_PARENT);
logger.debug("Found quorum: " + quorum + ":" + znode);
admin = new HBaseAdmin(config);
try {
existingDesc = admin.getTableDescriptor(tableName);
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
tableExist = false;
if (tableType == PTableType.VIEW) {
String fullTableName = Bytes.toString(tableName);
throw new ReadOnlyTableException(
"An HBase table for a VIEW must already exist",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName));
}
}
HTableDescriptor newDesc = generateTableDescriptor(tableName, existingDesc, tableType , props, families, splits);
if (!tableExist) {
if (newDesc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES) != null && Boolean.TRUE.equals(
PBoolean.INSTANCE.toObject(newDesc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
newDesc.setValue(HTableDescriptor.SPLIT_POLICY, IndexRegionSplitPolicy.class.getName());
}
// Remove the splitPolicy attribute to prevent HBASE-12570
if (isMetaTable) {
newDesc.remove(HTableDescriptor.SPLIT_POLICY);
}
try {
if (splits == null) {
admin.createTable(newDesc);
} else {
admin.createTable(newDesc, splits);
}
} catch (TableExistsException e) {
// We can ignore this, as it just means that another client beat us
// to creating the HBase metadata.
return null;
}
if (isMetaTable) {
checkClientServerCompatibility();
/*
* Now we modify the table to add the split policy, since we know that the client and
* server and compatible. This works around HBASE-12570 which causes the cluster to be
* brought down.
*/
newDesc.setValue(HTableDescriptor.SPLIT_POLICY, MetaDataSplitPolicy.class.getName());
if (allowOnlineTableSchemaUpdate()) {
// No need to wait/poll for this update
admin.modifyTable(tableName, newDesc);
} else {
admin.disableTable(tableName);
admin.modifyTable(tableName, newDesc);
admin.enableTable(tableName);
}
}
return null;
} else {
if (isMetaTable) {
checkClientServerCompatibility();
}
if (!modifyExistingMetaData) {
return existingDesc; // Caller already knows that no metadata was changed
}
boolean willBeTx = Boolean.TRUE.equals(props.get(TableProperty.TRANSACTIONAL.name()));
// If mapping an existing table as transactional, set property so that existing
// data is correctly read.
if (willBeTx) {
newDesc.setValue(TxConstants.READ_NON_TX_DATA, Boolean.TRUE.toString());
} else {
// If we think we're creating a non transactional table when it's already
// transactional, don't allow.
if (existingDesc.hasCoprocessor(TransactionProcessor.class.getName())) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.TX_MAY_NOT_SWITCH_TO_NON_TX)
.setSchemaName(SchemaUtil.getSchemaNameFromFullName(tableName))
.setTableName(SchemaUtil.getTableNameFromFullName(tableName)).build().buildException();
}
newDesc.remove(TxConstants.READ_NON_TX_DATA);
}
if (existingDesc.equals(newDesc)) {
return null; // Indicate that no metadata was changed
}
modifyTable(tableName, newDesc, true);
return newDesc;
}
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).build().buildException();
} catch (TimeoutException e) {
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.OPERATION_TIMED_OUT).setRootCause(e.getCause() != null ? e.getCause() : e).build().buildException();
} finally {
try {
if (admin != null) {
admin.close();
}
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
return null; // will never make it here
}
private void modifyTable(byte[] tableName, HTableDescriptor newDesc, boolean shouldPoll) throws IOException,
InterruptedException, TimeoutException {
try (HBaseAdmin admin = new HBaseAdmin(config)) {
if (!allowOnlineTableSchemaUpdate()) {
admin.disableTable(tableName);
admin.modifyTable(tableName, newDesc);
admin.enableTable(tableName);
} else {
admin.modifyTable(tableName, newDesc);
if (shouldPoll) {
pollForUpdatedTableDescriptor(admin, newDesc, tableName);
}
}
}
}
private static boolean hasIndexWALCodec(Long serverVersion) {
if (serverVersion == null) {
return true;
}
return MetaDataUtil.decodeHasIndexWALCodec(serverVersion);
}
private static boolean isCompatible(Long serverVersion) {
if (serverVersion == null) {
return false;
}
return MetaDataUtil.areClientAndServerCompatible(serverVersion);
}
private void checkClientServerCompatibility() throws SQLException {
StringBuilder buf = new StringBuilder("The following servers require an updated " + QueryConstants.DEFAULT_COPROCESS_PATH + " to be put in the classpath of HBase: ");
boolean isIncompatible = false;
int minHBaseVersion = Integer.MAX_VALUE;
try {
List<HRegionLocation> locations = this.getAllTableRegions(SYSTEM_CATALOG_NAME_BYTES);
Set<HRegionLocation> serverMap = Sets.newHashSetWithExpectedSize(locations.size());
TreeMap<byte[], HRegionLocation> regionMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
List<byte[]> regionKeys = Lists.newArrayListWithExpectedSize(locations.size());
for (HRegionLocation entry : locations) {
if (!serverMap.contains(entry)) {
regionKeys.add(entry.getRegionInfo().getStartKey());
regionMap.put(entry.getRegionInfo().getRegionName(), entry);
serverMap.add(entry);
}
}
HTableInterface ht = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
final Map<byte[], Long> results =
ht.coprocessorService(MetaDataService.class, null, null, new Batch.Call<MetaDataService,Long>() {
@Override
public Long call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<GetVersionResponse> rpcCallback =
new BlockingRpcCallback<GetVersionResponse>();
GetVersionRequest.Builder builder = GetVersionRequest.newBuilder();
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.getVersion(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get().getVersion();
}
});
for (Map.Entry<byte[],Long> result : results.entrySet()) {
// This is the "phoenix.jar" is in-place, but server is out-of-sync with client case.
if (!isCompatible(result.getValue())) {
isIncompatible = true;
HRegionLocation name = regionMap.get(result.getKey());
buf.append(name);
buf.append(';');
}
hasIndexWALCodec &= hasIndexWALCodec(result.getValue());
if (minHBaseVersion > MetaDataUtil.decodeHBaseVersion(result.getValue())) {
minHBaseVersion = MetaDataUtil.decodeHBaseVersion(result.getValue());
}
}
lowestClusterHBaseVersion = minHBaseVersion;
} catch (SQLException e) {
throw e;
} catch (Throwable t) {
// This is the case if the "phoenix.jar" is not on the classpath of HBase on the region server
throw new SQLExceptionInfo.Builder(SQLExceptionCode.INCOMPATIBLE_CLIENT_SERVER_JAR).setRootCause(t)
.setMessage("Ensure that " + QueryConstants.DEFAULT_COPROCESS_PATH + " is put on the classpath of HBase in every region server: " + t.getMessage())
.build().buildException();
}
if (isIncompatible) {
buf.setLength(buf.length()-1);
throw new SQLExceptionInfo.Builder(SQLExceptionCode.OUTDATED_JARS).setMessage(buf.toString()).build().buildException();
}
}
/**
* Invoke meta data coprocessor with one retry if the key was found to not be in the regions
* (due to a table split)
*/
private MetaDataMutationResult metaDataCoprocessorExec(byte[] tableKey,
Batch.Call<MetaDataService, MetaDataResponse> callable) throws SQLException {
return metaDataCoprocessorExec(tableKey, callable, PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
}
/**
* Invoke meta data coprocessor with one retry if the key was found to not be in the regions
* (due to a table split)
*/
private MetaDataMutationResult metaDataCoprocessorExec(byte[] tableKey,
Batch.Call<MetaDataService, MetaDataResponse> callable, byte[] tableName) throws SQLException {
try {
boolean retried = false;
while (true) {
if (retried) {
connection.relocateRegion(
TableName.valueOf(tableName),
tableKey);
}
HTableInterface ht = this.getTable(tableName);
try {
final Map<byte[], MetaDataResponse> results =
ht.coprocessorService(MetaDataService.class, tableKey, tableKey, callable);
assert(results.size() == 1);
MetaDataResponse result = results.values().iterator().next();
if (result.getReturnCode() == MetaDataProtos.MutationCode.TABLE_NOT_IN_REGION
|| result.getReturnCode() == MetaDataProtos.MutationCode.FUNCTION_NOT_IN_REGION) {
if (retried) return MetaDataMutationResult.constructFromProto(result);
retried = true;
continue;
}
return MetaDataMutationResult.constructFromProto(result);
} finally {
Closeables.closeQuietly(ht);
}
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} catch (Throwable t) {
throw new SQLException(t);
}
}
// Our property values are translated using toString, so we need to "string-ify" this.
private static final String TRUE_BYTES_AS_STRING = Bytes.toString(PDataType.TRUE_BYTES);
private void ensureViewIndexTableCreated(byte[] physicalTableName, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits, long timestamp) throws SQLException {
Long maxFileSize = (Long)tableProps.get(HTableDescriptor.MAX_FILESIZE);
if (maxFileSize == null) {
maxFileSize = this.props.getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE);
}
byte[] physicalIndexName = MetaDataUtil.getViewIndexPhysicalName(physicalTableName);
int indexMaxFileSizePerc;
// Get percentage to use from table props first and then fallback to config
Integer indexMaxFileSizePercProp = (Integer)tableProps.remove(QueryServices.INDEX_MAX_FILESIZE_PERC_ATTRIB);
if (indexMaxFileSizePercProp == null) {
indexMaxFileSizePerc = this.props.getInt(QueryServices.INDEX_MAX_FILESIZE_PERC_ATTRIB, QueryServicesOptions.DEFAULT_INDEX_MAX_FILESIZE_PERC);
} else {
indexMaxFileSizePerc = indexMaxFileSizePercProp;
}
long indexMaxFileSize = maxFileSize * indexMaxFileSizePerc / 100;
tableProps.put(HTableDescriptor.MAX_FILESIZE, indexMaxFileSize);
tableProps.put(MetaDataUtil.IS_VIEW_INDEX_TABLE_PROP_NAME, TRUE_BYTES_AS_STRING);
HTableDescriptor desc = ensureTableCreated(physicalIndexName, PTableType.TABLE, tableProps, families, splits, false);
if (desc != null) {
if (!Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_VIEW_INDEX_TABLE_PROP_BYTES)))) {
String fullTableName = Bytes.toString(physicalIndexName);
throw new TableAlreadyExistsException(
"Unable to create shared physical table for indexes on views.",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName));
}
}
}
private void ensureLocalIndexTableCreated(byte[] physicalTableName, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>> families, byte[][] splits, long timestamp) throws SQLException {
PTable table;
String parentTableName = Bytes.toString(physicalTableName, MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX_BYTES.length,
physicalTableName.length - MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX_BYTES.length);
try {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
table = latestMetaData.getTableRef(new PTableKey(PName.EMPTY_NAME, parentTableName)).getTable();
latestMetaDataLock.notifyAll();
}
if (table.getTimeStamp() >= timestamp) { // Table in cache is newer than client timestamp which shouldn't be the case
throw new TableNotFoundException(table.getSchemaName().getString(), table.getTableName().getString());
}
} catch (TableNotFoundException e) {
byte[] schemaName = Bytes.toBytes(SchemaUtil.getSchemaNameFromFullName(parentTableName));
byte[] tableName = Bytes.toBytes(SchemaUtil.getTableNameFromFullName(parentTableName));
MetaDataMutationResult result = this.getTable(null, schemaName, tableName, HConstants.LATEST_TIMESTAMP, timestamp);
table = result.getTable();
if (table == null) {
throw e;
}
}
ensureLocalIndexTableCreated(physicalTableName, tableProps, families, splits);
}
private void ensureLocalIndexTableCreated(byte[] physicalTableName, Map<String, Object> tableProps, List<Pair<byte[], Map<String, Object>>> families, byte[][] splits) throws SQLException, TableAlreadyExistsException {
// If we're not allowing local indexes or the hbase version is too low,
// don't create the local index table
if ( !this.getProps().getBoolean(QueryServices.ALLOW_LOCAL_INDEX_ATTRIB, QueryServicesOptions.DEFAULT_ALLOW_LOCAL_INDEX)
|| !this.supportsFeature(Feature.LOCAL_INDEX)) {
return;
}
tableProps.put(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_NAME, TRUE_BYTES_AS_STRING);
HTableDescriptor desc = ensureTableCreated(physicalTableName, PTableType.TABLE, tableProps, families, splits, true);
if (desc != null) {
if (!Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
String fullTableName = Bytes.toString(physicalTableName);
throw new TableAlreadyExistsException(
"Unable to create shared physical table for local indexes.",
SchemaUtil.getSchemaNameFromFullName(fullTableName),
SchemaUtil.getTableNameFromFullName(fullTableName));
}
}
}
private boolean ensureViewIndexTableDropped(byte[] physicalTableName, long timestamp) throws SQLException {
byte[] physicalIndexName = MetaDataUtil.getViewIndexPhysicalName(physicalTableName);
HTableDescriptor desc = null;
HBaseAdmin admin = null;
boolean wasDeleted = false;
try {
admin = new HBaseAdmin(config);
try {
desc = admin.getTableDescriptor(physicalIndexName);
if (Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_VIEW_INDEX_TABLE_PROP_BYTES)))) {
this.tableStatsCache.invalidate(new ImmutableBytesPtr(physicalIndexName));
final ReadOnlyProps props = this.getProps();
final boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
admin.disableTable(physicalIndexName);
admin.deleteTable(physicalIndexName);
clearTableRegionCache(physicalIndexName);
wasDeleted = true;
}
}
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) admin.close();
} catch (IOException e) {
logger.warn("",e);
}
}
return wasDeleted;
}
private boolean ensureLocalIndexTableDropped(byte[] physicalTableName, long timestamp) throws SQLException {
byte[] physicalIndexName = MetaDataUtil.getLocalIndexPhysicalName(physicalTableName);
HTableDescriptor desc = null;
HBaseAdmin admin = null;
boolean wasDeleted = false;
try {
admin = new HBaseAdmin(config);
try {
desc = admin.getTableDescriptor(physicalIndexName);
if (Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(desc.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_BYTES)))) {
this.tableStatsCache.invalidate(new ImmutableBytesPtr(physicalIndexName));
final ReadOnlyProps props = this.getProps();
final boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
admin.disableTable(physicalIndexName);
admin.deleteTable(physicalIndexName);
clearTableRegionCache(physicalIndexName);
wasDeleted = true;
}
}
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) admin.close();
} catch (IOException e) {
logger.warn("",e);
}
}
return wasDeleted;
}
@Override
public MetaDataMutationResult createTable(final List<Mutation> tableMetaData, byte[] physicalTableName, PTableType tableType,
Map<String,Object> tableProps, final List<Pair<byte[],Map<String,Object>>> families, byte[][] splits) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
Mutation m = MetaDataUtil.getPutOnlyTableHeaderRow(tableMetaData);
byte[] key = m.getRow();
SchemaUtil.getVarChars(key, rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableName = physicalTableName != null ? physicalTableName : SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
boolean localIndexTable = Boolean.TRUE.equals(tableProps.remove(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_NAME));
if ((tableType == PTableType.VIEW && physicalTableName != null) || (tableType != PTableType.VIEW && physicalTableName == null)) {
// For views this will ensure that metadata already exists
// For tables and indexes, this will create the metadata if it doesn't already exist
ensureTableCreated(tableName, tableType, tableProps, families, splits, true);
}
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
if (tableType == PTableType.INDEX) { // Index on view
// Physical index table created up front for multi tenant
// TODO: if viewIndexId is Short.MIN_VALUE, then we don't need to attempt to create it
if (physicalTableName != null) {
if (localIndexTable) {
ensureLocalIndexTableCreated(tableName, tableProps, families, splits, MetaDataUtil.getClientTimeStamp(m));
} else if (!MetaDataUtil.isMultiTenant(m, kvBuilder, ptr)) {
ensureViewIndexTableCreated(tenantIdBytes.length == 0 ? null : PNameFactory.newName(tenantIdBytes), physicalTableName, MetaDataUtil.getClientTimeStamp(m));
}
}
} else if (tableType == PTableType.TABLE && MetaDataUtil.isMultiTenant(m, kvBuilder, ptr)) { // Create view index table up front for multi tenant tables
ptr.set(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES);
MetaDataUtil.getMutationValue(m, PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME_BYTES, kvBuilder, ptr);
List<Pair<byte[],Map<String,Object>>> familiesPlusDefault = null;
for (Pair<byte[],Map<String,Object>> family : families) {
byte[] cf = family.getFirst();
if (Bytes.compareTo(cf, 0, cf.length, ptr.get(), ptr.getOffset(),ptr.getLength()) == 0) {
familiesPlusDefault = families;
break;
}
}
// Don't override if default family already present
if (familiesPlusDefault == null) {
byte[] defaultCF = ByteUtil.copyKeyBytesIfNecessary(ptr);
// Only use splits if table is salted, otherwise it may not be applicable
// Always add default column family, as we don't know in advance if we'll need it
familiesPlusDefault = Lists.newArrayList(families);
familiesPlusDefault.add(new Pair<byte[],Map<String,Object>>(defaultCF,Collections.<String,Object>emptyMap()));
}
ensureViewIndexTableCreated(tableName, tableProps, familiesPlusDefault, MetaDataUtil.isSalted(m, kvBuilder, ptr) ? splits : null, MetaDataUtil.getClientTimeStamp(m));
}
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
MetaDataMutationResult result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
CreateTableRequest.Builder builder = CreateTableRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
CreateTableRequest build = builder.build();
instance.createTable(controller, build, rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
return result;
}
@Override
public MetaDataMutationResult getTable(final PName tenantId, final byte[] schemaBytes, final byte[] tableBytes,
final long tableTimestamp, final long clientTimestamp) throws SQLException {
final byte[] tenantIdBytes = tenantId == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantId.getBytes();
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
return metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
GetTableRequest.Builder builder = GetTableRequest.newBuilder();
builder.setTenantId(ByteStringer.wrap(tenantIdBytes));
builder.setSchemaName(ByteStringer.wrap(schemaBytes));
builder.setTableName(ByteStringer.wrap(tableBytes));
builder.setTableTimestamp(tableTimestamp);
builder.setClientTimestamp(clientTimestamp);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.getTable(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
}
@Override
public MetaDataMutationResult dropTable(final List<Mutation> tableMetaData, final PTableType tableType, final boolean cascade) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
SchemaUtil.getVarChars(tableMetaData.get(0).getRow(), rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantIdBytes, schemaBytes, tableBytes);
final MetaDataMutationResult result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
DropTableRequest.Builder builder = DropTableRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setTableType(tableType.getSerializedValue());
builder.setCascade(cascade);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.dropTable(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
final MutationCode code = result.getMutationCode();
switch(code) {
case TABLE_ALREADY_EXISTS:
ReadOnlyProps props = this.getProps();
boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
dropTables(result.getTableNamesToDelete());
}
invalidateTables(result.getTableNamesToDelete());
if (tableType == PTableType.TABLE) {
byte[] physicalName = SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
long timestamp = MetaDataUtil.getClientTimeStamp(tableMetaData);
ensureViewIndexTableDropped(physicalName, timestamp);
ensureLocalIndexTableDropped(physicalName, timestamp);
tableStatsCache.invalidate(new ImmutableBytesPtr(physicalName));
}
break;
default:
break;
}
return result;
}
@Override
public MetaDataMutationResult dropFunction(final List<Mutation> functionData, final boolean ifExists) throws SQLException {
byte[][] rowKeyMetadata = new byte[2][];
byte[] key = functionData.get(0).getRow();
SchemaUtil.getVarChars(key, rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] functionBytes = rowKeyMetadata[PhoenixDatabaseMetaData.FUNTION_NAME_INDEX];
byte[] functionKey = SchemaUtil.getFunctionKey(tenantIdBytes, functionBytes);
final MetaDataMutationResult result = metaDataCoprocessorExec(functionKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
DropFunctionRequest.Builder builder = DropFunctionRequest.newBuilder();
for (Mutation m : functionData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setIfExists(ifExists);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.dropFunction(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
}, PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES);
return result;
}
private void invalidateTables(final List<byte[]> tableNamesToDelete) {
if (tableNamesToDelete != null) {
for ( byte[] tableName : tableNamesToDelete ) {
tableStatsCache.invalidate(new ImmutableBytesPtr(tableName));
}
}
}
private void dropTables(final List<byte[]> tableNamesToDelete) throws SQLException {
HBaseAdmin admin = null;
SQLException sqlE = null;
try{
admin = new HBaseAdmin(config);
if (tableNamesToDelete != null){
for ( byte[] tableName : tableNamesToDelete ) {
if ( admin.tableExists(tableName) ) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
clearTableRegionCache(tableName);
}
}
}
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) {
admin.close();
}
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
}
private static Map<String,Object> createPropertiesMap(Map<ImmutableBytesWritable,ImmutableBytesWritable> htableProps) {
Map<String,Object> props = Maps.newHashMapWithExpectedSize(htableProps.size());
for (Map.Entry<ImmutableBytesWritable,ImmutableBytesWritable> entry : htableProps.entrySet()) {
ImmutableBytesWritable key = entry.getKey();
ImmutableBytesWritable value = entry.getValue();
props.put(Bytes.toString(key.get(), key.getOffset(), key.getLength()), Bytes.toString(value.get(), value.getOffset(), value.getLength()));
}
return props;
}
private void ensureViewIndexTableCreated(PName tenantId, byte[] physicalIndexTableName, long timestamp) throws SQLException {
PTable table;
String name = Bytes.toString(
physicalIndexTableName,
MetaDataUtil.VIEW_INDEX_TABLE_PREFIX_BYTES.length,
physicalIndexTableName.length-MetaDataUtil.VIEW_INDEX_TABLE_PREFIX_BYTES.length);
try {
PMetaData metadata = latestMetaData;
if (metadata == null) {
throwConnectionClosedException();
}
table = metadata.getTableRef(new PTableKey(tenantId, name)).getTable();
if (table.getTimeStamp() >= timestamp) { // Table in cache is newer than client timestamp which shouldn't be the case
throw new TableNotFoundException(table.getSchemaName().getString(), table.getTableName().getString());
}
} catch (TableNotFoundException e) {
byte[] schemaName = Bytes.toBytes(SchemaUtil.getSchemaNameFromFullName(name));
byte[] tableName = Bytes.toBytes(SchemaUtil.getTableNameFromFullName(name));
MetaDataMutationResult result = this.getTable(null, schemaName, tableName, HConstants.LATEST_TIMESTAMP, timestamp);
table = result.getTable();
if (table == null) {
throw e;
}
}
ensureViewIndexTableCreated(table, timestamp);
}
private void ensureViewIndexTableCreated(PTable table, long timestamp) throws SQLException {
byte[] physicalTableName = table.getPhysicalName().getBytes();
HTableDescriptor htableDesc = this.getTableDescriptor(physicalTableName);
Map<String,Object> tableProps = createPropertiesMap(htableDesc.getValues());
List<Pair<byte[],Map<String,Object>>> families = Lists.newArrayListWithExpectedSize(Math.max(1, table.getColumnFamilies().size()+1));
if (families.isEmpty()) {
byte[] familyName = SchemaUtil.getEmptyColumnFamily(table);
Map<String,Object> familyProps = createPropertiesMap(htableDesc.getFamily(familyName).getValues());
families.add(new Pair<byte[],Map<String,Object>>(familyName, familyProps));
} else {
for (PColumnFamily family : table.getColumnFamilies()) {
byte[] familyName = family.getName().getBytes();
Map<String,Object> familyProps = createPropertiesMap(htableDesc.getFamily(familyName).getValues());
families.add(new Pair<byte[],Map<String,Object>>(familyName, familyProps));
}
// Always create default column family, because we don't know in advance if we'll
// need it for an index with no covered columns.
families.add(new Pair<byte[],Map<String,Object>>(table.getDefaultFamilyName().getBytes(), Collections.<String,Object>emptyMap()));
}
byte[][] splits = null;
if (table.getBucketNum() != null) {
splits = SaltingUtil.getSalteByteSplitPoints(table.getBucketNum());
}
ensureViewIndexTableCreated(physicalTableName, tableProps, families, splits, timestamp);
}
@Override
public MetaDataMutationResult addColumn(final List<Mutation> tableMetaData, PTable table, Map<String, List<Pair<String,Object>>> stmtProperties, Set<String> colFamiliesForPColumnsToBeAdded) throws SQLException {
List<Pair<byte[], Map<String, Object>>> families = new ArrayList<>(stmtProperties.size());
Map<String, Object> tableProps = new HashMap<String, Object>();
Set<HTableDescriptor> tableDescriptors = Collections.emptySet();
Set<HTableDescriptor> origTableDescriptors = Collections.emptySet();
boolean nonTxToTx = false;
Pair<HTableDescriptor,HTableDescriptor> tableDescriptorPair = separateAndValidateProperties(table, stmtProperties, colFamiliesForPColumnsToBeAdded, families, tableProps);
HTableDescriptor tableDescriptor = tableDescriptorPair.getSecond();
HTableDescriptor origTableDescriptor = tableDescriptorPair.getFirst();
if (tableDescriptor != null) {
tableDescriptors = Sets.newHashSetWithExpectedSize(3 + table.getIndexes().size());
origTableDescriptors = Sets.newHashSetWithExpectedSize(3 + table.getIndexes().size());
tableDescriptors.add(tableDescriptor);
origTableDescriptors.add(origTableDescriptor);
nonTxToTx = Boolean.TRUE.equals(tableProps.get(TxConstants.READ_NON_TX_DATA));
/*
* If the table was transitioned from non transactional to transactional, we need
* to also transition the index tables.
*/
if (nonTxToTx) {
updateDescriptorForTx(table, tableProps, tableDescriptor, Boolean.TRUE.toString(), tableDescriptors, origTableDescriptors);
}
}
boolean success = false;
boolean metaDataUpdated = !tableDescriptors.isEmpty();
boolean pollingNeeded = !(!tableProps.isEmpty() && families.isEmpty() && colFamiliesForPColumnsToBeAdded.isEmpty());
MetaDataMutationResult result = null;
try {
boolean modifyHTable = true;
if (table.getType() == PTableType.VIEW) {
boolean canViewsAddNewCF = props.getBoolean(QueryServices.ALLOW_VIEWS_ADD_NEW_CF_BASE_TABLE,
QueryServicesOptions.DEFAULT_ALLOW_VIEWS_ADD_NEW_CF_BASE_TABLE);
// When adding a column to a view, base physical table should only be modified when new column families are being added.
modifyHTable = canViewsAddNewCF && !existingColumnFamiliesForBaseTable(table.getPhysicalName()).containsAll(colFamiliesForPColumnsToBeAdded);
}
if (modifyHTable) {
sendHBaseMetaData(tableDescriptors, pollingNeeded);
}
// Special case for call during drop table to ensure that the empty column family exists.
// In this, case we only include the table header row, as until we add schemaBytes and tableBytes
// as args to this function, we have no way of getting them in this case.
// TODO: change to if (tableMetaData.isEmpty()) once we pass through schemaBytes and tableBytes
// Also, could be used to update property values on ALTER TABLE t SET prop=xxx
if ((tableMetaData.isEmpty()) || (tableMetaData.size() == 1 && tableMetaData.get(0).isEmpty())) {
return new MetaDataMutationResult(MutationCode.NO_OP, System.currentTimeMillis(), table);
}
byte[][] rowKeyMetaData = new byte[3][];
PTableType tableType = table.getType();
Mutation m = tableMetaData.get(0);
byte[] rowKey = m.getRow();
SchemaUtil.getVarChars(rowKey, rowKeyMetaData);
byte[] tenantIdBytes = rowKeyMetaData[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetaData[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetaData[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
AddColumnRequest.Builder builder = AddColumnRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.addColumn(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
if (result.getMutationCode() == MutationCode.COLUMN_NOT_FOUND || result.getMutationCode() == MutationCode.TABLE_ALREADY_EXISTS) { // Success
success = true;
// Flush the table if transitioning DISABLE_WAL from TRUE to FALSE
if ( MetaDataUtil.getMutationValue(m,PhoenixDatabaseMetaData.DISABLE_WAL_BYTES, kvBuilder, ptr)
&& Boolean.FALSE.equals(PBoolean.INSTANCE.toObject(ptr))) {
flushTable(table.getPhysicalName().getBytes());
}
if (tableType == PTableType.TABLE) {
// If we're changing MULTI_TENANT to true or false, create or drop the view index table
if (MetaDataUtil.getMutationValue(m, PhoenixDatabaseMetaData.MULTI_TENANT_BYTES, kvBuilder, ptr)){
long timestamp = MetaDataUtil.getClientTimeStamp(m);
if (Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(ptr.get(), ptr.getOffset(), ptr.getLength()))) {
this.ensureViewIndexTableCreated(table, timestamp);
} else {
this.ensureViewIndexTableDropped(table.getPhysicalName().getBytes(), timestamp);
}
}
}
}
} finally {
// If we weren't successful with our metadata update
// and we've already pushed the HBase metadata changes to the server
// and we've tried to go from non transactional to transactional
// then we must undo the metadata change otherwise the table will
// no longer function correctly.
// Note that if this fails, we're in a corrupt state.
if (!success && metaDataUpdated && nonTxToTx) {
sendHBaseMetaData(origTableDescriptors, pollingNeeded);
}
}
return result;
}
private void updateDescriptorForTx(PTable table, Map<String, Object> tableProps, HTableDescriptor tableDescriptor,
String txValue, Set<HTableDescriptor> descriptorsToUpdate, Set<HTableDescriptor> origDescriptors) throws SQLException {
HBaseAdmin admin = null;
byte[] physicalTableName = table.getPhysicalName().getBytes();
try {
admin = new HBaseAdmin(config);
setTransactional(tableDescriptor, table.getType(), txValue, tableProps);
Map<String, Object> indexTableProps;
if (txValue == null) {
indexTableProps = Collections.<String,Object>emptyMap();
} else {
indexTableProps = Maps.newHashMapWithExpectedSize(1);
indexTableProps.put(TxConstants.READ_NON_TX_DATA, Boolean.valueOf(txValue));
}
for (PTable index : table.getIndexes()) {
HTableDescriptor indexDescriptor = admin.getTableDescriptor(index.getPhysicalName().getBytes());
origDescriptors.add(indexDescriptor);
indexDescriptor = new HTableDescriptor(indexDescriptor);
descriptorsToUpdate.add(indexDescriptor);
if (index.getColumnFamilies().isEmpty()) {
byte[] dataFamilyName = SchemaUtil.getEmptyColumnFamily(table);
byte[] indexFamilyName = SchemaUtil.getEmptyColumnFamily(index);
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(indexFamilyName);
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(dataFamilyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
} else {
for (PColumnFamily family : index.getColumnFamilies()) {
byte[] familyName = family.getName().getBytes();
indexDescriptor.getFamily(familyName).setMaxVersions(tableDescriptor.getFamily(familyName).getMaxVersions());
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(familyName);
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(familyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
}
}
setTransactional(indexDescriptor, index.getType(), txValue, indexTableProps);
}
try {
HTableDescriptor indexDescriptor = admin.getTableDescriptor(MetaDataUtil.getViewIndexPhysicalName(physicalTableName));
origDescriptors.add(indexDescriptor);
indexDescriptor = new HTableDescriptor(indexDescriptor);
descriptorsToUpdate.add(indexDescriptor);
setSharedIndexMaxVersion(table, tableDescriptor, indexDescriptor);
setTransactional(indexDescriptor, PTableType.INDEX, txValue, indexTableProps);
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
try {
HTableDescriptor indexDescriptor = admin.getTableDescriptor(MetaDataUtil.getLocalIndexPhysicalName(physicalTableName));
origDescriptors.add(indexDescriptor);
indexDescriptor = new HTableDescriptor(indexDescriptor);
descriptorsToUpdate.add(indexDescriptor);
setSharedIndexMaxVersion(table, tableDescriptor, indexDescriptor);
setTransactional(indexDescriptor, PTableType.INDEX, txValue, indexTableProps);
} catch (org.apache.hadoop.hbase.TableNotFoundException ignore) {
// Ignore, as we may never have created a view index table
}
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
try {
if (admin != null) admin.close();
} catch (IOException e) {
logger.warn("Could not close admin",e);
}
}
}
private void setSharedIndexMaxVersion(PTable table, HTableDescriptor tableDescriptor,
HTableDescriptor indexDescriptor) {
if (table.getColumnFamilies().isEmpty()) {
byte[] familyName = SchemaUtil.getEmptyColumnFamily(table);
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(familyName);
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(familyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
} else {
for (PColumnFamily family : table.getColumnFamilies()) {
byte[] familyName = family.getName().getBytes();
HColumnDescriptor indexColDescriptor = indexDescriptor.getFamily(familyName);
if (indexColDescriptor != null) {
HColumnDescriptor tableColDescriptor = tableDescriptor.getFamily(familyName);
indexColDescriptor.setMaxVersions(tableColDescriptor.getMaxVersions());
indexColDescriptor.setValue(TxConstants.PROPERTY_TTL, tableColDescriptor.getValue(TxConstants.PROPERTY_TTL));
}
}
}
}
private void sendHBaseMetaData(Set<HTableDescriptor> tableDescriptors, boolean pollingNeeded) throws SQLException {
SQLException sqlE = null;
for (HTableDescriptor descriptor : tableDescriptors) {
try {
modifyTable(descriptor.getName(), descriptor, pollingNeeded);
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).build().buildException();
} catch (TimeoutException e) {
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.OPERATION_TIMED_OUT).setRootCause(e.getCause() != null ? e.getCause() : e).build().buildException();
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
}
private void setTransactional(HTableDescriptor tableDescriptor, PTableType tableType, String txValue, Map<String, Object> tableProps) throws SQLException {
if (txValue == null) {
tableDescriptor.remove(TxConstants.READ_NON_TX_DATA);
} else {
tableDescriptor.setValue(TxConstants.READ_NON_TX_DATA, txValue);
}
this.addCoprocessors(tableDescriptor.getName(), tableDescriptor, tableType, tableProps);
}
private Pair<HTableDescriptor,HTableDescriptor> separateAndValidateProperties(PTable table, Map<String, List<Pair<String, Object>>> properties, Set<String> colFamiliesForPColumnsToBeAdded, List<Pair<byte[], Map<String, Object>>> families, Map<String, Object> tableProps) throws SQLException {
Map<String, Map<String, Object>> stmtFamiliesPropsMap = new HashMap<>(properties.size());
Map<String,Object> commonFamilyProps = new HashMap<>();
boolean addingColumns = colFamiliesForPColumnsToBeAdded != null && !colFamiliesForPColumnsToBeAdded.isEmpty();
HashSet<String> existingColumnFamilies = existingColumnFamilies(table);
Map<String, Map<String, Object>> allFamiliesProps = new HashMap<>(existingColumnFamilies.size());
boolean isTransactional = table.isTransactional();
boolean willBeTransactional = false;
boolean isOrWillBeTransactional = isTransactional;
Integer newTTL = null;
for (String family : properties.keySet()) {
List<Pair<String, Object>> propsList = properties.get(family);
if (propsList != null && propsList.size() > 0) {
Map<String, Object> colFamilyPropsMap = new HashMap<String, Object>(propsList.size());
for (Pair<String, Object> prop : propsList) {
String propName = prop.getFirst();
Object propValue = prop.getSecond();
if ((isHTableProperty(propName) || TableProperty.isPhoenixTableProperty(propName)) && addingColumns) {
// setting HTable and PhoenixTable properties while adding a column is not allowed.
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_SET_TABLE_PROPERTY_ADD_COLUMN)
.setMessage("Property: " + propName).build()
.buildException();
}
if (isHTableProperty(propName)) {
// Can't have a column family name for a property that's an HTableProperty
if (!family.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY)) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_FAMILY_NOT_ALLOWED_TABLE_PROPERTY)
.setMessage("Column Family: " + family + ", Property: " + propName).build()
.buildException();
}
tableProps.put(propName, propValue);
} else {
if (TableProperty.isPhoenixTableProperty(propName)) {
TableProperty.valueOf(propName).validate(true, !family.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY), table.getType());
if (propName.equals(TTL)) {
newTTL = ((Number)prop.getSecond()).intValue();
// Even though TTL is really a HColumnProperty we treat it specially.
// We enforce that all column families have the same TTL.
commonFamilyProps.put(propName, prop.getSecond());
} else if (propName.equals(PhoenixDatabaseMetaData.TRANSACTIONAL) && Boolean.TRUE.equals(propValue)) {
willBeTransactional = isOrWillBeTransactional = true;
tableProps.put(TxConstants.READ_NON_TX_DATA, propValue);
}
} else {
if (isHColumnProperty(propName)) {
if (family.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY)) {
commonFamilyProps.put(propName, propValue);
} else {
colFamilyPropsMap.put(propName, propValue);
}
} else {
// invalid property - neither of HTableProp, HColumnProp or PhoenixTableProp
// FIXME: This isn't getting triggered as currently a property gets evaluated
// as HTableProp if its neither HColumnProp or PhoenixTableProp.
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ALTER_PROPERTY)
.setMessage("Column Family: " + family + ", Property: " + propName).build()
.buildException();
}
}
}
}
if (!colFamilyPropsMap.isEmpty()) {
stmtFamiliesPropsMap.put(family, colFamilyPropsMap);
}
}
}
commonFamilyProps = Collections.unmodifiableMap(commonFamilyProps);
boolean isAddingPkColOnly = colFamiliesForPColumnsToBeAdded.size() == 1 && colFamiliesForPColumnsToBeAdded.contains(null);
if (!commonFamilyProps.isEmpty()) {
if (!addingColumns) {
// Add the common family props to all existing column families
for (String existingColFamily : existingColumnFamilies) {
Map<String, Object> m = new HashMap<String, Object>(commonFamilyProps.size());
m.putAll(commonFamilyProps);
allFamiliesProps.put(existingColFamily, m);
}
} else {
// Add the common family props to the column families of the columns being added
for (String colFamily : colFamiliesForPColumnsToBeAdded) {
if (colFamily != null) {
// only set properties for key value columns
Map<String, Object> m = new HashMap<String, Object>(commonFamilyProps.size());
m.putAll(commonFamilyProps);
allFamiliesProps.put(colFamily, m);
} else if (isAddingPkColOnly) {
// Setting HColumnProperty for a pk column is invalid
// because it will be part of the row key and not a key value column family.
// However, if both pk cols as well as key value columns are getting added
// together, then its allowed. The above if block will make sure that we add properties
// only for the kv cols and not pk cols.
throw new SQLExceptionInfo.Builder(SQLExceptionCode.SET_UNSUPPORTED_PROP_ON_ALTER_TABLE)
.build().buildException();
}
}
}
}
// Now go through the column family properties specified in the statement
// and merge them with the common family properties.
for (String f : stmtFamiliesPropsMap.keySet()) {
if (!addingColumns && !existingColumnFamilies.contains(f)) {
throw new ColumnFamilyNotFoundException(f);
}
if (addingColumns && !colFamiliesForPColumnsToBeAdded.contains(f)) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_SET_PROPERTY_FOR_COLUMN_NOT_ADDED).build().buildException();
}
Map<String, Object> commonProps = allFamiliesProps.get(f);
Map<String, Object> stmtProps = stmtFamiliesPropsMap.get(f);
if (commonProps != null) {
if (stmtProps != null) {
// merge common props with statement props for the family
commonProps.putAll(stmtProps);
}
} else {
// if no common props were specified, then assign family specific props
if (stmtProps != null) {
allFamiliesProps.put(f, stmtProps);
}
}
}
// case when there is a column family being added but there are no props
// For ex - in DROP COLUMN when a new empty CF needs to be added since all
// the columns of the existing empty CF are getting dropped. Or the case
// when one is just adding a column for a column family like this:
// ALTER TABLE ADD CF.COL
for (String cf : colFamiliesForPColumnsToBeAdded) {
if (cf != null && allFamiliesProps.get(cf) == null) {
allFamiliesProps.put(cf, new HashMap<String, Object>());
}
}
if (table.getColumnFamilies().isEmpty() && !addingColumns && !commonFamilyProps.isEmpty()) {
allFamiliesProps.put(Bytes.toString(table.getDefaultFamilyName() == null ? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES : table.getDefaultFamilyName().getBytes() ), commonFamilyProps);
}
// Views are not allowed to have any of these properties.
if (table.getType() == PTableType.VIEW && (!stmtFamiliesPropsMap.isEmpty() || !commonFamilyProps.isEmpty() || !tableProps.isEmpty())) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.VIEW_WITH_PROPERTIES).build()
.buildException();
}
HTableDescriptor newTableDescriptor = null;
HTableDescriptor origTableDescriptor = null;
if (!allFamiliesProps.isEmpty() || !tableProps.isEmpty()) {
byte[] tableNameBytes = Bytes.toBytes(table.getPhysicalName().getString());
HTableDescriptor existingTableDescriptor = origTableDescriptor = getTableDescriptor(tableNameBytes);
newTableDescriptor = new HTableDescriptor(existingTableDescriptor);
if (!tableProps.isEmpty()) {
// add all the table properties to the existing table descriptor
for (Entry<String, Object> entry : tableProps.entrySet()) {
newTableDescriptor.setValue(entry.getKey(), entry.getValue() != null ? entry.getValue().toString() : null);
}
}
if (addingColumns) {
// Make sure that all the CFs of the table have the same TTL as the empty CF.
setTTLForNewCFs(allFamiliesProps, table, newTableDescriptor, newTTL);
}
// Set TTL on all table column families, even if they're not referenced here
if (newTTL != null) {
for (PColumnFamily family : table.getColumnFamilies()) {
if (!allFamiliesProps.containsKey(family.getName().getString())) {
Map<String,Object> familyProps = Maps.newHashMapWithExpectedSize(1);
familyProps.put(TTL, newTTL);
allFamiliesProps.put(family.getName().getString(), familyProps);
}
}
}
Integer defaultTxMaxVersions = null;
if (isOrWillBeTransactional) {
// Calculate default for max versions
Map<String, Object> emptyFamilyProps = allFamiliesProps.get(SchemaUtil.getEmptyColumnFamilyAsString(table));
if (emptyFamilyProps != null) {
defaultTxMaxVersions = (Integer)emptyFamilyProps.get(HConstants.VERSIONS);
}
if (defaultTxMaxVersions == null) {
if (isTransactional) {
defaultTxMaxVersions = newTableDescriptor.getFamily(SchemaUtil.getEmptyColumnFamily(table)).getMaxVersions();
} else {
defaultTxMaxVersions =
this.getProps().getInt(
QueryServices.MAX_VERSIONS_TRANSACTIONAL_ATTRIB,
QueryServicesOptions.DEFAULT_MAX_VERSIONS_TRANSACTIONAL);
}
}
if (willBeTransactional) {
// Set VERSIONS for all column families when transitioning to transactional
for (PColumnFamily family : table.getColumnFamilies()) {
if (!allFamiliesProps.containsKey(family.getName().getString())) {
Map<String,Object> familyProps = Maps.newHashMapWithExpectedSize(1);
familyProps.put(HConstants.VERSIONS, defaultTxMaxVersions);
allFamiliesProps.put(family.getName().getString(), familyProps);
}
}
}
}
// Set Tephra's TTL property based on HBase property if we're
// transitioning to become transactional or setting TTL on
// an already transactional table.
if (isOrWillBeTransactional) {
int ttl = getTTL(table, newTableDescriptor, newTTL);
if (ttl != HColumnDescriptor.DEFAULT_TTL) {
for (Map.Entry<String, Map<String, Object>> entry : allFamiliesProps.entrySet()) {
Map<String, Object> props = entry.getValue();
if (props == null) {
props = new HashMap<String, Object>();
}
props.put(TxConstants.PROPERTY_TTL, ttl);
// Remove HBase TTL if we're not transitioning an existing table to become transactional
// or if the existing transactional table wasn't originally non transactional.
if (!willBeTransactional && !Boolean.valueOf(newTableDescriptor.getValue(TxConstants.READ_NON_TX_DATA))) {
props.remove(TTL);
}
}
}
}
for (Entry<String, Map<String, Object>> entry : allFamiliesProps.entrySet()) {
Map<String,Object> familyProps = entry.getValue();
if (isOrWillBeTransactional) {
if (!familyProps.containsKey(HConstants.VERSIONS)) {
familyProps.put(HConstants.VERSIONS, defaultTxMaxVersions);
}
}
byte[] cf = Bytes.toBytes(entry.getKey());
HColumnDescriptor colDescriptor = newTableDescriptor.getFamily(cf);
if (colDescriptor == null) {
// new column family
colDescriptor = generateColumnFamilyDescriptor(new Pair<>(cf, familyProps), table.getType());
newTableDescriptor.addFamily(colDescriptor);
} else {
modifyColumnFamilyDescriptor(colDescriptor, familyProps);
}
if (isOrWillBeTransactional) {
checkTransactionalVersionsValue(colDescriptor);
}
}
}
return new Pair<>(origTableDescriptor, newTableDescriptor);
}
private void checkTransactionalVersionsValue(HColumnDescriptor colDescriptor) throws SQLException {
int maxVersions = colDescriptor.getMaxVersions();
if (maxVersions <= 1) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.TX_MAX_VERSIONS_MUST_BE_GREATER_THAN_ONE)
.setFamilyName(colDescriptor.getNameAsString())
.build().buildException();
}
}
private boolean isHColumnProperty(String propName) {
return HColumnDescriptor.getDefaultValues().containsKey(propName);
}
private boolean isHTableProperty(String propName) {
return !isHColumnProperty(propName) && !TableProperty.isPhoenixTableProperty(propName);
}
private HashSet<String> existingColumnFamiliesForBaseTable(PName baseTableName) throws TableNotFoundException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
PTable table = latestMetaData.getTableRef(new PTableKey(null, baseTableName.getString())).getTable();
latestMetaDataLock.notifyAll();
return existingColumnFamilies(table);
}
}
private HashSet<String> existingColumnFamilies(PTable table) {
List<PColumnFamily> cfs = table.getColumnFamilies();
HashSet<String> cfNames = new HashSet<>(cfs.size());
for (PColumnFamily cf : table.getColumnFamilies()) {
cfNames.add(cf.getName().getString());
}
return cfNames;
}
private static int getTTL(PTable table, HTableDescriptor tableDesc, Integer newTTL) throws SQLException {
// If we're setting TTL now, then use that value. Otherwise, use empty column family value
int ttl = newTTL != null ? newTTL
: tableDesc.getFamily(SchemaUtil.getEmptyColumnFamily(table)).getTimeToLive();
return ttl;
}
private static void setTTLForNewCFs(Map<String, Map<String, Object>> familyProps, PTable table,
HTableDescriptor tableDesc, Integer newTTL) throws SQLException {
if (!familyProps.isEmpty()) {
int ttl = getTTL(table, tableDesc, newTTL);
for (Map.Entry<String, Map<String, Object>> entry : familyProps.entrySet()) {
Map<String, Object> props = entry.getValue();
if (props == null) {
props = new HashMap<String, Object>();
}
props.put(TTL, ttl);
}
}
}
@Override
public MetaDataMutationResult dropColumn(final List<Mutation> tableMetaData, PTableType tableType) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
SchemaUtil.getVarChars(tableMetaData.get(0).getRow(), rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
MetaDataMutationResult result = metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
DropColumnRequest.Builder builder = DropColumnRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.dropColumn(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
final MutationCode code = result.getMutationCode();
switch(code) {
case TABLE_ALREADY_EXISTS:
final ReadOnlyProps props = this.getProps();
final boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
if (dropMetadata) {
dropTables(result.getTableNamesToDelete());
}
invalidateTables(result.getTableNamesToDelete());
break;
default:
break;
}
return result;
}
/**
* This closes the passed connection.
*/
private PhoenixConnection addColumn(PhoenixConnection oldMetaConnection, String tableName, long timestamp, String columns, boolean addIfNotExists) throws SQLException {
Properties props = PropertiesUtil.deepCopy(oldMetaConnection.getClientInfo());
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(timestamp));
// Cannot go through DriverManager or you end up in an infinite loop because it'll call init again
PhoenixConnection metaConnection = new PhoenixConnection(oldMetaConnection, this, props);
SQLException sqlE = null;
try {
metaConnection.createStatement().executeUpdate("ALTER TABLE " + tableName + " ADD " + (addIfNotExists ? " IF NOT EXISTS " : "") + columns );
} catch (SQLException e) {
logger.warn("Add column failed due to:" + e);
sqlE = e;
} finally {
try {
oldMetaConnection.close();
} catch (SQLException e) {
if (sqlE != null) {
sqlE.setNextException(e);
} else {
sqlE = e;
}
}
if (sqlE != null) {
throw sqlE;
}
}
return metaConnection;
}
/**
* Keeping this to use for further upgrades. This method closes the oldMetaConnection.
*/
private PhoenixConnection addColumnsIfNotExists(PhoenixConnection oldMetaConnection,
String tableName, long timestamp, String columns) throws SQLException {
return addColumn(oldMetaConnection, tableName, timestamp, columns, true);
}
@Override
public void init(final String url, final Properties props) throws SQLException {
try {
PhoenixContextExecutor.call(new Callable<Void>() {
@Override
public Void call() throws Exception {
if (initialized) {
if (initializationException != null) {
// Throw previous initialization exception, as we won't resuse this instance
throw initializationException;
}
return null;
}
synchronized (ConnectionQueryServicesImpl.this) {
if (initialized) {
if (initializationException != null) {
// Throw previous initialization exception, as we won't resuse this instance
throw initializationException;
}
return null;
}
checkClosed();
PhoenixConnection metaConnection = null;
try {
openConnection();
Properties scnProps = PropertiesUtil.deepCopy(props);
scnProps.setProperty(
PhoenixRuntime.CURRENT_SCN_ATTRIB,
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
String globalUrl = JDBCUtil.removeProperty(url, PhoenixRuntime.TENANT_ID_ATTRIB);
metaConnection = new PhoenixConnection(
ConnectionQueryServicesImpl.this, globalUrl, scnProps, newEmptyMetaData());
try {
metaConnection.createStatement().executeUpdate(QueryConstants.CREATE_TABLE_METADATA);
} catch (NewerTableAlreadyExistsException ignore) {
// Ignore, as this will happen if the SYSTEM.CATALOG already exists at this fixed timestamp.
// A TableAlreadyExistsException is not thrown, since the table only exists *after* this fixed timestamp.
} catch (TableAlreadyExistsException e) {
// This will occur if we have an older SYSTEM.CATALOG and we need to update it to include
// any new columns we've added.
long currentServerSideTableTimeStamp = e.getTable().getTimeStamp();
String columnsToAdd = "";
if(currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_3_0) {
// We know that we always need to add the STORE_NULLS column for 4.3 release
columnsToAdd += "," + PhoenixDatabaseMetaData.STORE_NULLS + " " + PBoolean.INSTANCE.getSqlTypeName();
HBaseAdmin admin = null;
try {
admin = getAdmin();
HTableDescriptor[] localIndexTables = admin.listTables(MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX+".*");
for (HTableDescriptor table : localIndexTables) {
if (table.getValue(MetaDataUtil.PARENT_TABLE_KEY) == null
&& table.getValue(MetaDataUtil.IS_LOCAL_INDEX_TABLE_PROP_NAME) != null) {
table.setValue(MetaDataUtil.PARENT_TABLE_KEY,
MetaDataUtil.getUserTableName(table
.getNameAsString()));
// Explicitly disable, modify and enable the table to ensure co-location of data
// and index regions. If we just modify the table descriptor when online schema
// change enabled may reopen the region in same region server instead of following data region.
admin.disableTable(table.getTableName());
admin.modifyTable(table.getTableName(), table);
admin.enableTable(table.getTableName());
}
}
} finally {
if (admin != null) admin.close();
}
}
// If the server side schema is before MIN_SYSTEM_TABLE_TIMESTAMP_4_1_0 then
// we need to add INDEX_TYPE and INDEX_DISABLE_TIMESTAMP columns too.
// TODO: Once https://issues.apache.org/jira/browse/PHOENIX-1614 is fixed,
// we should just have a ALTER TABLE ADD IF NOT EXISTS statement with all
// the column names that have been added to SYSTEM.CATALOG since 4.0.
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_1_0) {
columnsToAdd += ", " + PhoenixDatabaseMetaData.INDEX_TYPE + " " + PUnsignedTinyint.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP + " " + PLong.INSTANCE.getSqlTypeName();
}
// If we have some new columns from 4.1-4.3 to add, add them now.
if (!columnsToAdd.isEmpty()) {
// Ugh..need to assign to another local variable to keep eclipse happy.
PhoenixConnection newMetaConnection = addColumnsIfNotExists(metaConnection,
PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_3_0, columnsToAdd);
metaConnection = newMetaConnection;
}
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0) {
columnsToAdd = PhoenixDatabaseMetaData.BASE_COLUMN_COUNT + " "
+ PInteger.INSTANCE.getSqlTypeName();
try {
metaConnection = addColumn(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0, columnsToAdd, false);
upgradeTo4_5_0(metaConnection);
} catch (ColumnAlreadyExistsException ignored) {
/*
* Upgrade to 4.5 is a slightly special case. We use the fact that the column
* BASE_COLUMN_COUNT is already part of the meta-data schema as the signal that
* the server side upgrade has finished or is in progress.
*/
logger.debug("No need to run 4.5 upgrade");
}
Properties props = PropertiesUtil.deepCopy(metaConnection.getClientInfo());
props.remove(PhoenixRuntime.CURRENT_SCN_ATTRIB);
props.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
PhoenixConnection conn = new PhoenixConnection(ConnectionQueryServicesImpl.this, metaConnection.getURL(), props, metaConnection.getMetaDataCache());
try {
List<String> tablesNeedingUpgrade = UpgradeUtil.getPhysicalTablesWithDescRowKey(conn);
if (!tablesNeedingUpgrade.isEmpty()) {
logger.warn("The following tables require upgrade due to a bug causing the row key to be incorrect for descending columns and ascending BINARY columns (PHOENIX-2067 and PHOENIX-2120):\n" + Joiner.on(' ').join(tablesNeedingUpgrade) + "\nTo upgrade issue the \"bin/psql.py -u\" command.");
}
List<String> unsupportedTables = UpgradeUtil.getPhysicalTablesWithDescVarbinaryRowKey(conn);
if (!unsupportedTables.isEmpty()) {
logger.warn("The following tables use an unsupported VARBINARY DESC construct and need to be changed:\n" + Joiner.on(' ').join(unsupportedTables));
}
} catch (Exception ex) {
logger.error("Unable to determine tables requiring upgrade due to PHOENIX-2067", ex);
} finally {
conn.close();
}
}
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_6_0) {
columnsToAdd = PhoenixDatabaseMetaData.IS_ROW_TIMESTAMP + " " + PBoolean.INSTANCE.getSqlTypeName();
metaConnection = addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_6_0, columnsToAdd);
}
if(currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0) {
// Add these columns one at a time, each with different timestamps so that if folks have
// run the upgrade code already for a snapshot, we'll still enter this block (and do the
// parts we haven't yet done).
metaConnection = addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG, MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0 - 2,
PhoenixDatabaseMetaData.TRANSACTIONAL + " " + PBoolean.INSTANCE.getSqlTypeName());
metaConnection = addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG, MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0 - 1,
PhoenixDatabaseMetaData.UPDATE_CACHE_FREQUENCY + " " + PLong.INSTANCE.getSqlTypeName());
setImmutableTableIndexesImmutable(metaConnection);
// Drop old stats table so that new stats table is created
metaConnection = dropStatsTable(metaConnection,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0);
// Clear the server cache so the above changes make it over to any clients
// that already have cached data.
clearCache();
}
}
int nSaltBuckets = ConnectionQueryServicesImpl.this.props.getInt(QueryServices.SEQUENCE_SALT_BUCKETS_ATTRIB,
QueryServicesOptions.DEFAULT_SEQUENCE_TABLE_SALT_BUCKETS);
try {
String createSequenceTable = Sequence.getCreateTableStatement(nSaltBuckets);
metaConnection.createStatement().executeUpdate(createSequenceTable);
nSequenceSaltBuckets = nSaltBuckets;
} catch (NewerTableAlreadyExistsException e) {
// Ignore, as this will happen if the SYSTEM.SEQUENCE already exists at this fixed timestamp.
// A TableAlreadyExistsException is not thrown, since the table only exists *after* this fixed timestamp.
nSequenceSaltBuckets = getSaltBuckets(e);
} catch (TableAlreadyExistsException e) {
// This will occur if we have an older SYSTEM.SEQUENCE and we need to update it to include
// any new columns we've added.
long currentServerSideTableTimeStamp = e.getTable().getTimeStamp();
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_1_0) {
// If the table time stamp is before 4.1.0 then we need to add below columns
// to the SYSTEM.SEQUENCE table.
String columnsToAdd = PhoenixDatabaseMetaData.MIN_VALUE + " " + PLong.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.MAX_VALUE + " " + PLong.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.CYCLE_FLAG + " " + PBoolean.INSTANCE.getSqlTypeName()
+ ", " + PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG + " " + PBoolean.INSTANCE.getSqlTypeName();
addColumnsIfNotExists(metaConnection, PhoenixDatabaseMetaData.SYSTEM_CATALOG,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
}
// If the table timestamp is before 4.2.1 then run the upgrade script
if (currentServerSideTableTimeStamp < MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_2_1) {
if (UpgradeUtil.upgradeSequenceTable(metaConnection, nSaltBuckets, e.getTable())) {
metaConnection.removeTable(null,
PhoenixDatabaseMetaData.SEQUENCE_SCHEMA_NAME,
PhoenixDatabaseMetaData.SEQUENCE_TABLE_NAME,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP);
clearTableFromCache(ByteUtil.EMPTY_BYTE_ARRAY,
PhoenixDatabaseMetaData.SEQUENCE_SCHEMA_NAME_BYTES,
PhoenixDatabaseMetaData.SEQUENCE_TABLE_NAME_BYTES,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP);
clearTableRegionCache(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
}
nSequenceSaltBuckets = nSaltBuckets;
} else {
nSequenceSaltBuckets = getSaltBuckets(e);
}
}
try {
metaConnection.createStatement().executeUpdate(
QueryConstants.CREATE_STATS_TABLE_METADATA);
} catch (NewerTableAlreadyExistsException ignore) {
} catch(TableAlreadyExistsException ignore) {
metaConnection = addColumnsIfNotExists(
metaConnection,
PhoenixDatabaseMetaData.SYSTEM_STATS_NAME,
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP,
PhoenixDatabaseMetaData.GUIDE_POSTS_ROW_COUNT + " "
+ PLong.INSTANCE.getSqlTypeName());
}
try {
metaConnection.createStatement().executeUpdate(
QueryConstants.CREATE_FUNCTION_METADATA);
} catch (NewerTableAlreadyExistsException e) {
} catch (TableAlreadyExistsException e) {
}
scheduleRenewLeaseTasks();
} catch (Exception e) {
if (e instanceof SQLException) {
initializationException = (SQLException)e;
} else {
// wrap every other exception into a SQLException
initializationException = new SQLException(e);
}
} finally {
try {
if (metaConnection != null) metaConnection.close();
} catch (SQLException e) {
if (initializationException != null) {
initializationException.setNextException(e);
} else {
initializationException = e;
}
} finally {
try {
if (initializationException != null) {
throw initializationException;
}
} finally {
initialized = true;
}
}
}
}
return null;
}
});
} catch (Exception e) {
Throwables.propagateIfInstanceOf(e, SQLException.class);
throw Throwables.propagate(e);
}
}
/**
* Set IMMUTABLE_ROWS to true for all index tables over immutable tables.
* @param metaConnection connection over which to run the upgrade
* @throws SQLException
*/
private static void setImmutableTableIndexesImmutable(PhoenixConnection metaConnection) throws SQLException {
boolean autoCommit = metaConnection.getAutoCommit();
try {
metaConnection.setAutoCommit(true);
metaConnection.createStatement().execute(
"UPSERT INTO SYSTEM.CATALOG(TENANT_ID, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, COLUMN_FAMILY, IMMUTABLE_ROWS)\n" +
"SELECT A.TENANT_ID, A.TABLE_SCHEM,B.COLUMN_FAMILY,null,null,true\n" +
"FROM SYSTEM.CATALOG A JOIN SYSTEM.CATALOG B ON (\n" +
" A.TENANT_ID = B.TENANT_ID AND \n" +
" A.TABLE_SCHEM = B.TABLE_SCHEM AND\n" +
" A.TABLE_NAME = B.TABLE_NAME AND\n" +
" A.COLUMN_NAME = B.COLUMN_NAME AND\n" +
" B.LINK_TYPE = 1\n" +
")\n" +
"WHERE A.COLUMN_FAMILY IS NULL AND\n" +
" B.COLUMN_FAMILY IS NOT NULL AND\n" +
" A.IMMUTABLE_ROWS = TRUE;");
} finally {
metaConnection.setAutoCommit(autoCommit);
}
}
private PhoenixConnection dropStatsTable(PhoenixConnection oldMetaConnection, long timestamp)
throws SQLException, IOException {
Properties props = PropertiesUtil.deepCopy(oldMetaConnection.getClientInfo());
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(timestamp-1));
PhoenixConnection metaConnection = new PhoenixConnection(oldMetaConnection, this, props);
SQLException sqlE = null;
boolean wasCommit = metaConnection.getAutoCommit();
try {
metaConnection.setAutoCommit(true);
metaConnection.createStatement()
.executeUpdate("DELETE FROM " + PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME + " WHERE "
+ PhoenixDatabaseMetaData.TABLE_NAME + "='" + PhoenixDatabaseMetaData.SYSTEM_STATS_TABLE
+ "' AND " + PhoenixDatabaseMetaData.TABLE_SCHEM + "='"
+ PhoenixDatabaseMetaData.SYSTEM_SCHEMA_NAME + "'");
} catch (SQLException e) {
logger.warn("exception during upgrading stats table:" + e);
sqlE = e;
} finally {
try {
metaConnection.setAutoCommit(wasCommit);
oldMetaConnection.close();
} catch (SQLException e) {
if (sqlE != null) {
sqlE.setNextException(e);
} else {
sqlE = e;
}
}
if (sqlE != null) {
throw sqlE;
}
}
oldMetaConnection = metaConnection;
props = PropertiesUtil.deepCopy(oldMetaConnection.getClientInfo());
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(timestamp));
try {
metaConnection = new PhoenixConnection(oldMetaConnection, ConnectionQueryServicesImpl.this, props);
} finally {
try {
oldMetaConnection.close();
} catch (SQLException e) {
if (sqlE != null) {
sqlE.setNextException(e);
} else {
sqlE = e;
}
}
if (sqlE != null) {
throw sqlE;
}
}
return metaConnection;
}
private void scheduleRenewLeaseTasks() {
if (isRenewingLeasesEnabled()) {
ThreadFactory threadFactory =
new ThreadFactoryBuilder().setDaemon(true)
.setNameFormat("PHOENIX-SCANNER-RENEW-LEASE" + "-thread-%s").build();
renewLeaseExecutor =
Executors.newScheduledThreadPool(renewLeasePoolSize, threadFactory);
for (LinkedBlockingQueue<WeakReference<PhoenixConnection>> q : connectionQueues) {
renewLeaseExecutor.scheduleAtFixedRate(new RenewLeaseTask(q), 0,
renewLeaseTaskFrequency, TimeUnit.MILLISECONDS);
}
}
}
private static int getSaltBuckets(TableAlreadyExistsException e) {
PTable table = e.getTable();
Integer sequenceSaltBuckets = table == null ? null : table.getBucketNum();
return sequenceSaltBuckets == null ? 0 : sequenceSaltBuckets;
}
@Override
public MutationState updateData(MutationPlan plan) throws SQLException {
MutationState state = plan.execute();
plan.getContext().getConnection().commit();
return state;
}
@Override
public int getLowestClusterHBaseVersion() {
return lowestClusterHBaseVersion;
}
@Override
public boolean hasIndexWALCodec() {
return hasIndexWALCodec;
}
/**
* Clears the Phoenix meta data cache on each region server
* @throws SQLException
*/
@Override
public long clearCache() throws SQLException {
try {
SQLException sqlE = null;
HTableInterface htable = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
try {
final Map<byte[], Long> results =
htable.coprocessorService(MetaDataService.class, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, new Batch.Call<MetaDataService, Long>() {
@Override
public Long call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<ClearCacheResponse> rpcCallback =
new BlockingRpcCallback<ClearCacheResponse>();
ClearCacheRequest.Builder builder = ClearCacheRequest.newBuilder();
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.clearCache(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get().getUnfreedBytes();
}
});
long unfreedBytes = 0;
for (Map.Entry<byte[],Long> result : results.entrySet()) {
if (result.getValue() != null) {
unfreedBytes += result.getValue();
}
}
return unfreedBytes;
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} catch (Throwable e) {
sqlE = new SQLException(e);
} finally {
try {
tableStatsCache.invalidateAll();
htable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
} catch (Exception e) {
throw new SQLException(ServerUtil.parseServerException(e));
}
return 0;
}
private void flushTable(byte[] tableName) throws SQLException {
HBaseAdmin admin = getAdmin();
try {
admin.flush(tableName);
} catch (IOException e) {
throw new PhoenixIOException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
throw new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).build()
.buildException();
} finally {
Closeables.closeQuietly(admin);
}
}
@Override
public HBaseAdmin getAdmin() throws SQLException {
try {
return new HBaseAdmin(config);
} catch (IOException e) {
throw new PhoenixIOException(e);
}
}
@Override
public MetaDataMutationResult updateIndexState(final List<Mutation> tableMetaData, String parentTableName) throws SQLException {
byte[][] rowKeyMetadata = new byte[3][];
SchemaUtil.getVarChars(tableMetaData.get(0).getRow(), rowKeyMetadata);
byte[] tableKey = SchemaUtil.getTableKey(ByteUtil.EMPTY_BYTE_ARRAY, rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX], rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX]);
return metaDataCoprocessorExec(tableKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
UpdateIndexStateRequest.Builder builder = UpdateIndexStateRequest.newBuilder();
for (Mutation m : tableMetaData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.updateIndexState(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
}
@Override
public long createSequence(String tenantId, String schemaName, String sequenceName,
long startWith, long incrementBy, long cacheSize, long minValue, long maxValue,
boolean cycle, long timestamp) throws SQLException {
SequenceKey sequenceKey = new SequenceKey(tenantId, schemaName, sequenceName, nSequenceSaltBuckets);
Sequence newSequences = new Sequence(sequenceKey);
Sequence sequence = sequenceMap.putIfAbsent(sequenceKey, newSequences);
if (sequence == null) {
sequence = newSequences;
}
try {
sequence.getLock().lock();
// Now that we have the lock we need, create the sequence
Append append = sequence.createSequence(startWith, incrementBy, cacheSize, timestamp, minValue, maxValue, cycle);
HTableInterface htable =
this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
htable.setAutoFlush(true);
try {
Result result = htable.append(append);
return sequence.createSequence(result, minValue, maxValue, cycle);
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
Closeables.closeQuietly(htable);
}
} finally {
sequence.getLock().unlock();
}
}
@Override
public long dropSequence(String tenantId, String schemaName, String sequenceName, long timestamp) throws SQLException {
SequenceKey sequenceKey = new SequenceKey(tenantId, schemaName, sequenceName, nSequenceSaltBuckets);
Sequence newSequences = new Sequence(sequenceKey);
Sequence sequence = sequenceMap.putIfAbsent(sequenceKey, newSequences);
if (sequence == null) {
sequence = newSequences;
}
try {
sequence.getLock().lock();
// Now that we have the lock we need, create the sequence
Append append = sequence.dropSequence(timestamp);
HTableInterface htable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
try {
Result result = htable.append(append);
return sequence.dropSequence(result);
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} finally {
Closeables.closeQuietly(htable);
}
} finally {
sequence.getLock().unlock();
}
}
/**
* Gets the current sequence value
* @throws SQLException if cached sequence cannot be found
*/
@Override
public long currentSequenceValue(SequenceKey sequenceKey, long timestamp) throws SQLException {
Sequence sequence = sequenceMap.get(sequenceKey);
if (sequence == null) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_CALL_CURRENT_BEFORE_NEXT_VALUE)
.setSchemaName(sequenceKey.getSchemaName()).setTableName(sequenceKey.getSequenceName())
.build().buildException();
}
sequence.getLock().lock();
try {
return sequence.currentValue(timestamp);
} catch (EmptySequenceCacheException e) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_CALL_CURRENT_BEFORE_NEXT_VALUE)
.setSchemaName(sequenceKey.getSchemaName()).setTableName(sequenceKey.getSequenceName())
.build().buildException();
} finally {
sequence.getLock().unlock();
}
}
/**
* Verifies that sequences exist and reserves values for them if reserveValues is true
*/
@Override
public void validateSequences(List<SequenceAllocation> sequenceAllocations, long timestamp, long[] values, SQLException[] exceptions, Sequence.ValueOp action) throws SQLException {
incrementSequenceValues(sequenceAllocations, timestamp, values, exceptions, action);
}
/**
* Increment any of the set of sequences that need more values. These are the sequences
* that are asking for the next value within a given statement. The returned sequences
* are the ones that were not found because they were deleted by another client.
* @param sequenceKeys sorted list of sequence kyes
* @param timestamp
* @throws SQLException if any of the sequences cannot be found
*
*/
@Override
public void incrementSequences(List<SequenceAllocation> sequenceAllocations, long timestamp, long[] values, SQLException[] exceptions) throws SQLException {
incrementSequenceValues(sequenceAllocations, timestamp, values, exceptions, Sequence.ValueOp.INCREMENT_SEQUENCE);
}
@SuppressWarnings("deprecation")
private void incrementSequenceValues(List<SequenceAllocation> sequenceAllocations, long timestamp, long[] values, SQLException[] exceptions, Sequence.ValueOp op) throws SQLException {
List<Sequence> sequences = Lists.newArrayListWithExpectedSize(sequenceAllocations.size());
for (SequenceAllocation sequenceAllocation : sequenceAllocations) {
SequenceKey key = sequenceAllocation.getSequenceKey();
Sequence newSequences = new Sequence(key);
Sequence sequence = sequenceMap.putIfAbsent(key, newSequences);
if (sequence == null) {
sequence = newSequences;
}
sequences.add(sequence);
}
try {
for (Sequence sequence : sequences) {
sequence.getLock().lock();
}
// Now that we have all the locks we need, increment the sequences
List<Increment> incrementBatch = Lists.newArrayListWithExpectedSize(sequences.size());
List<Sequence> toIncrementList = Lists.newArrayListWithExpectedSize(sequences.size());
int[] indexes = new int[sequences.size()];
for (int i = 0; i < sequences.size(); i++) {
Sequence sequence = sequences.get(i);
try {
values[i] = sequence.incrementValue(timestamp, op, sequenceAllocations.get(i).getNumAllocations());
} catch (EmptySequenceCacheException e) {
indexes[toIncrementList.size()] = i;
toIncrementList.add(sequence);
Increment inc = sequence.newIncrement(timestamp, op, sequenceAllocations.get(i).getNumAllocations());
incrementBatch.add(inc);
} catch (SQLException e) {
exceptions[i] = e;
}
}
if (toIncrementList.isEmpty()) {
return;
}
HTableInterface hTable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
Object[] resultObjects = null;
SQLException sqlE = null;
try {
resultObjects= hTable.batch(incrementBatch);
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME ?
} finally {
try {
hTable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
}
if (sqlE != null) {
throw sqlE;
}
}
for (int i=0;i<resultObjects.length;i++){
Sequence sequence = toIncrementList.get(i);
Result result = (Result)resultObjects[i];
try {
long numToAllocate = Bytes.toLong(incrementBatch.get(i).getAttribute(SequenceRegionObserver.NUM_TO_ALLOCATE));
values[indexes[i]] = sequence.incrementValue(result, op, numToAllocate);
} catch (SQLException e) {
exceptions[indexes[i]] = e;
}
}
} finally {
for (Sequence sequence : sequences) {
sequence.getLock().unlock();
}
}
}
@Override
public void clearTableFromCache(final byte[] tenantId, final byte[] schemaName, final byte[] tableName,
final long clientTS) throws SQLException {
// clear the meta data cache for the table here
try {
SQLException sqlE = null;
HTableInterface htable = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
try {
htable.coprocessorService(MetaDataService.class, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
new Batch.Call<MetaDataService, ClearTableFromCacheResponse>() {
@Override
public ClearTableFromCacheResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<ClearTableFromCacheResponse> rpcCallback = new BlockingRpcCallback<ClearTableFromCacheResponse>();
ClearTableFromCacheRequest.Builder builder = ClearTableFromCacheRequest.newBuilder();
builder.setTenantId(ByteStringer.wrap(tenantId));
builder.setTableName(ByteStringer.wrap(tableName));
builder.setSchemaName(ByteStringer.wrap(schemaName));
builder.setClientTimestamp(clientTS);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.clearTableFromCache(controller, builder.build(), rpcCallback);
if (controller.getFailedOn() != null) { throw controller.getFailedOn(); }
return rpcCallback.get();
}
});
} catch (IOException e) {
throw ServerUtil.parseServerException(e);
} catch (Throwable e) {
sqlE = new SQLException(e);
} finally {
try {
if (tenantId.length == 0) tableStatsCache.invalidate(new ImmutableBytesPtr(SchemaUtil.getTableNameAsBytes(schemaName, tableName)));
htable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
if (sqlE != null) { throw sqlE; }
}
}
} catch (Exception e) {
throw new SQLException(ServerUtil.parseServerException(e));
}
}
@SuppressWarnings("deprecation")
@Override
public void returnSequences(List<SequenceKey> keys, long timestamp, SQLException[] exceptions) throws SQLException {
List<Sequence> sequences = Lists.newArrayListWithExpectedSize(keys.size());
for (SequenceKey key : keys) {
Sequence newSequences = new Sequence(key);
Sequence sequence = sequenceMap.putIfAbsent(key, newSequences);
if (sequence == null) {
sequence = newSequences;
}
sequences.add(sequence);
}
try {
for (Sequence sequence : sequences) {
sequence.getLock().lock();
}
// Now that we have all the locks we need, attempt to return the unused sequence values
List<Append> mutations = Lists.newArrayListWithExpectedSize(sequences.size());
List<Sequence> toReturnList = Lists.newArrayListWithExpectedSize(sequences.size());
int[] indexes = new int[sequences.size()];
for (int i = 0; i < sequences.size(); i++) {
Sequence sequence = sequences.get(i);
try {
Append append = sequence.newReturn(timestamp);
toReturnList.add(sequence);
mutations.add(append);
} catch (EmptySequenceCacheException ignore) { // Nothing to return, so ignore
}
}
if (toReturnList.isEmpty()) {
return;
}
HTableInterface hTable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
Object[] resultObjects = null;
SQLException sqlE = null;
try {
resultObjects= hTable.batch(mutations);
} catch (IOException e){
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e){
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME ?
} finally {
try {
hTable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
}
if (sqlE != null) {
throw sqlE;
}
}
for (int i=0;i<resultObjects.length;i++){
Sequence sequence = toReturnList.get(i);
Result result = (Result)resultObjects[i];
try {
sequence.returnValue(result);
} catch (SQLException e) {
exceptions[indexes[i]] = e;
}
}
} finally {
for (Sequence sequence : sequences) {
sequence.getLock().unlock();
}
}
}
// Take no locks, as this only gets run when there are no open connections
// so there's no danger of contention.
@SuppressWarnings("deprecation")
private void returnAllSequences(ConcurrentMap<SequenceKey,Sequence> sequenceMap) throws SQLException {
List<Append> mutations = Lists.newArrayListWithExpectedSize(sequenceMap.size());
for (Sequence sequence : sequenceMap.values()) {
mutations.addAll(sequence.newReturns());
}
if (mutations.isEmpty()) {
return;
}
HTableInterface hTable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
SQLException sqlE = null;
try {
hTable.batch(mutations);
} catch (IOException e) {
sqlE = ServerUtil.parseServerException(e);
} catch (InterruptedException e) {
// restore the interrupt status
Thread.currentThread().interrupt();
sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION)
.setRootCause(e).build().buildException(); // FIXME ?
} finally {
try {
hTable.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
}
if (sqlE != null) {
throw sqlE;
}
}
}
@Override
public void addConnection(PhoenixConnection connection) throws SQLException {
connectionQueues.get(getQueueIndex(connection)).add(new WeakReference<PhoenixConnection>(connection));
if (returnSequenceValues) {
synchronized (connectionCountLock) {
connectionCount++;
}
}
}
@Override
public void removeConnection(PhoenixConnection connection) throws SQLException {
if (returnSequenceValues) {
ConcurrentMap<SequenceKey,Sequence> formerSequenceMap = null;
synchronized (connectionCountLock) {
if (--connectionCount == 0) {
if (!this.sequenceMap.isEmpty()) {
formerSequenceMap = this.sequenceMap;
this.sequenceMap = Maps.newConcurrentMap();
}
}
}
// Since we're using the former sequenceMap, we can do this outside
// the lock.
if (formerSequenceMap != null) {
// When there are no more connections, attempt to return any sequences
returnAllSequences(formerSequenceMap);
}
}
}
private int getQueueIndex(PhoenixConnection conn) {
return ThreadLocalRandom.current().nextInt(renewLeasePoolSize);
}
@Override
public KeyValueBuilder getKeyValueBuilder() {
return this.kvBuilder;
}
@Override
public boolean supportsFeature(Feature feature) {
FeatureSupported supported = featureMap.get(feature);
if (supported == null) {
return false;
}
return supported.isSupported(this);
}
@Override
public String getUserName() {
return userName;
}
private void checkClosed() {
if (closed) {
throwConnectionClosedException();
}
}
private void throwConnectionClosedIfNullMetaData() {
if (latestMetaData == null) {
throwConnectionClosedException();
}
}
private void throwConnectionClosedException() {
throw new IllegalStateException("Connection to the cluster is closed");
}
@Override
public PTableStats getTableStats(final byte[] physicalName, final long clientTimeStamp) throws SQLException {
try {
return tableStatsCache.get(new ImmutableBytesPtr(physicalName), new Callable<PTableStats>() {
@Override
public PTableStats call() throws Exception {
/*
* The shared view index case is tricky, because we don't have
* table metadata for it, only an HBase table. We do have stats,
* though, so we'll query them directly here and cache them so
* we don't keep querying for them.
*/
HTableInterface statsHTable = ConnectionQueryServicesImpl.this.getTable(PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES);
try {
return StatisticsUtil.readStatistics(statsHTable, physicalName, clientTimeStamp);
} catch (IOException e) {
logger.warn("Unable to read from stats table", e);
// Just cache empty stats. We'll try again after some time anyway.
return PTableStats.EMPTY_STATS;
} finally {
try {
statsHTable.close();
} catch (IOException e) {
// Log, but continue. We have our stats anyway now.
logger.warn("Unable to close stats table", e);
}
}
}
});
} catch (ExecutionException e) {
throw ServerUtil.parseServerException(e);
}
}
@Override
public int getSequenceSaltBuckets() {
return nSequenceSaltBuckets;
}
@Override
public PMetaData addFunction(PFunction function) throws SQLException {
synchronized (latestMetaDataLock) {
try {
throwConnectionClosedIfNullMetaData();
// If existing table isn't older than new table, don't replace
// If a client opens a connection at an earlier timestamp, this can happen
PFunction existingFunction = latestMetaData.getFunction(new PTableKey(function.getTenantId(), function.getFunctionName()));
if (existingFunction.getTimeStamp() >= function.getTimeStamp()) {
return latestMetaData;
}
} catch (FunctionNotFoundException e) {}
latestMetaData = latestMetaData.addFunction(function);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
@Override
public PMetaData removeFunction(PName tenantId, String function, long functionTimeStamp)
throws SQLException {
synchronized (latestMetaDataLock) {
throwConnectionClosedIfNullMetaData();
latestMetaData = latestMetaData.removeFunction(tenantId, function, functionTimeStamp);
latestMetaDataLock.notifyAll();
return latestMetaData;
}
}
@Override
public MetaDataMutationResult getFunctions(PName tenantId, final List<Pair<byte[], Long>> functions,
final long clientTimestamp) throws SQLException {
final byte[] tenantIdBytes = tenantId == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantId.getBytes();
return metaDataCoprocessorExec(tenantIdBytes,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
GetFunctionsRequest.Builder builder = GetFunctionsRequest.newBuilder();
builder.setTenantId(ByteStringer.wrap(tenantIdBytes));
for(Pair<byte[], Long> function: functions) {
builder.addFunctionNames(ByteStringer.wrap(function.getFirst()));
builder.addFunctionTimestamps(function.getSecond().longValue());
}
builder.setClientTimestamp(clientTimestamp);
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.getFunctions(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
}, PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES);
}
// TODO the mutations should be added to System functions table.
@Override
public MetaDataMutationResult createFunction(final List<Mutation> functionData,
final PFunction function, final boolean temporary) throws SQLException {
byte[][] rowKeyMetadata = new byte[2][];
Mutation m = MetaDataUtil.getPutOnlyTableHeaderRow(functionData);
byte[] key = m.getRow();
SchemaUtil.getVarChars(key, rowKeyMetadata);
byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
byte[] functionBytes = rowKeyMetadata[PhoenixDatabaseMetaData.FUNTION_NAME_INDEX];
byte[] functionKey = SchemaUtil.getFunctionKey(tenantIdBytes, functionBytes);
MetaDataMutationResult result = metaDataCoprocessorExec(functionKey,
new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback =
new BlockingRpcCallback<MetaDataResponse>();
CreateFunctionRequest.Builder builder = CreateFunctionRequest.newBuilder();
for (Mutation m : functionData) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setTemporary(temporary);
builder.setReplace(function.isReplace());
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.createFunction(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
}, PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES);
return result;
}
@VisibleForTesting
static class RenewLeaseTask implements Runnable {
private final LinkedBlockingQueue<WeakReference<PhoenixConnection>> connectionsQueue;
private final Random random = new Random();
private static final int MAX_WAIT_TIME = 1000;
RenewLeaseTask(LinkedBlockingQueue<WeakReference<PhoenixConnection>> queue) {
this.connectionsQueue = queue;
}
private void waitForRandomDuration() throws InterruptedException {
new CountDownLatch(1).await(random.nextInt(MAX_WAIT_TIME), MILLISECONDS);
}
@Override
public void run() {
try {
int numConnections = connectionsQueue.size();
boolean wait = true;
// We keep adding items to the end of the queue. So to stop the loop, iterate only up to
// whatever the current count is.
while (numConnections > 0) {
if (wait) {
// wait for some random duration to prevent all threads from renewing lease at
// the same time.
waitForRandomDuration();
wait = false;
}
// It is guaranteed that this poll won't hang indefinitely because this is the
// only thread that removes items from the queue. Still adding a 1 ms timeout
// for sanity check.
WeakReference<PhoenixConnection> connRef =
connectionsQueue.poll(1, TimeUnit.MILLISECONDS);
if (connRef == null) {
throw new IllegalStateException(
"Connection ref found to be null. This is a bug. Some other thread removed items from the connection queue.");
}
PhoenixConnection conn = connRef.get();
if (conn != null && !conn.isClosed()) {
LinkedBlockingQueue<WeakReference<TableResultIterator>> scannerQueue =
conn.getScanners();
// We keep adding items to the end of the queue. So to stop the loop,
// iterate only up to whatever the current count is.
int numScanners = scannerQueue.size();
int renewed = 0;
long start = System.currentTimeMillis();
while (numScanners > 0) {
// It is guaranteed that this poll won't hang indefinitely because this is the
// only thread that removes items from the queue. Still adding a 1 ms timeout
// for sanity check.
WeakReference<TableResultIterator> ref =
scannerQueue.poll(1, TimeUnit.MILLISECONDS);
if (ref == null) {
throw new IllegalStateException(
"TableResulIterator ref found to be null. This is a bug. Some other thread removed items from the scanner queue.");
}
TableResultIterator scanningItr = ref.get();
if (scanningItr != null) {
RenewLeaseStatus status = scanningItr.renewLease();
switch (status) {
case RENEWED:
renewed++;
// add it back at the tail
scannerQueue.offer(new WeakReference<TableResultIterator>(
scanningItr));
logger.info("Lease renewed for scanner: " + scanningItr);
break;
case UNINITIALIZED:
case THRESHOLD_NOT_REACHED:
// add it back at the tail
scannerQueue.offer(new WeakReference<TableResultIterator>(
scanningItr));
break;
// if lease wasn't renewed or scanner was closed, don't add the
// scanner back to the queue.
case CLOSED:
case NOT_RENEWED:
break;
}
}
numScanners--;
}
if (renewed > 0) {
logger.info("Renewed leases for " + renewed + " scanner/s in "
+ (System.currentTimeMillis() - start) + " ms ");
}
connectionsQueue.offer(connRef);
}
numConnections--;
}
} catch (InterruptedException e1) {
Thread.currentThread().interrupt(); // restore the interrupt status
logger.warn("Thread interrupted when renewing lease ", e1);
throw new RuntimeException(e1);
} catch (Exception e2) {
logger.warn("Exception thrown when renewing lease ", e2);
throw new RuntimeException(e2);
}
}
}
@Override
public long getRenewLeaseThresholdMilliSeconds() {
return renewLeaseThreshold;
}
@Override
public boolean isRenewingLeasesEnabled() {
return supportsFeature(ConnectionQueryServices.Feature.RENEW_LEASE) && renewLeaseEnabled;
}
}
|
PHOENIX-2616 Indexes over immutable tables not marked as immutable
|
phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
|
PHOENIX-2616 Indexes over immutable tables not marked as immutable
|
<ide><path>hoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide>
<del>import co.cask.tephra.TransactionSystemClient;
<del>import co.cask.tephra.TxConstants;
<del>import co.cask.tephra.distributed.PooledClientProvider;
<del>import co.cask.tephra.distributed.TransactionServiceClient;
<del>import co.cask.tephra.hbase11.coprocessor.TransactionProcessor;
<del>
<ide> import com.google.common.annotations.VisibleForTesting;
<ide> import com.google.common.base.Joiner;
<ide> import com.google.common.base.Throwables;
<ide> import com.google.common.collect.Maps;
<ide> import com.google.common.collect.Sets;
<ide> import com.google.common.util.concurrent.ThreadFactoryBuilder;
<add>
<add>import co.cask.tephra.TransactionSystemClient;
<add>import co.cask.tephra.TxConstants;
<add>import co.cask.tephra.distributed.PooledClientProvider;
<add>import co.cask.tephra.distributed.TransactionServiceClient;
<add>import co.cask.tephra.hbase11.coprocessor.TransactionProcessor;
<ide>
<ide>
<ide> public class ConnectionQueryServicesImpl extends DelegateQueryServices implements ConnectionQueryServices {
<ide> SQLException sqlE = null;
<ide> try {
<ide> metaConnection.createStatement().executeUpdate("ALTER TABLE " + tableName + " ADD " + (addIfNotExists ? " IF NOT EXISTS " : "") + columns );
<add> } catch (NewerTableAlreadyExistsException e) {
<add> logger.warn("Table already modified at this timestamp, so assuming add of these columns already done: " + columns);
<ide> } catch (SQLException e) {
<ide> logger.warn("Add column failed due to:" + e);
<ide> sqlE = e;
<ide> ")\n" +
<ide> "WHERE A.COLUMN_FAMILY IS NULL AND\n" +
<ide> " B.COLUMN_FAMILY IS NOT NULL AND\n" +
<del> " A.IMMUTABLE_ROWS = TRUE;");
<add> " A.IMMUTABLE_ROWS = TRUE");
<ide> } finally {
<ide> metaConnection.setAutoCommit(autoCommit);
<ide> }
|
|
Java
|
apache-2.0
|
4247bdce02cb870bae3b1cbbe609d6857011e9c4
| 0 |
emre-aydin/hazelcast,Donnerbart/hazelcast,mesutcelik/hazelcast,emre-aydin/hazelcast,dbrimley/hazelcast,tufangorel/hazelcast,mesutcelik/hazelcast,tombujok/hazelcast,mesutcelik/hazelcast,mdogan/hazelcast,dbrimley/hazelcast,emrahkocaman/hazelcast,juanavelez/hazelcast,lmjacksoniii/hazelcast,mdogan/hazelcast,emre-aydin/hazelcast,tufangorel/hazelcast,tkountis/hazelcast,tkountis/hazelcast,juanavelez/hazelcast,emrahkocaman/hazelcast,lmjacksoniii/hazelcast,mdogan/hazelcast,tkountis/hazelcast,tombujok/hazelcast,tufangorel/hazelcast,Donnerbart/hazelcast,dsukhoroslov/hazelcast,Donnerbart/hazelcast,dbrimley/hazelcast,dsukhoroslov/hazelcast
|
/*
* Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.query.impl;
import com.hazelcast.nio.serialization.Data;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class IndexImpl implements Index {
public static final NullObject NULL = new NullObject();
// indexKey -- indexValue
private final ConcurrentMap<Data, Comparable> recordValues = new ConcurrentHashMap<Data, Comparable>(1000);
private final IndexStore indexStore;
private final String attribute;
private final boolean ordered;
private volatile AttributeType attributeType;
public IndexImpl(String attribute, boolean ordered) {
this.attribute = attribute;
this.ordered = ordered;
indexStore = (ordered) ? new SortedIndexStore() : new UnsortedIndexStore();
}
@Override
public void removeEntryIndex(Data indexKey) {
Comparable oldValue = recordValues.remove(indexKey);
if (oldValue != null) {
indexStore.removeIndex(oldValue, indexKey);
}
}
@Override
public void clear() {
recordValues.clear();
indexStore.clear();
}
ConcurrentMap<Data, QueryableEntry> getRecordMap(Comparable indexValue) {
return indexStore.getRecordMap(indexValue);
}
@Override
public void saveEntryIndex(QueryableEntry e) throws QueryException {
Data key = e.getIndexKey();
Comparable oldValue = recordValues.remove(key);
Comparable newValue = e.getAttribute(attribute);
if (newValue == null) {
newValue = NULL;
} else if (newValue.getClass().isEnum()) {
newValue = TypeConverters.ENUM_CONVERTER.convert(newValue);
}
recordValues.put(key, newValue);
if (oldValue == null) {
// new
indexStore.newIndex(newValue, e);
} else {
// update
indexStore.removeIndex(oldValue, key);
indexStore.newIndex(newValue, e);
}
if (attributeType == null) {
attributeType = e.getAttributeType(attribute);
}
}
@Override
public Set<QueryableEntry> getRecords(Comparable[] values) {
if (values.length == 1) {
return indexStore.getRecords(convert(values[0]));
} else {
Set<Comparable> convertedValues = new HashSet<Comparable>(values.length);
for (Comparable value : values) {
convertedValues.add(convert(value));
}
MultiResultSet results = new MultiResultSet();
indexStore.getRecords(results, convertedValues);
return results;
}
}
@Override
public Set<QueryableEntry> getRecords(Comparable value) {
return indexStore.getRecords(convert(value));
}
@Override
public Set<QueryableEntry> getSubRecordsBetween(Comparable from, Comparable to) {
MultiResultSet results = new MultiResultSet();
indexStore.getSubRecordsBetween(results, convert(from), convert(to));
return results;
}
@Override
public Set<QueryableEntry> getSubRecords(ComparisonType comparisonType, Comparable searchedValue) {
MultiResultSet results = new MultiResultSet();
indexStore.getSubRecords(results, comparisonType, convert(searchedValue));
return results;
}
private Comparable convert(Comparable value) {
if (attributeType == null) {
return value;
}
return attributeType.getConverter().convert(value);
}
public ConcurrentMap<Data, Comparable> getRecordValues() {
return recordValues;
}
@Override
public String getAttributeName() {
return attribute;
}
@Override
public boolean isOrdered() {
return ordered;
}
public static final class NullObject implements Comparable {
@Override
public int compareTo(Object o) {
if (o == this || o instanceof NullObject) {
return 0;
}
return -1;
}
@Override
public int hashCode() {
return 0;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return true;
}
}
}
|
hazelcast/src/main/java/com/hazelcast/query/impl/IndexImpl.java
|
/*
* Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.query.impl;
import com.hazelcast.nio.serialization.Data;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class IndexImpl implements Index {
public static final NullObject NULL = new NullObject();
// indexKey -- indexValue
private final ConcurrentMap<Data, Comparable> recordValues = new ConcurrentHashMap<Data, Comparable>(1000);
private final IndexStore indexStore;
private final String attribute;
private final boolean ordered;
private volatile AttributeType attributeType;
public IndexImpl(String attribute, boolean ordered) {
this.attribute = attribute;
this.ordered = ordered;
indexStore = (ordered) ? new SortedIndexStore() : new UnsortedIndexStore();
}
@Override
public void removeEntryIndex(Data indexKey) {
Comparable oldValue = recordValues.remove(indexKey);
if (oldValue != null) {
indexStore.removeIndex(oldValue, indexKey);
}
}
@Override
public void clear() {
recordValues.clear();
indexStore.clear();
}
ConcurrentMap<Data, QueryableEntry> getRecordMap(Comparable indexValue) {
return indexStore.getRecordMap(indexValue);
}
@Override
public void saveEntryIndex(QueryableEntry e) throws QueryException {
Data key = e.getIndexKey();
Comparable oldValue = recordValues.remove(key);
Comparable newValue = e.getAttribute(attribute);
if (newValue == null) {
newValue = NULL;
}
else if (newValue.getClass().isEnum()) {
newValue = TypeConverters.ENUM_CONVERTER.convert(newValue);
}
recordValues.put(key, newValue);
if (oldValue == null) {
// new
indexStore.newIndex(newValue, e);
} else {
// update
indexStore.removeIndex(oldValue, key);
indexStore.newIndex(newValue, e);
}
if (attributeType == null) {
attributeType = e.getAttributeType(attribute);
}
}
@Override
public Set<QueryableEntry> getRecords(Comparable[] values) {
if (values.length == 1) {
return indexStore.getRecords(convert(values[0]));
} else {
Set<Comparable> convertedValues = new HashSet<Comparable>(values.length);
for (Comparable value : values) {
convertedValues.add(convert(value));
}
MultiResultSet results = new MultiResultSet();
indexStore.getRecords(results, convertedValues);
return results;
}
}
@Override
public Set<QueryableEntry> getRecords(Comparable value) {
return indexStore.getRecords(convert(value));
}
@Override
public Set<QueryableEntry> getSubRecordsBetween(Comparable from, Comparable to) {
MultiResultSet results = new MultiResultSet();
indexStore.getSubRecordsBetween(results, convert(from), convert(to));
return results;
}
@Override
public Set<QueryableEntry> getSubRecords(ComparisonType comparisonType, Comparable searchedValue) {
MultiResultSet results = new MultiResultSet();
indexStore.getSubRecords(results, comparisonType, convert(searchedValue));
return results;
}
private Comparable convert(Comparable value) {
if (attributeType == null) {
return value;
}
return attributeType.getConverter().convert(value);
}
public ConcurrentMap<Data, Comparable> getRecordValues() {
return recordValues;
}
@Override
public String getAttributeName() {
return attribute;
}
@Override
public boolean isOrdered() {
return ordered;
}
public static final class NullObject implements Comparable {
@Override
public int compareTo(Object o) {
if (o == this || o instanceof NullObject) {
return 0;
}
return -1;
}
@Override
public int hashCode() {
return 0;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return true;
}
}
}
|
checkstyle fix
|
hazelcast/src/main/java/com/hazelcast/query/impl/IndexImpl.java
|
checkstyle fix
|
<ide><path>azelcast/src/main/java/com/hazelcast/query/impl/IndexImpl.java
<ide> Comparable newValue = e.getAttribute(attribute);
<ide> if (newValue == null) {
<ide> newValue = NULL;
<del> }
<del> else if (newValue.getClass().isEnum()) {
<add> } else if (newValue.getClass().isEnum()) {
<ide> newValue = TypeConverters.ENUM_CONVERTER.convert(newValue);
<ide> }
<ide> recordValues.put(key, newValue);
|
|
Java
|
apache-2.0
|
error: pathspec 'java/value/trunk/src/main/java/org/ppwcode/value_III/time/interval/AbstractIntradayTimeIntervalValueHandler.java' did not match any file(s) known to git
|
50ab092b4f6cbb675458dd879d9a5794a8033448
| 1 |
jandppw/ppwcode-recovered-from-google-code,jandppw/ppwcode-recovered-from-google-code,jandppw/ppwcode-recovered-from-google-code,jandockx/ppwcode-recovered-from-google-code,jandockx/ppwcode-recovered-from-google-code,jandppw/ppwcode-recovered-from-google-code,jandppw/ppwcode-recovered-from-google-code,jandockx/ppwcode-recovered-from-google-code,jandockx/ppwcode-recovered-from-google-code,jandppw/ppwcode-recovered-from-google-code,jandockx/ppwcode-recovered-from-google-code,jandockx/ppwcode-recovered-from-google-code
|
/*<license>
Copyright 2004 - $Date: 2008-11-06 15:27:53 +0100 (Thu, 06 Nov 2008) $ by PeopleWare n.v..
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
</license>*/
package org.ppwcode.value_III.time.interval;
import static org.ppwcode.metainfo_I.License.Type.APACHE_V2;
import static org.ppwcode.value_III.time.TimeHelpers.compose;
import static org.ppwcode.value_III.time.TimeHelpers.isDayDate;
import static org.ppwcode.value_III.time.TimeHelpers.sqlDayDate;
import static org.ppwcode.value_III.time.TimeHelpers.sqlTimeOfDay;
import static org.ppwcode.vernacular.exception_II.ProgrammingErrorHelpers.deadBranch;
import static org.ppwcode.vernacular.exception_II.ProgrammingErrorHelpers.unexpectedException;
import java.sql.Time;
import java.sql.Types;
import java.util.Date;
import java.util.TimeZone;
import org.apache.openjpa.jdbc.kernel.JDBCStore;
import org.apache.openjpa.jdbc.meta.JavaSQLTypes;
import org.apache.openjpa.jdbc.meta.ValueMapping;
import org.apache.openjpa.jdbc.meta.strats.AbstractValueHandler;
import org.apache.openjpa.jdbc.schema.Column;
import org.apache.openjpa.jdbc.schema.ColumnIO;
import org.ppwcode.metainfo_I.Copyright;
import org.ppwcode.metainfo_I.License;
import org.ppwcode.metainfo_I.vcs.SvnInfo;
import org.ppwcode.value_III.propertyeditors.java.util.TimeZoneValueHandler;
/**
* A OpenJPA value handler for {@link IntradayTimeInterval} and {@link DeterminateIntradayTimeInterval}.
* Date is stored in 4 columns in the database, 1 for the day date as {@code DATE}, 2 for the time of day as
* {@code TIME}, and one for the time zone, as {@code VARCHAR}. {@code null} is represented by {@code null}
* in all 4 columns. This is not a problem because both the begin and end being {@code null} is forbidden for
* {@link IntradayTimeInterval IntradayTimeIntervals}.
*
* @author Jan Dockx
* @author PeopleWare n.v.
*/
@Copyright("2008 - $Date: 2008-11-06 15:27:53 +0100 (Thu, 06 Nov 2008) $, PeopleWare n.v.")
@License(APACHE_V2)
@SvnInfo(revision = "$Revision: 3435 $",
date = "$Date: 2008-11-06 15:27:53 +0100 (Thu, 06 Nov 2008) $")
public abstract class AbstractIntradayTimeIntervalValueHandler extends AbstractValueHandler {
protected AbstractIntradayTimeIntervalValueHandler(boolean determinate) {
$determinate = determinate;
}
private final boolean $determinate;
private final TimeZoneValueHandler $timeZoneValueHandler = new TimeZoneValueHandler();
public final Column[] map(ValueMapping vm, String name, ColumnIO io, boolean adapt) {
return new Column[] {dayColumn(name), timeColumn(name + "_begin"), timeColumn(name + "_end"),
timeZoneColumn(vm, name, io, adapt)};
}
private Column dayColumn(String name) {
Column c = new Column();
c.setName(name + "_day");
c.setType(Types.DATE);
c.setNotNull(true);
c.setJavaType(JavaSQLTypes.SQL_DATE);
return c;
}
private Column timeColumn(String name) {
Column c = new Column();
c.setName(name);
c.setType(Types.TIME);
c.setJavaType(JavaSQLTypes.TIME);
c.setNotNull($determinate);
return c;
}
private Column timeZoneColumn(ValueMapping vm, String name, ColumnIO io, boolean adapt) {
Column c = $timeZoneValueHandler.map(vm, name + "_timezone", io, adapt)[0];
c.setNotNull(true);
return c;
}
@Override
public Object[] toDataStoreValue(ValueMapping vm, Object val, JDBCStore store) {
try {
AbstractIntradayTimeInterval beTi = (AbstractIntradayTimeInterval)val;
if (beTi == null) {
return new Object[] {null, null, null, null};
}
java.sql.Date day = sqlDayDate(beTi.getDay(), beTi.getTimeZone());
Time beginTime = sqlTimeOfDay(beTi.getBegin(), beTi.getTimeZone());
Time endTime = sqlTimeOfDay(beTi.getEnd(), beTi.getTimeZone());
return new Object[] {day, beginTime, endTime,
$timeZoneValueHandler.toDataStoreValue(vm, beTi.getTimeZone(), store)};
}
catch (ClassCastException exc) {
unexpectedException(exc, "trying to handle " + val + " with " +
AbstractIntradayTimeIntervalValueHandler.class.getName() + ", but that can't handle that type");
}
return null; // make compiler happy
}
@Override
public Object toObjectValue(ValueMapping vm, Object fromDb) {
try {
Object[] data = (Object[])fromDb;
java.sql.Date day = (java.sql.Date)data[0];
Time beginTime = (Time)data[1];
Time endTime = (Time)data[2];
TimeZone tz = (TimeZone)$timeZoneValueHandler.toObjectValue(vm, data[3]);
assert day == null || isDayDate(day, tz);
if (day == null) {
if (beginTime != null || endTime != null || tz != null) {
deadBranch("data received from database is not as expected: if the day is null, the times and timezone need to be null too");
}
return null;
}
Date intervalBeginTime = compose(day, beginTime, tz);
Date intervalEndTime = compose(day, endTime, tz);
return createFreshIntradayTimeInterval(intervalBeginTime, intervalEndTime, tz);
}
catch (ArrayIndexOutOfBoundsException exc) {
unexpectedException(exc, "data received from database is not as expected: expected array of 3 values");
}
catch (ClassCastException exc) {
unexpectedException(exc, "data received from database is not as expected: expected an array of 3 dates");
}
catch (IllegalTimeIntervalException exc) {
unexpectedException(exc, "data received from database did violate invariants for " + IntradayTimeInterval.class);
}
return null; // make compiler happy
}
protected abstract AbstractIntradayTimeInterval createFreshIntradayTimeInterval(Date intervalBeginTime, Date intervalEndTime, TimeZone tz)
throws IllegalTimeIntervalException;
}
|
java/value/trunk/src/main/java/org/ppwcode/value_III/time/interval/AbstractIntradayTimeIntervalValueHandler.java
|
value handlers for Intraday intervals that might work
|
java/value/trunk/src/main/java/org/ppwcode/value_III/time/interval/AbstractIntradayTimeIntervalValueHandler.java
|
value handlers for Intraday intervals that might work
|
<ide><path>ava/value/trunk/src/main/java/org/ppwcode/value_III/time/interval/AbstractIntradayTimeIntervalValueHandler.java
<add>/*<license>
<add>Copyright 2004 - $Date: 2008-11-06 15:27:53 +0100 (Thu, 06 Nov 2008) $ by PeopleWare n.v..
<add>
<add>Licensed under the Apache License, Version 2.0 (the "License");
<add>you may not use this file except in compliance with the License.
<add>You may obtain a copy of the License at
<add>
<add> http://www.apache.org/licenses/LICENSE-2.0
<add>
<add>Unless required by applicable law or agreed to in writing, software
<add>distributed under the License is distributed on an "AS IS" BASIS,
<add>WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add>See the License for the specific language governing permissions and
<add>limitations under the License.
<add></license>*/
<add>
<add>package org.ppwcode.value_III.time.interval;
<add>
<add>
<add>import static org.ppwcode.metainfo_I.License.Type.APACHE_V2;
<add>import static org.ppwcode.value_III.time.TimeHelpers.compose;
<add>import static org.ppwcode.value_III.time.TimeHelpers.isDayDate;
<add>import static org.ppwcode.value_III.time.TimeHelpers.sqlDayDate;
<add>import static org.ppwcode.value_III.time.TimeHelpers.sqlTimeOfDay;
<add>import static org.ppwcode.vernacular.exception_II.ProgrammingErrorHelpers.deadBranch;
<add>import static org.ppwcode.vernacular.exception_II.ProgrammingErrorHelpers.unexpectedException;
<add>
<add>import java.sql.Time;
<add>import java.sql.Types;
<add>import java.util.Date;
<add>import java.util.TimeZone;
<add>
<add>import org.apache.openjpa.jdbc.kernel.JDBCStore;
<add>import org.apache.openjpa.jdbc.meta.JavaSQLTypes;
<add>import org.apache.openjpa.jdbc.meta.ValueMapping;
<add>import org.apache.openjpa.jdbc.meta.strats.AbstractValueHandler;
<add>import org.apache.openjpa.jdbc.schema.Column;
<add>import org.apache.openjpa.jdbc.schema.ColumnIO;
<add>import org.ppwcode.metainfo_I.Copyright;
<add>import org.ppwcode.metainfo_I.License;
<add>import org.ppwcode.metainfo_I.vcs.SvnInfo;
<add>import org.ppwcode.value_III.propertyeditors.java.util.TimeZoneValueHandler;
<add>
<add>
<add>/**
<add> * A OpenJPA value handler for {@link IntradayTimeInterval} and {@link DeterminateIntradayTimeInterval}.
<add> * Date is stored in 4 columns in the database, 1 for the day date as {@code DATE}, 2 for the time of day as
<add> * {@code TIME}, and one for the time zone, as {@code VARCHAR}. {@code null} is represented by {@code null}
<add> * in all 4 columns. This is not a problem because both the begin and end being {@code null} is forbidden for
<add> * {@link IntradayTimeInterval IntradayTimeIntervals}.
<add> *
<add> * @author Jan Dockx
<add> * @author PeopleWare n.v.
<add> */
<add>@Copyright("2008 - $Date: 2008-11-06 15:27:53 +0100 (Thu, 06 Nov 2008) $, PeopleWare n.v.")
<add>@License(APACHE_V2)
<add>@SvnInfo(revision = "$Revision: 3435 $",
<add> date = "$Date: 2008-11-06 15:27:53 +0100 (Thu, 06 Nov 2008) $")
<add>public abstract class AbstractIntradayTimeIntervalValueHandler extends AbstractValueHandler {
<add>
<add> protected AbstractIntradayTimeIntervalValueHandler(boolean determinate) {
<add> $determinate = determinate;
<add> }
<add>
<add> private final boolean $determinate;
<add>
<add> private final TimeZoneValueHandler $timeZoneValueHandler = new TimeZoneValueHandler();
<add>
<add> public final Column[] map(ValueMapping vm, String name, ColumnIO io, boolean adapt) {
<add> return new Column[] {dayColumn(name), timeColumn(name + "_begin"), timeColumn(name + "_end"),
<add> timeZoneColumn(vm, name, io, adapt)};
<add> }
<add>
<add> private Column dayColumn(String name) {
<add> Column c = new Column();
<add> c.setName(name + "_day");
<add> c.setType(Types.DATE);
<add> c.setNotNull(true);
<add> c.setJavaType(JavaSQLTypes.SQL_DATE);
<add> return c;
<add> }
<add>
<add> private Column timeColumn(String name) {
<add> Column c = new Column();
<add> c.setName(name);
<add> c.setType(Types.TIME);
<add> c.setJavaType(JavaSQLTypes.TIME);
<add> c.setNotNull($determinate);
<add> return c;
<add> }
<add>
<add> private Column timeZoneColumn(ValueMapping vm, String name, ColumnIO io, boolean adapt) {
<add> Column c = $timeZoneValueHandler.map(vm, name + "_timezone", io, adapt)[0];
<add> c.setNotNull(true);
<add> return c;
<add> }
<add>
<add> @Override
<add> public Object[] toDataStoreValue(ValueMapping vm, Object val, JDBCStore store) {
<add> try {
<add> AbstractIntradayTimeInterval beTi = (AbstractIntradayTimeInterval)val;
<add> if (beTi == null) {
<add> return new Object[] {null, null, null, null};
<add> }
<add> java.sql.Date day = sqlDayDate(beTi.getDay(), beTi.getTimeZone());
<add> Time beginTime = sqlTimeOfDay(beTi.getBegin(), beTi.getTimeZone());
<add> Time endTime = sqlTimeOfDay(beTi.getEnd(), beTi.getTimeZone());
<add> return new Object[] {day, beginTime, endTime,
<add> $timeZoneValueHandler.toDataStoreValue(vm, beTi.getTimeZone(), store)};
<add> }
<add> catch (ClassCastException exc) {
<add> unexpectedException(exc, "trying to handle " + val + " with " +
<add> AbstractIntradayTimeIntervalValueHandler.class.getName() + ", but that can't handle that type");
<add> }
<add> return null; // make compiler happy
<add> }
<add>
<add> @Override
<add> public Object toObjectValue(ValueMapping vm, Object fromDb) {
<add> try {
<add> Object[] data = (Object[])fromDb;
<add> java.sql.Date day = (java.sql.Date)data[0];
<add> Time beginTime = (Time)data[1];
<add> Time endTime = (Time)data[2];
<add> TimeZone tz = (TimeZone)$timeZoneValueHandler.toObjectValue(vm, data[3]);
<add> assert day == null || isDayDate(day, tz);
<add> if (day == null) {
<add> if (beginTime != null || endTime != null || tz != null) {
<add> deadBranch("data received from database is not as expected: if the day is null, the times and timezone need to be null too");
<add> }
<add> return null;
<add> }
<add> Date intervalBeginTime = compose(day, beginTime, tz);
<add> Date intervalEndTime = compose(day, endTime, tz);
<add> return createFreshIntradayTimeInterval(intervalBeginTime, intervalEndTime, tz);
<add> }
<add> catch (ArrayIndexOutOfBoundsException exc) {
<add> unexpectedException(exc, "data received from database is not as expected: expected array of 3 values");
<add> }
<add> catch (ClassCastException exc) {
<add> unexpectedException(exc, "data received from database is not as expected: expected an array of 3 dates");
<add> }
<add> catch (IllegalTimeIntervalException exc) {
<add> unexpectedException(exc, "data received from database did violate invariants for " + IntradayTimeInterval.class);
<add> }
<add> return null; // make compiler happy
<add> }
<add>
<add> protected abstract AbstractIntradayTimeInterval createFreshIntradayTimeInterval(Date intervalBeginTime, Date intervalEndTime, TimeZone tz)
<add> throws IllegalTimeIntervalException;
<add>
<add>}
|
|
Java
|
apache-2.0
|
eb7dcaad69a1bac03f387574a0673994c43b2af9
| 0 |
ontop/ontop,ontop/ontop,ontop/ontop,ontop/ontop,ontop/ontop
|
package it.unibz.inf.ontop.spec.mapping.transformer.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import it.unibz.inf.ontop.constraints.ImmutableCQ;
import it.unibz.inf.ontop.constraints.ImmutableCQContainmentCheck;
import it.unibz.inf.ontop.injection.IntermediateQueryFactory;
import it.unibz.inf.ontop.iq.IQ;
import it.unibz.inf.ontop.iq.IQTree;
import it.unibz.inf.ontop.iq.UnaryIQTree;
import it.unibz.inf.ontop.iq.node.*;
import it.unibz.inf.ontop.iq.transform.impl.ChildTransformer;
import it.unibz.inf.ontop.iq.transform.impl.DefaultNonRecursiveIQTreeTransformer;
import it.unibz.inf.ontop.iq.transform.impl.DefaultRecursiveIQTreeVisitingTransformer;
import it.unibz.inf.ontop.model.atom.AtomPredicate;
import it.unibz.inf.ontop.model.atom.DataAtom;
import it.unibz.inf.ontop.model.term.ImmutableExpression;
import it.unibz.inf.ontop.model.term.ImmutableTerm;
import it.unibz.inf.ontop.model.term.TermFactory;
import it.unibz.inf.ontop.model.term.Variable;
import it.unibz.inf.ontop.spec.mapping.transformer.MappingCQCOptimizer;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import static it.unibz.inf.ontop.model.term.functionsymbol.ExpressionOperation.AND;
public class MappingCQCOptimizerImpl implements MappingCQCOptimizer {
private final IntermediateQueryFactory iqFactory;
private final TermFactory termFactory;
@Inject
public MappingCQCOptimizerImpl(IntermediateQueryFactory iqFactory, TermFactory termFactory) {
this.iqFactory = iqFactory;
this.termFactory = termFactory;
}
@Override
public IQ optimize(ImmutableCQContainmentCheck cqContainmentCheck, IQ query) {
IQTree tree0 = query.getTree().acceptTransformer(new DefaultRecursiveIQTreeVisitingTransformer(iqFactory) {
@Override
public IQTree transformInnerJoin(IQTree tree, InnerJoinNode rootNode, ImmutableList<IQTree> children) {
ImmutableList<IQTree> joinChildren = children.stream().filter(c -> c.getRootNode() instanceof InnerJoinNode).collect(ImmutableCollectors.toList());
ImmutableList<ImmutableExpression> filters = joinChildren.stream()
.map(c -> ((InnerJoinNode)c.getRootNode()).getOptionalFilterCondition())
.filter(Optional::isPresent)
.map(Optional::get)
.collect(ImmutableCollectors.toList());
return iqFactory.createNaryIQTree(
filters.isEmpty()
? rootNode
: iqFactory.createInnerJoinNode(getConjunction(rootNode.getOptionalFilterCondition(), filters)),
Stream.concat(
children.stream().filter(c -> !(c.getRootNode() instanceof InnerJoinNode)),
joinChildren.stream().flatMap(c -> c.getChildren().stream()))
.map(t -> t.acceptTransformer(this))
.collect(ImmutableCollectors.toList()));
}
});
IQTree tree = tree0.acceptTransformer(new FilterChildNormalizer());
if (tree.getRootNode() instanceof ConstructionNode) {
ConstructionNode constructionNode = (ConstructionNode)tree.getRootNode();
if (tree.getChildren().size() == 1 && tree.getChildren().get(0).getRootNode() instanceof InnerJoinNode) {
IQTree joinTree = tree.getChildren().get(0);
InnerJoinNode joinNode = (InnerJoinNode) joinTree.getRootNode();
ImmutableSet<Variable> answerVariables = Stream.concat(
constructionNode.getSubstitution().getImmutableMap().values().stream()
.flatMap(ImmutableTerm::getVariableStream),
joinNode.getOptionalFilterCondition()
.map(ImmutableTerm::getVariableStream).orElse(Stream.of()))
.collect(ImmutableCollectors.toSet());
System.out.println("CQC " + tree + " WITH " + answerVariables);
if (joinTree.getChildren().stream().anyMatch(c -> !(c.getRootNode() instanceof DataNode))) {
System.out.println("CQC PANIC - NOT A JOIN OF DATA ATOMS");
}
else {
if (joinTree.getChildren().size() < 2) {
System.out.println("CQC: NOTHING TO OPTIMIZE");
return query;
}
List<IQTree> children = new ArrayList<>(joinTree.getChildren());
int currentIndex = 0;
while (currentIndex < children.size()) {
List<IQTree> toLeave = new ArrayList<>(children.size() - 1);
for (int j = 0; j < children.size(); j++)
if (currentIndex != j)
toLeave.add(children.get(j));
ImmutableSet<Variable> variablesInToLeave = toLeave.stream().flatMap(a -> a.getVariables().stream()).collect(ImmutableCollectors.toSet());
if (!variablesInToLeave.containsAll(answerVariables))
continue;
System.out.println("CHECK H: " + children + " TO " + toLeave);
ImmutableList<Variable> avList = ImmutableList.copyOf(answerVariables);
ImmutableList<DataAtom<AtomPredicate>> from = children.stream()
.map(n -> ((DataNode<AtomPredicate>)n.getRootNode()).getProjectionAtom())
.collect(ImmutableCollectors.toList());
ImmutableList<DataAtom<AtomPredicate>> to = toLeave.stream()
.map(n -> ((DataNode<AtomPredicate>)n.getRootNode()).getProjectionAtom())
.collect(ImmutableCollectors.toList());
if (cqContainmentCheck.isContainedIn(new ImmutableCQ<>(avList, to), new ImmutableCQ<>(avList, from))) {
System.out.println("POSITIVE");
children.remove(currentIndex);
if (children.size() < 2)
break;
currentIndex = 0; // reset
}
else {
currentIndex++;
System.out.println("NEGATIVE");
}
}
return iqFactory.createIQ(
query.getProjectionAtom(),
iqFactory.createUnaryIQTree(
(ConstructionNode)tree.getRootNode(),
(children.size() < 2)
? (joinNode.getOptionalFilterCondition().isPresent()
? iqFactory.createUnaryIQTree(iqFactory.createFilterNode(joinNode.getOptionalFilterCondition().get()), children.get(0))
: children.get(0))
: iqFactory.createNaryIQTree(joinNode, ImmutableList.copyOf(children))));
}
}
}
return query;
}
// PINCHED FROM ExplicitEqualityTransformerImpl
// TODO: extract as an independent class
/**
* Affects each outermost filter or (left) join n in the tree.
* For each child of n, deletes its root if it is a filter node.
* Then:
* - if n is a join or filter: merge the boolean expressions
* - if n is a left join: merge boolean expressions coming from the right, and lift the ones coming from the left.
* This lift is only performed for optimization purposes: may save a subquery during SQL generation.
*/
class FilterChildNormalizer extends DefaultNonRecursiveIQTreeTransformer {
private final ChildTransformer childTransformer;
public FilterChildNormalizer() {
this.childTransformer = new ChildTransformer(iqFactory, this);
}
@Override
public IQTree transformLeftJoin(IQTree tree, LeftJoinNode rootNode, IQTree leftChild, IQTree rightChild) {
Optional<ImmutableExpression> leftChildChildExpression = getOptionalChildExpression(leftChild);
Optional<ImmutableExpression> rightChildExpression = getOptionalChildExpression(rightChild);
if (!leftChildChildExpression.isPresent() && !rightChildExpression.isPresent())
return tree;
IQTree leftJoinTree = iqFactory.createBinaryNonCommutativeIQTree(
rightChildExpression.isPresent()
? iqFactory.createLeftJoinNode(getConjunction(
rootNode.getOptionalFilterCondition(),
ImmutableList.of(rightChildExpression.get())))
: rootNode,
trimRootFilter(leftChild),
trimRootFilter(rightChild));
return leftChildChildExpression.isPresent()
? iqFactory.createUnaryIQTree(iqFactory.createFilterNode(leftChildChildExpression.get()), leftJoinTree)
: leftJoinTree;
}
@Override
public IQTree transformInnerJoin(IQTree tree, InnerJoinNode rootNode, ImmutableList<IQTree> children) {
ImmutableList<ImmutableExpression> filterChildExpressions = getChildExpressions(children);
if (filterChildExpressions.isEmpty())
return tree;
return iqFactory.createNaryIQTree(
iqFactory.createInnerJoinNode(getConjunction(
rootNode.getOptionalFilterCondition(),
filterChildExpressions)),
children.stream()
.map(this::trimRootFilter)
.collect(ImmutableCollectors.toList()));
}
@Override
public IQTree transformFilter(IQTree tree, FilterNode rootNode, IQTree child) {
ImmutableList<ImmutableExpression> filterChildExpressions = getChildExpressions(ImmutableList.of(child));
if (filterChildExpressions.isEmpty())
return tree;
return iqFactory.createUnaryIQTree(
iqFactory.createFilterNode(getConjunction(
Optional.of(rootNode.getFilterCondition()),
filterChildExpressions).get()),
trimRootFilter(child));
}
private ImmutableList<ImmutableExpression> getChildExpressions(ImmutableList<IQTree> children) {
return children.stream()
.filter(t -> t.getRootNode() instanceof FilterNode)
.map(t -> ((FilterNode) t.getRootNode()).getFilterCondition())
.collect(ImmutableCollectors.toList());
}
private Optional<ImmutableExpression> getOptionalChildExpression(IQTree child) {
QueryNode root = child.getRootNode();
return root instanceof FilterNode
? Optional.of(((FilterNode) root).getFilterCondition())
: Optional.empty();
}
private IQTree trimRootFilter(IQTree tree) {
return tree.getRootNode() instanceof FilterNode
? ((UnaryIQTree) tree).getChild()
: tree;
}
protected IQTree transformUnaryNode(IQTree tree, UnaryOperatorNode rootNode, IQTree child) {
return childTransformer.transform(tree);
}
protected IQTree transformNaryCommutativeNode(IQTree tree, NaryOperatorNode rootNode, ImmutableList<IQTree> children) {
return childTransformer.transform(tree);
}
protected IQTree transformBinaryNonCommutativeNode(IQTree tree, BinaryNonCommutativeOperatorNode rootNode, IQTree leftChild, IQTree rightChild) {
return childTransformer.transform(tree);
}
}
private Optional<ImmutableExpression> getConjunction(Optional<ImmutableExpression> optExpression, List<ImmutableExpression> expressions) {
if (expressions.isEmpty())
throw new IllegalArgumentException("Nonempty list of filters expected");
ImmutableExpression result = (optExpression.isPresent()
? Stream.concat(Stream.of(optExpression.get()), expressions.stream())
: expressions.stream())
.reduce(null,
(a, b) -> (a == null) ? b : termFactory.getImmutableExpression(AND, a, b));
return Optional.of(result);
}
}
|
mapping/core/src/main/java/it/unibz/inf/ontop/spec/mapping/transformer/impl/MappingCQCOptimizerImpl.java
|
package it.unibz.inf.ontop.spec.mapping.transformer.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import it.unibz.inf.ontop.constraints.ImmutableCQ;
import it.unibz.inf.ontop.constraints.ImmutableCQContainmentCheck;
import it.unibz.inf.ontop.injection.IntermediateQueryFactory;
import it.unibz.inf.ontop.iq.IQ;
import it.unibz.inf.ontop.iq.IQTree;
import it.unibz.inf.ontop.iq.UnaryIQTree;
import it.unibz.inf.ontop.iq.node.*;
import it.unibz.inf.ontop.iq.transform.impl.ChildTransformer;
import it.unibz.inf.ontop.iq.transform.impl.DefaultNonRecursiveIQTreeTransformer;
import it.unibz.inf.ontop.iq.transform.impl.DefaultRecursiveIQTreeVisitingTransformer;
import it.unibz.inf.ontop.model.atom.AtomPredicate;
import it.unibz.inf.ontop.model.atom.DataAtom;
import it.unibz.inf.ontop.model.term.ImmutableExpression;
import it.unibz.inf.ontop.model.term.ImmutableTerm;
import it.unibz.inf.ontop.model.term.TermFactory;
import it.unibz.inf.ontop.model.term.Variable;
import it.unibz.inf.ontop.spec.mapping.transformer.MappingCQCOptimizer;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import static it.unibz.inf.ontop.model.term.functionsymbol.ExpressionOperation.AND;
public class MappingCQCOptimizerImpl implements MappingCQCOptimizer {
private final IntermediateQueryFactory iqFactory;
private final TermFactory termFactory;
@Inject
public MappingCQCOptimizerImpl(IntermediateQueryFactory iqFactory, TermFactory termFactory) {
this.iqFactory = iqFactory;
this.termFactory = termFactory;
}
@Override
public IQ optimize(ImmutableCQContainmentCheck cqContainmentCheck, IQ query) {
IQTree tree0 = query.getTree().acceptTransformer(new DefaultRecursiveIQTreeVisitingTransformer(iqFactory) {
@Override
public IQTree transformInnerJoin(IQTree tree, InnerJoinNode rootNode, ImmutableList<IQTree> children) {
ImmutableList<IQTree> joinChildren = children.stream().filter(c -> c.getRootNode() instanceof InnerJoinNode).collect(ImmutableCollectors.toList());
ImmutableList<ImmutableExpression> filters = joinChildren.stream()
.map(c -> ((InnerJoinNode)c.getRootNode()).getOptionalFilterCondition())
.filter(Optional::isPresent)
.map(Optional::get)
.collect(ImmutableCollectors.toList());
return iqFactory.createNaryIQTree(
filters.isEmpty()
? rootNode
: iqFactory.createInnerJoinNode(getConjunction(rootNode.getOptionalFilterCondition(), filters)),
Stream.concat(
children.stream().filter(c -> !(c.getRootNode() instanceof InnerJoinNode)),
joinChildren.stream().flatMap(c -> c.getChildren().stream()))
.map(t -> t.acceptTransformer(this))
.collect(ImmutableCollectors.toList()));
}
});
IQTree tree = tree0.acceptTransformer(new FilterChildNormalizer());
if (tree.getRootNode() instanceof ConstructionNode) {
ConstructionNode constructionNode = (ConstructionNode)tree.getRootNode();
if (tree.getChildren().size() == 1 && tree.getChildren().get(0).getRootNode() instanceof InnerJoinNode) {
IQTree joinTree = tree.getChildren().get(0);
InnerJoinNode joinNode = (InnerJoinNode) joinTree.getRootNode();
ImmutableSet<Variable> answerVariables = Stream.concat(
constructionNode.getSubstitution().getImmutableMap().values().stream()
.flatMap(ImmutableTerm::getVariableStream),
joinNode.getOptionalFilterCondition()
.map(ImmutableTerm::getVariableStream).orElse(Stream.of()))
.collect(ImmutableCollectors.toSet());
System.out.println("CQC " + tree + " WITH " + answerVariables);
if (joinTree.getChildren().stream().anyMatch(c -> !(c.getRootNode() instanceof DataNode))) {
System.out.println("CQC PANIC - NOT A JOIN OF DATA ATOMS");
}
else {
if (joinTree.getChildren().size() < 2) {
System.out.println("CQC: NOTHING TO OPTIMIZE");
return query;
}
List<IQTree> children = new ArrayList<>(joinTree.getChildren());
int currentIndex = 0;
while (currentIndex < children.size()) {
List<IQTree> toLeave = new ArrayList<>(children.size() - 1);
for (int j = 0; j < children.size(); j++)
if (currentIndex != j)
toLeave.add(children.get(j));
System.out.println("CHECK H: " + children + " TO " + toLeave);
ImmutableSet<Variable> variablesInToLeave = toLeave.stream().flatMap(a -> a.getVariables().stream()).collect(ImmutableCollectors.toSet());
if (!variablesInToLeave.containsAll(answerVariables)) {
System.out.println("VARIABLES " + variablesInToLeave + " NOT " + answerVariables);
continue;
}
ImmutableList<Variable> avList = ImmutableList.copyOf(answerVariables);
ImmutableList<DataAtom<AtomPredicate>> from = children.stream()
.map(n -> ((DataNode<AtomPredicate>)n.getRootNode()).getProjectionAtom())
.collect(ImmutableCollectors.toList());
ImmutableList<DataAtom<AtomPredicate>> to = toLeave.stream()
.map(n -> ((DataNode<AtomPredicate>)n.getRootNode()).getProjectionAtom())
.collect(ImmutableCollectors.toList());
if (cqContainmentCheck.isContainedIn(new ImmutableCQ<>(avList, to), new ImmutableCQ<>(avList, from))) {
System.out.println("POSITIVE");
children.remove(currentIndex);
if (children.size() < 2)
break;
currentIndex = 0; // reset
}
else {
currentIndex++;
System.out.println("NEGATIVE");
}
}
return iqFactory.createIQ(
query.getProjectionAtom(),
iqFactory.createUnaryIQTree(
(ConstructionNode)tree.getRootNode(),
(children.size() < 2)
? (joinNode.getOptionalFilterCondition().isPresent()
? iqFactory.createUnaryIQTree(iqFactory.createFilterNode(joinNode.getOptionalFilterCondition().get()), children.get(0))
: children.get(0))
: iqFactory.createNaryIQTree(joinNode, ImmutableList.copyOf(children))));
}
}
}
return query;
}
// PINCHED FROM ExplicitEqualityTransformerImpl
// TODO: extract as an independent class
/**
* Affects each outermost filter or (left) join n in the tree.
* For each child of n, deletes its root if it is a filter node.
* Then:
* - if n is a join or filter: merge the boolean expressions
* - if n is a left join: merge boolean expressions coming from the right, and lift the ones coming from the left.
* This lift is only performed for optimization purposes: may save a subquery during SQL generation.
*/
class FilterChildNormalizer extends DefaultNonRecursiveIQTreeTransformer {
private final ChildTransformer childTransformer;
public FilterChildNormalizer() {
this.childTransformer = new ChildTransformer(iqFactory, this);
}
@Override
public IQTree transformLeftJoin(IQTree tree, LeftJoinNode rootNode, IQTree leftChild, IQTree rightChild) {
Optional<ImmutableExpression> leftChildChildExpression = getOptionalChildExpression(leftChild);
Optional<ImmutableExpression> rightChildExpression = getOptionalChildExpression(rightChild);
if (!leftChildChildExpression.isPresent() && !rightChildExpression.isPresent())
return tree;
IQTree leftJoinTree = iqFactory.createBinaryNonCommutativeIQTree(
rightChildExpression.isPresent()
? iqFactory.createLeftJoinNode(getConjunction(
rootNode.getOptionalFilterCondition(),
ImmutableList.of(rightChildExpression.get())))
: rootNode,
trimRootFilter(leftChild),
trimRootFilter(rightChild));
return leftChildChildExpression.isPresent()
? iqFactory.createUnaryIQTree(iqFactory.createFilterNode(leftChildChildExpression.get()), leftJoinTree)
: leftJoinTree;
}
@Override
public IQTree transformInnerJoin(IQTree tree, InnerJoinNode rootNode, ImmutableList<IQTree> children) {
ImmutableList<ImmutableExpression> filterChildExpressions = getChildExpressions(children);
if (filterChildExpressions.isEmpty())
return tree;
return iqFactory.createNaryIQTree(
iqFactory.createInnerJoinNode(getConjunction(
rootNode.getOptionalFilterCondition(),
filterChildExpressions)),
children.stream()
.map(this::trimRootFilter)
.collect(ImmutableCollectors.toList()));
}
@Override
public IQTree transformFilter(IQTree tree, FilterNode rootNode, IQTree child) {
ImmutableList<ImmutableExpression> filterChildExpressions = getChildExpressions(ImmutableList.of(child));
if (filterChildExpressions.isEmpty())
return tree;
return iqFactory.createUnaryIQTree(
iqFactory.createFilterNode(getConjunction(
Optional.of(rootNode.getFilterCondition()),
filterChildExpressions).get()),
trimRootFilter(child));
}
private ImmutableList<ImmutableExpression> getChildExpressions(ImmutableList<IQTree> children) {
return children.stream()
.filter(t -> t.getRootNode() instanceof FilterNode)
.map(t -> ((FilterNode) t.getRootNode()).getFilterCondition())
.collect(ImmutableCollectors.toList());
}
private Optional<ImmutableExpression> getOptionalChildExpression(IQTree child) {
QueryNode root = child.getRootNode();
return root instanceof FilterNode
? Optional.of(((FilterNode) root).getFilterCondition())
: Optional.empty();
}
private IQTree trimRootFilter(IQTree tree) {
return tree.getRootNode() instanceof FilterNode
? ((UnaryIQTree) tree).getChild()
: tree;
}
protected IQTree transformUnaryNode(IQTree tree, UnaryOperatorNode rootNode, IQTree child) {
return childTransformer.transform(tree);
}
protected IQTree transformNaryCommutativeNode(IQTree tree, NaryOperatorNode rootNode, ImmutableList<IQTree> children) {
return childTransformer.transform(tree);
}
protected IQTree transformBinaryNonCommutativeNode(IQTree tree, BinaryNonCommutativeOperatorNode rootNode, IQTree leftChild, IQTree rightChild) {
return childTransformer.transform(tree);
}
}
private Optional<ImmutableExpression> getConjunction(Optional<ImmutableExpression> optExpression, List<ImmutableExpression> expressions) {
if (expressions.isEmpty())
throw new IllegalArgumentException("Nonempty list of filters expected");
ImmutableExpression result = (optExpression.isPresent()
? Stream.concat(Stream.of(optExpression.get()), expressions.stream())
: expressions.stream())
.reduce(null,
(a, b) -> (a == null) ? b : termFactory.getImmutableExpression(AND, a, b));
return Optional.of(result);
}
}
|
minor fix
|
mapping/core/src/main/java/it/unibz/inf/ontop/spec/mapping/transformer/impl/MappingCQCOptimizerImpl.java
|
minor fix
|
<ide><path>apping/core/src/main/java/it/unibz/inf/ontop/spec/mapping/transformer/impl/MappingCQCOptimizerImpl.java
<ide> if (currentIndex != j)
<ide> toLeave.add(children.get(j));
<ide>
<add> ImmutableSet<Variable> variablesInToLeave = toLeave.stream().flatMap(a -> a.getVariables().stream()).collect(ImmutableCollectors.toSet());
<add> if (!variablesInToLeave.containsAll(answerVariables))
<add> continue;
<add>
<ide> System.out.println("CHECK H: " + children + " TO " + toLeave);
<ide>
<del> ImmutableSet<Variable> variablesInToLeave = toLeave.stream().flatMap(a -> a.getVariables().stream()).collect(ImmutableCollectors.toSet());
<del> if (!variablesInToLeave.containsAll(answerVariables)) {
<del> System.out.println("VARIABLES " + variablesInToLeave + " NOT " + answerVariables);
<del> continue;
<del> }
<ide> ImmutableList<Variable> avList = ImmutableList.copyOf(answerVariables);
<ide> ImmutableList<DataAtom<AtomPredicate>> from = children.stream()
<ide> .map(n -> ((DataNode<AtomPredicate>)n.getRootNode()).getProjectionAtom())
|
|
Java
|
mpl-2.0
|
86157858f2f53777f0760ebe1fb0d9c08e05356d
| 0 |
RoyalDev/RoyalCommands,joansmith/RoyalCommands
|
package org.royaldev.royalcommands.rcommands;
import org.bukkit.ChatColor;
import org.bukkit.World;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.royaldev.royalcommands.RUtils;
import org.royaldev.royalcommands.RoyalCommands;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.Map;
public class CmdTime implements CommandExecutor {
static RoyalCommands plugin;
public CmdTime(RoyalCommands instance) {
plugin = instance;
}
public static void smoothTimeChange(long time, final World world) {
if (time > 24000) time = time % 24000L;
final long ftime = time;
final Runnable r = new Runnable() {
@Override
public void run() {
for (long i = world.getTime() + 1; i != ftime; i++) {
if (i == 24001) {
i = 0;
if (ftime == 0) break;
}
world.setTime(i);
}
world.setTime(ftime);
}
};
plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, r);
}
public static Long getValidTime(String time) {
Long vtime;
try {
vtime = Long.valueOf(time);
if (vtime > 24000L) vtime = vtime % 24000L;
} catch (Exception e) {
if (time.equalsIgnoreCase("day")) vtime = 0L;
else if (time.equalsIgnoreCase("midday") || time.equalsIgnoreCase("noon")) vtime = 6000L;
else if (time.equalsIgnoreCase("sunset") || time.equalsIgnoreCase("sundown") || time.equalsIgnoreCase("dusk"))
vtime = 12000L;
else if (time.equalsIgnoreCase("night") || time.equalsIgnoreCase("dark")) vtime = 14000L;
else if (time.equalsIgnoreCase("midnight")) vtime = 18000L;
else if (time.equalsIgnoreCase("sunrise") || time.equalsIgnoreCase("sunup") || time.equalsIgnoreCase("dawn"))
vtime = 23000L;
else return null;
}
return vtime;
}
public static Map<String, String> getRealTime(long ticks) {
if (ticks > 24000L) ticks = ticks % 24000L;
DecimalFormat df = new DecimalFormat("00");
df.setRoundingMode(RoundingMode.DOWN);
float thour = 1000F;
float tminute = 16.6666666666666666666666666666666666666666666666666666666666666666666666F;
float hour = (ticks / thour) + 6F;
if (hour >= 24F) hour = hour - 24F;
float minute = (ticks % thour) / tminute;
String meridian = (hour >= 12F) ? "PM" : "AM";
float twelvehour = (hour > 12F) ? hour - 12F : hour;
if (df.format(twelvehour).equals("00")) twelvehour = 12F;
Map<String, String> toRet = new HashMap<String, String>();
toRet.put("24h", df.format(hour) + ":" + df.format(minute));
toRet.put("12h", df.format(twelvehour) + ":" + df.format(minute) + " " + meridian);
return toRet;
}
@Override
public boolean onCommand(CommandSender cs, Command cmd, String label, String[] args) {
if (cmd.getName().equalsIgnoreCase("time")) {
if (!plugin.isAuthorized(cs, "rcmds.time")) {
RUtils.dispNoPerms(cs);
return true;
}
if (args.length < 1) {
if (!(cs instanceof Player))
for (World w : plugin.getServer().getWorlds()) {
long ticks = w.getTime();
Map<String, String> times = getRealTime(ticks);
cs.sendMessage(ChatColor.BLUE + "The current time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " is " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
}
else {
Player p = (Player) cs;
World w = p.getWorld();
long ticks = w.getTime();
Map<String, String> times = getRealTime(ticks);
cs.sendMessage(ChatColor.BLUE + "The current time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " is " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
}
return true;
}
if (args.length > 0 && args[0].equals("?") || args[0].equalsIgnoreCase("help")) {
cs.sendMessage(cmd.getDescription());
return false;
}
String target = "";
if (!(cs instanceof Player) && args.length < 2) target = "*";
else if ((cs instanceof Player) && args.length < 2) target = ((Player) cs).getWorld().getName();
if (args.length > 1) target = args[1];
if (target.equalsIgnoreCase("all")) target = "*";
if (plugin.getServer().getWorld(target) == null && !target.equals("*")) {
cs.sendMessage(ChatColor.RED + "No such world!");
return true;
}
World w = (!target.equals("*")) ? plugin.getServer().getWorld(target) : null;
Long ticks = getValidTime(args[0]);
if (ticks == null) {
if (plugin.getServer().getWorld(args[0]) != null) {
w = plugin.getServer().getWorld(args[0]);
ticks = w.getTime();
Map<String, String> times = getRealTime(ticks);
cs.sendMessage(ChatColor.BLUE + "The current time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " is " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
return true;
}
cs.sendMessage(ChatColor.RED + "Invalid time specified!");
return true;
}
Map<String, String> times = getRealTime(ticks);
if (w == null) {
for (World ws : plugin.getServer().getWorlds()) {
if (plugin.smoothTime) smoothTimeChange(ticks, ws);
else ws.setTime(ticks);
}
cs.sendMessage(ChatColor.BLUE + "Set time in all worlds to " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
} else {
if (plugin.smoothTime) smoothTimeChange(ticks, w);
else w.setTime(ticks);
cs.sendMessage(ChatColor.BLUE + "Set time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " to " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
}
return true;
}
return false;
}
}
|
src/org/royaldev/royalcommands/rcommands/CmdTime.java
|
package org.royaldev.royalcommands.rcommands;
import org.bukkit.ChatColor;
import org.bukkit.World;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.royaldev.royalcommands.RUtils;
import org.royaldev.royalcommands.RoyalCommands;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.Map;
public class CmdTime implements CommandExecutor {
static RoyalCommands plugin;
public CmdTime(RoyalCommands instance) {
plugin = instance;
}
public static void smoothTimeChange(long time, final World world) {
if (time > 24000) time = time % 24000L;
final long ftime = time;
final Runnable r = new Runnable() {
@Override
public void run() {
for (long i = world.getTime() + 1; i != ftime; i++) {
if (i == 24001) {
i = 0;
if (ftime == 0) break;
}
world.setTime(i);
}
world.setTime(ftime);
}
};
plugin.getServer().getScheduler().scheduleAsyncDelayedTask(plugin, r);
}
public static Long getValidTime(String time) {
Long vtime;
try {
vtime = Long.valueOf(time);
if (vtime > 24000L) vtime = vtime % 24000L;
} catch (Exception e) {
if (time.equalsIgnoreCase("day")) vtime = 0L;
else if (time.equalsIgnoreCase("midday") || time.equalsIgnoreCase("noon")) vtime = 6000L;
else if (time.equalsIgnoreCase("sunset") || time.equalsIgnoreCase("sundown") || time.equalsIgnoreCase("dusk"))
vtime = 12000L;
else if (time.equalsIgnoreCase("night") || time.equalsIgnoreCase("dark")) vtime = 14000L;
else if (time.equalsIgnoreCase("midnight")) vtime = 18000L;
else if (time.equalsIgnoreCase("sunrise") || time.equalsIgnoreCase("sunup") || time.equalsIgnoreCase("dawn"))
vtime = 23000L;
else return null;
}
return vtime;
}
public static Map<String, String> getRealTime(long ticks) {
if (ticks > 24000L) ticks = ticks % 24000L;
DecimalFormat df = new DecimalFormat("00");
df.setRoundingMode(RoundingMode.DOWN);
float thour = 1000F;
float tminute = 16.6666666666666666666666666666666666666666666666666666666666666666666666F;
float hour = (ticks / thour) + 6F;
if (hour >= 24F) hour = hour - 24F;
float minute = (ticks % thour) / tminute;
String meridian = (hour >= 12F) ? "PM" : "AM";
float twelvehour = (hour > 12F) ? hour - 12F : hour;
if (df.format(twelvehour).equals("00")) twelvehour = 12F;
Map<String, String> toRet = new HashMap<String, String>();
toRet.put("24h", df.format(hour) + ":" + df.format(minute));
toRet.put("12h", df.format(twelvehour) + ":" + df.format(minute) + " " + meridian);
return toRet;
}
@Override
public boolean onCommand(CommandSender cs, Command cmd, String label, String[] args) {
if (cmd.getName().equalsIgnoreCase("time")) {
if (!plugin.isAuthorized(cs, "rcmds.time")) {
RUtils.dispNoPerms(cs);
return true;
}
if (args.length < 1) {
if (!(cs instanceof Player))
for (World w : plugin.getServer().getWorlds()) {
long ticks = w.getTime();
Map<String, String> times = getRealTime(ticks);
cs.sendMessage(ChatColor.BLUE + "The current time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " is " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
}
else {
Player p = (Player) cs;
World w = p.getWorld();
long ticks = w.getTime();
Map<String, String> times = getRealTime(ticks);
cs.sendMessage(ChatColor.BLUE + "The current time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " is " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
}
return true;
}
if (args.length > 0 && args[0].equals("?") || args[0].equalsIgnoreCase("help")) {
cs.sendMessage(cmd.getDescription());
return false;
}
String target = "";
if (!(cs instanceof Player) && args.length < 2) target = "*";
else if ((cs instanceof Player) && args.length < 2) target = ((Player) cs).getWorld().getName();
if (args.length > 1) target = args[1];
if (target.equalsIgnoreCase("all")) target = "*";
if (plugin.getServer().getWorld(target) == null && !target.equals("*")) {
cs.sendMessage(ChatColor.RED + "No such world!");
return true;
}
World w = (!target.equals("*")) ? plugin.getServer().getWorld(target) : null;
Long ticks = getValidTime(args[0]);
if (ticks == null) {
if (plugin.getServer().getWorld(args[0]) != null) {
w = plugin.getServer().getWorld(args[0]);
ticks = w.getTime();
Map<String, String> times = getRealTime(ticks);
cs.sendMessage(ChatColor.BLUE + "The current time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " is " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
return true;
}
cs.sendMessage(ChatColor.RED + "Invalid time specified!");
return true;
}
Map<String, String> times = getRealTime(ticks);
if (w == null) {
for (World ws : plugin.getServer().getWorlds()) {
if (plugin.smoothTime) smoothTimeChange(ticks, ws);
else ws.setTime(ticks);
}
cs.sendMessage(ChatColor.BLUE + "Set time in all worlds to " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
} else {
if (plugin.smoothTime) smoothTimeChange(ticks, w);
else w.setTime(ticks);
cs.sendMessage(ChatColor.BLUE + "Set time in " + ChatColor.GRAY + w.getName() + ChatColor.BLUE + " to " + ChatColor.GRAY + ticks + " ticks" + ChatColor.BLUE + " (" + ChatColor.GRAY + times.get("24h") + ChatColor.BLUE + "/" + ChatColor.GRAY + times.get("12h") + ChatColor.BLUE + ").");
}
return true;
}
return false;
}
}
|
Fixed: /time scheduling wrong task
|
src/org/royaldev/royalcommands/rcommands/CmdTime.java
|
Fixed: /time scheduling wrong task
|
<ide><path>rc/org/royaldev/royalcommands/rcommands/CmdTime.java
<ide> world.setTime(ftime);
<ide> }
<ide> };
<del> plugin.getServer().getScheduler().scheduleAsyncDelayedTask(plugin, r);
<add> plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, r);
<ide> }
<ide>
<ide> public static Long getValidTime(String time) {
|
|
Java
|
apache-2.0
|
d5ab539c15a5798ab0ef299f85e1dcd50fe4c9b0
| 0 |
googleinterns/spec-math,googleinterns/spec-math,googleinterns/spec-math,googleinterns/spec-math
|
/*
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.util.LinkedHashMap;
import java.util.List;
public class TypeChecker {
/**
* Returns true if the Object {@code value} is a LinkedHashMap, false otherwise.
*/
static boolean isObjectMap(Object value) {
return value instanceof LinkedHashMap;
}
/**
* Returns true if the Object {@code value} is a List, false otherwise.
*/
static boolean isObjectList(Object value){
return value instanceof List;
}
/**
* Returns true if the Object {@code value} is a primitive object, false otherwise.
*/
static boolean isObjectPrimitive(Object value) {
return value instanceof String
|| value instanceof Boolean
|| value instanceof Number
|| value instanceof Character;
}
}
|
library/src/main/java/TypeChecker.java
|
/*
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.util.LinkedHashMap;
import java.util.List;
public class TypeChecker {
/**
* Returns true if the Object {@code value} is a LinkedHashMap, false otherwise.
*/
static boolean isObjectMap(Object value) {
return value instanceof LinkedHashMap;
}
/**
* Returns true if the Object {@code value} is a List, false otherwise.
*/
static boolean isObjectList(Object value){
return value instanceof List;
}
/**
* Returns true if the Object {@code value} is a primitive object, false otherwise.
*/
static boolean isObjectPrimitive(Object value) {
return value instanceof String
|| value instanceof Boolean
|| value instanceof Integer
|| value instanceof Double;
}
}
|
change in primitive types to be more general
|
library/src/main/java/TypeChecker.java
|
change in primitive types to be more general
|
<ide><path>ibrary/src/main/java/TypeChecker.java
<ide> static boolean isObjectPrimitive(Object value) {
<ide> return value instanceof String
<ide> || value instanceof Boolean
<del> || value instanceof Integer
<del> || value instanceof Double;
<add> || value instanceof Number
<add> || value instanceof Character;
<ide> }
<ide> }
|
|
JavaScript
|
mpl-2.0
|
67432bb4924af8a50db3ac5dc4bcadd569c4dd94
| 0 |
slidewiki/notification-service,slidewiki/notification-service
|
/*
Handles the requests by executing stuff and replying to the client. Uses promises to get stuff done.
*/
'use strict';
const boom = require('boom'), //Boom gives us some predefined http codes and proper responses
notificationsDB = require('../database/notificationsDatabase'), //Database functions specific for notifications
co = require('../common');
const Microservices = require('../configs/microservices');
let http = require('http');
module.exports = {
//Get notification from database or return NOT FOUND
getNotification: function(request, reply) {
return notificationsDB.get(encodeURIComponent(request.params.id)).then((notification) => {
if (co.isEmpty(notification))
reply(boom.notFound());
else {
return insertAuthor(notification).then((notification) => {
reply(co.rewriteID(notification));
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Create notification with new id and payload or return INTERNAL_SERVER_ERROR
newNotification: function(request, reply) {
return notificationsDB.insert(request.payload).then((inserted) => {
//console.log('inserted: ', inserted);
if (co.isEmpty(inserted.ops) || co.isEmpty(inserted.ops[0]))
throw inserted;
else {
return insertAuthor(inserted.ops[0]).then((notification) => {
reply(co.rewriteID(notification));
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Update notification with id id and payload or return INTERNAL_SERVER_ERROR
updateNotification: function(request, reply) {
return notificationsDB.replace(encodeURIComponent(request.params.id), request.payload).then((replaced) => {
//console.log('updated: ', replaced);
if (co.isEmpty(replaced.value))
throw replaced;
else
reply(replaced.value);
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Delete notification with id id
deleteNotification: function(request, reply) {
return notificationsDB.delete(encodeURIComponent(request.payload.id)).then(() =>
reply({'msg': 'notification is successfully deleted...'})
).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Delete notifications with subscribed_user_id
deleteNotifications: function(request, reply) {
return notificationsDB.deleteAllWithSubscribedUserID(encodeURIComponent(request.payload.subscribed_user_id)).then(() =>
reply({'msg': 'notifications were successfully deleted...'})
).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Get All notifications from database for the id in the request
getNotifications: function(request, reply) {
return notificationsDB.getAllFromCollection()//TODO call getAllWithSubscribedUserID(identifier)
//return notificationsDB.getAllWithSubscribedUserID(encodeURIComponent(request.params.id))
.then((notifications) => {
let arrayOfAuthorPromisses = [];
notifications.forEach((notification) => {
co.rewriteID(notification);
let promise = insertAuthor(notification);
arrayOfAuthorPromisses.push(promise);
});
Promise.all(arrayOfAuthorPromisses).then(() => {
let jsonReply = JSON.stringify(notifications);
reply(jsonReply);
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
});
},
//Get All notifications from database
getAllNotifications: function(request, reply) {
return notificationsDB.getAllFromCollection()
.then((notifications) => {
let arrayOfAuthorPromisses = [];
notifications.forEach((notification) => {
co.rewriteID(notification);
let promise = insertAuthor(notification);
arrayOfAuthorPromisses.push(promise);
});
Promise.all(arrayOfAuthorPromisses).then(() => {
let jsonReply = JSON.stringify(notifications);
reply(jsonReply);
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}
};
//insert author data using user microservice
function insertAuthor(notification) {
let myPromise = new Promise((resolve, reject) => {
let options = {
host: Microservices.user.uri,
port: 80,
path: '/user/' + notification.user_id
};
let req = http.get(options, (res) => {
if (res.statusCode === '404') {//user not found
notification.author = {
id: notification.user_id,
username: 'unknown',
avatar: ''
};
resolve(notification);
}
// console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
let body = '';
res.on('data', (chunk) => {
// console.log('Response: ', chunk);
body += chunk;
});
res.on('end', () => {
let parsed = JSON.parse(body);
notification.author = {
id: notification.user_id,
username: parsed.username,
avatar: parsed.picture
};
resolve(notification);
});
});
req.on('error', (e) => {
console.log('problem with request: ' + e.message);
reject(e);
});
});
return myPromise;
}
|
application/controllers/handler.js
|
/*
Handles the requests by executing stuff and replying to the client. Uses promises to get stuff done.
*/
'use strict';
const boom = require('boom'), //Boom gives us some predefined http codes and proper responses
notificationsDB = require('../database/notificationsDatabase'), //Database functions specific for notifications
co = require('../common');
const Microservices = require('../configs/microservices');
let http = require('http');
module.exports = {
//Get notification from database or return NOT FOUND
getNotification: function(request, reply) {
return notificationsDB.get(encodeURIComponent(request.params.id)).then((notification) => {
if (co.isEmpty(notification))
reply(boom.notFound());
else {
return insertAuthor(notification).then((notification) => {
if (notification.user_id.length === 24) {//Mockup - old kind of ids
notification.author = getMockupAuthor(notification.user_id);
}
reply(co.rewriteID(notification));
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Create notification with new id and payload or return INTERNAL_SERVER_ERROR
newNotification: function(request, reply) {
return notificationsDB.insert(request.payload).then((inserted) => {
//console.log('inserted: ', inserted);
if (co.isEmpty(inserted.ops) || co.isEmpty(inserted.ops[0]))
throw inserted;
else {
return insertAuthor(inserted.ops[0]).then((notification) => {
reply(co.rewriteID(notification));
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Update notification with id id and payload or return INTERNAL_SERVER_ERROR
updateNotification: function(request, reply) {
return notificationsDB.replace(encodeURIComponent(request.params.id), request.payload).then((replaced) => {
//console.log('updated: ', replaced);
if (co.isEmpty(replaced.value))
throw replaced;
else
reply(replaced.value);
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Delete notification with id id
deleteNotification: function(request, reply) {
return notificationsDB.delete(encodeURIComponent(request.payload.id)).then(() =>
reply({'msg': 'notification is successfully deleted...'})
).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Delete notifications with subscribed_user_id
deleteNotifications: function(request, reply) {
return notificationsDB.deleteAllWithSubscribedUserID(encodeURIComponent(request.payload.subscribed_user_id)).then(() =>
reply({'msg': 'notifications were successfully deleted...'})
).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Get All notifications from database for the id in the request
getNotifications: function(request, reply) {
//Clean collection and insert mockup notifications - only if request.params.id === 0
return initMockupData(request.params.id)
.then(() => notificationsDB.getAllFromCollection()//TODO call getAllWithSubscribedUserID(identifier)
// .then(() => notificationsDB.getAllWithSubscribedUserID(encodeURIComponent(request.params.id))
.then((notifications) => {
let arrayOfAuthorPromisses = [];
notifications.forEach((notification) => {
co.rewriteID(notification);
let promise = insertAuthor(notification).then((notification) => {
if (notification.user_id.length === 24) {//Mockup - old kind of ids
notification.author = getMockupAuthor(notification.user_id);//insert author data
}
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
arrayOfAuthorPromisses.push(promise);
});
Promise.all(arrayOfAuthorPromisses).then(() => {
let jsonReply = JSON.stringify(notifications);
reply(jsonReply);
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
})).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
},
//Get All notifications from database
getAllNotifications: function(request, reply) {
return notificationsDB.getAllFromCollection()
.then((notifications) => {
let arrayOfAuthorPromisses = [];
notifications.forEach((notification) => {
co.rewriteID(notification);
let promise = insertAuthor(notification).then((notification) => {
if (notification.user_id.length === 24) {//Mockup - old kind of ids
notification.author = getMockupAuthor(notification.user_id);//insert author data
}
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
arrayOfAuthorPromisses.push(promise);
});
Promise.all(arrayOfAuthorPromisses).then(() => {
let jsonReply = JSON.stringify(notifications);
reply(jsonReply);
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}).catch((error) => {
request.log('error', error);
reply(boom.badImplementation());
});
}
};
//insert author data using user microservice
function insertAuthor(notification) {
let myPromise = new Promise((resolve, reject) => {
let options = {
host: Microservices.user.uri,
port: 80,
path: '/user/' + notification.user_id
};
let req = http.get(options, (res) => {
if (res.statusCode === '404') {//user not found
notification.author = {
id: notification.user_id,
username: 'unknown',
avatar: ''
};
resolve(notification);
}
// console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
let body = '';
res.on('data', (chunk) => {
// console.log('Response: ', chunk);
body += chunk;
});
res.on('end', () => {
let parsed = JSON.parse(body);
notification.author = {
id: notification.user_id,
username: parsed.username,
avatar: parsed.picture
};
resolve(notification);
});
});
req.on('error', (e) => {
console.log('problem with request: ' + e.message);
reject(e);
});
});
return myPromise;
}
function getMockupAuthor(userId) {
let author = authorsMap.get(userId);//insert author data
if (author === undefined) {
author = authorsMap.get('112233445566778899000000');
}
return author;
}
//Delete all and insert mockup data
function initMockupData(identifier) {
if (identifier === '000000000000000000000000') {//create collection, delete all and insert mockup data only if the user has explicitly sent 000000000000000000000000
return notificationsDB.createCollection()
.then(() => notificationsDB.deleteAll())
.then(() => insertMockupData());
}
return new Promise((resolve) => {resolve (1);});
}
// function getFirstTwoActivities() {
// let http = require('http');
// const Microservices = require('../configs/microservices');
// let options = {
// //CHANGES FOR LOCALHOST IN PUPIN (PROXY)
// // host: 'proxy.rcub.bg.ac.rs',
// // port: 8080,
// // path: 'http://activitiesservice.manfredfris.ch/activity/new',
// // path: 'http://' + Microservices.activities.uri + '/activity/new',
//
// // host: 'activitiesservice.manfredfris.ch',
// host: Microservices.activities.uri,
// port: 80,
// path: '/activities/8',
// };
//
// let req = http.get(options, (res) => {
// // console.log('STATUS: ' + res.statusCode);
// // console.log('HEADERS: ' + JSON.stringify(res.headers));
// let body = '';
// res.on('data', (chunk) => {
// body += chunk;
// });
// res.on('end', () => {
// let data = JSON.parse(body);
// console.log(data);
// return data;
// });
// });
// req.on('error', (e) => {
// console.log('problem with request: ' + e.message);
// });
//
// }
//Insert mockup data to the collection
function insertMockupData() {
//get real ids for the first 2 activities
// const firstTwoActivities = getFirstTwoActivities();
const firstTwoActivities = [
{activity_id: '57b6c4eb000eee001048bd26'},
{activity_id: '57b6c4eb000eee001048bd25'}
];
let notification1 = {
activity_id: firstTwoActivities[0].activity_id,
activity_type: 'react',
content_id: '8',
content_kind: 'slide',
content_name: 'Introduction',
user_id: '112233445566778899000002',
react_type: 'like',
subscribed_user_id: '112233445566778899000001'
};
let ins1 = notificationsDB.insert(notification1);
let notification2 = {
activity_id: firstTwoActivities[1].activity_id,
activity_type: 'download',
content_id: '8',
content_kind: 'slide',
content_name: 'Introduction',
user_id: '112233445566778899000001',
subscribed_user_id: '112233445566778899000001'
};
let ins2 = ins1.then(() => notificationsDB.insert(notification2));
return ins2;
}
let authorsMap = new Map([
['112233445566778899000001', {
id: 7,
username: 'Dejan P.',
avatar: '/assets/images/mock-avatars/deadpool_256.png'
}],
['112233445566778899000002', {
id: 8,
username: 'Nikola T.',
avatar: '/assets/images/mock-avatars/man_512.png'
}],
['112233445566778899000003', {
id: 9,
username: 'Marko B.',
avatar: '/assets/images/mock-avatars/batman_512.jpg'
}],
['112233445566778899000004', {
id: 10,
username: 'Valentina J.',
avatar: '/assets/images/mock-avatars/ninja-simple_512.png'
}],
['112233445566778899000005', {
id: 11,
username: 'Voice in the crowd',
avatar: '/assets/images/mock-avatars/anon_256.jpg'
}],
['112233445566778899000006', {
id: 12,
username: 'SlideWiki FTW',
avatar: '/assets/images/mock-avatars/spooky_256.png'
}],
['112233445566778899000000', {
id: 13,
username: 'Dutch',
avatar: '/assets/images/mock-avatars/dgirl.jpeg'
}]
]);
|
Removed pieces of code which worked with mockup data
|
application/controllers/handler.js
|
Removed pieces of code which worked with mockup data
|
<ide><path>pplication/controllers/handler.js
<ide> reply(boom.notFound());
<ide> else {
<ide> return insertAuthor(notification).then((notification) => {
<del>
<del> if (notification.user_id.length === 24) {//Mockup - old kind of ids
<del> notification.author = getMockupAuthor(notification.user_id);
<del> }
<ide> reply(co.rewriteID(notification));
<ide> }).catch((error) => {
<ide> request.log('error', error);
<ide>
<ide> //Get All notifications from database for the id in the request
<ide> getNotifications: function(request, reply) {
<del> //Clean collection and insert mockup notifications - only if request.params.id === 0
<del> return initMockupData(request.params.id)
<del> .then(() => notificationsDB.getAllFromCollection()//TODO call getAllWithSubscribedUserID(identifier)
<del> // .then(() => notificationsDB.getAllWithSubscribedUserID(encodeURIComponent(request.params.id))
<add> return notificationsDB.getAllFromCollection()//TODO call getAllWithSubscribedUserID(identifier)
<add> //return notificationsDB.getAllWithSubscribedUserID(encodeURIComponent(request.params.id))
<ide> .then((notifications) => {
<ide> let arrayOfAuthorPromisses = [];
<ide> notifications.forEach((notification) => {
<ide> co.rewriteID(notification);
<del> let promise = insertAuthor(notification).then((notification) => {
<del>
<del> if (notification.user_id.length === 24) {//Mockup - old kind of ids
<del> notification.author = getMockupAuthor(notification.user_id);//insert author data
<del> }
<del> }).catch((error) => {
<del> request.log('error', error);
<del> reply(boom.badImplementation());
<del> });
<add> let promise = insertAuthor(notification);
<ide> arrayOfAuthorPromisses.push(promise);
<ide> });
<ide> Promise.all(arrayOfAuthorPromisses).then(() => {
<ide> request.log('error', error);
<ide> reply(boom.badImplementation());
<ide> });
<del>
<del>
<del> })).catch((error) => {
<del> request.log('error', error);
<del> reply(boom.badImplementation());
<ide> });
<ide> },
<ide>
<ide> let arrayOfAuthorPromisses = [];
<ide> notifications.forEach((notification) => {
<ide> co.rewriteID(notification);
<del> let promise = insertAuthor(notification).then((notification) => {
<del>
<del> if (notification.user_id.length === 24) {//Mockup - old kind of ids
<del> notification.author = getMockupAuthor(notification.user_id);//insert author data
<del> }
<del> }).catch((error) => {
<del> request.log('error', error);
<del> reply(boom.badImplementation());
<del> });
<add> let promise = insertAuthor(notification);
<ide> arrayOfAuthorPromisses.push(promise);
<ide> });
<ide>
<ide>
<ide> return myPromise;
<ide> }
<del>
<del>function getMockupAuthor(userId) {
<del> let author = authorsMap.get(userId);//insert author data
<del> if (author === undefined) {
<del> author = authorsMap.get('112233445566778899000000');
<del> }
<del> return author;
<del>}
<del>
<del>//Delete all and insert mockup data
<del>function initMockupData(identifier) {
<del> if (identifier === '000000000000000000000000') {//create collection, delete all and insert mockup data only if the user has explicitly sent 000000000000000000000000
<del> return notificationsDB.createCollection()
<del> .then(() => notificationsDB.deleteAll())
<del> .then(() => insertMockupData());
<del> }
<del> return new Promise((resolve) => {resolve (1);});
<del>}
<del>
<del>
<del>
<del>// function getFirstTwoActivities() {
<del>// let http = require('http');
<del>// const Microservices = require('../configs/microservices');
<del>// let options = {
<del>// //CHANGES FOR LOCALHOST IN PUPIN (PROXY)
<del>// // host: 'proxy.rcub.bg.ac.rs',
<del>// // port: 8080,
<del>// // path: 'http://activitiesservice.manfredfris.ch/activity/new',
<del>// // path: 'http://' + Microservices.activities.uri + '/activity/new',
<del>//
<del>// // host: 'activitiesservice.manfredfris.ch',
<del>// host: Microservices.activities.uri,
<del>// port: 80,
<del>// path: '/activities/8',
<del>// };
<del>//
<del>// let req = http.get(options, (res) => {
<del>// // console.log('STATUS: ' + res.statusCode);
<del>// // console.log('HEADERS: ' + JSON.stringify(res.headers));
<del>// let body = '';
<del>// res.on('data', (chunk) => {
<del>// body += chunk;
<del>// });
<del>// res.on('end', () => {
<del>// let data = JSON.parse(body);
<del>// console.log(data);
<del>// return data;
<del>// });
<del>// });
<del>// req.on('error', (e) => {
<del>// console.log('problem with request: ' + e.message);
<del>// });
<del>//
<del>// }
<del>//Insert mockup data to the collection
<del>function insertMockupData() {
<del> //get real ids for the first 2 activities
<del> // const firstTwoActivities = getFirstTwoActivities();
<del> const firstTwoActivities = [
<del> {activity_id: '57b6c4eb000eee001048bd26'},
<del> {activity_id: '57b6c4eb000eee001048bd25'}
<del> ];
<del> let notification1 = {
<del> activity_id: firstTwoActivities[0].activity_id,
<del> activity_type: 'react',
<del> content_id: '8',
<del> content_kind: 'slide',
<del> content_name: 'Introduction',
<del> user_id: '112233445566778899000002',
<del> react_type: 'like',
<del> subscribed_user_id: '112233445566778899000001'
<del> };
<del> let ins1 = notificationsDB.insert(notification1);
<del> let notification2 = {
<del> activity_id: firstTwoActivities[1].activity_id,
<del> activity_type: 'download',
<del> content_id: '8',
<del> content_kind: 'slide',
<del> content_name: 'Introduction',
<del> user_id: '112233445566778899000001',
<del> subscribed_user_id: '112233445566778899000001'
<del> };
<del> let ins2 = ins1.then(() => notificationsDB.insert(notification2));
<del>
<del> return ins2;
<del>}
<del>
<del>let authorsMap = new Map([
<del> ['112233445566778899000001', {
<del> id: 7,
<del> username: 'Dejan P.',
<del> avatar: '/assets/images/mock-avatars/deadpool_256.png'
<del> }],
<del> ['112233445566778899000002', {
<del> id: 8,
<del> username: 'Nikola T.',
<del> avatar: '/assets/images/mock-avatars/man_512.png'
<del> }],
<del> ['112233445566778899000003', {
<del> id: 9,
<del> username: 'Marko B.',
<del> avatar: '/assets/images/mock-avatars/batman_512.jpg'
<del> }],
<del> ['112233445566778899000004', {
<del> id: 10,
<del> username: 'Valentina J.',
<del> avatar: '/assets/images/mock-avatars/ninja-simple_512.png'
<del> }],
<del> ['112233445566778899000005', {
<del> id: 11,
<del> username: 'Voice in the crowd',
<del> avatar: '/assets/images/mock-avatars/anon_256.jpg'
<del> }],
<del> ['112233445566778899000006', {
<del> id: 12,
<del> username: 'SlideWiki FTW',
<del> avatar: '/assets/images/mock-avatars/spooky_256.png'
<del> }],
<del> ['112233445566778899000000', {
<del> id: 13,
<del> username: 'Dutch',
<del> avatar: '/assets/images/mock-avatars/dgirl.jpeg'
<del> }]
<del>]);
|
|
Java
|
mit
|
fa4417ab61ae4450efe6c36bca3cc21c9fe32578
| 0 |
vincelasal/Spout
|
import java.io.*;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.SecureRandom;
import javax.crypto.BadPaddingException;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
public class TCPServer
{
private static ArrayList<ServerThread> threadList = new ArrayList<ServerThread>();//array of server threads for concurrency
final static TheGUIServer theGui = new TheGUIServer();//the servers GUI
private static SecureRandom secureRnd = new SecureRandom();
public TCPServer(Socket socket) {
}
public static void main(String[] args) throws IOException, InvalidKeyException,
IllegalBlockSizeException, BadPaddingException, InvalidAlgorithmParameterException,
NoSuchAlgorithmException, NoSuchPaddingException, InterruptedException, NoSuchProviderException
{
final ServerSocket serverSocket = new ServerSocket(6874);
//Create new Secure random obj in order to generate our
//private key. It will be a 128 bit AES key. The secure
//random class seeds from environment sources of entropy
//along with a wide selection of results making it a good
//source of pseudo random junk.
final int AES_KEY_SIZE = 16;
byte[] temp = new byte[AES_KEY_SIZE];
secureRnd.nextBytes(temp);
String sharedString = new String(temp);
theGui.setChatDisplay(sharedString + "\n");
theGui.setChatDisplay(sharedString.length() + "\n");
theGui.setChatDisplay("Waiting for clients......" + "\n");
//In order to start our nested while, accept more is set to false but has the check
//before every pass of the inner acceptMore in order to see if the size is ok for
//our serverThread list.
boolean acceptMore = false;
//Make sure the server never creates more than x ammount of threads.
if(ServerThread.serverThreadList.size() < 15)
acceptMore = true;
while(acceptMore)
{
if(ServerThread.serverThreadList.size() <= 15)
{
Socket serverThreadSocket = serverSocket.accept();//wait until tcp handshake happens on port 6874
theGui.setChatDisplay("Client Connected: " + serverThreadSocket.getInetAddress() + "\n");
ServerThread srvThread = new ServerThread(serverThreadSocket, sharedString);//create new thread of server for every client.
srvThread.start();
}
else
theGui.setChatDisplay("Client was rejected. Too many clients already connected." + "\n");
}
}
protected static TheGUIServer getGUI(){
return theGui;
}
}
|
Source/ServerThread.java
|
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.spec.EncodedKeySpec;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.X509EncodedKeySpec;
import java.util.Enumeration;
import java.util.Vector;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.IvParameterSpec;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
public class ServerThread extends Thread
{
//I/O variables
protected ObjectInputStream inFromClient;
protected ObjectOutputStream outToClient;
protected Socket threadSock;
protected Thread listener;
//List of all server threads active.
protected static Vector<ServerThread> serverThreadList = new Vector<ServerThread>();//its static so the same one is used for all.
//Used to suspend all regular chat functions while there is a client connecting as to free up tunnels for RSA exchange.
private boolean suspendAll;
//RSA key exchange stuff
private String sharedBitString;
public ServerThread(Socket socket, String shared) throws IOException
{
sharedBitString = shared;//Take in the shared bits entered earlier.
threadSock = socket;
suspendAll = true;
}
/*
* Ran as a starting point for the thread, it initiates the IO and starts a new thread listener.
*/
public void start()
{
try {
inFromClient = new ObjectInputStream(threadSock.getInputStream());
outToClient = new ObjectOutputStream(threadSock.getOutputStream());
}catch (IOException e) {
e.printStackTrace();}
listener = new Thread(this);
listener.start();
serverThreadList.addElement(this);//add this element to the list of server threads.
}
/*
* Alright so in the run method we first fire up our publicKeySwap() method in order to answer to the similar loop
* within the client. This is where the elements of the clients public key are transfered over in order for
* this particular server thread to used them to encrypt the shared secret. Then we have
*/
public void run()
{
try {
//NEW USER
//Gather up the username hash
String usernameHash;
while((usernameHash = (String) inFromClient.readObject()) == null){
usernameHash = (String) inFromClient.readObject();
}
System.out.println(usernameHash);
boolean isReturning = false;
File dir = new File("C:/Users/Public/Favorites/srv/");
File[] directoryListing = dir.listFiles();
String usernameHashFile = new String(usernameHash+".txt");
if (directoryListing != null)
{
for (File child : directoryListing)
{
String filename = child.getName();
if(usernameHashFile.equals(filename)){
isReturning = true;
}
}
}
if(isReturning){
System.out.println("we found a returing user");
}
if(isReturning)
{
//OLD USER
Path path = Paths.get("C:\\Users\\Public\\Favorites\\srv\\"+usernameHash+".txt");
//Create public key from encoded bytes,
byte[] encodedPublic = Files.readAllBytes(path);
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(encodedPublic);
PublicKey publicKeyForStorage = keyFactory.generatePublic(publicKeySpec);
//Create the random mess used to sign.
SecureRandom rnd = new SecureRandom();
byte[] randomBytes = new byte[6];
rnd.nextBytes(randomBytes);
String messToClient = new String(Hex.encodeHexString(randomBytes));
System.out.println("What was sent to client: " +messToClient);
outToClient.writeObject(messToClient);
//Create the digest
MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
byte[] temp = sha1.digest(messToClient.getBytes());
String srvDigest = new String(temp);
//Wait for the signature.
String encodedEncryptedDigest = (String) inFromClient.readObject();
System.out.println("This is the encrypted encoded string we get: "+encodedEncryptedDigest);
byte[] encryptedDigest = Base64.decodeBase64(encodedEncryptedDigest.getBytes());
//decrypt digest with our public key we use for the digital signature.
Cipher cipher = Cipher.getInstance("RSA");
cipher.init(Cipher.DECRYPT_MODE, publicKeyForStorage);
String digest = new String((cipher.doFinal(encryptedDigest)));
if((digest.equals(srvDigest)) == false)
{
try {threadSock.close();
}catch (IOException e) {
e.printStackTrace();
}
serverThreadList.removeElement(this);
}
System.out.println("This is what we got from client:" +digest);
System.out.println("This is the hash of what we sent:" +srvDigest);
publicKeySwap(publicKeyForStorage);
}
else
{
//New user, go ahead and gather up the encoded pubkey then store it with
//the usernamehash as the title to be used for authentication.
String encodedPublicKey = (String) inFromClient.readObject();
byte[] decodedPublicKey = Base64.decodeBase64(encodedPublicKey.getBytes());
//Create public key from encoded bytes,
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(decodedPublicKey);
PublicKey publicKeyForStorage = keyFactory.generatePublic(publicKeySpec);
//Store the users public key/usernamehash.
File directory = new File("C:/Users/Public/Favorites/srv/");
directory.mkdir();
FileOutputStream keyfos2 = new FileOutputStream("C:/Users/Public/Favorites/srv/"+usernameHash+".txt");
keyfos2.write(decodedPublicKey);
keyfos2.close();
//AUTHENTICATION
//Create the random mess used to sign.
SecureRandom rnd = new SecureRandom();
byte[] randomBytes = new byte[16];
rnd.nextBytes(randomBytes);
String messToClient = new String(Hex.encodeHexString(randomBytes));
//Create the digest
MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
byte[] temp = sha1.digest(messToClient.getBytes());
String srvDigest = new String(temp);
System.out.println("What was sent to client: " +messToClient);
outToClient.writeObject(messToClient);
//Wait for the signature.
String encodedEncryptedDigest = (String) inFromClient.readObject();
System.out.println("This is the encrypted encoded string we get: "+encodedEncryptedDigest);
byte[] encryptedDigest = Base64.decodeBase64(encodedEncryptedDigest.getBytes());
//decrypt digest with our public key we use for the digital signature.
Cipher cipher = Cipher.getInstance("RSA");
cipher.init(Cipher.DECRYPT_MODE, publicKeyForStorage);
String digest = new String((cipher.doFinal(encryptedDigest)));
System.out.println("This is what we got from client:" +digest);
System.out.println("This is the hash of what we sent:" +srvDigest);
if((digest.equals(srvDigest)) == false)
{
try {threadSock.close();
}catch (IOException e) {
e.printStackTrace();
}
serverThreadList.removeElement(this);
}
publicKeySwap(publicKeyForStorage);
}
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (InvalidKeySpecException e) {
e.printStackTrace();
} catch (InvalidKeyException e) {
e.printStackTrace();
} catch (NoSuchPaddingException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
}
//only thing we want run() to do is spam send to all.
//When an I/O exception is caught, means client is no
//longer connected so we close the socket and remove that
//client from the global list.
suspendAll = false;
boolean clientConnected = true;
System.out.println("Entering main thread loop.");
while(clientConnected)
{
try {
sendToAll(((String) inFromClient.readObject()));
} catch (IOException e1)
{
e1.printStackTrace();
try {threadSock.close();
}catch (IOException e) {
e.printStackTrace();
}
clientConnected = false;
serverThreadList.removeElement(this);
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
}
/*
* This method sort of explains itself. It takes in the encrypted string it would recieve from its client, then
* it goes down the array, sending this message to all clients currently active on the server. So long as suspendAll
* is not set. It is set whilst new clients are connecting.
*/
protected void sendToAll(String encryptedString)
{
if(!suspendAll)
{
synchronized(serverThreadList)
{
Enumeration<ServerThread> enumerator = serverThreadList.elements();
while(enumerator.hasMoreElements())
{
ServerThread srvThread = (ServerThread)enumerator.nextElement();
try{
TCPServer.getGUI().getChatDisplay().append(encryptedString + "\n");
srvThread.outToClient.writeObject(encryptedString);
}catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
}
/*
* So, this is where we join the client in gathering its public key (RSA). This works by letting
* the client know when it is ready to take in the RSAe by sending it an ack bit of "1".
*/
synchronized private void publicKeySwap(PublicKey value) throws IOException, InterruptedException, ClassNotFoundException, NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, InvalidKeyException, IllegalBlockSizeException, BadPaddingException
{
String encodedPublicKey = (String) inFromClient.readObject();
PublicKey clientSigKey = value;
MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
byte[] srvHashBytes = sha1.digest(encodedPublicKey.getBytes());
String srvHash = new String(srvHashBytes);
Cipher cipher = Cipher.getInstance("RSA");
cipher.init(Cipher.DECRYPT_MODE, clientSigKey);
//Create public key from encoded bytes,
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
byte[] decodedPublicKey = Base64.decodeBase64(encodedPublicKey.getBytes());
EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(decodedPublicKey);
PublicKey clientPubKey = keyFactory.generatePublic(publicKeySpec);
String signature = (String) inFromClient.readObject();
System.out.println("signature from client: "+signature);
byte[] decodedEncryptedSignature = Base64.decodeBase64(signature.getBytes());
byte[] decryptedSignature = cipher.doFinal(decodedEncryptedSignature);
String clientHash = new String(decryptedSignature);
System.out.println("Client Hash: "+clientHash);
System.out.println("Server Hash: "+srvHash);
if(!clientHash.equals(srvHash)){
System.out.println("Hash Missmatch!!! Either data is corrupted or tampered with");
try {threadSock.close();
}catch (IOException e) {
e.printStackTrace();
}
serverThreadList.removeElement(this);
}
//==============================
// Create and encrypt
//==============================
String encryptedSharedBytes = null;
try {
encryptedSharedBytes = EncryptRSA(sharedBitString, clientPubKey);
} catch (Exception e) {
e.printStackTrace();
}
outToClient.writeObject(encryptedSharedBytes);
System.out.println("Sent Encrypted private secret to client");
}
private static String EncryptRSA(String plainText, PublicKey pubKey) throws Exception
{
Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding");
cipher.init(Cipher.ENCRYPT_MODE, pubKey);
String encodedEncryptedString = new String(Base64.encodeBase64String(cipher.doFinal(plainText.getBytes())));
return encodedEncryptedString;
}
protected ObjectOutputStream getOutToClient() {
return outToClient;
}
}
|
Cleaned er right out!
Cleaned up the server class, the main loop to accept new clients and make them a thread was a little wonky, one more boolean than it needed. That and just some extra comments and housekeeping. The grind is real.
|
Source/ServerThread.java
|
Cleaned er right out!
|
<ide><path>ource/ServerThread.java
<ide>
<del>
<del>import java.io.BufferedReader;
<del>import java.io.DataOutputStream;
<del>import java.io.File;
<del>import java.io.FileOutputStream;
<del>import java.io.IOException;
<del>import java.io.InputStreamReader;
<del>import java.io.ObjectInputStream;
<del>import java.io.ObjectOutputStream;
<add>import java.io.*;
<add>import java.net.ServerSocket;
<ide> import java.net.Socket;
<del>import java.nio.file.Files;
<del>import java.nio.file.Path;
<del>import java.nio.file.Paths;
<add>import java.util.ArrayList;
<add>import java.security.InvalidAlgorithmParameterException;
<ide> import java.security.InvalidKeyException;
<del>import java.security.KeyFactory;
<del>import java.security.MessageDigest;
<ide> import java.security.NoSuchAlgorithmException;
<del>import java.security.PublicKey;
<add>import java.security.NoSuchProviderException;
<ide> import java.security.SecureRandom;
<del>import java.security.spec.EncodedKeySpec;
<del>import java.security.spec.InvalidKeySpecException;
<del>import java.security.spec.X509EncodedKeySpec;
<del>import java.util.Enumeration;
<del>import java.util.Vector;
<del>
<ide> import javax.crypto.BadPaddingException;
<del>import javax.crypto.Cipher;
<ide> import javax.crypto.IllegalBlockSizeException;
<ide> import javax.crypto.NoSuchPaddingException;
<del>import javax.crypto.spec.IvParameterSpec;
<ide>
<del>import org.apache.commons.codec.binary.Base64;
<del>import org.apache.commons.codec.binary.Hex;
<add>public class TCPServer
<add>{
<add> private static ArrayList<ServerThread> threadList = new ArrayList<ServerThread>();//array of server threads for concurrency
<add> final static TheGUIServer theGui = new TheGUIServer();//the servers GUI
<add> private static SecureRandom secureRnd = new SecureRandom();
<ide>
<del>
<del>public class ServerThread extends Thread
<del>{
<del> //I/O variables
<del> protected ObjectInputStream inFromClient;
<del> protected ObjectOutputStream outToClient;
<del> protected Socket threadSock;
<del> protected Thread listener;
<del>
<del> //List of all server threads active.
<del> protected static Vector<ServerThread> serverThreadList = new Vector<ServerThread>();//its static so the same one is used for all.
<del>
<del>
<del>
<del> //Used to suspend all regular chat functions while there is a client connecting as to free up tunnels for RSA exchange.
<del> private boolean suspendAll;
<del>
<del> //RSA key exchange stuff
<del> private String sharedBitString;
<del>
<del>
<del> public ServerThread(Socket socket, String shared) throws IOException
<del> {
<del> sharedBitString = shared;//Take in the shared bits entered earlier.
<del> threadSock = socket;
<del> suspendAll = true;
<del> }
<del>
<del>
<del> /*
<del> * Ran as a starting point for the thread, it initiates the IO and starts a new thread listener.
<del> */
<del> public void start()
<del> {
<del> try {
<del> inFromClient = new ObjectInputStream(threadSock.getInputStream());
<del> outToClient = new ObjectOutputStream(threadSock.getOutputStream());
<del> }catch (IOException e) {
<del> e.printStackTrace();}
<del>
<del> listener = new Thread(this);
<del> listener.start();
<del> serverThreadList.addElement(this);//add this element to the list of server threads.
<add> public TCPServer(Socket socket) {
<ide> }
<ide>
<ide>
<del> /*
<del> * Alright so in the run method we first fire up our publicKeySwap() method in order to answer to the similar loop
<del> * within the client. This is where the elements of the clients public key are transfered over in order for
<del> * this particular server thread to used them to encrypt the shared secret. Then we have
<del> */
<del> public void run()
<add> public static void main(String[] args) throws IOException, InvalidKeyException,
<add> IllegalBlockSizeException, BadPaddingException, InvalidAlgorithmParameterException,
<add> NoSuchAlgorithmException, NoSuchPaddingException, InterruptedException, NoSuchProviderException
<ide> {
<del> try {
<del>
<del> //NEW USER
<del> //Gather up the username hash
<del> String usernameHash;
<del> while((usernameHash = (String) inFromClient.readObject()) == null){
<del> usernameHash = (String) inFromClient.readObject();
<del> }
<del> System.out.println(usernameHash);
<del>
<del> boolean isReturning = false;
<del> File dir = new File("C:/Users/Public/Favorites/srv/");
<del> File[] directoryListing = dir.listFiles();
<del> String usernameHashFile = new String(usernameHash+".txt");
<del>
<del> if (directoryListing != null)
<add> final ServerSocket serverSocket = new ServerSocket(6874);
<add>
<add> //Create new Secure random obj in order to generate our
<add> //private key. It will be a 128 bit AES key. The secure
<add> //random class seeds from environment sources of entropy
<add> //along with a wide selection of results making it a good
<add> //source of pseudo random junk.
<add> final int AES_KEY_SIZE = 16;
<add>
<add> byte[] temp = new byte[AES_KEY_SIZE];
<add> secureRnd.nextBytes(temp);
<add> String sharedString = new String(temp);
<add> theGui.setChatDisplay(sharedString + "\n");
<add> theGui.setChatDisplay(sharedString.length() + "\n");
<add>
<add> theGui.setChatDisplay("Waiting for clients......" + "\n");
<add>
<add> //In order to start our nested while, accept more is set to false but has the check
<add> //before every pass of the inner acceptMore in order to see if the size is ok for
<add> //our serverThread list.
<add> boolean acceptMore = false;
<add> //Make sure the server never creates more than x ammount of threads.
<add> if(ServerThread.serverThreadList.size() < 15)
<add> acceptMore = true;
<add>
<add>
<add> while(acceptMore)
<add> {
<add> if(ServerThread.serverThreadList.size() <= 15)
<ide> {
<del> for (File child : directoryListing)
<del> {
<del> String filename = child.getName();
<del> if(usernameHashFile.equals(filename)){
<del> isReturning = true;
<del> }
<del> }
<del> }
<del>
<del>
<del> if(isReturning){
<del> System.out.println("we found a returing user");
<del> }
<add> Socket serverThreadSocket = serverSocket.accept();//wait until tcp handshake happens on port 6874
<add> theGui.setChatDisplay("Client Connected: " + serverThreadSocket.getInetAddress() + "\n");
<ide>
<del>
<del> if(isReturning)
<del> {
<ide>
<del> //OLD USER
<del> Path path = Paths.get("C:\\Users\\Public\\Favorites\\srv\\"+usernameHash+".txt");
<del>
<del> //Create public key from encoded bytes,
<del> byte[] encodedPublic = Files.readAllBytes(path);
<del> KeyFactory keyFactory = KeyFactory.getInstance("RSA");
<del> EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(encodedPublic);
<del> PublicKey publicKeyForStorage = keyFactory.generatePublic(publicKeySpec);
<del>
<del> //Create the random mess used to sign.
<del> SecureRandom rnd = new SecureRandom();
<del> byte[] randomBytes = new byte[6];
<del> rnd.nextBytes(randomBytes);
<del> String messToClient = new String(Hex.encodeHexString(randomBytes));
<del>
<del> System.out.println("What was sent to client: " +messToClient);
<del> outToClient.writeObject(messToClient);
<del>
<del> //Create the digest
<del> MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
<del> byte[] temp = sha1.digest(messToClient.getBytes());
<del> String srvDigest = new String(temp);
<del>
<del> //Wait for the signature.
<del> String encodedEncryptedDigest = (String) inFromClient.readObject();
<del>
<del>
<del> System.out.println("This is the encrypted encoded string we get: "+encodedEncryptedDigest);
<del> byte[] encryptedDigest = Base64.decodeBase64(encodedEncryptedDigest.getBytes());
<del>
<del> //decrypt digest with our public key we use for the digital signature.
<del> Cipher cipher = Cipher.getInstance("RSA");
<del> cipher.init(Cipher.DECRYPT_MODE, publicKeyForStorage);
<del> String digest = new String((cipher.doFinal(encryptedDigest)));
<del>
<del> if((digest.equals(srvDigest)) == false)
<del> {
<del> try {threadSock.close();
<del> }catch (IOException e) {
<del> e.printStackTrace();
<del> }
<del> serverThreadList.removeElement(this);
<del> }
<del>
<del> System.out.println("This is what we got from client:" +digest);
<del> System.out.println("This is the hash of what we sent:" +srvDigest);
<del>
<del> publicKeySwap(publicKeyForStorage);
<add> ServerThread srvThread = new ServerThread(serverThreadSocket, sharedString);//create new thread of server for every client.
<add> srvThread.start();
<ide> }
<ide> else
<del> {
<del> //New user, go ahead and gather up the encoded pubkey then store it with
<del> //the usernamehash as the title to be used for authentication.
<del>
<del> String encodedPublicKey = (String) inFromClient.readObject();
<del>
<del> byte[] decodedPublicKey = Base64.decodeBase64(encodedPublicKey.getBytes());
<del>
<del> //Create public key from encoded bytes,
<del> KeyFactory keyFactory = KeyFactory.getInstance("RSA");
<del> EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(decodedPublicKey);
<del> PublicKey publicKeyForStorage = keyFactory.generatePublic(publicKeySpec);
<del>
<del> //Store the users public key/usernamehash.
<del> File directory = new File("C:/Users/Public/Favorites/srv/");
<del> directory.mkdir();
<del> FileOutputStream keyfos2 = new FileOutputStream("C:/Users/Public/Favorites/srv/"+usernameHash+".txt");
<del> keyfos2.write(decodedPublicKey);
<del> keyfos2.close();
<del>
<del> //AUTHENTICATION
<del> //Create the random mess used to sign.
<del> SecureRandom rnd = new SecureRandom();
<del> byte[] randomBytes = new byte[16];
<del> rnd.nextBytes(randomBytes);
<del> String messToClient = new String(Hex.encodeHexString(randomBytes));
<del>
<del> //Create the digest
<del> MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
<del> byte[] temp = sha1.digest(messToClient.getBytes());
<del> String srvDigest = new String(temp);
<del>
<del> System.out.println("What was sent to client: " +messToClient);
<del> outToClient.writeObject(messToClient);
<del>
<del> //Wait for the signature.
<del> String encodedEncryptedDigest = (String) inFromClient.readObject();
<del>
<del>
<del> System.out.println("This is the encrypted encoded string we get: "+encodedEncryptedDigest);
<del> byte[] encryptedDigest = Base64.decodeBase64(encodedEncryptedDigest.getBytes());
<del>
<del> //decrypt digest with our public key we use for the digital signature.
<del> Cipher cipher = Cipher.getInstance("RSA");
<del> cipher.init(Cipher.DECRYPT_MODE, publicKeyForStorage);
<del> String digest = new String((cipher.doFinal(encryptedDigest)));
<del>
<del> System.out.println("This is what we got from client:" +digest);
<del> System.out.println("This is the hash of what we sent:" +srvDigest);
<del>
<del> if((digest.equals(srvDigest)) == false)
<del> {
<del> try {threadSock.close();
<del> }catch (IOException e) {
<del> e.printStackTrace();
<del> }
<del> serverThreadList.removeElement(this);
<del> }
<del>
<del> publicKeySwap(publicKeyForStorage);
<del> }
<del>
<del> } catch (IOException e) {
<del> e.printStackTrace();
<del> } catch (InterruptedException e) {
<del> e.printStackTrace();
<del> } catch (ClassNotFoundException e) {
<del> e.printStackTrace();
<del> } catch (NoSuchAlgorithmException e) {
<del> e.printStackTrace();
<del> } catch (InvalidKeySpecException e) {
<del> e.printStackTrace();
<del> } catch (InvalidKeyException e) {
<del> e.printStackTrace();
<del> } catch (NoSuchPaddingException e) {
<del> e.printStackTrace();
<del> } catch (IllegalBlockSizeException e) {
<del> e.printStackTrace();
<del> } catch (BadPaddingException e) {
<del> e.printStackTrace();
<del> }
<del>
<del>
<del>
<del> //only thing we want run() to do is spam send to all.
<del> //When an I/O exception is caught, means client is no
<del> //longer connected so we close the socket and remove that
<del> //client from the global list.
<del> suspendAll = false;
<del> boolean clientConnected = true;
<del> System.out.println("Entering main thread loop.");
<del> while(clientConnected)
<del> {
<del> try {
<del> sendToAll(((String) inFromClient.readObject()));
<del> } catch (IOException e1)
<del> {
<del> e1.printStackTrace();
<del> try {threadSock.close();
<del> }catch (IOException e) {
<del> e.printStackTrace();
<del> }
<del> clientConnected = false;
<del> serverThreadList.removeElement(this);
<del> } catch (ClassNotFoundException e) {
<del> e.printStackTrace();
<del> }
<del> }
<del> }
<del>
<del>
<del> /*
<del> * This method sort of explains itself. It takes in the encrypted string it would recieve from its client, then
<del> * it goes down the array, sending this message to all clients currently active on the server. So long as suspendAll
<del> * is not set. It is set whilst new clients are connecting.
<del> */
<del> protected void sendToAll(String encryptedString)
<del> {
<del> if(!suspendAll)
<del> {
<del> synchronized(serverThreadList)
<del> {
<del> Enumeration<ServerThread> enumerator = serverThreadList.elements();
<del> while(enumerator.hasMoreElements())
<del> {
<del> ServerThread srvThread = (ServerThread)enumerator.nextElement();
<del> try{
<del> TCPServer.getGUI().getChatDisplay().append(encryptedString + "\n");
<del> srvThread.outToClient.writeObject(encryptedString);
<del> }catch (IOException e1) {
<del> e1.printStackTrace();
<del> }
<del> }
<del> }
<add> theGui.setChatDisplay("Client was rejected. Too many clients already connected." + "\n");
<ide> }
<ide> }
<ide>
<add>
<ide>
<del> /*
<del> * So, this is where we join the client in gathering its public key (RSA). This works by letting
<del> * the client know when it is ready to take in the RSAe by sending it an ack bit of "1".
<del> */
<del>
<del> synchronized private void publicKeySwap(PublicKey value) throws IOException, InterruptedException, ClassNotFoundException, NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, InvalidKeyException, IllegalBlockSizeException, BadPaddingException
<del> {
<del> String encodedPublicKey = (String) inFromClient.readObject();
<del> PublicKey clientSigKey = value;
<del>
<del> MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
<del> byte[] srvHashBytes = sha1.digest(encodedPublicKey.getBytes());
<del> String srvHash = new String(srvHashBytes);
<del>
<del>
<del> Cipher cipher = Cipher.getInstance("RSA");
<del> cipher.init(Cipher.DECRYPT_MODE, clientSigKey);
<del>
<del>
<del> //Create public key from encoded bytes,
<del> KeyFactory keyFactory = KeyFactory.getInstance("RSA");
<del> byte[] decodedPublicKey = Base64.decodeBase64(encodedPublicKey.getBytes());
<del> EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(decodedPublicKey);
<del> PublicKey clientPubKey = keyFactory.generatePublic(publicKeySpec);
<del>
<del>
<del> String signature = (String) inFromClient.readObject();
<del>
<del> System.out.println("signature from client: "+signature);
<del>
<del> byte[] decodedEncryptedSignature = Base64.decodeBase64(signature.getBytes());
<del> byte[] decryptedSignature = cipher.doFinal(decodedEncryptedSignature);
<del> String clientHash = new String(decryptedSignature);
<del>
<del> System.out.println("Client Hash: "+clientHash);
<del> System.out.println("Server Hash: "+srvHash);
<del>
<del> if(!clientHash.equals(srvHash)){
<del> System.out.println("Hash Missmatch!!! Either data is corrupted or tampered with");
<del>
<del> try {threadSock.close();
<del> }catch (IOException e) {
<del> e.printStackTrace();
<del> }
<del> serverThreadList.removeElement(this);
<del> }
<del>
<del>
<del>
<del> //==============================
<del> // Create and encrypt
<del> //==============================
<del>
<del> String encryptedSharedBytes = null;
<del> try {
<del> encryptedSharedBytes = EncryptRSA(sharedBitString, clientPubKey);
<del> } catch (Exception e) {
<del> e.printStackTrace();
<del> }
<del>
<del> outToClient.writeObject(encryptedSharedBytes);
<del>
<del> System.out.println("Sent Encrypted private secret to client");
<del> }
<del>
<del>
<del> private static String EncryptRSA(String plainText, PublicKey pubKey) throws Exception
<del> {
<del> Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding");
<del> cipher.init(Cipher.ENCRYPT_MODE, pubKey);
<del>
<del> String encodedEncryptedString = new String(Base64.encodeBase64String(cipher.doFinal(plainText.getBytes())));
<del> return encodedEncryptedString;
<del> }
<del>
<del>
<del> protected ObjectOutputStream getOutToClient() {
<del> return outToClient;
<add> protected static TheGUIServer getGUI(){
<add> return theGui;
<ide> }
<ide> }
<ide>
|
|
Java
|
epl-1.0
|
372a780161ad932d13f52bdfb1d3d0731e013a75
| 0 |
moorehea1/csc207-hw6
|
src/edu/grinnell/CSC207/moorehea1/hw6/dutchFlagTest.java
|
package edu.grinnell.CSC207.moorehea1.hw6;
import static org.junit.Assert.*;
import org.junit.Test;
public class dutchFlagTest {
String[] testArray = {"blue", "white", "red"};
String[] testAns = new String[] {"red", "white", "blue"};
@Test
public void testMain() {
assertEquals("Simple", testAns, dutchFlag.main(testArray));
}
}
|
We tested with experiments in main. For some reason, this was not deleted when we pushed our final commit.
|
src/edu/grinnell/CSC207/moorehea1/hw6/dutchFlagTest.java
|
We tested with experiments in main. For some reason, this was not deleted when we pushed our final commit.
|
<ide><path>rc/edu/grinnell/CSC207/moorehea1/hw6/dutchFlagTest.java
<del>package edu.grinnell.CSC207.moorehea1.hw6;
<del>
<del>import static org.junit.Assert.*;
<del>
<del>import org.junit.Test;
<del>
<del>public class dutchFlagTest {
<del>
<del> String[] testArray = {"blue", "white", "red"};
<del> String[] testAns = new String[] {"red", "white", "blue"};
<del>
<del> @Test
<del> public void testMain() {
<del> assertEquals("Simple", testAns, dutchFlag.main(testArray));
<del> }
<del>
<del>}
|
||
Java
|
apache-2.0
|
ed79c0ef09997ab94c94b8abc741bcab688d0889
| 0 |
wso2/jaggery,wso2/jaggery,wso2/jaggery
|
package org.jaggeryjs.hostobjects.file;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jaggeryjs.hostobjects.stream.StreamHostObject;
import org.jaggeryjs.scriptengine.EngineConstants;
import org.jaggeryjs.scriptengine.engine.JaggeryContext;
import org.jaggeryjs.scriptengine.engine.RhinoEngine;
import org.jaggeryjs.scriptengine.exceptions.ScriptException;
import org.jaggeryjs.scriptengine.util.HostObjectUtil;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.Function;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.wso2.carbon.utils.CarbonUtils;
import javax.activation.FileTypeMap;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
public class FileHostObject extends ScriptableObject {
private static final Log log = LogFactory.getLog(FileHostObject.class);
private static final String hostObjectName = "File";
public static final String JAVASCRIPT_FILE_MANAGER = "hostobjects.file.filemanager";
private static final String RESOURCE_MEDIA_TYPE_MAPPINGS_FILE = "mime.types";
private static boolean mimeMapLoaded = false;
private JavaScriptFile file = null;
private JavaScriptFileManager manager = null;
private Context context = null;
private String path;
public static Scriptable jsConstructor(Context cx, Object[] args, Function ctorObj,
boolean inNewExpr) throws ScriptException {
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, hostObjectName, argsCount, true);
}
FileHostObject fho = new FileHostObject();
JaggeryContext context = (JaggeryContext) RhinoEngine.getContextProperty(EngineConstants.JAGGERY_CONTEXT);
Object obj = context.getProperty(JAVASCRIPT_FILE_MANAGER);
if (obj instanceof JavaScriptFileManager) {
fho.manager = (JavaScriptFileManager) obj;
} else {
fho.manager = new JavaScriptFileManagerImpl();
}
fho.file = fho.manager.getJavaScriptFile(args[0]);
fho.file.construct();
fho.context = cx;
return fho;
}
public String getClassName() {
return hostObjectName;
}
public static void jsFunction_open(Context cx, Scriptable thisObj, Object[] args, Function funObj)//NOPMD
throws ScriptException {
String functionName = "open";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof String)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
fho.file.open((String) args[0]);
}
public static void jsFunction_write(Context cx, Scriptable thisObj, Object[] args, Function funObj)//NOPMD
throws ScriptException {
String functionName = "write";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
Object data = args[0];
FileHostObject fho = (FileHostObject) thisObj;
if (data instanceof InputStream) {
fho.file.write((InputStream) data);
} else if (data instanceof StreamHostObject) {
fho.file.write(((StreamHostObject) data).getStream());
} else {
fho.file.write(HostObjectUtil.serializeObject(args[0]));
}
}
public static String jsFunction_read(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "read";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof Number)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
int count = ((Number) args[0]).intValue();
return fho.file.read(count);
}
public static String jsFunction_readAll(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "readAll";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.readAll();
}
public static void jsFunction_close(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "close";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
fho.file.close();
}
public static boolean jsFunction_move(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "move";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof String)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
String dest = fho.manager.getJavaScriptFile(args[0]).getURI();
return fho.file.move(dest);
}
public static boolean jsFunction_saveAs(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "saveAs";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof String)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
String dest = fho.manager.getJavaScriptFile(args[0]).getURI();
return fho.file.saveAs(dest);
}
public static boolean jsFunction_del(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "del";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.del();
}
public static long jsFunction_getLength(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getLength";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getLength();
}
public static long jsFunction_getLastModified(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getLastModified";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getLastModified();
}
public static String jsFunction_getName(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getName";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getName();
}
public static boolean jsFunction_isExists(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "isExists";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.isExist();
}
public static String jsFunction_getContentType(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getContentType";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
if (!mimeMapLoaded) {
FileTypeMap.setDefaultFileTypeMap(loadMimeMap());
mimeMapLoaded = true;
}
return fho.file.getContentType();
}
private static FileTypeMap loadMimeMap() throws ScriptException {
String configDirPath = CarbonUtils.getEtcCarbonConfigDirPath();
File configFile = new File(configDirPath, RESOURCE_MEDIA_TYPE_MAPPINGS_FILE);
if (!configFile.exists()) {
String msg = "Resource media type definitions file (mime.types) file does " +
"not exist in the path " + configDirPath;
log.error(msg);
throw new ScriptException(msg);
}
final Map<String, String> mimeMappings = new HashMap<String, String>();
final String mappings;
try {
mappings = FileUtils.readFileToString(configFile, "UTF-8");
} catch (IOException e) {
String msg = "Error opening resource media type definitions file " +
"(mime.types) : " + e.getMessage();
throw new ScriptException(msg, e);
}
String[] lines = mappings.split("[\\r\\n]+");
for (String line : lines) {
if (!line.startsWith("#")) {
String[] parts = line.split("\\s+");
for (int i = 1; i < parts.length; i++) {
mimeMappings.put(parts[i], parts[0]);
}
}
}
return new FileTypeMap() {
@Override
public String getContentType(File file) {
return getContentType(file.getName());
}
@Override
public String getContentType(String fileName) {
int i = fileName.lastIndexOf('.');
if (i > 0) {
String mimeType = mimeMappings.get(fileName.substring(i + 1));
if (mimeType != null) {
return mimeType;
}
}
return "application/octet-stream";
}
};
}
public static Scriptable jsFunction_getStream(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getStream";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.context.newObject(thisObj, "Stream", new Object[]{fho.file.getInputStream()});
}
public InputStream getInputStream() throws ScriptException {
return file.getInputStream();
}
public OutputStream getOutputStream() throws ScriptException {
return file.getOutputStream();
}
public String getName() throws ScriptException {
String fileName = file.getName();
fileName = FilenameUtils.getName(fileName);
return fileName;
}
public JavaScriptFile getJavaScriptFile() throws ScriptException {
return file;
}
public static boolean jsFunction_isDirectory(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "isDirectory";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.isDirectory();
}
public static String jsFunction_getPath(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "getPath";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getURI();
}
public static boolean jsFunction_mkdir(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "mkdir";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.mkdir();
}
public static Object jsFunction_listFiles(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "listFiles";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
ArrayList<String> fpaths = fho.file.listFiles();
ArrayList<Scriptable> fhol = new ArrayList<Scriptable>();
for (String jsf : fpaths) {
fhol.add(fho.context.newObject(thisObj, "File", new Object[]{jsf}));
}
return cx.newArray(thisObj, fhol.toArray());
}
/**
* To unzip a zip file
*
* @param cx Context
* @param thisObj FileHostObject to be unzipped
* @param args Path to unzip the zip file
* @param funObj Function Object
* @throws ScriptException
*/
public static boolean jsFunction_unZip(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException, IOException {
String functionName = "unZip";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
ZipInputStream zin = null;
BufferedOutputStream out = null;
if (fho.file.isExist()) {
JaggeryContext context = (JaggeryContext) RhinoEngine.getContextProperty(EngineConstants.JAGGERY_CONTEXT);
Object obj = context.getProperty(JAVASCRIPT_FILE_MANAGER);
if (obj instanceof JavaScriptFileManager) {
fho.manager = (JavaScriptFileManager) obj;
} else {
fho.manager = new JavaScriptFileManagerImpl();
}
File zipfile = new File(fho.manager.getFile(fho.file.getPath()).getAbsolutePath());
File outdir = new File(fho.manager.getDirectoryPath(args[0].toString()));
outdir.getParentFile().mkdirs();
outdir.mkdir();
try {
zin = new ZipInputStream(new FileInputStream(zipfile));
ZipEntry entry;
String name, dir;
byte[] buffer = new byte[1024];
while ((entry = zin.getNextEntry()) != null) {
name = entry.getName();
if (entry.isDirectory()) {
mkdirs(outdir, name);
continue;
}
int hasParentDirs = name.lastIndexOf(File.separatorChar);
dir = (hasParentDirs == -1) ? null : name.substring(0, hasParentDirs);
if (dir != null) {
mkdirs(outdir, dir);
}
try {
out = new BufferedOutputStream(new FileOutputStream(new File(outdir, name)));
int count = -1;
while ((count = zin.read(buffer)) != -1) {
out.write(buffer, 0, count);
}
} finally {
if (out != null) {
out.close();
}
}
}
return true;
} catch (IOException ex) {
log.error("Cannot unzip the file " + ex);
throw new IOException(ex);
} finally {
if (zin != null) {
zin.close();
}
}
} else {
log.error("Zip file not exists");
}
return false;
}
/**
* To zip a folder
*
* @param cx Context
* @param thisObj FileHostObject
* @param args Zip file path to zip the folder
* @param funObj Function
* @throws ScriptException
*/
public static boolean jsFunction_zip(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException, IOException {
String functionName = "zip";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
ZipOutputStream zip = null;
if (fho.file.isExist()) {
JaggeryContext context = (JaggeryContext) RhinoEngine.getContextProperty(EngineConstants.JAGGERY_CONTEXT);
Object obj = context.getProperty(JAVASCRIPT_FILE_MANAGER);
if (obj instanceof JavaScriptFileManager) {
fho.manager = (JavaScriptFileManager) obj;
} else {
fho.manager = new JavaScriptFileManagerImpl();
}
String destinationPath = fho.manager.getFile(args[0].toString()).getAbsolutePath();
String sourcePath = fho.manager.getDirectoryPath(fho.file.getPath());
File destinationFile = new File(destinationPath);
destinationFile.getParentFile().mkdirs();
try {
zip = new ZipOutputStream(new FileOutputStream(destinationPath));
File folder = new File(sourcePath);
for (String fileName : folder.list()) {
addFileToZip("", sourcePath + File.separator + fileName, zip);
}
return true;
} catch (IOException ex) {
log.error("Cannot zip the folder. " + ex);
throw new IOException(ex);
} finally {
if (zip != null) {
zip.flush();
zip.close();
}
}
} else {
log.error("Zip operation cannot be done. Folder not found");
}
return false;
}
/**
* To add a file to zip
*
* @param path Root path name
* @param srcFile Source File that need to be added to zip
* @param zip ZipOutputStream
* @throws IOException
*/
private static void addFileToZip(String path, String srcFile, ZipOutputStream zip) throws IOException {
FileInputStream in = null;
try {
File folder = new File(srcFile);
if (folder.isDirectory()) {
addFolderToZip(path, srcFile, zip);
} else {
byte[] buf = new byte[1024];
int len;
in = new FileInputStream(srcFile);
zip.putNextEntry(new ZipEntry(path + File.separator + folder.getName()));
while ((len = in.read(buf)) > 0) {
zip.write(buf, 0, len);
}
}
} catch (IOException er) {
log.error("Cannot add file to zip " + er);
throw new IOException(er);
} finally {
if (in != null) {
in.close();
}
}
}
/**
* To add a folder to zip
*
* @param path Path of the file or folder from root directory of zip
* @param srcFolder Source folder to be made as zip
* @param zip ZipOutputStream
*/
private static void addFolderToZip(String path, String srcFolder, ZipOutputStream zip) throws IOException {
File folder = new File(srcFolder);
if (path.isEmpty()) {
zip.putNextEntry(new ZipEntry(folder.getName() + File.separator));
} else {
zip.putNextEntry(new ZipEntry(path + File.separator + folder.getName() + File.separator));
}
for (String fileName : folder.list()) {
if (path.isEmpty()) {
addFileToZip(folder.getName(), srcFolder + File.separator + fileName, zip);
} else {
addFileToZip(path + File.separator + folder.getName(), srcFolder + File.separator + fileName, zip);
}
}
}
/**
* To create the recursive directories in a specific path
*
* @param parentDirectory Parent of the directory
* @param path Path of the the child directory to be created inside
*/
private static void mkdirs(File parentDirectory, String path) {
File dir = new File(parentDirectory, path);
if (!dir.exists()) {
dir.mkdirs();
}
}
}
|
components/hostobjects/org.jaggeryjs.hostobjects.file/src/main/java/org/jaggeryjs/hostobjects/file/FileHostObject.java
|
package org.jaggeryjs.hostobjects.file;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jaggeryjs.hostobjects.stream.StreamHostObject;
import org.jaggeryjs.scriptengine.EngineConstants;
import org.jaggeryjs.scriptengine.engine.JaggeryContext;
import org.jaggeryjs.scriptengine.engine.RhinoEngine;
import org.jaggeryjs.scriptengine.exceptions.ScriptException;
import org.jaggeryjs.scriptengine.util.HostObjectUtil;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.Function;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.wso2.carbon.utils.CarbonUtils;
import javax.activation.FileTypeMap;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
public class FileHostObject extends ScriptableObject {
private static final Log log = LogFactory.getLog(FileHostObject.class);
private static final String hostObjectName = "File";
public static final String JAVASCRIPT_FILE_MANAGER = "hostobjects.file.filemanager";
private static final String RESOURCE_MEDIA_TYPE_MAPPINGS_FILE = "mime.types";
private static boolean mimeMapLoaded = false;
private JavaScriptFile file = null;
private JavaScriptFileManager manager = null;
private Context context = null;
private String path;
public static Scriptable jsConstructor(Context cx, Object[] args, Function ctorObj,
boolean inNewExpr) throws ScriptException {
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, hostObjectName, argsCount, true);
}
FileHostObject fho = new FileHostObject();
JaggeryContext context = (JaggeryContext) RhinoEngine.getContextProperty(EngineConstants.JAGGERY_CONTEXT);
Object obj = context.getProperty(JAVASCRIPT_FILE_MANAGER);
if (obj instanceof JavaScriptFileManager) {
fho.manager = (JavaScriptFileManager) obj;
} else {
fho.manager = new JavaScriptFileManagerImpl();
}
fho.file = fho.manager.getJavaScriptFile(args[0]);
fho.file.construct();
fho.context = cx;
return fho;
}
public String getClassName() {
return hostObjectName;
}
public static void jsFunction_open(Context cx, Scriptable thisObj, Object[] args, Function funObj)//NOPMD
throws ScriptException {
String functionName = "open";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof String)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
fho.file.open((String) args[0]);
}
public static void jsFunction_write(Context cx, Scriptable thisObj, Object[] args, Function funObj)//NOPMD
throws ScriptException {
String functionName = "write";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
Object data = args[0];
FileHostObject fho = (FileHostObject) thisObj;
if (data instanceof InputStream) {
fho.file.write((InputStream) data);
} else if (data instanceof StreamHostObject) {
fho.file.write(((StreamHostObject) data).getStream());
} else {
fho.file.write(HostObjectUtil.serializeObject(args[0]));
}
}
public static String jsFunction_read(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "read";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof Number)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
int count = ((Number) args[0]).intValue();
return fho.file.read(count);
}
public static String jsFunction_readAll(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "readAll";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.readAll();
}
public static void jsFunction_close(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "close";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
fho.file.close();
}
public static boolean jsFunction_move(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "move";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof String)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
String dest = fho.manager.getJavaScriptFile(args[0]).getURI();
return fho.file.move(dest);
}
public static boolean jsFunction_saveAs(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "saveAs";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
if (!(args[0] instanceof String)) {
HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false);
}
FileHostObject fho = (FileHostObject) thisObj;
String dest = fho.manager.getJavaScriptFile(args[0]).getURI();
return fho.file.saveAs(dest);
}
public static boolean jsFunction_del(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "del";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.del();
}
public static long jsFunction_getLength(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getLength";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getLength();
}
public static long jsFunction_getLastModified(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getLastModified";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getLastModified();
}
public static String jsFunction_getName(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getName";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getName();
}
public static boolean jsFunction_isExists(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "isExists";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.isExist();
}
public static String jsFunction_getContentType(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getContentType";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
if (!mimeMapLoaded) {
FileTypeMap.setDefaultFileTypeMap(loadMimeMap());
mimeMapLoaded = true;
}
return fho.file.getContentType();
}
private static FileTypeMap loadMimeMap() throws ScriptException {
String configDirPath = CarbonUtils.getEtcCarbonConfigDirPath();
File configFile = new File(configDirPath, RESOURCE_MEDIA_TYPE_MAPPINGS_FILE);
if (!configFile.exists()) {
String msg = "Resource media type definitions file (mime.types) file does " +
"not exist in the path " + configDirPath;
log.error(msg);
throw new ScriptException(msg);
}
final Map<String, String> mimeMappings = new HashMap<String, String>();
final String mappings;
try {
mappings = FileUtils.readFileToString(configFile, "UTF-8");
} catch (IOException e) {
String msg = "Error opening resource media type definitions file " +
"(mime.types) : " + e.getMessage();
throw new ScriptException(msg, e);
}
String[] lines = mappings.split("[\\r\\n]+");
for (String line : lines) {
if (!line.startsWith("#")) {
String[] parts = line.split("\\s+");
for (int i = 1; i < parts.length; i++) {
mimeMappings.put(parts[i], parts[0]);
}
}
}
return new FileTypeMap() {
@Override
public String getContentType(File file) {
return getContentType(file.getName());
}
@Override
public String getContentType(String fileName) {
int i = fileName.lastIndexOf('.');
if (i > 0) {
String mimeType = mimeMappings.get(fileName.substring(i + 1));
if (mimeType != null) {
return mimeType;
}
}
return "application/octet-stream";
}
};
}
public static Scriptable jsFunction_getStream(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException {
String functionName = "getStream";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.context.newObject(thisObj, "Stream", new Object[]{fho.file.getInputStream()});
}
public InputStream getInputStream() throws ScriptException {
return file.getInputStream();
}
public OutputStream getOutputStream() throws ScriptException {
return file.getOutputStream();
}
public String getName() throws ScriptException {
String fileName = file.getName();
fileName = FilenameUtils.getName(fileName);
return fileName;
}
public JavaScriptFile getJavaScriptFile() throws ScriptException {
return file;
}
public static boolean jsFunction_isDirectory(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "isDirectory";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.isDirectory();
}
public static String jsFunction_getPath(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "getPath";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.getURI();
}
public static boolean jsFunction_mkdir(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "mkdir";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
return fho.file.mkdir();
}
public static Object jsFunction_listFiles(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException {
String functionName = "listFiles";
int argsCount = args.length;
if (argsCount != 0) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
ArrayList<String> fpaths = fho.file.listFiles();
ArrayList<Scriptable> fhol = new ArrayList<Scriptable>();
for (String jsf : fpaths) {
fhol.add(fho.context.newObject(thisObj, "File", new Object[]{jsf}));
}
return cx.newArray(thisObj, fhol.toArray());
}
/**
* To unzip a zip file
*
* @param cx Context
* @param thisObj FileHostObject to be unzipped
* @param args Path to unzip the zip file
* @param funObj Function Object
* @throws ScriptException
*/
public static boolean jsFunction_unZip(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException, IOException {
String functionName = "unZip";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
ZipInputStream zin = null;
BufferedOutputStream out = null;
if (fho.file.isExist()) {
JaggeryContext context = (JaggeryContext) RhinoEngine.getContextProperty(EngineConstants.JAGGERY_CONTEXT);
Object obj = context.getProperty(JAVASCRIPT_FILE_MANAGER);
if (obj instanceof JavaScriptFileManager) {
fho.manager = (JavaScriptFileManager) obj;
} else {
fho.manager = new JavaScriptFileManagerImpl();
}
File zipfile = new File(fho.manager.getFile(fho.file.getPath()).getAbsolutePath());
File outdir = new File(fho.manager.getDirectoryPath(args[0].toString()));
outdir.getParentFile().mkdirs();
outdir.mkdir();
try {
zin = new ZipInputStream(new FileInputStream(zipfile));
ZipEntry entry;
String name, dir;
byte[] buffer = new byte[1024];
while ((entry = zin.getNextEntry()) != null) {
name = entry.getName();
if (entry.isDirectory()) {
mkdirs(outdir, name);
continue;
}
int hasParentDirs = name.lastIndexOf(File.separatorChar);
dir = (hasParentDirs == -1) ? null : name.substring(0, hasParentDirs);
if (dir != null) {
mkdirs(outdir, dir);
}
out = new BufferedOutputStream(new FileOutputStream(new File(outdir, name)));
int count = -1;
while ((count = zin.read(buffer)) != -1) {
out.write(buffer, 0, count);
}
}
return true;
} catch (IOException ex) {
log.error("Cannot unzip the file " + ex);
throw new IOException(ex);
} finally {
if (zin != null) {
zin.close();
}
if (out != null) {
out.close();
}
}
} else {
log.error("Zip file not exists");
}
return false;
}
/**
* To zip a folder
*
* @param cx Context
* @param thisObj FileHostObject
* @param args Zip file path to zip the folder
* @param funObj Function
* @throws ScriptException
*/
public static boolean jsFunction_zip(Context cx, Scriptable thisObj, Object[] args, Function funObj)
throws ScriptException, IOException {
String functionName = "zip";
int argsCount = args.length;
if (argsCount != 1) {
HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false);
}
FileHostObject fho = (FileHostObject) thisObj;
ZipOutputStream zip = null;
if (fho.file.isExist()) {
JaggeryContext context = (JaggeryContext) RhinoEngine.getContextProperty(EngineConstants.JAGGERY_CONTEXT);
Object obj = context.getProperty(JAVASCRIPT_FILE_MANAGER);
if (obj instanceof JavaScriptFileManager) {
fho.manager = (JavaScriptFileManager) obj;
} else {
fho.manager = new JavaScriptFileManagerImpl();
}
String destinationPath = fho.manager.getFile(args[0].toString()).getAbsolutePath();
String sourcePath = fho.manager.getDirectoryPath(fho.file.getPath());
File destinationFile = new File(destinationPath);
destinationFile.getParentFile().mkdirs();
try {
zip = new ZipOutputStream(new FileOutputStream(destinationPath));
File folder = new File(sourcePath);
for (String fileName : folder.list()) {
addFileToZip("", sourcePath + File.separator + fileName, zip);
}
return true;
} catch (IOException ex) {
log.error("Cannot zip the folder. " + ex);
throw new IOException(ex);
} finally {
if (zip != null) {
zip.flush();
zip.close();
}
}
} else {
log.error("Zip operation cannot be done. Folder not found");
}
return false;
}
/**
* To add a file to zip
*
* @param path Root path name
* @param srcFile Source File that need to be added to zip
* @param zip ZipOutputStream
* @throws IOException
*/
private static void addFileToZip(String path, String srcFile, ZipOutputStream zip) throws IOException {
FileInputStream in = null;
try {
File folder = new File(srcFile);
if (folder.isDirectory()) {
addFolderToZip(path, srcFile, zip);
} else {
byte[] buf = new byte[1024];
int len;
in = new FileInputStream(srcFile);
zip.putNextEntry(new ZipEntry(path + File.separator + folder.getName()));
while ((len = in.read(buf)) > 0) {
zip.write(buf, 0, len);
}
}
} catch (IOException er) {
log.error("Cannot add file to zip " + er);
throw new IOException(er);
} finally {
if (in != null) {
in.close();
}
}
}
/**
* To add a folder to zip
*
* @param path Path of the file or folder from root directory of zip
* @param srcFolder Source folder to be made as zip
* @param zip ZipOutputStream
*/
private static void addFolderToZip(String path, String srcFolder, ZipOutputStream zip) throws IOException {
File folder = new File(srcFolder);
if (path.isEmpty()) {
zip.putNextEntry(new ZipEntry(folder.getName() + File.separator));
} else {
zip.putNextEntry(new ZipEntry(path + File.separator + folder.getName() + File.separator));
}
for (String fileName : folder.list()) {
if (path.isEmpty()) {
addFileToZip(folder.getName(), srcFolder + File.separator + fileName, zip);
} else {
addFileToZip(path + File.separator + folder.getName(), srcFolder + File.separator + fileName, zip);
}
}
}
/**
* To create the recursive directories in a specific path
*
* @param parentDirectory Parent of the directory
* @param path Path of the the child directory to be created inside
*/
private static void mkdirs(File parentDirectory, String path) {
File dir = new File(parentDirectory, path);
if (!dir.exists()) {
dir.mkdirs();
}
}
}
|
Closing the file output stream at correct point
|
components/hostobjects/org.jaggeryjs.hostobjects.file/src/main/java/org/jaggeryjs/hostobjects/file/FileHostObject.java
|
Closing the file output stream at correct point
|
<ide><path>omponents/hostobjects/org.jaggeryjs.hostobjects.file/src/main/java/org/jaggeryjs/hostobjects/file/FileHostObject.java
<ide> if (dir != null) {
<ide> mkdirs(outdir, dir);
<ide> }
<del> out = new BufferedOutputStream(new FileOutputStream(new File(outdir, name)));
<del> int count = -1;
<del> while ((count = zin.read(buffer)) != -1) {
<del> out.write(buffer, 0, count);
<add> try {
<add> out = new BufferedOutputStream(new FileOutputStream(new File(outdir, name)));
<add> int count = -1;
<add> while ((count = zin.read(buffer)) != -1) {
<add> out.write(buffer, 0, count);
<add> }
<add> } finally {
<add> if (out != null) {
<add> out.close();
<add> }
<ide> }
<ide> }
<ide> return true;
<ide> } finally {
<ide> if (zin != null) {
<ide> zin.close();
<del> }
<del> if (out != null) {
<del> out.close();
<ide> }
<ide> }
<ide> } else {
|
|
Java
|
apache-2.0
|
b81cc2d3a09c00ee36ac7f2bbc7d85daf6e3ec63
| 0 |
kelemen/JTrim,kelemen/JTrim
|
package org.jtrim.taskgraph;
import java.util.concurrent.CompletionStage;
import org.jtrim.cancel.CancellationToken;
/**
* Defines the initial nodes of the task graph. Additional nodes will be automatically
* created based on the input needs of the task nodes. That is, {@link #addNode(TaskNodeKey) adding a task node key}
* will force the graph execution framework to create a task node with factory specified by the key; when
* the factory defines inputs for the action it creates, it will recursively spawn new nodes.
*
* <h3>Thread safety</h3>
* The methods of this interface may not be used by multiple threads concurrently, unless otherwise noted.
*
* <h4>Synchronization transparency</h4>
* The methods of this interface are not <I>synchronization transparent</I> in general.
*
* @see TaskGraphDefConfigurer
* @see TaskGraphExecutor
*/
public interface TaskGraphBuilder {
/**
* Adds a node to the task execution graph. Adding a node will force the execution framework
* to at least execute that node.
* <P>
* It is allowed to call {@code addNode} concurrently with another {@code addNode} call. However,
* the {@code addNode} method may not be called concurrently with any other method of this
* interface.
*
* @param nodeKey the {@code TaskNodeKey} identifying the task node to be created. The
* task node key must not be added multiple times and there must be a task node factory
* able to create nodes based on this key. This argument cannot be {@code null}.
*/
public void addNode(TaskNodeKey<?, ?> nodeKey);
/**
* Returns the properties used when building the task graph. The properties must be set
* before calling the {@link #buildGraph(CancellationToken) buildGraph} method.
*
* @return the properties used when building the task graph. This method never returns
* {@code null}.
*/
public TaskGraphBuilderProperties.Builder properties();
/**
* Starts building a task graph and will notify the returned {@code CompletionStage} once the
* graph is built and is ready to be executed.
* <P>
* Adding further nodes after calling this method does not affect the {@code TaskGraphExecutor}
* to be created.
*
* @param cancelToken the {@code CancellationToken} which can be used to cancel the building
* of the task graph. The framework will make a best effort to cancel the building of the task graph.
* However, there is no guarantee that cancellation request will be fulfilled. If cancellation succeeds,
* the {@code CompletionStage} will complete exceptionally with an
* {@link org.jtrim.cancel.OperationCanceledException OperationCanceledException}.
* @return the {@code CompletionStage} which can be used to receive the {@code TaskGraphExecutor} and
* actually start the task graph execution. This method never returns {@code null}.
*/
public CompletionStage<TaskGraphExecutor> buildGraph(CancellationToken cancelToken);
}
|
jtrim-task-graph/src/main/java/org/jtrim/taskgraph/TaskGraphBuilder.java
|
package org.jtrim.taskgraph;
import java.util.concurrent.CompletionStage;
import org.jtrim.cancel.CancellationToken;
/**
* Defines the initial nodes of the task graph. Additional nodes will be automatically
* created based on the input needs of the task nodes. That is, {@link #addNode(TaskNodeKey) adding a task node key}
* will force the graph execution framework to create a task node with factory specified by the key; when
* the factory defines inputs for the action it creates, it will recursively spawn new nodes.
*
* <h3>Thread safety</h3>
* The methods of this interface may not be used by multiple threads concurrently, unless otherwise noted.
*
* <h4>Synchronization transparency</h4>
* The methods of this interface are not <I>synchronization transparent</I> in general.
*
* @see TaskGraphDefConfigurer
* @see TaskGraphExecutor
*/
public interface TaskGraphBuilder {
/**
* Adds a node to the task execution graph. Adding a node will force the execution framework
* to at least execute that node.
* <P>
* It is allowed to call {@code addNode} concurrently with another {@code addNode} call. However,
* the {@code addNode} method may not be called concurrently with any other method of this
* interface.
*
* @param nodeKey the {@code TaskNodeKey} identifying the task node to be created. The
* task node key must not be added multiple times and there must be a task node factory
* able to create nodes based on this key. This argument cannot be {@code null}.
*/
public void addNode(TaskNodeKey<?, ?> nodeKey);
/**
* Returns the properties used when building the task graph. The properties must be set
* before calling the {@link #buildGraph(CancellationToken) buildGraph} method.
*
* @return the properties used when building the task graph. This method never returns
* {@code null}.
*/
public TaskGraphBuilderProperties.Builder properties();
/**
* Starts building a task graph and will notify the returned {@code CompletionStage} once the
* graph is built and is ready to be executed.
* <P>
* Adding further nodes after calling this
*
* @param cancelToken the {@code CancellationToken} which can be used to cancel the building
* of the task graph. The framework will make a best effort to cancel the building of the task graph.
* However, there is no guarantee that cancellation request will be fulfilled. If cancellation succeeds,
* the {@code CompletionStage} will complete exceptionally with an
* {@link org.jtrim.cancel.OperationCanceledException OperationCanceledException}.
* @return the {@code CompletionStage} which can be used to receive the {@code TaskGraphExecutor} and
* actually start the task graph execution. This method never returns {@code null}.
*/
public CompletionStage<TaskGraphExecutor> buildGraph(CancellationToken cancelToken);
}
|
Finished an unfinished javadoc comment in TaskGraphBuilder.
|
jtrim-task-graph/src/main/java/org/jtrim/taskgraph/TaskGraphBuilder.java
|
Finished an unfinished javadoc comment in TaskGraphBuilder.
|
<ide><path>trim-task-graph/src/main/java/org/jtrim/taskgraph/TaskGraphBuilder.java
<ide> * Starts building a task graph and will notify the returned {@code CompletionStage} once the
<ide> * graph is built and is ready to be executed.
<ide> * <P>
<del> * Adding further nodes after calling this
<add> * Adding further nodes after calling this method does not affect the {@code TaskGraphExecutor}
<add> * to be created.
<ide> *
<ide> * @param cancelToken the {@code CancellationToken} which can be used to cancel the building
<ide> * of the task graph. The framework will make a best effort to cancel the building of the task graph.
|
|
Java
|
mit
|
9a7e67a43121f734d318e5516c8c2a5631aa7d34
| 0 |
fhirschmann/clozegen,fhirschmann/clozegen
|
/*
* Copyright (C) 2012 Fabian Hirschmann <[email protected]>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package com.github.fhirschmann.clozegen.lib.annotators.en;
import com.github.fhirschmann.clozegen.lib.annotators.AbstractPosTrigramAnnotator;
import com.github.fhirschmann.clozegen.lib.multiset.MapMultiset;
import com.github.fhirschmann.clozegen.lib.multiset.MultisetReader;
import com.github.fhirschmann.clozegen.lib.type.GapAnnotation;
import com.github.fhirschmann.clozegen.lib.util.MultisetUtils;
import com.github.fhirschmann.clozegen.lib.util.UIMAUtils;
import com.github.fhirschmann.clozegen.lib.util.WordFilterFunction;
import com.google.common.base.Joiner;
import com.google.common.collect.*;
import com.google.common.collect.Multiset.Entry;
import com.google.common.io.Resources;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.POS;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.PP;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.uima.UimaContext;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.cas.NonEmptyStringList;
import org.apache.uima.resource.ResourceInitializationException;
import org.uimafit.descriptor.ConfigurationParameter;
import org.uimafit.util.FSCollectionFactory;
/**
* This annotator creates annotations for prepositions.
*
* <p>It implements the concepts as proposed in the paper
* <i>Automatic Generation of Cloze Items for Prepositions</i> [1]
* by Lee et all.
*
* <p>Three collocation data files which describe the frequency of preposition
* collocations need to be present in the model directory:
* <ul>
* <li>trigrams.txt: prepositions located in the middle
* <li>before.txt: preposition located on the left-hand side
* <li>after.txt: preposition located on the right-hand side
* </ul>
* The space separated word sequence and the corresponding counts need
* to be separated by the tab-character. For more detail on the format,
* please consult the documentation on {@link MultisetReader#parseMultiset}
* and {@link MultisetReader#parseMapMultiset}, which describe the format
* for {before|after}.txt and trigrams.txt, respectively.
*
* The actual models can be generated using the
*
* <p>[1] <b>J. Lee and S. Seneff</b>.<br/>
* Automatic generation of cloze items for prepositions.<br/>
* <i>In Eight Annual Conference of the International Speech Communication
* Association, 2007</i>.
*
* @param PARAM_MODEL_PATH the directory in which the models can be found
* @param CHOICES_COUNT the number of answers to generate
*
* @author Fabian Hirschmann <[email protected]>
*/
public class PrepositionGapGenerator extends AbstractPosTrigramAnnotator {
public static final String PARAM_MODEL_PATH = "ModelPath";
@ConfigurationParameter(name = PARAM_MODEL_PATH, mandatory = true)
private String modelPath;
/** (A, p, B) - preposition surrounded by A, B. */
private Multiset<String> trigrams;
/** (p, B) - prepositions comes second. */
private MapMultiset<String, String> after;
/** (A, p) - prepositions comes first. */
private MapMultiset<String, String> before;
private final static Joiner joiner = Joiner.on(" ");
public final static int CHOICES_COUNT = 4;
@Override
public void initialize(UimaContext context) throws ResourceInitializationException {
super.initialize(context);
try {
trigrams = MultisetReader.parseMultiset(
Resources.getResource(modelPath + "/trigrams.txt"));
after = MultisetReader.parseMapMultiset(
Resources.getResource(modelPath + "/after.txt"), 0);
before = MultisetReader.parseMapMultiset(
Resources.getResource(modelPath + "/before.txt"), 1);
} catch (IOException ex) {
Logger.getLogger(PrepositionGapGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Override
public void processTrigram(JCas aJCas, List<POS> pos) {
List<String> strings = Lists.newArrayList(Collections2.
transform(pos, new WordFilterFunction()));
// Collect a list of possible candidates for this gap
final Multiset<String> candidates = ConcurrentHashMultiset.create(
MultisetUtils.mergeMultiSets(
before.get(strings.get(0)), after.get(strings.get(0))));
// Removed candidates p* which appeared in the context (A, p*, B)
for (Entry<String> entry : candidates.entrySet()) {
if (trigrams.contains(
joiner.join(strings.get(0), entry.getElement(), strings.get(1)))) {
candidates.remove(entry.getElement(), entry.getCount());
}
}
// Remove the correct answer from the candidates
candidates.remove(strings.get(1), candidates.count(strings.get(1)));
if (candidates.elementSet().size() > CHOICES_COUNT - 2) {
final Set<String> invalidAnswers = Sets.newHashSet(
MultisetUtils.sortedElementList(candidates, CHOICES_COUNT - 1));
GapAnnotation gap = UIMAUtils.createGapAnnotation(aJCas,
ImmutableSet.of(strings.get(1)), invalidAnswers);
gap.setBegin(pos.get(1).getBegin());
gap.setEnd(pos.get(1).getEnd());
gap.addToIndexes();
}
}
}
|
lib/src/main/java/com/github/fhirschmann/clozegen/lib/annotators/en/PrepositionGapGenerator.java
|
/*
* Copyright (C) 2012 Fabian Hirschmann <[email protected]>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package com.github.fhirschmann.clozegen.lib.annotators.en;
import com.github.fhirschmann.clozegen.lib.annotators.AbstractPosTrigramAnnotator;
import com.github.fhirschmann.clozegen.lib.multiset.MapMultiset;
import com.github.fhirschmann.clozegen.lib.multiset.MultisetReader;
import com.github.fhirschmann.clozegen.lib.type.GapAnnotation;
import com.github.fhirschmann.clozegen.lib.util.MultisetUtils;
import com.github.fhirschmann.clozegen.lib.util.UIMAUtils;
import com.github.fhirschmann.clozegen.lib.util.WordFilterFunction;
import com.google.common.base.Joiner;
import com.google.common.collect.*;
import com.google.common.collect.Multiset.Entry;
import com.google.common.io.Resources;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.POS;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.PP;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.uima.UimaContext;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.cas.NonEmptyStringList;
import org.apache.uima.resource.ResourceInitializationException;
import org.uimafit.descriptor.ConfigurationParameter;
import org.uimafit.util.FSCollectionFactory;
/**
* This annotator creates annotations for prepositions.
*
* <p>It implements the concepts as proposed in the paper
* <i>Automatic Generation of Cloze Items for Prepositions</i> [1]
* by Lee et all.
*
* <p>Three collocation data files which describe the frequency of preposition
* collocations need to be present in the model directory:
* <ul>
* <li>trigrams.txt: prepositions located in the middle
* <li>before.txt: preposition located on the left-hand side
* <li>after.txt: preposition located on the right-hand side
* </ul>
* The space separated word sequence and the corresponding counts need
* to be separated by the tab-character. For more detail on the format,
* please consult the documentation on {@link MultisetReader#parseMultiset}
* and {@link MultisetReader#parseMapMultiset}, which describe the format
* for {before|after}.txt and trigrams.txt, respectively.
*
* <p>[1] <b>J. Lee and S. Seneff</b>.<br/>
* Automatic generation of cloze items for prepositions.<br/>
* <i>In Eight Annual Conference of the International Speech Communication
* Association, 2007</i>.
*
* @param PARAM_MODEL_PATH the directory in which the models can be found
* @param CHOICES_COUNT the number of answers to generate
*
* @author Fabian Hirschmann <[email protected]>
*/
public class PrepositionGapGenerator extends AbstractPosTrigramAnnotator {
public static final String PARAM_MODEL_PATH = "ModelPath";
@ConfigurationParameter(name = PARAM_MODEL_PATH, mandatory = true)
private String modelPath;
private Multiset<String> trigrams;
private MapMultiset<String, String> after;
private MapMultiset<String, String> before;
private final static Joiner joiner = Joiner.on(" ");
public final static int CHOICES_COUNT = 4;
@Override
public void initialize(UimaContext context) throws ResourceInitializationException {
super.initialize(context);
try {
trigrams = MultisetReader.parseMultiset(
Resources.getResource(modelPath + "/trigrams.txt"));
after = MultisetReader.parseMapMultiset(
Resources.getResource(modelPath + "/after.txt"), 0);
before = MultisetReader.parseMapMultiset(
Resources.getResource(modelPath + "/before.txt"), 1);
} catch (IOException ex) {
Logger.getLogger(PrepositionGapGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Override
public void processTrigram(JCas aJCas, List<POS> pos) {
List<String> strings = Lists.newArrayList(Collections2.
transform(pos, new WordFilterFunction()));
// Collect a list of possible candidates for this gap
final Multiset<String> candidates = ConcurrentHashMultiset.create(
MultisetUtils.mergeMultiSets(
before.get(strings.get(0)), after.get(strings.get(0))));
// Removed candidates p* which appeared in the context (A, p*, B)
for (Entry<String> entry : candidates.entrySet()) {
if (trigrams.contains(
joiner.join(strings.get(0), entry.getElement(), strings.get(1)))) {
candidates.remove(entry.getElement(), entry.getCount());
}
}
// Remove the correct answer from the candidates
candidates.remove(strings.get(1), candidates.count(strings.get(1)));
if (candidates.elementSet().size() > CHOICES_COUNT - 2) {
final Set<String> invalidAnswers = Sets.newHashSet(
MultisetUtils.sortedElementList(candidates, CHOICES_COUNT - 1));
GapAnnotation gap = UIMAUtils.createGapAnnotation(aJCas,
ImmutableSet.of(strings.get(1)), invalidAnswers);
gap.setBegin(pos.get(1).getBegin());
gap.setEnd(pos.get(1).getEnd());
gap.addToIndexes();
}
}
}
|
duplicated documentation in PrepositionGapGenerator
|
lib/src/main/java/com/github/fhirschmann/clozegen/lib/annotators/en/PrepositionGapGenerator.java
|
duplicated documentation in PrepositionGapGenerator
|
<ide><path>ib/src/main/java/com/github/fhirschmann/clozegen/lib/annotators/en/PrepositionGapGenerator.java
<ide> * and {@link MultisetReader#parseMapMultiset}, which describe the format
<ide> * for {before|after}.txt and trigrams.txt, respectively.
<ide> *
<add> * The actual models can be generated using the
<add> *
<ide> * <p>[1] <b>J. Lee and S. Seneff</b>.<br/>
<ide> * Automatic generation of cloze items for prepositions.<br/>
<ide> * <i>In Eight Annual Conference of the International Speech Communication
<ide> @ConfigurationParameter(name = PARAM_MODEL_PATH, mandatory = true)
<ide> private String modelPath;
<ide>
<add> /** (A, p, B) - preposition surrounded by A, B. */
<ide> private Multiset<String> trigrams;
<add>
<add> /** (p, B) - prepositions comes second. */
<ide> private MapMultiset<String, String> after;
<add>
<add> /** (A, p) - prepositions comes first. */
<ide> private MapMultiset<String, String> before;
<ide>
<ide> private final static Joiner joiner = Joiner.on(" ");
|
|
Java
|
mit
|
063fee01c56aaa29360fee23d50d264235a9e572
| 0 |
fredyw/leetcode,fredyw/leetcode,fredyw/leetcode,fredyw/leetcode
|
package leetcode;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* https://leetcode.com/problems/substring-with-concatenation-of-all-words/
*/
public class Problem30 {
public List<Integer> findSubstring(String s, String[] words) {
List<Integer> result = new ArrayList<>();
int wordSize = words[0].length();
int totalSize = wordSize * words.length;
if (totalSize > s.length()) {
return result;
}
Map<String, Integer> map = new HashMap<>();
for (String word : words) {
if (!map.containsKey(word)) {
map.put(word, 1);
} else {
map.put(word, map.get(word) + 1);
}
}
List<String> removedWords = new ArrayList<>();
int i = 0;
while (i + totalSize <= s.length()) {
int j = i;
for (; j + wordSize <= s.length(); j++) {
String sub = s.substring(j, j + wordSize);
if (!map.containsKey(sub)) {
break;
} else {
int newCount = map.get(sub) - 1;
if (newCount == 0) {
map.remove(sub);
} else {
map.put(sub, newCount);
}
removedWords.add(sub);
}
j += wordSize - 1;
}
if (map.isEmpty()) {
result.add(i);
}
for (String word : removedWords) {
if (map.containsKey(word)) {
map.put(word, map.get(word) + 1);
} else {
map.put(word, 1);
}
}
removedWords.clear();
i++;
}
return result;
}
public static void main(String[] args) {
Problem30 prob = new Problem30();
System.out.println(prob.findSubstring("barfoothefoobarman", new String[]{"foo", "bar"})); // [0, 9]
System.out.println(prob.findSubstring("foobarthefoobarman", new String[]{"foo", "bar"})); // [0, 9]
System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar"})); // [12]
System.out.println(prob.findSubstring("barbazbebarbarfoobaz", new String[]{"foo", "bar"})); // [11]
System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar", "baz"})); // [12]
System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar", "bar"})); // [9]
System.out.println(prob.findSubstring("barfoofoobarthefoobarman", new String[]{"bar", "foo", "the"})); // [6, 9, 12]
System.out.println(prob.findSubstring("aaaaaaaa", new String[]{"aa", "aa", "aa"})); // [0, 1, 2]
System.out.println(prob.findSubstring("a", new String[]{"a"})); // [0]
}
}
|
src/main/java/leetcode/Problem30.java
|
package leetcode;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* https://leetcode.com/problems/substring-with-concatenation-of-all-words/
*/
public class Problem30 {
public List<Integer> findSubstring(String s, String[] words) {
Map<String, Integer> map = new HashMap<>();
for (String word : words) {
if (!map.containsKey(word)) {
map.put(word, 1);
} else {
map.put(word, map.get(word) + 1);
}
}
List<Integer> result = new ArrayList<>();
int wordSize = words[0].length();
List<String> removedWords = new ArrayList<>();
int i = 0;
while (i < s.length()) {
int j = i;
for (; j + wordSize <= s.length(); j++) {
String sub = s.substring(j, j + wordSize);
if (!map.containsKey(sub)) {
break;
} else {
int newCount = map.get(sub) - 1;
if (newCount == 0) {
map.remove(sub);
} else {
map.put(sub, newCount);
}
removedWords.add(sub);
}
j += wordSize - 1;
}
if (map.isEmpty()) {
result.add(i);
i = i + wordSize;
} else {
i++;
}
for (String word : removedWords) {
if (map.containsKey(word)) {
map.put(word, map.get(word) + 1);
} else {
map.put(word, 1);
}
}
removedWords.clear();
}
return result;
}
public static void main(String[] args) {
Problem30 prob = new Problem30();
System.out.println(prob.findSubstring("barfoothefoobarman", new String[]{"foo", "bar"})); // [0, 9]
System.out.println(prob.findSubstring("foobarthefoobarman", new String[]{"foo", "bar"})); // [0, 9]
System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar"})); // [12]
System.out.println(prob.findSubstring("barbazbebarbarfoobaz", new String[]{"foo", "bar"})); // [11]
System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar", "baz"})); // [12]
System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar", "bar"})); // [9]
System.out.println(prob.findSubstring("barfoofoobarthefoobarman", new String[]{"bar", "foo", "the"})); // [6, 9, 12]
}
}
|
Update problem 30
|
src/main/java/leetcode/Problem30.java
|
Update problem 30
|
<ide><path>rc/main/java/leetcode/Problem30.java
<ide> */
<ide> public class Problem30 {
<ide> public List<Integer> findSubstring(String s, String[] words) {
<add> List<Integer> result = new ArrayList<>();
<add> int wordSize = words[0].length();
<add> int totalSize = wordSize * words.length;
<add> if (totalSize > s.length()) {
<add> return result;
<add> }
<ide> Map<String, Integer> map = new HashMap<>();
<ide> for (String word : words) {
<ide> if (!map.containsKey(word)) {
<ide> map.put(word, map.get(word) + 1);
<ide> }
<ide> }
<del> List<Integer> result = new ArrayList<>();
<del> int wordSize = words[0].length();
<ide> List<String> removedWords = new ArrayList<>();
<ide> int i = 0;
<del> while (i < s.length()) {
<add> while (i + totalSize <= s.length()) {
<ide> int j = i;
<ide> for (; j + wordSize <= s.length(); j++) {
<ide> String sub = s.substring(j, j + wordSize);
<ide> }
<ide> if (map.isEmpty()) {
<ide> result.add(i);
<del> i = i + wordSize;
<del> } else {
<del> i++;
<ide> }
<ide> for (String word : removedWords) {
<ide> if (map.containsKey(word)) {
<ide> }
<ide> }
<ide> removedWords.clear();
<add> i++;
<ide> }
<ide> return result;
<ide> }
<ide> System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar", "baz"})); // [12]
<ide> System.out.println(prob.findSubstring("barbazthebarbarfoobaz", new String[]{"foo", "bar", "bar"})); // [9]
<ide> System.out.println(prob.findSubstring("barfoofoobarthefoobarman", new String[]{"bar", "foo", "the"})); // [6, 9, 12]
<add> System.out.println(prob.findSubstring("aaaaaaaa", new String[]{"aa", "aa", "aa"})); // [0, 1, 2]
<add> System.out.println(prob.findSubstring("a", new String[]{"a"})); // [0]
<ide> }
<ide> }
|
|
Java
|
mit
|
20883cc604bc4e4906a32e73effd4e2dfe27f94f
| 0 |
lukehutch/fast-classpath-scanner,classgraph/classgraph,lukehutch/fast-classpath-scanner
|
/*
* This file is part of FastClasspathScanner.
*
* Author: Luke Hutchison
*
* Hosted at: https://github.com/lukehutch/fast-classpath-scanner
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2018 Luke Hutchison
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.github.lukehutch.fastclasspathscanner.typesignature;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import io.github.lukehutch.fastclasspathscanner.scanner.ClassInfo;
import io.github.lukehutch.fastclasspathscanner.typesignature.TypeUtils.ParseException;
import io.github.lukehutch.fastclasspathscanner.typesignature.TypeUtils.ParseState;
/** A method type signature (called "MethodSignature" in the classfile documentation). */
public class MethodTypeSignature extends HierarchicalTypeSignature {
/** The method type parameters. */
final List<TypeParameter> typeParameters;
/** The method parameter type signatures. */
private final List<TypeSignature> parameterTypeSignatures;
/** The method result type. */
private final TypeSignature resultType;
/** The throws type signatures. */
private final List<ClassRefOrTypeVariableSignature> throwsSignatures;
public MethodTypeSignature(final List<TypeParameter> typeParameters, final List<TypeSignature> paramTypes,
final TypeSignature resultType, final List<ClassRefOrTypeVariableSignature> throwsSignatures) {
this.typeParameters = typeParameters;
this.parameterTypeSignatures = paramTypes;
this.resultType = resultType;
this.throwsSignatures = throwsSignatures;
}
/** Get the type parameters for the method. */
public List<TypeParameter> getTypeParameters() {
return typeParameters;
}
/** Get the type signatures of the method parameters. */
public List<TypeSignature> getParameterTypeSignatures() {
return parameterTypeSignatures;
}
/** Get the result type for the method. */
public TypeSignature getResultType() {
return resultType;
}
/** Get the throws type(s) for the method. */
public List<ClassRefOrTypeVariableSignature> getThrowsSignatures() {
return throwsSignatures;
}
@Override
public void getAllReferencedClassNames(final Set<String> classNameListOut) {
for (final TypeParameter typeParameter : typeParameters) {
if (typeParameter != null) {
typeParameter.getAllReferencedClassNames(classNameListOut);
}
}
for (final TypeSignature typeSignature : parameterTypeSignatures) {
if (typeSignature != null) {
typeSignature.getAllReferencedClassNames(classNameListOut);
}
}
resultType.getAllReferencedClassNames(classNameListOut);
for (final ClassRefOrTypeVariableSignature typeSignature : throwsSignatures) {
if (typeSignature != null) {
typeSignature.getAllReferencedClassNames(classNameListOut);
}
}
}
@Override
public int hashCode() {
return typeParameters.hashCode() + parameterTypeSignatures.hashCode() * 7 + resultType.hashCode() * 15
+ throwsSignatures.hashCode() * 31;
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof MethodTypeSignature)) {
return false;
}
final MethodTypeSignature o = (MethodTypeSignature) obj;
return o.typeParameters.equals(this.typeParameters)
&& o.parameterTypeSignatures.equals(this.parameterTypeSignatures)
&& o.resultType.equals(this.resultType) && o.throwsSignatures.equals(this.throwsSignatures);
}
@Override
public String toString() {
final StringBuilder buf = new StringBuilder();
if (!typeParameters.isEmpty()) {
buf.append('<');
for (int i = 0; i < typeParameters.size(); i++) {
if (i > 0) {
buf.append(", ");
}
final String typeParamStr = typeParameters.get(i).toString();
buf.append(typeParamStr);
}
buf.append('>');
}
if (buf.length() > 0) {
buf.append(' ');
}
buf.append(resultType.toString());
buf.append(" (");
for (int i = 0; i < parameterTypeSignatures.size(); i++) {
if (i > 0) {
buf.append(", ");
}
buf.append(parameterTypeSignatures.get(i).toString());
}
buf.append(')');
if (!throwsSignatures.isEmpty()) {
buf.append(" throws ");
for (int i = 0; i < throwsSignatures.size(); i++) {
if (i > 0) {
buf.append(", ");
}
buf.append(throwsSignatures.get(i).toString());
}
}
return buf.toString();
}
/**
* Parse a method signature (ignores class context, i.e. no ClassInfo needs to be provided -- this means that
* type variables cannot be resolved to the matching type parameter).
*/
public static MethodTypeSignature parse(final String typeDescriptor) {
return MethodTypeSignature.parse(/* classInfo = */ null, typeDescriptor);
}
/** Parse a method signature. */
public static MethodTypeSignature parse(final ClassInfo classInfo, final String typeDescriptor) {
final ParseState parseState = new ParseState(typeDescriptor);
try {
final List<TypeParameter> typeParameters = TypeParameter.parseList(parseState);
parseState.expect('(');
final List<TypeSignature> paramTypes = new ArrayList<>();
while (parseState.peek() != ')') {
if (!parseState.hasMore()) {
throw new ParseException();
}
final TypeSignature paramType = TypeSignature.parse(parseState);
if (paramType == null) {
throw new ParseException();
}
paramTypes.add(paramType);
}
parseState.expect(')');
final TypeSignature resultType = TypeSignature.parse(parseState);
if (resultType == null) {
throw new ParseException();
}
List<ClassRefOrTypeVariableSignature> throwsSignatures;
if (parseState.peek() == '^') {
throwsSignatures = new ArrayList<>();
while (parseState.peek() == '^') {
parseState.expect('^');
final ClassRefTypeSignature classTypeSignature = ClassRefTypeSignature.parse(parseState);
if (classTypeSignature != null) {
throwsSignatures.add(classTypeSignature);
} else {
final TypeVariableSignature typeVariableSignature = TypeVariableSignature.parse(parseState);
if (typeVariableSignature != null) {
throwsSignatures.add(typeVariableSignature);
} else {
throw new ParseException();
}
}
}
} else {
throwsSignatures = Collections.emptyList();
}
if (parseState.hasMore()) {
throw new IllegalArgumentException("Extra characters at end of type descriptor: " + parseState);
}
final MethodTypeSignature methodSignature = new MethodTypeSignature(typeParameters, paramTypes,
resultType, throwsSignatures);
// Add back-links from type variable signature to the method signature it is part of,
// and to the enclosing class' type signature
for (final TypeVariableSignature typeVariableSignature : parseState.getTypeVariableSignatures()) {
typeVariableSignature.containingMethodSignature = methodSignature;
}
if (classInfo != null) {
final ClassTypeSignature classSignature = classInfo.getTypeSignature();
for (final TypeVariableSignature typeVariableSignature : parseState.getTypeVariableSignatures()) {
typeVariableSignature.containingClassSignature = classSignature;
}
}
return methodSignature;
} catch (final Exception e) {
throw new IllegalArgumentException("Type signature could not be parsed: " + parseState, e);
}
}
}
|
src/main/java/io/github/lukehutch/fastclasspathscanner/typesignature/MethodTypeSignature.java
|
/*
* This file is part of FastClasspathScanner.
*
* Author: Luke Hutchison
*
* Hosted at: https://github.com/lukehutch/fast-classpath-scanner
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2018 Luke Hutchison
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.github.lukehutch.fastclasspathscanner.typesignature;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import io.github.lukehutch.fastclasspathscanner.scanner.ClassInfo;
import io.github.lukehutch.fastclasspathscanner.typesignature.TypeUtils.ParseException;
import io.github.lukehutch.fastclasspathscanner.typesignature.TypeUtils.ParseState;
import io.github.lukehutch.fastclasspathscanner.utils.AdditionOrderedSet;
/** A method type signature (called "MethodSignature" in the classfile documentation). */
public class MethodTypeSignature extends HierarchicalTypeSignature {
/** The method type parameters. */
final List<TypeParameter> typeParameters;
/** The method parameter type signatures. */
private final List<TypeSignature> parameterTypeSignatures;
/** The method result type. */
private final TypeSignature resultType;
/** The throws type signatures. */
private final List<ClassRefOrTypeVariableSignature> throwsSignatures;
public MethodTypeSignature(final List<TypeParameter> typeParameters, final List<TypeSignature> paramTypes,
final TypeSignature resultType, final List<ClassRefOrTypeVariableSignature> throwsSignatures) {
this.typeParameters = typeParameters;
this.parameterTypeSignatures = paramTypes;
this.resultType = resultType;
this.throwsSignatures = throwsSignatures;
}
/** Get the type parameters for the method. */
public List<TypeParameter> getTypeParameters() {
return typeParameters;
}
/** Get the type signatures of the method parameters. */
public List<TypeSignature> getParameterTypeSignatures() {
return parameterTypeSignatures;
}
/** Get the result type for the method. */
public TypeSignature getResultType() {
return resultType;
}
/** Get the throws type(s) for the method. */
public List<ClassRefOrTypeVariableSignature> getThrowsSignatures() {
return throwsSignatures;
}
@Override
public void getAllReferencedClassNames(final Set<String> classNameListOut) {
for (final TypeParameter typeParameter : typeParameters) {
if (typeParameter != null) {
typeParameter.getAllReferencedClassNames(classNameListOut);
}
}
for (final TypeSignature typeSignature : parameterTypeSignatures) {
if (typeSignature != null) {
typeSignature.getAllReferencedClassNames(classNameListOut);
}
}
resultType.getAllReferencedClassNames(classNameListOut);
for (final ClassRefOrTypeVariableSignature typeSignature : throwsSignatures) {
if (typeSignature != null) {
typeSignature.getAllReferencedClassNames(classNameListOut);
}
}
}
@Override
public int hashCode() {
return typeParameters.hashCode() + parameterTypeSignatures.hashCode() * 7 + resultType.hashCode() * 15
+ throwsSignatures.hashCode() * 31;
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof MethodTypeSignature)) {
return false;
}
final MethodTypeSignature o = (MethodTypeSignature) obj;
return o.typeParameters.equals(this.typeParameters)
&& o.parameterTypeSignatures.equals(this.parameterTypeSignatures)
&& o.resultType.equals(this.resultType) && o.throwsSignatures.equals(this.throwsSignatures);
}
@Override
public String toString() {
final StringBuilder buf = new StringBuilder();
if (!typeParameters.isEmpty()) {
buf.append('<');
for (int i = 0; i < typeParameters.size(); i++) {
if (i > 0) {
buf.append(", ");
}
final String typeParamStr = typeParameters.get(i).toString();
buf.append(typeParamStr);
}
buf.append('>');
}
if (buf.length() > 0) {
buf.append(' ');
}
buf.append(resultType.toString());
buf.append(" (");
for (int i = 0; i < parameterTypeSignatures.size(); i++) {
if (i > 0) {
buf.append(", ");
}
buf.append(parameterTypeSignatures.get(i).toString());
}
buf.append(')');
if (!throwsSignatures.isEmpty()) {
buf.append(" throws ");
for (int i = 0; i < throwsSignatures.size(); i++) {
if (i > 0) {
buf.append(", ");
}
buf.append(throwsSignatures.get(i).toString());
}
}
return buf.toString();
}
/**
* Merge together programmer-view and JDK-internal method type signatures.
*
* @param methodTypeSignature
* The programmer-view type signature, with type parameters where possible, and without synthetic
* parameters.
* @param methodTypeSignatureInternal
* The JDK-internal type signature, without type parameters, but including synthetic parameters, if
* any.
* @param parameterAccessFlags
* The parameter modifiers for parameters in the JDK-internal type signature.
* @return A MethodSignature consisting of all information from both type signatures.
*/
public static MethodTypeSignature merge(final MethodTypeSignature methodTypeSignature,
final MethodTypeSignature methodTypeSignatureInternal, final int[] parameterAccessFlagsInternal) {
if (methodTypeSignature == null || methodTypeSignatureInternal == null) {
throw new IllegalArgumentException("Signatures must be non-null");
}
if (!methodTypeSignatureInternal.typeParameters.isEmpty()) {
throw new IllegalArgumentException("typeSignatureInternal.typeParameters should be empty");
}
if (!methodTypeSignatureInternal.resultType.equalsIgnoringTypeParams(methodTypeSignature.resultType)) {
throw new IllegalArgumentException("Result types could not be reconciled: "
+ methodTypeSignatureInternal.resultType + " vs. " + methodTypeSignature.resultType);
}
// parameterAccessFlags is only available in classfiles compiled in JDK8 or above using
// the -parameters commandline switch, or code compiled with Kotlin or some other language
if (parameterAccessFlagsInternal != null
&& parameterAccessFlagsInternal.length != methodTypeSignatureInternal.parameterTypeSignatures
.size()) {
throw new IllegalArgumentException(
"Parameter arity mismatch between access flags and internal param types");
}
List<TypeSignature> mergedParamTypes;
if (parameterAccessFlagsInternal == null) {
// If there are no parameter access flags, there must be no difference in the number
// of parameters between the JDK-internal and programmer-visible type signature
// (i.e. if there are synthetic parameters, then the classfile should specify
// this by adding the parameter modifier flags section to the method attributes).
// It's possible this is not always true, so if this exception is thrown, please
// report a bug in the GitHub bug tracker.
if (methodTypeSignature.parameterTypeSignatures
.size() != methodTypeSignatureInternal.parameterTypeSignatures.size()) {
throw new IllegalArgumentException("Unexpected mismatch in method paramTypes arity");
}
// Use the programmer-visible paramTypes, since these will have type info if it is available
mergedParamTypes = methodTypeSignature.parameterTypeSignatures;
} else {
mergedParamTypes = new ArrayList<>(methodTypeSignatureInternal.parameterTypeSignatures.size());
int internalParamIdx = 0;
int paramIdx = 0;
for (; internalParamIdx < methodTypeSignatureInternal.parameterTypeSignatures
.size(); internalParamIdx++) {
if ((parameterAccessFlagsInternal[internalParamIdx]
& (TypeUtils.MODIFIER_SYNTHETIC | TypeUtils.MODIFIER_MANDATED)) != 0) {
// This parameter is present in JDK-internal type signature, but not in the
// programmer-visible signature. This should only be true for synthetic
// parameters, and they should not have any type parameters, due to type
// erasure.
mergedParamTypes.add(methodTypeSignatureInternal.parameterTypeSignatures.get(internalParamIdx));
} else {
if (paramIdx == methodTypeSignature.parameterTypeSignatures.size()) {
// Shouldn't happen
throw new IllegalArgumentException(
"Ran out of parameters in programmer-visible type signature");
}
// This parameter should be present in both type signatures, and the types
// should be the same, ignoring any type parameters.
final TypeSignature paramTypeSignature = methodTypeSignature.parameterTypeSignatures
.get(paramIdx++);
final TypeSignature paramTypeSignatureInternal = //
methodTypeSignatureInternal.parameterTypeSignatures.get(internalParamIdx);
if (!paramTypeSignature.equalsIgnoringTypeParams(paramTypeSignatureInternal)) {
throw new IllegalArgumentException(
"Corresponding type parameters in type signatures do not refer to the same bare "
+ "types: " + paramTypeSignature + " [from method signature "
+ methodTypeSignature + "] vs. " + paramTypeSignatureInternal
+ " [from method signature " + methodTypeSignatureInternal + "]");
}
// The programmer-visible parameter should always have more type information, if available
mergedParamTypes.add(paramTypeSignature);
}
}
if (paramIdx < methodTypeSignature.parameterTypeSignatures.size()) {
throw new IllegalArgumentException(
"Parameter arity mismatch between internal and programmer-visible type signature");
}
}
List<ClassRefOrTypeVariableSignature> mergedThrowsSignatures;
if (methodTypeSignature.throwsSignatures.isEmpty()) {
mergedThrowsSignatures = methodTypeSignatureInternal.throwsSignatures;
} else if (methodTypeSignatureInternal.throwsSignatures.isEmpty()
|| methodTypeSignature.throwsSignatures.equals(methodTypeSignatureInternal.throwsSignatures)) {
mergedThrowsSignatures = methodTypeSignature.throwsSignatures;
} else {
final AdditionOrderedSet<ClassRefOrTypeVariableSignature> sigSet = new AdditionOrderedSet<>(
methodTypeSignature.throwsSignatures);
sigSet.addAll(methodTypeSignatureInternal.throwsSignatures);
mergedThrowsSignatures = sigSet.toList();
}
return new MethodTypeSignature(
// Use the programmer-view of type parameters (the JDK-internal view should have no type params)
methodTypeSignature.typeParameters,
// Merged parameter types
mergedParamTypes,
// Use the programmer-view of result type, in case there is a type parameter
methodTypeSignature.resultType,
// Merged throws signatures
mergedThrowsSignatures);
}
/**
* Parse a method signature (ignores class context, i.e. no ClassInfo needs to be provided -- this means that
* type variables cannot be resolved to the matching type parameter).
*/
public static MethodTypeSignature parse(final String typeDescriptor) {
return MethodTypeSignature.parse(/* classInfo = */ null, typeDescriptor);
}
/** Parse a method signature. */
public static MethodTypeSignature parse(final ClassInfo classInfo, final String typeDescriptor) {
final ParseState parseState = new ParseState(typeDescriptor);
try {
final List<TypeParameter> typeParameters = TypeParameter.parseList(parseState);
parseState.expect('(');
final List<TypeSignature> paramTypes = new ArrayList<>();
while (parseState.peek() != ')') {
if (!parseState.hasMore()) {
throw new ParseException();
}
final TypeSignature paramType = TypeSignature.parse(parseState);
if (paramType == null) {
throw new ParseException();
}
paramTypes.add(paramType);
}
parseState.expect(')');
final TypeSignature resultType = TypeSignature.parse(parseState);
if (resultType == null) {
throw new ParseException();
}
List<ClassRefOrTypeVariableSignature> throwsSignatures;
if (parseState.peek() == '^') {
throwsSignatures = new ArrayList<>();
while (parseState.peek() == '^') {
parseState.expect('^');
final ClassRefTypeSignature classTypeSignature = ClassRefTypeSignature.parse(parseState);
if (classTypeSignature != null) {
throwsSignatures.add(classTypeSignature);
} else {
final TypeVariableSignature typeVariableSignature = TypeVariableSignature.parse(parseState);
if (typeVariableSignature != null) {
throwsSignatures.add(typeVariableSignature);
} else {
throw new ParseException();
}
}
}
} else {
throwsSignatures = Collections.emptyList();
}
if (parseState.hasMore()) {
throw new IllegalArgumentException("Extra characters at end of type descriptor: " + parseState);
}
final MethodTypeSignature methodSignature = new MethodTypeSignature(typeParameters, paramTypes,
resultType, throwsSignatures);
// Add back-links from type variable signature to the method signature it is part of,
// and to the enclosing class' type signature
for (final TypeVariableSignature typeVariableSignature : parseState.getTypeVariableSignatures()) {
typeVariableSignature.containingMethodSignature = methodSignature;
}
if (classInfo != null) {
final ClassTypeSignature classSignature = classInfo.getTypeSignature();
for (final TypeVariableSignature typeVariableSignature : parseState.getTypeVariableSignatures()) {
typeVariableSignature.containingClassSignature = classSignature;
}
}
return methodSignature;
} catch (final Exception e) {
throw new IllegalArgumentException("Type signature could not be parsed: " + parseState, e);
}
}
}
|
Remove unused method
|
src/main/java/io/github/lukehutch/fastclasspathscanner/typesignature/MethodTypeSignature.java
|
Remove unused method
|
<ide><path>rc/main/java/io/github/lukehutch/fastclasspathscanner/typesignature/MethodTypeSignature.java
<ide> import io.github.lukehutch.fastclasspathscanner.scanner.ClassInfo;
<ide> import io.github.lukehutch.fastclasspathscanner.typesignature.TypeUtils.ParseException;
<ide> import io.github.lukehutch.fastclasspathscanner.typesignature.TypeUtils.ParseState;
<del>import io.github.lukehutch.fastclasspathscanner.utils.AdditionOrderedSet;
<ide>
<ide> /** A method type signature (called "MethodSignature" in the classfile documentation). */
<ide> public class MethodTypeSignature extends HierarchicalTypeSignature {
<ide> }
<ide> }
<ide> return buf.toString();
<del> }
<del>
<del> /**
<del> * Merge together programmer-view and JDK-internal method type signatures.
<del> *
<del> * @param methodTypeSignature
<del> * The programmer-view type signature, with type parameters where possible, and without synthetic
<del> * parameters.
<del> * @param methodTypeSignatureInternal
<del> * The JDK-internal type signature, without type parameters, but including synthetic parameters, if
<del> * any.
<del> * @param parameterAccessFlags
<del> * The parameter modifiers for parameters in the JDK-internal type signature.
<del> * @return A MethodSignature consisting of all information from both type signatures.
<del> */
<del> public static MethodTypeSignature merge(final MethodTypeSignature methodTypeSignature,
<del> final MethodTypeSignature methodTypeSignatureInternal, final int[] parameterAccessFlagsInternal) {
<del> if (methodTypeSignature == null || methodTypeSignatureInternal == null) {
<del> throw new IllegalArgumentException("Signatures must be non-null");
<del> }
<del> if (!methodTypeSignatureInternal.typeParameters.isEmpty()) {
<del> throw new IllegalArgumentException("typeSignatureInternal.typeParameters should be empty");
<del> }
<del> if (!methodTypeSignatureInternal.resultType.equalsIgnoringTypeParams(methodTypeSignature.resultType)) {
<del> throw new IllegalArgumentException("Result types could not be reconciled: "
<del> + methodTypeSignatureInternal.resultType + " vs. " + methodTypeSignature.resultType);
<del> }
<del> // parameterAccessFlags is only available in classfiles compiled in JDK8 or above using
<del> // the -parameters commandline switch, or code compiled with Kotlin or some other language
<del> if (parameterAccessFlagsInternal != null
<del> && parameterAccessFlagsInternal.length != methodTypeSignatureInternal.parameterTypeSignatures
<del> .size()) {
<del> throw new IllegalArgumentException(
<del> "Parameter arity mismatch between access flags and internal param types");
<del> }
<del> List<TypeSignature> mergedParamTypes;
<del> if (parameterAccessFlagsInternal == null) {
<del> // If there are no parameter access flags, there must be no difference in the number
<del> // of parameters between the JDK-internal and programmer-visible type signature
<del> // (i.e. if there are synthetic parameters, then the classfile should specify
<del> // this by adding the parameter modifier flags section to the method attributes).
<del> // It's possible this is not always true, so if this exception is thrown, please
<del> // report a bug in the GitHub bug tracker.
<del> if (methodTypeSignature.parameterTypeSignatures
<del> .size() != methodTypeSignatureInternal.parameterTypeSignatures.size()) {
<del> throw new IllegalArgumentException("Unexpected mismatch in method paramTypes arity");
<del> }
<del> // Use the programmer-visible paramTypes, since these will have type info if it is available
<del> mergedParamTypes = methodTypeSignature.parameterTypeSignatures;
<del> } else {
<del> mergedParamTypes = new ArrayList<>(methodTypeSignatureInternal.parameterTypeSignatures.size());
<del> int internalParamIdx = 0;
<del> int paramIdx = 0;
<del> for (; internalParamIdx < methodTypeSignatureInternal.parameterTypeSignatures
<del> .size(); internalParamIdx++) {
<del> if ((parameterAccessFlagsInternal[internalParamIdx]
<del> & (TypeUtils.MODIFIER_SYNTHETIC | TypeUtils.MODIFIER_MANDATED)) != 0) {
<del> // This parameter is present in JDK-internal type signature, but not in the
<del> // programmer-visible signature. This should only be true for synthetic
<del> // parameters, and they should not have any type parameters, due to type
<del> // erasure.
<del> mergedParamTypes.add(methodTypeSignatureInternal.parameterTypeSignatures.get(internalParamIdx));
<del> } else {
<del> if (paramIdx == methodTypeSignature.parameterTypeSignatures.size()) {
<del> // Shouldn't happen
<del> throw new IllegalArgumentException(
<del> "Ran out of parameters in programmer-visible type signature");
<del> }
<del> // This parameter should be present in both type signatures, and the types
<del> // should be the same, ignoring any type parameters.
<del> final TypeSignature paramTypeSignature = methodTypeSignature.parameterTypeSignatures
<del> .get(paramIdx++);
<del> final TypeSignature paramTypeSignatureInternal = //
<del> methodTypeSignatureInternal.parameterTypeSignatures.get(internalParamIdx);
<del> if (!paramTypeSignature.equalsIgnoringTypeParams(paramTypeSignatureInternal)) {
<del> throw new IllegalArgumentException(
<del> "Corresponding type parameters in type signatures do not refer to the same bare "
<del> + "types: " + paramTypeSignature + " [from method signature "
<del> + methodTypeSignature + "] vs. " + paramTypeSignatureInternal
<del> + " [from method signature " + methodTypeSignatureInternal + "]");
<del> }
<del> // The programmer-visible parameter should always have more type information, if available
<del> mergedParamTypes.add(paramTypeSignature);
<del> }
<del> }
<del> if (paramIdx < methodTypeSignature.parameterTypeSignatures.size()) {
<del> throw new IllegalArgumentException(
<del> "Parameter arity mismatch between internal and programmer-visible type signature");
<del> }
<del> }
<del> List<ClassRefOrTypeVariableSignature> mergedThrowsSignatures;
<del> if (methodTypeSignature.throwsSignatures.isEmpty()) {
<del> mergedThrowsSignatures = methodTypeSignatureInternal.throwsSignatures;
<del> } else if (methodTypeSignatureInternal.throwsSignatures.isEmpty()
<del> || methodTypeSignature.throwsSignatures.equals(methodTypeSignatureInternal.throwsSignatures)) {
<del> mergedThrowsSignatures = methodTypeSignature.throwsSignatures;
<del> } else {
<del> final AdditionOrderedSet<ClassRefOrTypeVariableSignature> sigSet = new AdditionOrderedSet<>(
<del> methodTypeSignature.throwsSignatures);
<del> sigSet.addAll(methodTypeSignatureInternal.throwsSignatures);
<del> mergedThrowsSignatures = sigSet.toList();
<del> }
<del> return new MethodTypeSignature(
<del> // Use the programmer-view of type parameters (the JDK-internal view should have no type params)
<del> methodTypeSignature.typeParameters,
<del> // Merged parameter types
<del> mergedParamTypes,
<del> // Use the programmer-view of result type, in case there is a type parameter
<del> methodTypeSignature.resultType,
<del> // Merged throws signatures
<del> mergedThrowsSignatures);
<ide> }
<ide>
<ide> /**
|
|
Java
|
apache-2.0
|
469a73e7c9abbf402d5856904555873a6e1b9950
| 0 |
reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api
|
/*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.ws.converter.builders;
import com.epam.ta.reportportal.commons.EntityUtils;
import com.epam.ta.reportportal.entity.ItemAttribute;
import com.epam.ta.reportportal.entity.enums.LaunchModeEnum;
import com.epam.ta.reportportal.entity.enums.StatusEnum;
import com.epam.ta.reportportal.entity.launch.Launch;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.ws.model.ErrorType;
import com.epam.ta.reportportal.ws.model.attribute.ItemAttributeResource;
import com.epam.ta.reportportal.ws.model.attribute.ItemAttributesRQ;
import com.epam.ta.reportportal.ws.model.launch.Mode;
import com.epam.ta.reportportal.ws.model.launch.StartLaunchRQ;
import com.google.common.base.Preconditions;
import org.apache.commons.lang3.StringUtils;
import java.util.Date;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static com.epam.ta.reportportal.ws.converter.converters.ItemAttributeConverter.FROM_RESOURCE;
import static java.util.Optional.ofNullable;
public class LaunchBuilder implements Supplier<Launch> {
private static final int LAUNCH_DESCRIPTION_LENGTH_LIMIT = 16000;
private static final int DESCRIPTION_START_SYMBOL_INDEX = 0;
private Launch launch;
public LaunchBuilder() {
this.launch = new Launch();
}
public LaunchBuilder(Launch launch) {
this.launch = launch;
}
public LaunchBuilder addStartRQ(StartLaunchRQ request) {
Preconditions.checkNotNull(request, ErrorType.BAD_REQUEST_ERROR);
launch.setStartTime(EntityUtils.TO_LOCAL_DATE_TIME.apply(request.getStartTime()));
launch.setName(request.getName().trim());
launch.setStatus(StatusEnum.IN_PROGRESS);
launch.setUuid(Optional.ofNullable(request.getUuid()).orElse(UUID.randomUUID().toString()));
addDescription(request.getDescription());
LaunchModeEnum.findByName(ofNullable(request.getMode()).map(Enum::name).orElse(LaunchModeEnum.DEFAULT.name()))
.ifPresent(it -> launch.setMode(it));
return this;
}
public LaunchBuilder addDescription(String description) {
ofNullable(description).ifPresent(it -> launch.setDescription(StringUtils.substring(it.trim(),
DESCRIPTION_START_SYMBOL_INDEX,
LAUNCH_DESCRIPTION_LENGTH_LIMIT
)));
return this;
}
public LaunchBuilder addUserId(Long userId) {
launch.setUserId(userId);
return this;
}
public LaunchBuilder addProject(Long projectId) {
launch.setProjectId(projectId);
return this;
}
public LaunchBuilder addAttribute(ItemAttributeResource attributeResource) {
ItemAttribute itemAttribute = FROM_RESOURCE.apply(attributeResource);
itemAttribute.setLaunch(launch);
launch.getAttributes().add(itemAttribute);
return this;
}
public LaunchBuilder addAttributes(Set<ItemAttributesRQ> attributes) {
ofNullable(attributes).ifPresent(it -> launch.getAttributes().addAll(it.stream().map(val -> {
ItemAttribute itemAttribute = FROM_RESOURCE.apply(val);
itemAttribute.setLaunch(launch);
return itemAttribute;
}).collect(Collectors.toSet())));
return this;
}
public LaunchBuilder overwriteAttributes(Set<ItemAttributeResource> attributes) {
if (attributes != null) {
final Set<ItemAttribute> overwrittenAttributes = launch.getAttributes()
.stream()
.filter(ItemAttribute::isSystem)
.collect(Collectors.toSet());
attributes.stream().map(val -> {
ItemAttribute itemAttribute = FROM_RESOURCE.apply(val);
itemAttribute.setLaunch(launch);
return itemAttribute;
}).forEach(overwrittenAttributes::add);
launch.setAttributes(overwrittenAttributes);
}
return this;
}
public LaunchBuilder addMode(Mode mode) {
ofNullable(mode).ifPresent(it -> launch.setMode(LaunchModeEnum.valueOf(it.name())));
return this;
}
public LaunchBuilder addStatus(String status) {
launch.setStatus(StatusEnum.fromValue(status).orElseThrow(() -> new ReportPortalException(ErrorType.INCORRECT_FINISH_STATUS)));
return this;
}
public LaunchBuilder addEndTime(Date date) {
launch.setEndTime(EntityUtils.TO_LOCAL_DATE_TIME.apply(date));
return this;
}
@Override
public Launch get() {
return launch;
}
}
|
src/main/java/com/epam/ta/reportportal/ws/converter/builders/LaunchBuilder.java
|
/*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.ws.converter.builders;
import com.epam.ta.reportportal.commons.EntityUtils;
import com.epam.ta.reportportal.entity.ItemAttribute;
import com.epam.ta.reportportal.entity.enums.LaunchModeEnum;
import com.epam.ta.reportportal.entity.enums.StatusEnum;
import com.epam.ta.reportportal.entity.launch.Launch;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.ws.model.ErrorType;
import com.epam.ta.reportportal.ws.model.attribute.ItemAttributeResource;
import com.epam.ta.reportportal.ws.model.attribute.ItemAttributesRQ;
import com.epam.ta.reportportal.ws.model.launch.Mode;
import com.epam.ta.reportportal.ws.model.launch.StartLaunchRQ;
import com.google.common.base.Preconditions;
import org.apache.commons.lang3.StringUtils;
import java.util.Date;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static com.epam.ta.reportportal.ws.converter.converters.ItemAttributeConverter.FROM_RESOURCE;
import static java.util.Optional.ofNullable;
public class LaunchBuilder implements Supplier<Launch> {
private static final int LAUNCH_DESCRIPTION_LENGTH_LIMIT = 1024;
private static final int DESCRIPTION_START_SYMBOL_INDEX = 0;
private Launch launch;
public LaunchBuilder() {
this.launch = new Launch();
}
public LaunchBuilder(Launch launch) {
this.launch = launch;
}
public LaunchBuilder addStartRQ(StartLaunchRQ request) {
Preconditions.checkNotNull(request, ErrorType.BAD_REQUEST_ERROR);
launch.setStartTime(EntityUtils.TO_LOCAL_DATE_TIME.apply(request.getStartTime()));
launch.setName(request.getName().trim());
launch.setStatus(StatusEnum.IN_PROGRESS);
launch.setUuid(Optional.ofNullable(request.getUuid()).orElse(UUID.randomUUID().toString()));
addDescription(request.getDescription());
LaunchModeEnum.findByName(ofNullable(request.getMode()).map(Enum::name).orElse(LaunchModeEnum.DEFAULT.name()))
.ifPresent(it -> launch.setMode(it));
return this;
}
public LaunchBuilder addDescription(String description) {
ofNullable(description).ifPresent(it -> launch.setDescription(StringUtils.substring(it.trim(),
DESCRIPTION_START_SYMBOL_INDEX,
LAUNCH_DESCRIPTION_LENGTH_LIMIT
)));
return this;
}
public LaunchBuilder addUserId(Long userId) {
launch.setUserId(userId);
return this;
}
public LaunchBuilder addProject(Long projectId) {
launch.setProjectId(projectId);
return this;
}
public LaunchBuilder addAttribute(ItemAttributeResource attributeResource) {
ItemAttribute itemAttribute = FROM_RESOURCE.apply(attributeResource);
itemAttribute.setLaunch(launch);
launch.getAttributes().add(itemAttribute);
return this;
}
public LaunchBuilder addAttributes(Set<ItemAttributesRQ> attributes) {
ofNullable(attributes).ifPresent(it -> launch.getAttributes().addAll(it.stream().map(val -> {
ItemAttribute itemAttribute = FROM_RESOURCE.apply(val);
itemAttribute.setLaunch(launch);
return itemAttribute;
}).collect(Collectors.toSet())));
return this;
}
public LaunchBuilder overwriteAttributes(Set<ItemAttributeResource> attributes) {
if (attributes != null) {
final Set<ItemAttribute> overwrittenAttributes = launch.getAttributes()
.stream()
.filter(ItemAttribute::isSystem)
.collect(Collectors.toSet());
attributes.stream().map(val -> {
ItemAttribute itemAttribute = FROM_RESOURCE.apply(val);
itemAttribute.setLaunch(launch);
return itemAttribute;
}).forEach(overwrittenAttributes::add);
launch.setAttributes(overwrittenAttributes);
}
return this;
}
public LaunchBuilder addMode(Mode mode) {
ofNullable(mode).ifPresent(it -> launch.setMode(LaunchModeEnum.valueOf(it.name())));
return this;
}
public LaunchBuilder addStatus(String status) {
launch.setStatus(StatusEnum.fromValue(status).orElseThrow(() -> new ReportPortalException(ErrorType.INCORRECT_FINISH_STATUS)));
return this;
}
public LaunchBuilder addEndTime(Date date) {
launch.setEndTime(EntityUtils.TO_LOCAL_DATE_TIME.apply(date));
return this;
}
@Override
public Launch get() {
return launch;
}
}
|
EPMRPP-77779 || updated limit on service api
|
src/main/java/com/epam/ta/reportportal/ws/converter/builders/LaunchBuilder.java
|
EPMRPP-77779 || updated limit on service api
|
<ide><path>rc/main/java/com/epam/ta/reportportal/ws/converter/builders/LaunchBuilder.java
<ide>
<ide> public class LaunchBuilder implements Supplier<Launch> {
<ide>
<del> private static final int LAUNCH_DESCRIPTION_LENGTH_LIMIT = 1024;
<add> private static final int LAUNCH_DESCRIPTION_LENGTH_LIMIT = 16000;
<ide> private static final int DESCRIPTION_START_SYMBOL_INDEX = 0;
<ide>
<ide> private Launch launch;
|
|
Java
|
apache-2.0
|
b2fde0f112219e864f6a7f1e9afc88c441058d5c
| 0 |
menyp/SG_Android_Appium
|
package Native;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import io.appium.java_client.MobileBy;
import io.appium.java_client.MobileElement;
import io.appium.java_client.android.AndroidDriver;
import io.appium.java_client.android.AndroidKeyCode;
import io.appium.java_client.ios.IOSDriver;
import io.appium.java_client.pagefactory.WithTimeout;
import io.appium.java_client.pagefactory.iOSFindBy;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.xml.parsers.ParserConfigurationException;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriverException;
import org.testng.ITestContext;
import org.testng.annotations.BeforeSuite;
import org.xml.sax.SAXException;
import com.applitools.eyes.Eyes;
//MobileElement e2; //test will wait for this diring 20 seconds
public class SanityAndroid {
@WithTimeout(time = 30, unit = TimeUnit.SECONDS)
@iOSFindBy (id = "relevant id need to be added here")
String currentDateFolder;
String webElementXmlLang;
String webElementXmlPath;
String StartServerPath;
String StopServerPath;
String appIdentifier;
Boolean skipfailure = true;
AndroidDriver<MobileElement> driver;
AndroidMethods genMeth = new AndroidMethods();
Eyes eyes = new Eyes();
Boolean useEye = true;
AndroidElements DroidData;
@BeforeSuite(alwaysRun = true)
public void setupBeforeSuite(ITestContext context) throws ParserConfigurationException, SAXException, IOException, InterruptedException, jdk.internal.org.xml.sax.SAXException {
// This is your api key, make sure you use it in all your tests.
//Set the tests configuration
StartServerPath = genMeth.getValueFromPropFile("StartServerPath");
StopServerPath = genMeth.getValueFromPropFile("StopServerPath");
webElementXmlPath = genMeth.getValueFromPropFile("webElementXmlPath");
webElementXmlLang = genMeth.getValueFromPropFile("webElementXmlLang");
appIdentifier = genMeth.getValueFromPropFile("appIdentifier");
//DroidData= new IosElements(webElementXmlLang, webElementXmlPath);
DroidData = genMeth.setElements(webElementXmlPath, webElementXmlLang);
driver = genMeth.setCapabilitiesAndroid(genMeth);
genMeth.cleanLoginDroid(genMeth, DroidData.userQA, DroidData.passwordQA);
}
@BeforeMethod (alwaysRun = true)
public void checkHomeScreen() throws InterruptedException, IOException, ParserConfigurationException, SAXException, jdk.internal.org.xml.sax.SAXException{
// Check if the client still logged in & in StartUp screen before each test
if (driver == null) {
try {
// driver.removeApp(genMeth.getValueFromPropFile("appPackage"));
driver.quit();
} catch (Exception e) {
// swallow if fails
}
driver = genMeth.setCapabilitiesAndroid(genMeth);
DroidData = genMeth.setElements(webElementXmlPath, webElementXmlLang);
genMeth.cleanLoginDroid( genMeth, DroidData.userQA , DroidData.passwordQA );
}
else {
skipfailure = false;
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden Ap", useEye, genMeth, skipfailure);
/*
// genMeth.clickName(genMeth, "DashB/Cards/Employee");
String Startup_Screen = "//android.widget.TextView[@text='All Tabs']";
// String Startup_Screen = "//android.widget.LinearLayout[@text='SQL Golden App']";
genMeth.swipeUpMeizuLong(1000);
boolean StartUpScreenDisplay = genMeth.checkIsElementVisible( By.xpath(Startup_Screen));
if (StartUpScreenDisplay != true) {
try {
driver.resetApp();
driver.removeApp(appIdentifier);
driver.quit();
} catch (Exception e) {
// swallow if fails
}
driver = genMeth.setCapabilitiesAndroid(genMeth);
DroidData = genMeth.setElements(webElementXmlPath, webElementXmlLang);
genMeth.cleanLoginDroid( genMeth, DroidData.userQA, DroidData.passwordQA);
*/
}
}
@Test(enabled = true, testName = "URL Tab", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_URL() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to URL Constant
genMeth.clickXpthName_TextView(genMeth, "URL / News");
Thread.sleep(10000);
genMeth.eyesCheckWindow("Tabs(Droid)- URL Data Item", useEye, genMeth, skipfailure);
//go to URL data Item
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "URL Constant");
genMeth.eyesCheckWindow("Tabs (Droid) - URL Constant", useEye, genMeth, skipfailure);
//Go Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
Thread.sleep(4000);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "News Tab", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_News() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to News
genMeth.clickXpthName_TextView(genMeth, "URL / News");
//go to URL data Item
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
Thread.sleep(2000);
genMeth.clickXpthName_CheckedTextView(genMeth, "News");
genMeth.eyesCheckWindow("Tabs (Droid) - News", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "www.milliondollarhomepage.com");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- The milliion $ home page", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.rotateLandscape();
genMeth.eyesCheckWindow("All Tabs- News Landscape", useEye, genMeth, skipfailure);
genMeth.rotatePortrait();
//Go Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Dashboard Tab", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_Dashboard() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Open Dashboard Tab
Thread.sleep(8000);
genMeth.clickXpthName_TextView(genMeth, "DashB/Cards/Employee");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard Default Layout", useEye, genMeth, skipfailure);
//Navigate to Employee directory tab
genMeth.clickXpthName_TextView(genMeth, "Service Call ID1");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard Default Layout- Navigate to Employee Directory", useEye, genMeth, skipfailure);
//Navigate back to Dashboard
genMeth.backDroidButton();
genMeth.clickXpthName_TextView(genMeth, "DashB/Cards/Employee");
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickXpthName_TextView(genMeth, "Service Call ID1");
genMeth.clickXpthName_TextView(genMeth, "1");
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/filter_detail_navigation_left_btn");
genMeth.clickXpthName_TextView(genMeth, "Done");
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard Advanced columns (Scroll down)", useEye, genMeth, skipfailure);
//Gauge
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Dash with Gauge");
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard- Gauge Half", useEye, genMeth, skipfailure);
//Navigate
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/dashboard_item_bottom_label_container_view");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard- Navigate to Map By GPS", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard- Gauge Full/Solid", useEye, genMeth, skipfailure);
//Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
}
@Test(enabled = true, testName = "Map,Dashboard, Charts Tabs", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_Map() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Open Map By Address Tab
genMeth.clickXpthName_TextView(genMeth, "Map");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By GPS", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Map By Address");
Thread.sleep(3000);
//genMeth.eyesCheckWindow(eyes, "All Tabs- Map By Address", useEye, skipfailure);
// genMeth.clickId(genMeth,"19501 Biscayne Blvd, Aventura, FL 33180. 19501 Biscayne Boulevard,Aventura, FL 33180.");
//genMeth.clickXpthName_TextView(genMeth, "19501 Biscayne Blvd, Aventura, FL 33180. 19501 Biscayne Boulevard,Aventura, FL 33180.");
By by = By.xpath("//android.view.View[@content-desc='19501 Biscayne Blvd, Aventura, FL 33180. 19501 Biscayne Boulevard,Aventura, FL 33180.']");
driver.findElement(by).click();
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By Address- Aventura", useEye, genMeth, skipfailure);
//Driving Directions
genMeth.clickId(genMeth, DroidData.BTNdirection);
// genMeth.eyesCheckWindow("All Tabs (Droid)- Map By Address- Driving directions", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Phone
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_adress_container");
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_item_phone");
// genMeth.eyesCheckWindow("All Tabs (Droid)- Map By Address- Phone", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Navigation to URL tab
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_item_jump_to");
Thread.sleep(8000);
genMeth.eyesCheckWindow("Tabs (Droid)- URL Data Item", useEye, genMeth, skipfailure);
//Navigation Back
genMeth.backDroidButton();
//Open Map By GPS
Thread.sleep(10000);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Map By GPS");
by = By.xpath("//android.view.View[@content-desc='40.918116,-74.076363. 1 Garden State Plaza Boulevard,Paramus, NJ 07652.']");
genMeth.clickBy(driver, genMeth, by);
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By GPS- Press pin map", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_adress_container");
//All addresses
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By GPS- All Addresses", useEye, genMeth, skipfailure);
//Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
}
@Test(enabled = true, testName = "Map Charts Tabs", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_Chart() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Open Bar Chart
genMeth.clickXpthName_TextView(genMeth, "Chart/CoverF/ActionC");
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Filter data
genMeth.clickXpthName_TextView(genMeth, "Sales");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart- Returns & Net Sales", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Returns");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart- Net Sales", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Sales");
genMeth.clickXpthName_TextView(genMeth, "Returns");
//genMeth.clickId(genMeth, "Net Sales");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Navigation to pie chart
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/column_chart_selected_title_nav_icon");
Thread.sleep(15000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart- Navigate to Dashboard", useEye, genMeth, skipfailure);
//Navigate back to the Bar chart
genMeth.backDroidButton();
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Pie Chart
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Pie Chart");
genMeth.eyesCheckWindow("All Tabs (Droid)- Pie Chart", useEye, genMeth, skipfailure);
//Filter data Returen
genMeth.clickXpthName_TextView(genMeth, "Returns");
genMeth.eyesCheckWindow("All Tabs (Droid)- Pie Chart- Returns", useEye, genMeth, skipfailure);
//Filter data Net Sales
genMeth.clickXpthName_TextView(genMeth, "Net Sales");
genMeth.eyesCheckWindow("All Tabs (Droid)- Pie Chart- Net Sales", useEye, genMeth, skipfailure);
//Navigation to Bar chart
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/pie_chart_slicer_name");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Go Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
}
@Test(enabled = true, testName = "Cover Flow", retryAnalyzer = Retry.class, description = "Check the Cover Flow tab",
groups = { "Sanity Android" })
public void Tabs_CoverFlow() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to CoverFlow
genMeth.clickXpthName_TextView(genMeth, "Chart/CoverF/ActionC");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Cover Flow");
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow", useEye, genMeth, skipfailure);
genMeth.swipeRightMeizuShort(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- swipe John Grant", useEye, genMeth, skipfailure);
//Address
genMeth.clickXpthName_TextView(genMeth, "Address");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Address", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Scroll Down", useEye, genMeth, skipfailure);
//Address mini map
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/list_template_map_address_icon");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Address Mini Map", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Phone
genMeth.clickXpthName_TextView(genMeth, "Phone");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Phone", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Email
genMeth.clickXpthName_TextView(genMeth, "Email");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//URL
genMeth.clickXpthName_TextView(genMeth, "URL");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Go to URL", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Landline
genMeth.clickXpthName_TextView(genMeth, "Landline");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Landline", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Go to Startup screen
genMeth.backDroidButton();
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity Android" })
public void Tabs_List_AdvancedColumns() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickXpthName_TextView(genMeth, "List / Grid");
Thread.sleep(3000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List", useEye, genMeth, skipfailure);
//Phone
genMeth.clickXpthName_TextView(genMeth, "Call");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Phone", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List Phone", useEye, skipfailure);
//Email
genMeth.clickXpthName_TextView(genMeth, "Email");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
Thread.sleep(2000);
//URL
genMeth.clickXpthName_TextView(genMeth, "URL");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Thread.sleep(3000);
// Landline
genMeth.clickXpthName_TextView(genMeth, "Landline");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Landline", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Landline");
//Address
genMeth.clickXpthName_TextView(genMeth, "Address");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Address", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List Address", useEye, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Address");
//Mini Map
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/template_view_item_map_layout");
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(3000);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.swipedownMeizuShort(1000);
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List See All", useEye, genMeth, skipfailure);
//Folder
genMeth.clickXpthName_TextView(genMeth, "Folder");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Folder", useEye, genMeth, skipfailure);
genMeth.swipedownMeizuLong(1000);
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List See All scroll down", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List See All scroll down", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.clickId(genMeth, DroidData.IconHome);
Thread.sleep(2000);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Grid two layer Advanced", retryAnalyzer = Retry.class, description = "Check the Grid two layer tab",
groups = { "Sanity Android" })
public void Tabs_Grid_Two_Layers() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to Grid
genMeth.clickXpthName_TextView(genMeth, "List / Grid");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Grid - Two Layers");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- List Address", useEye, genMeth, skipfailure);
//Open the second layer
genMeth.clickXpthName_TextView(genMeth, "$200");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Second layer", useEye, genMeth, skipfailure);
genMeth.swipedownMeizuShorter(1000);
genMeth.setLandscapeMode();
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Second layer - Landscape", useEye, genMeth, skipfailure);
genMeth.setPortraitMode();
//Phone
genMeth.clickXpthName_TextView(genMeth, "Phone");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Phone options open", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Phone");
// Landline
genMeth.clickXpthName_TextView(genMeth, "Landline");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Landline", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Landline");
//URL
genMeth.clickXpthName_TextView(genMeth, "URL");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- List URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Email
genMeth.clickXpthName_TextView(genMeth, "Email");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- List Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
Thread.sleep(2000);
//Address
genMeth.clickXpthName_TextView(genMeth, "Address");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Address", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List Address", useEye, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Address");
//Mini Map
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/template_view_item_map_layout");
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Mini Map", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(3000);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.clickId(genMeth, DroidData.IconHome);
//Verify Startup screen is open
Thread.sleep(2000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Grid one layer", retryAnalyzer = Retry.class, description = "Check the Grid one layer tab Advanced & navigation",
groups = { "Sanity Android123" })
public void Tabs_Grid_One_Layer_Advance_Navigation() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Need to find a solution to the find element by xpath for the Address/Mobile Phone etc sonce it keeps failing (seems like Appium bug)
// go to Grid
genMeth.clickXpthName_TextView(genMeth, "List / Grid");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Grid - One Layer");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - (Advanced - Part 1)", useEye, genMeth, skipfailure);
// Address
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[4]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Address", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Mobile Phone
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[5]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Phone", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.swipeRightMeizuLong(2000);
genMeth.swipeRightMeizuLong(2000);
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Swipe to the right", useEye, genMeth, skipfailure);
// MiniMap - Navigation to slicer report
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[2]/android.widget.ImageView[1]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Mini Map Navigation", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
// Email
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[3]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// URL
//genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[4]");
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[4]/android.widget.ImageView[1]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Go To URL", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Landline
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[5]/android.widget.ImageView[1]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Landline", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.clickId(genMeth, DroidData.IconHome);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Employee Directory", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Tabs_Employee_Directory() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to Employee Directory tab
genMeth.clickXpthName_TextView(genMeth, "DashB/Cards/Employee");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Employee Directory");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Main",useEye, genMeth, skipfailure);
//Search an employee (Empty search)
genMeth.clickId(genMeth, DroidData.IconSearch);
genMeth.sendId(genMeth, DroidData.IconSearch , "no emplyees found");
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - empty search",useEye, genMeth, skipfailure);
genMeth.deleteKey(17);
genMeth.clearId(genMeth, DroidData.IconSearch);
//Search an employee
genMeth.clickId(genMeth, DroidData.IconSearch);
genMeth.sendId(genMeth, DroidData.IconSearch , "Lane");
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - search Specific employee",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//second layer
genMeth.clickXpthName_TextView(genMeth, "Lane R. Barlow");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Second layer",useEye, genMeth, skipfailure);
// Phone
Thread.sleep(1000);
genMeth.clickXpthName_TextView(genMeth, "Phone");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Phone",useEye, genMeth, skipfailure);
// Email
genMeth.clickXpthName_TextView(genMeth, "Email");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Email",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Map
genMeth.swipedownMeizuShorter(1000);
genMeth.clickXpthName_TextView(genMeth, "Address First");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Address First",useEye, genMeth, skipfailure);
// Mini Map
genMeth.swipedownMeizuShorter(1000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/template_view_item_map_layout");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Address second",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// URL
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpthName_TextView(genMeth, "google.com");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - URL",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Social Networks - Facebook
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_facebook");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Facebook",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//Twitter
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_twitter");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Twitter",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//LinkedIn
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_linkidin");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - LinkedIn",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//Google+
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_google_plus");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Google+",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//Navigation
genMeth.clickXpthName_TextView(genMeth, "First_Name");
Thread.sleep(8000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Navigation to Param report ed",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Back from navigation",useEye, genMeth, skipfailure);
//No Social Networks available
genMeth.sendId(genMeth, DroidData.IconSearch , "Callum R. Aguirre");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - No Social Networks",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//No Google+
genMeth.sendId(genMeth, DroidData.IconSearch , "Caldwell Alexander");
genMeth.swipedownMeizuLong(1000);
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - No Google+",useEye, genMeth, skipfailure);
//Back to Startup screen
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.IconHome);
//Press info for the app
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/left_menu_child_info_icon");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Golden App info screen",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "SQL Golden App");
genMeth.clickXpthName_TextView(genMeth, "SQL Golden App");
//Verify Startup screen is open
Thread.sleep(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Parameterized report Grid", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Param_Report_Grid() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to parameterized report - Grid tab
genMeth.swipedownMeizuShort(1000);
// go to Employee Directory tab
genMeth.clickXpthName_TextView(genMeth, "Param Report Grid");
Thread.sleep(4000);
genMeth.eyesCheckWindow("Param Report Grid (Droid)- add Parameters",useEye, genMeth, skipfailure);
//Attempt to submit while mandatory is missing
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- Mandatory field is missing",useEye, genMeth, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/app_message_dialog_cancel_button");
genMeth.clickXpthName_TextView(genMeth, "SL-Device Types");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- SL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Laptop");
genMeth.clickXpthName_TextView(genMeth, "PSL- Device Model");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- PSL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "PSL- Device Model");
genMeth.clickXpthName_TextView(genMeth, "Lenovo");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- All params were filled",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Param Report Grid (Droid)- Grid first layer",useEye, genMeth, skipfailure);
//Go To second layer
genMeth.clickXpthName_TextView(genMeth, "Laptop");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- Grid second layer",useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Parameterized report With all variables", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Param_Report_AllVariables() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
genMeth.swipedownMeizuShort(1000);
genMeth.clickXpthName_TextView(genMeth, "Param Variables only");
genMeth.eyesCheckWindow("Param Report with All Variables (Droid) - SQL Golden App",useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
// genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Parameterized report List", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Param_Report_List() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to parameterized report - Grid tab
genMeth.swipedownMeizuShort(1000);
genMeth.swipedownMeizuShort(1000);
genMeth.clickXpthName_TextView(genMeth, "Param Report List");
genMeth.eyesCheckWindow("Param Report List (Droid)- add Parameters", useEye, genMeth, skipfailure);
//Attempt to submit while mandatory is missing
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
genMeth.eyesCheckWindow("Param Report List (Droid)- Mandatory field is missing", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/app_message_dialog_cancel_button");
//Insert parameters
genMeth.clickXpthName_TextView(genMeth, "FreeText (Priority)");
driver.pressKeyCode(AndroidKeyCode.KEYCODE_1);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/action_submit_button");
genMeth.clickXpthName_TextView(genMeth, "SL_ML (Priority)");
genMeth.eyesCheckWindow("Param Report List (Droid)- SL ML Priority", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "2");
//genMeth.clickId(genMeth, "2");
genMeth.eyesCheckWindow("Param Report List (Droid)- All params were filled", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Param Report List (Droid)- FreeText Priority = 1", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "SL_ML (Priority)");
genMeth.eyesCheckWindow("Param Report List (Droid)- FreeText Priority = 2", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "SC(Up_Date<=MobDate)");
genMeth.eyesCheckWindow("Param Report List (Droid)- SC(Up_Date<=MobDate)", useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity Android" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_DL_Dashboard()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report dashboard - DL- Device Info tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpthName_TextView(genMeth, "Param DL-Dashboard");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- add Parameters",useEye, genMeth, skipfailure);
//Insert parameters
genMeth.clickXpthName_TextView(genMeth, "SL- Devices Type");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- SL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Laptop");
Thread.sleep(2000);
genMeth.clickXpthName_TextView(genMeth, "DL- Device Model");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- DL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Lenovo");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- All params were filled",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
//genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(5000);
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- Dashboard tab",useEye, genMeth, skipfailure);
//Navigate to Dashboard tab
genMeth.clickXpthName_TextView(genMeth, "Device Type Name (ParentName)");
Thread.sleep(5000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- Navigate to SL- Devices by Type tab",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.backDroidButton();
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- Dashboard tab",useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity Android1"}, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_CoverFlow()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report dashboard - DL- Device Info tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpthName_TextView(genMeth, "Param Rep Cover Flow");
genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - Parameters",useEye, genMeth, skipfailure);
//Insert parameters
genMeth.clickXpthName_TextView(genMeth, "Insert Gender (F or M)");
genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - QR",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/barcode_activity_manual_input_button");
genMeth.sendId(genMeth, "com.skygiraffe.operationaldata:id/qr_scanner_view_finder", "m");
//genMeth.clickId(genMeth, "m");
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/qr_manual_input_submit_btn");
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - Males",useEye, genMeth, skipfailure);
//Go To cover flow tab by const (females)
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Const-Female Only");
genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - Female",useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_Chart()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Param Report Chart");
Thread.sleep(2000);
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Choose Value");
genMeth.clickId(genMeth, "Mall of America");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL ", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america Bar", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america in bar chart", useEye, skipfailure);
//Naviagte to param report
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAImage[2]");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - Param report map - parameters screen", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Go To Pie tab
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "SL-SalesbyBranch-Pie");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america Pie", useEye, skipfailure);
genMeth.clickId(genMeth, "Returns");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america Pie- Returnes", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_EmployeeDirectoryD()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Param Report ED");
Thread.sleep(2000);
genMeth.eyesCheckWindow(eyes, "Param Rep ED - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Choose Value");
genMeth.clickId(genMeth, "Female");
genMeth.eyesCheckWindow(eyes, "Param Rep ED -SL MB", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(2000);
genMeth.eyesCheckWindow(eyes, "Param Rep ED - Female only", useEye, skipfailure);
//Go To Employee tab by Login variable
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "ED by Login");
genMeth.eyesCheckWindow(eyes, "Param Rep ED - ED by Login", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_Map()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "Param Report Map");
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "Param Rep Map - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Choose Value");
genMeth.clickId(genMeth, "Mall of America");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Param Rep Map - Mall Of america chosen", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(12000);
genMeth.eyesCheckWindow(eyes, "Param Rep Map - Mall Of america on map", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_Cards()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Param Report Cards");
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "Param Rep Cards - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Default");
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Param Rep Cards - Priority = 1", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Param Rep Cards - Priority = 1 service calls", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity IOS" })
public void Actions_List() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickId(genMeth, "List / Grid Actions");
//Set slicer to one item
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Service Call ID");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, "1 Slicers");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "List Actions- List Actions", useEye, skipfailure);
//Execute action in the first layer
//Free text description
genMeth.clickId(genMeth, "Description");
boolean checkAction = genMeth.checkIsElementVisible(By.id("Descrip 1"));
if (checkAction) {
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "2");
} else {
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "1");
}
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "List Actions- cell description", useEye, skipfailure);
//Priority (Simple List MB)
genMeth.clickId(genMeth, "Priority");
genMeth.clickId(genMeth, "91");
checkAction = genMeth.checkIsElementVisible(By.id("Update Pirority (MB)"));
if (checkAction) {
genMeth.clickId(genMeth, "90");
}
genMeth.eyesCheckWindow(eyes, "List Actions- cell Priority (Simple List MB)", useEye, skipfailure);
//Assign To (Dynamic List)
genMeth.clickId(genMeth, "Assigned To");
genMeth.clickId(genMeth, "Adrian Lopez");
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "List Actions- cell Assign To (DL)", useEye, skipfailure);
//Action in second layer
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
// genMeth.swipedownIphone5Shortest(1000);
genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
Thread.sleep(2000);
genMeth.swipedownMeizuLong(1000);
//QR code
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.swipedownMeizuLong(1000);
Thread.sleep(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
try {
driver.findElementById(DroidData.BTNseeAll_ID).click();
//genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
} catch (Exception e1) {
// TODO Auto-generated catch block
}
boolean isDisplayed = genMeth.checkIsElementVisible(By.id("Service Call ID"));
if(!isDisplayed){
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
}
Thread.sleep(2000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "02");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.swipedownMeizuShort(1000);
genMeth.eyesCheckWindow(eyes, "List Actions- cell QR second layer (QR)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNBackName);
//Row Action (Adding a row to the all parameters table)
genMeth.swipedownMeizuLong(1000);
try {
driver.findElementById("PopUp- AddRow").click();
} catch (Exception e) {
// TODO Auto-generated catch block
}
Thread.sleep(4000);
isDisplayed = genMeth.checkIsElementVisible(By.id("PopUp- AddRow"));
if (!isDisplayed) {
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "PopUp- AddRow");
}
genMeth.clickId(genMeth, "Write");
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "New Row");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "DeviceType_SL_ByName");
genMeth.clickId(genMeth, "Laptop");
genMeth.clickId(genMeth, "Device_Model_DL");
genMeth.clickId(genMeth, "Asus");
genMeth.clickId(genMeth, "Items_By_Category_PSL");
genMeth.clickId(genMeth, "Keyboard (Cat 1)");
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "SL_Manual_List");
genMeth.clickId(genMeth, "2");
//PSL with Variable
genMeth.clickId(genMeth, "Items_SmallerThanMobileDate_PSL");
genMeth.clickId(genMeth, "3");
// image
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]/UIAStaticText[1]");
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIANavigationBar[1]/UIAButton[5]");
genMeth.clickId(genMeth, "PhotoCapture");
genMeth.clickId(genMeth, "Use Photo");
genMeth.clickId(genMeth, "Done");
genMeth.eyesCheckWindow(eyes, "List Actions- Image set", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
/*
//Signature
genMeth.swipedownIphone5Long(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]");
TouchAction touchAction = new TouchAction(driver);
// touchAction.press(250, 250).moveTo(250, 150).release().perform();
//MobileElement el1 = genMeth.returnName(driver, genMeth, "SkyGiraffe");
//MobileElement el2 = genMeth.returnId(driver, genMeth, "X");
//touchAction.longPress(el1, 2000).moveTo(el2).release().perform();
touchAction.longPress(200, 200, 3000).perform();
touchAction.longPress(200, 200, 3000).waitAction(1000).moveTo(200,201).release().perform();
touchAction.longPress(100, 100, 3000);
touchAction.moveTo(100, 50).waitAction(1000);
touchAction.release();
touchAction.perform();
genMeth.eyesCheckWindow(eyes, "List Actions- Cancel signature", useEye, skipfailure);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]");
touchAction.longPress(250, 250, 1000).moveTo(250, 150).release().perform();
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "List Actions- signature Set", useEye, skipfailure);
/
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
*/
//Row Action with input type = Inline (Adding a row to the all parameters table)
//Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuShort(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Inline row action", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity IOS" })
public void Actions_List_Inline() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
genMeth.clickId(genMeth, "List / Grid Actions");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "List (Inline)");
// go to List
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "InLine- AddRow");
Thread.sleep(4000);
genMeth.eyesCheckWindow(eyes, "Actions_List_Inline- Inline parameters default", useEye, skipfailure);
genMeth.sendId(genMeth, "This is default value", "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "Mobile");
genMeth.clickId(genMeth, "iPhone6");
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "Keyboard (Cat 1)");
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "2");
genMeth.swipedownMeizuShort(1000);
//PSL with Variable
genMeth.clickId(genMeth, "7");
// image
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[50]/UIAStaticText[1]");
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIANavigationBar[1]/UIAButton[5]");
genMeth.clickId(genMeth, "PhotoCapture");
genMeth.clickId(genMeth, "Use Photo");
genMeth.clickId(genMeth, "Done");
genMeth.eyesCheckWindow(eyes, "Actions_List_Inline- Inline Image set", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
//Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuShort(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity IOS" })
public void Actions_Grid_One_Layer() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickId(genMeth, "List / Grid Actions");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Grid - One Layer");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer main view", useEye, skipfailure);
//USER INPUT = Free Text (Description)
boolean isTextDisplayed = genMeth.checkIsElementVisible(By.id("Descrip 1"));
if (isTextDisplayed){
genMeth.clickId(genMeth, "Descrip 1");
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "2");
}
else{
genMeth.clickId(genMeth, "Descrip 2");
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "1");
}
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- description (free text)", useEye, skipfailure);
//USER INPUT = Simple List MB (Priority)
isTextDisplayed = genMeth.checkIsElementVisible(By.id("90"));
if (isTextDisplayed) {
genMeth.clickId(genMeth, "90");
genMeth.clickId(genMeth, "91");
}
else{
genMeth.clickId(genMeth, "91");
genMeth.clickId(genMeth, "90");
}
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Priority (Simple List MB)", useEye, skipfailure);
//USER INPUT = Simple List DI (Status)
genMeth.clickId(genMeth, "6");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Status (Simple List DI)", useEye, skipfailure);
genMeth.clickId(genMeth, "Not Clear");
Thread.sleep(6000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Status success (Simple List DI)", useEye, skipfailure);
genMeth.swipeRightMeizuLong(1000);
//USER INPUT = PSL (ItemID)
genMeth.clickId(genMeth, "21");
genMeth.clickId(genMeth, "Video card (Cat 1)");
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- ItemID (PSL)", useEye, skipfailure);
//USER INPUT = QR (KPI)
isTextDisplayed = genMeth.checkIsElementVisible(By.id("01"));
if (isTextDisplayed) {
genMeth.clickId(genMeth, "01");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "02");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
}
else{
genMeth.clickId(genMeth, "02");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "01");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
}
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- KPI (QR)", useEye, skipfailure);
//OutGrid(Row)
genMeth.clickXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAScrollView[1]/UIAScrollView[2]/UIAImage[13]");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Row parameters before insert", useEye, skipfailure);
genMeth.clickId(genMeth, "Free_Text1");
Thread.sleep(2000);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "New row");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "QR_");
Thread.sleep(3000);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "New QR");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "SL_Manual_List_");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, "Device_Type_SL_DI_");
genMeth.clickId(genMeth, "Laptop");
genMeth.clickId(genMeth, "Device_Model_DL_");
genMeth.clickId(genMeth, "Lenovo");
genMeth.clickId(genMeth, "Items_By_Category_PSL");
genMeth.clickId(genMeth, "Power Supply (Cat 1)");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Row parameters after insert", useEye, skipfailure);
genMeth.swipedownMeizuLong(1000);
// image
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]/UIAStaticText[1]");
genMeth.clickXpth(genMeth,"//UIAApplication[1]/UIAWindow[1]/UIANavigationBar[1]/UIAButton[5]");
genMeth.clickId(genMeth, "PhotoCapture");
genMeth.clickId(genMeth, "Use Photo");
genMeth.clickId(genMeth, "Done");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Image set", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- after action executed", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuShort(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the Grid two layer actions",
groups = { "Sanity IOS" })
public void Actions_Grid_Two_Layer() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickId(genMeth, "List / Grid Actions");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Grid - Two Layers");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layer main view", useEye, skipfailure);
genMeth.clickId(genMeth, "3");
//DL
genMeth.clickId(genMeth, "ItemID");
genMeth.clickId(genMeth, "Keyboard (Cat 1)");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- ItemID SL", useEye, skipfailure);
//Row Action
genMeth.clickId(genMeth, "m");
genMeth.clickId(genMeth, "UpdateWithTableParam");
genMeth.clickId(genMeth, "DummyParam");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "TableParams");
genMeth.clickId(genMeth, "add icon table");
genMeth.clickId(genMeth, "Priority");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "Status");
genMeth.clickId(genMeth, "Open");
genMeth.clickId(genMeth, DroidData.BTNsave);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- Table Parameter filled", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- All parameters are filled", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(10000);
//Check the push notification
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- Action Success", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = {"Sanity IOS"}, testName = "Sanity", description = "Slicer report")
public void slicerReport() throws InterruptedException, IOException{
// go to List
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Slicer report");
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, "BranchID");
genMeth.clickId(genMeth, "7");
genMeth.eyesCheckWindow(eyes, "Slicer Report- branchID selected", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Slicer Report- List (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Grid");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Grid (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cover Flow");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Cover Flow (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Dashboard");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Dashboard (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Map");
// genMeth.clickName(genMeth, "Garden State Plaza, Paramus, NJ, 1 item");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Map (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cards");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Cards (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer News");
genMeth.eyesCheckWindow(eyes, "Slicer Report- News (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Bar Chart");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Bar chart empty slicing", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, "BranchID");
genMeth.clickId(genMeth, "7");
genMeth.clickId(genMeth, "Aventura Mall");
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Slicer Report- Bar chart Aventura Mall", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = {"Sanity IOS1"}, testName = "Sanity", description = "Slicer report")
public void slicerReportWithSecurityFilter() throws InterruptedException, IOException{
// go to List
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "SlicerReport_Sfilter");
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, "BranchID");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter - branchID 1,3 only", useEye, skipfailure);
genMeth.clickId(genMeth, "3");
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- List (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Grid");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Grid (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cover Flow");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Cover Flow (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Dashboard");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Dashboard (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Map");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Map (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cards");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Cards (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer News");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- News (BranchID=3)", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = false, groups = { "Sanity IOS__" }, testName = "Sanity Tests", description = "login with bad/missing credentials", retryAnalyzer = Retry.class)
public void badCredentials() throws Exception, Throwable {
genMeth.signOutFromStartup(genMeth);
// Login with bad user name
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, "bad name");
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, DroidData.passwordProd);
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Login Failed"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with bad password
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, DroidData.userQA);
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, "bad password");
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Login Failed"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with bad user name & password
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, "bad name");
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, "bad password");
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Login Failed"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with empty Name
genMeth.clearId(genMeth, DroidData.TEXTFIELDemailXpth);
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, DroidData.passwordQA);
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Bad Request"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with empty Password
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, DroidData.userQA);
genMeth.clearId(genMeth, DroidData.TEXTFIELDpasswordXpth);
genMeth.clickId(genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Bad Request"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with empty Name & password
genMeth.clearId(genMeth, DroidData.TEXTFIELDemailXpth);
genMeth.clearId(genMeth, DroidData.TEXTFIELDpasswordXpth);
genMeth.clickId(genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Bad Request"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Forgot your password Negative (attempt to restore password with a non
// existing email)
// Forgot your password Positive (attempt to restore password with an
// existing email)
}
@Test(enabled = false, retryAnalyzer = Retry.class, testName = "Sanity Tests", description = "Switching from Foreground to Background and vice versa use cases",
groups = { "Sanity IOS__" })
public void foregroundBackgroundSwitch() throws Exception, Throwable {
//Take the app to background & foreground x times
//Take the app to sleep/lock x times
}
@Test(enabled = false, retryAnalyzer = Retry.class, testName = "connection lost handling", description = "Checking how the app owrks while connection is lost & back again", dependsOnGroups = { "Sanity*" },
groups = { "Sanity IOS__" })
public void connectionLost() throws InterruptedException, IOException,
ParserConfigurationException, SAXException {
}
@AfterSuite(alwaysRun = true)
public void tearDown() throws Exception {
try {
driver.removeApp(appIdentifier);
driver.quit();
/*
boolean isAppInstalled = driver.isAppInstalled(appIdentifier);
if (isAppInstalled) {
driver.removeApp(appIdentifier);
}
*/
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
SendResults sr = new SendResults("[email protected]",
"[email protected]", "TestNG results", "Test Results");
//sr.sendTestNGResult();
sr.sendRegularEmail();
/*
TestListenerAdapter tla = new TestListenerAdapter();
TestNG testng2 = new TestNG();
testng2.setTestClasses(new Class[] { SendReport.class });
testng2.setGroups("send mail");
testng2.addListener(tla);
testng2.run();
*/
}
/*
@Test (enabled = true ,testName = "Sample App Dashboard DailySales", retryAnalyzer = Retry.class, description = "Dashboard DailySales" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationDashboardDailySales() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//Logout from startup page
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 5.2");
//useEye = true;
// Login to sample app & open Dashboard report
genMeth.eyesCheckWindow(eyes, "SampleApp Main screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.DashboardName);
genMeth.eyesCheckWindow(eyes, "Dashboard Tab", useEye, skipfailure);
// genMeth.swipeRightIphone6Plus(1000);
genMeth.swipeRightIphone5(500);
genMeth.eyesCheckWindow(eyes, "World wide orders Tab", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, DroidData.DashboardName);
// Open Sales Bar
// Change eye back to true once oleg fix the decimal issue
Thread.sleep(2000);
genMeth.clickId(genMeth, DroidData.SalesName);
//set Eye UI to false due to ordinal change
// useEye = true;
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar- Show All", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.ReturnsName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar- show Sales/Net Sales", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.SalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp show Net Sales", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.NetSalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales - show Empty", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.SalesName);
genMeth.clickId(genMeth, DroidData.ReturnsName);
genMeth.clickId(genMeth, DroidData.NetSalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar- Show All", useEye, skipfailure);
//Open Sales Pie
genMeth.clickId(genMeth, DroidData.DailySalesBarID);
genMeth.clickId(genMeth, DroidData.DailysalesPieID);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Net Sales", useEye, skipfailure);
//genMeth.clickId(genMeth, DroidData.DestinyUSAID);
//genMeth.clickName(genMeth, DroidData.DestinyUSAID);
try {
driver.findElementById(DroidData.DestinyUSAID).click();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Net Sales - Destiny USA", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.ReturnsName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Returns", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.SalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Sales", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.DailysalesPieID);
genMeth.clickId(genMeth, DroidData.Last12hoursID);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Last 12 Months - Sparklines", useEye, skipfailure);
// Check slicer in Sparklines
genMeth.clickName(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.clickId(genMeth, DroidData.DestinyUSAID);
genMeth.clickName(genMeth, DroidData.BTNBackName);
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Last 12 Months - Sparklines / Destiny USA", useEye, skipfailure);
//Clear the Slicer
genMeth.clickName(genMeth, DroidData.BTNSlicer);
genMeth.clickName(genMeth, DroidData.BTNClearName);
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Last 12 Months - Sparklines", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
//Open Daily Sales from main screen
genMeth.clickId(genMeth, DroidData.DailySalesID);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar (no back icon)- Show All", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, "M");
}
@Test (enabled = true ,testName = "Sample Application", retryAnalyzer = Retry.class, description = "" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationServiceCalls() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN SERVICE CALLS
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
//genMeth.clickName(genMeth, DroidData.DashboardName);
genMeth.clickId(genMeth, DroidData.ServiceCallsID);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls", useEye, skipfailure);
// InGrid Action- First layer
//genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
/* genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIACollectionView[1]/UIACollectionCell[1]/UIAStaticText[7]");
genMeth.clickName(genMeth, "1");
Thread.sleep(3000);
genMeth.swipedownIphone5(1000);
genMeth.swipeUpIphone5(1000);
genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
genMeth.clickName(genMeth, "3");
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- priority = 3", useEye, skipfailure);
//Open the Slicer
genMeth.clickName(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.clickId(genMeth, DroidData.MallOfAmerica_Id);
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Slicer Mall Of America", useEye, skipfailure);
/*
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIATableView[1]/UIATableCell[6]/UIAStaticText[1]");
Thread.sleep(3000);
genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
genMeth.clickName(genMeth, "1");
Thread.sleep(10000);
genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
genMeth.clickName(genMeth, "4");
Thread.sleep(6000);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- priority = 4", useEye, skipfailure);
//Open the second layer
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIACollectionView[1]/UIACollectionCell[1]/UIAButton[1]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Second layer", useEye, skipfailure);
//Mobile & Email Contact Details/Person
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[16]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Mobile Contact Person -Cards", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[17]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Email Contact Person -Cards", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
genMeth.scrollDown(driver);
genMeth.scrollDown(driver);
//Mobile / Email / Map / URL - Address section
//Phone
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[20]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Mobile (Address Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
//Email
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[21]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Email (Address Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
// URL
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[22]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- URL ((Address Section))", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNdoneName);
//Map
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[24]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Mobile Maps (Address Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// Mobile / Email (Address Section)
//Mobile
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[28]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Phone (Assigned To Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// Email
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[29]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Email (Assigned To Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
//Close Service Call Action
genMeth.clickName(genMeth, "Close Service Call");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Close Service Calls - Action", useEye, skipfailure);
genMeth.clickName(genMeth, "Comments");
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "Meny The Best");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.clickName(genMeth, "Parts");
genMeth.clickName(genMeth, "Drawer");
genMeth.clickName(genMeth, "SolutionType");
genMeth.clickName(genMeth, "Replaced cash drawer");
genMeth.clickName(genMeth, "Status");
genMeth.clickName(genMeth, "Open");
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Close Service Calls - After Action", useEye, skipfailure);
Thread.sleep(2000);
genMeth.clickName(genMeth, DroidData.BTNBackName);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
}
@Test (enabled = true ,testName = "Sample Application", retryAnalyzer = Retry.class, description = "" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationServiceCallsMapNewServicecall() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN SERVICE CALLS Map
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
//Open service calls map
genMeth.clickId(genMeth, DroidData.ServiceCallsMapID);
Thread.sleep(1000);
genMeth.clickXpth(genMeth, DroidData.MallofAmericaOnMapXpath);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls Maps- Mall of America", useEye, skipfailure);
//Check is Location popup is displayed
//genMeth.clickId(genMeth, DroidData.BTNmapphoneiconID);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[1]/UIAButton[2]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls Maps- Mall of America - Phone Icon Option", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// genMeth.clickName(genMeth, DroidData.BTNMapCarIconName);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAButton[2]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls Maps- Mall of America - Car Direction", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
//go back to the map tab via the back navigation icon
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAButton[3]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls:5", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
//Create new service call
genMeth.clickId(genMeth, DroidData.BTNnewServiceCallId);
genMeth.eyesCheckWindow(eyes, "New Service Call", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.eyesCheckWindow(eyes, "Branch simple list", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.MallOfAmerica_Id);
genMeth.clickId(genMeth, "Assigned To");
genMeth.clickId(genMeth, "Jessica Blue");
genMeth.clickId(genMeth, "Category");
genMeth.clickId(genMeth, "Computer");
genMeth.clickId(genMeth, "Item");
genMeth.clickId(genMeth, "Memory card");
genMeth.clickId(genMeth, "Description");
genMeth.setEnglishKeyboard(genMeth);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "Meny The Best");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, DroidData.BTNpriority_Name);
genMeth.clickId(genMeth, "1");
genMeth.eyesCheckWindow(eyes, "New service call with parameters", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(2000);
//genMeth.eyesCheckWindow(eyes, "New Service Call", useEye, skipfailure);
genMeth.eyesCheckWindow(eyes, "New service call Actions collections +", useEye, skipfailure);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIACollectionView[1]/UIACollectionCell[1]/UIAStaticText[1]");
genMeth.eyesCheckWindow(eyes, "New Service Call", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
}
@Test (enabled = true ,testName = "Sample App OrderLookup Operation", retryAnalyzer = Retry.class, description = "OrderLookup Operation" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationOrderLookupOperation() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN Order Lookup
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
//Order lookup
genMeth.clickId(genMeth, DroidData.OrderLookup_ID);
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Order Lookup parameters", useEye, skipfailure);
genMeth.clickName(genMeth, "Start Date");
MobileElement UIAPickerWheel = driver.findElementByXPath("//UIAApplication[1]/UIAWindow[1]/UIAPicker[1]/UIAPickerWheel[1]");
UIAPickerWheel.sendKeys("July");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.clickName(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(1000);
genMeth.eyesCheckWindow(eyes, "List of Orders", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
//Operations
genMeth.clickXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[7]");
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Inventory", useEye, skipfailure);
//Open grid second layer
genMeth.clickName(genMeth, DroidData.MallOfAmerica_Id);
genMeth.eyesCheckWindow(eyes, "Inventory second layer", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, "Inventory");
genMeth.clickName(genMeth, "Orders");
/*genMeth.swipeRightIphone5(1000);
genMeth.swipeRightIphone5(1000);
genMeth.swipeRightIphone5(1000);
genMeth.eyesCheckWindow(eyes, "Orders", useEye, skipfailure);
genMeth.clickName(genMeth, "Orders");
genMeth.clickName(genMeth, "Place New Order");
genMeth.eyesCheckWindow(eyes, "Place New Order", useEye, skipfailure);
//Open the place new order
MobileElement El = driver.findElementByXPath(DroidData.BTNplaceNewOrder_Xpth);
El.click();
genMeth.eyesCheckWindow(eyes, "Place new order parameters", useEye, skipfailure);
// genMeth.clickName(genMeth, DroidData.BTNsubmit_ID);
// genMeth.eyesCheckWindow(eyes, "Place new order parameters missing", useEye, skipfailure);
// genMeth.clickName(genMeth, DroidData.BTNokName);
//Fill the parameters
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.clickName(genMeth, DroidData.MallOfAmerica_Id);
genMeth.clickName(genMeth, "ProductID");
// genMeth.accessToCameraHandle(genMeth);
Thread.sleep(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]");
Thread.sleep(1000);
genMeth.clickName(genMeth, "1");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
Thread.sleep(2000);
genMeth.clickName(genMeth, "Quantity");
genMeth.clickName(genMeth, "1");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Place new order All parameters", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNsubmit_ID);
genMeth.eyesCheckWindow(eyes, "Place New Order", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
}
@Test (enabled = true ,testName = "Sample App Technicians", retryAnalyzer = Retry.class, description = "Technicians" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationTechnicians() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN Order Lookup
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
// Technicians
genMeth.clickName(genMeth, "Technicians");
Thread.sleep(1000);
genMeth.eyesCheckWindow(eyes, "Technicians", useEye, skipfailure);
// Phone Icon
genMeth.clickName(genMeth, "Phone");
Thread.sleep(1000);
genMeth.eyesCheckWindow(eyes, "Technicians- Phone", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, "Phone");
// Add to contacts
genMeth.clickName(genMeth, DroidData.BTNaddContact_Name);
// genMeth.accessToContactsHandle(genMeth);
genMeth.eyesCheckWindow(eyes, "Technicians- Added by SkyGiraffe screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNBackName);
// Mail Icon
genMeth.clickName(genMeth, "Email");
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Technicians- New Message screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
// Map Icon
genMeth.clickName(genMeth, "Address");
genMeth.eyesCheckWindow(eyes, "Technicians- Address screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// Swipe along the technicians Cover Flow
genMeth.swipeRightIphone5(1000);
genMeth.eyesCheckWindow(eyes, "Technicians- cover flow John Grant", useEye, skipfailure);
}
*/
}
|
src/Native/SanityAndroid.java
|
package Native;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import io.appium.java_client.MobileBy;
import io.appium.java_client.MobileElement;
import io.appium.java_client.android.AndroidDriver;
import io.appium.java_client.android.AndroidKeyCode;
import io.appium.java_client.ios.IOSDriver;
import io.appium.java_client.pagefactory.WithTimeout;
import io.appium.java_client.pagefactory.iOSFindBy;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.xml.parsers.ParserConfigurationException;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriverException;
import org.testng.ITestContext;
import org.testng.annotations.BeforeSuite;
import org.xml.sax.SAXException;
import com.applitools.eyes.Eyes;
//MobileElement e2; //test will wait for this diring 20 seconds
public class SanityAndroid {
@WithTimeout(time = 30, unit = TimeUnit.SECONDS)
@iOSFindBy (id = "relevant id need to be added here")
String currentDateFolder;
String webElementXmlLang;
String webElementXmlPath;
String StartServerPath;
String StopServerPath;
String appIdentifier;
Boolean skipfailure = true;
AndroidDriver<MobileElement> driver;
AndroidMethods genMeth = new AndroidMethods();
Eyes eyes = new Eyes();
Boolean useEye = true;
AndroidElements DroidData;
@BeforeSuite(alwaysRun = true)
public void setupBeforeSuite(ITestContext context) throws ParserConfigurationException, SAXException, IOException, InterruptedException, jdk.internal.org.xml.sax.SAXException {
// This is your api key, make sure you use it in all your tests.
//Set the tests configuration
StartServerPath = genMeth.getValueFromPropFile("StartServerPath");
StopServerPath = genMeth.getValueFromPropFile("StopServerPath");
webElementXmlPath = genMeth.getValueFromPropFile("webElementXmlPath");
webElementXmlLang = genMeth.getValueFromPropFile("webElementXmlLang");
appIdentifier = genMeth.getValueFromPropFile("appIdentifier");
//DroidData= new IosElements(webElementXmlLang, webElementXmlPath);
DroidData = genMeth.setElements(webElementXmlPath, webElementXmlLang);
driver = genMeth.setCapabilitiesAndroid(genMeth);
genMeth.cleanLoginDroid(genMeth, DroidData.userQA, DroidData.passwordQA);
}
@BeforeMethod (alwaysRun = true)
public void checkHomeScreen() throws InterruptedException, IOException, ParserConfigurationException, SAXException, jdk.internal.org.xml.sax.SAXException{
// Check if the client still logged in & in StartUp screen before each test
if (driver == null) {
try {
// driver.removeApp(genMeth.getValueFromPropFile("appPackage"));
driver.quit();
} catch (Exception e) {
// swallow if fails
}
driver = genMeth.setCapabilitiesAndroid(genMeth);
DroidData = genMeth.setElements(webElementXmlPath, webElementXmlLang);
genMeth.cleanLoginDroid( genMeth, DroidData.userQA , DroidData.passwordQA );
}
else {
skipfailure = false;
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden Ap", useEye, genMeth, skipfailure);
/*
// genMeth.clickName(genMeth, "DashB/Cards/Employee");
String Startup_Screen = "//android.widget.TextView[@text='All Tabs']";
// String Startup_Screen = "//android.widget.LinearLayout[@text='SQL Golden App']";
genMeth.swipeUpMeizuLong(1000);
boolean StartUpScreenDisplay = genMeth.checkIsElementVisible( By.xpath(Startup_Screen));
if (StartUpScreenDisplay != true) {
try {
driver.resetApp();
driver.removeApp(appIdentifier);
driver.quit();
} catch (Exception e) {
// swallow if fails
}
driver = genMeth.setCapabilitiesAndroid(genMeth);
DroidData = genMeth.setElements(webElementXmlPath, webElementXmlLang);
genMeth.cleanLoginDroid( genMeth, DroidData.userQA, DroidData.passwordQA);
*/
}
}
@Test(enabled = true, testName = "URL Tab", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_URL() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to URL Constant
genMeth.clickXpthName_TextView(genMeth, "URL / News");
Thread.sleep(10000);
genMeth.eyesCheckWindow("Tabs(Droid)- URL Data Item", useEye, genMeth, skipfailure);
//go to URL data Item
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "URL Constant");
genMeth.eyesCheckWindow("Tabs (Droid) - URL Constant", useEye, genMeth, skipfailure);
//Go Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
Thread.sleep(4000);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "News Tab", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_News() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to News
genMeth.clickXpthName_TextView(genMeth, "URL / News");
//go to URL data Item
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
Thread.sleep(2000);
genMeth.clickXpthName_CheckedTextView(genMeth, "News");
genMeth.eyesCheckWindow("Tabs (Droid) - News", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "www.milliondollarhomepage.com");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- The milliion $ home page", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.rotateLandscape();
genMeth.eyesCheckWindow("All Tabs- News Landscape", useEye, genMeth, skipfailure);
genMeth.rotatePortrait();
//Go Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Dashboard Tab", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_Dashboard() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Open Dashboard Tab
Thread.sleep(8000);
genMeth.clickXpthName_TextView(genMeth, "DashB/Cards/Employee");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard Default Layout", useEye, genMeth, skipfailure);
//Navigate to Employee directory tab
genMeth.clickXpthName_TextView(genMeth, "Service Call ID1");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard Default Layout- Navigate to Employee Directory", useEye, genMeth, skipfailure);
//Navigate back to Dashboard
genMeth.backDroidButton();
genMeth.clickXpthName_TextView(genMeth, "DashB/Cards/Employee");
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickXpthName_TextView(genMeth, "Service Call ID1");
genMeth.clickXpthName_TextView(genMeth, "1");
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/filter_detail_navigation_left_btn");
genMeth.clickXpthName_TextView(genMeth, "Done");
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard Advanced columns (Scroll down)", useEye, genMeth, skipfailure);
//Gauge
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Dash with Gauge");
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard- Gauge Half", useEye, genMeth, skipfailure);
//Navigate
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/dashboard_item_bottom_label_container_view");
Thread.sleep(10000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard- Navigate to Map By GPS", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Dashboard- Gauge Full/Solid", useEye, genMeth, skipfailure);
//Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
}
@Test(enabled = true, testName = "Map,Dashboard, Charts Tabs", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_Map() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Open Map By Address Tab
genMeth.clickXpthName_TextView(genMeth, "Map");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By GPS", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Map By Address");
Thread.sleep(3000);
//genMeth.eyesCheckWindow(eyes, "All Tabs- Map By Address", useEye, skipfailure);
// genMeth.clickId(genMeth,"19501 Biscayne Blvd, Aventura, FL 33180. 19501 Biscayne Boulevard,Aventura, FL 33180.");
//genMeth.clickXpthName_TextView(genMeth, "19501 Biscayne Blvd, Aventura, FL 33180. 19501 Biscayne Boulevard,Aventura, FL 33180.");
By by = By.xpath("//android.view.View[@content-desc='19501 Biscayne Blvd, Aventura, FL 33180. 19501 Biscayne Boulevard,Aventura, FL 33180.']");
driver.findElement(by).click();
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By Address- Aventura", useEye, genMeth, skipfailure);
//Driving Directions
genMeth.clickId(genMeth, DroidData.BTNdirection);
// genMeth.eyesCheckWindow("All Tabs (Droid)- Map By Address- Driving directions", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Phone
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_adress_container");
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_item_phone");
// genMeth.eyesCheckWindow("All Tabs (Droid)- Map By Address- Phone", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Navigation to URL tab
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_item_jump_to");
Thread.sleep(8000);
genMeth.eyesCheckWindow("Tabs (Droid)- URL Data Item", useEye, genMeth, skipfailure);
//Navigation Back
genMeth.backDroidButton();
//Open Map By GPS
Thread.sleep(10000);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Map By GPS");
by = By.xpath("//android.view.View[@content-desc='40.918116,-74.076363. 1 Garden State Plaza Boulevard,Paramus, NJ 07652.']");
genMeth.clickBy(driver, genMeth, by);
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By GPS- Press pin map", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/map_add_info_adress_container");
//All addresses
genMeth.eyesCheckWindow("All Tabs (Droid)- Map By GPS- All Addresses", useEye, genMeth, skipfailure);
//Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
}
@Test(enabled = true, testName = "Map Charts Tabs", retryAnalyzer = Retry.class, description = "Check the URL tab",
groups = { "Sanity Android" })
public void Tabs_Chart() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Open Bar Chart
genMeth.clickXpthName_TextView(genMeth, "Chart/CoverF/ActionC");
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Filter data
genMeth.clickXpthName_TextView(genMeth, "Sales");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart- Returns & Net Sales", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Returns");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart- Net Sales", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Sales");
genMeth.clickXpthName_TextView(genMeth, "Returns");
//genMeth.clickId(genMeth, "Net Sales");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Navigation to pie chart
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/column_chart_selected_title_nav_icon");
Thread.sleep(15000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart- Navigate to Dashboard", useEye, genMeth, skipfailure);
//Navigate back to the Bar chart
genMeth.backDroidButton();
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Pie Chart
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Pie Chart");
genMeth.eyesCheckWindow("All Tabs (Droid)- Pie Chart", useEye, genMeth, skipfailure);
//Filter data Returen
genMeth.clickXpthName_TextView(genMeth, "Returns");
genMeth.eyesCheckWindow("All Tabs (Droid)- Pie Chart- Returns", useEye, genMeth, skipfailure);
//Filter data Net Sales
genMeth.clickXpthName_TextView(genMeth, "Net Sales");
genMeth.eyesCheckWindow("All Tabs (Droid)- Pie Chart- Net Sales", useEye, genMeth, skipfailure);
//Navigation to Bar chart
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/pie_chart_slicer_name");
genMeth.eyesCheckWindow("All Tabs (Droid)- Bar Chart", useEye, genMeth, skipfailure);
//Go Back to Startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
}
@Test(enabled = true, testName = "Cover Flow", retryAnalyzer = Retry.class, description = "Check the Cover Flow tab",
groups = { "Sanity Android" })
public void Tabs_CoverFlow() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to CoverFlow
genMeth.clickXpthName_TextView(genMeth, "Chart/CoverF/ActionC");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Cover Flow");
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow", useEye, genMeth, skipfailure);
genMeth.swipeRightMeizuShort(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- swipe John Grant", useEye, genMeth, skipfailure);
//Address
genMeth.clickXpthName_TextView(genMeth, "Address");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Address", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Scroll Down", useEye, genMeth, skipfailure);
//Address mini map
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/list_template_map_address_icon");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Address Mini Map", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Phone
genMeth.clickXpthName_TextView(genMeth, "Phone");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Phone", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Email
genMeth.clickXpthName_TextView(genMeth, "Email");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//URL
genMeth.clickXpthName_TextView(genMeth, "URL");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Go to URL", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Landline
genMeth.clickXpthName_TextView(genMeth, "Landline");
genMeth.eyesCheckWindow("All Tabs (Droid)- Cover Flow- Landline", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Go to Startup screen
genMeth.backDroidButton();
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity Android" })
public void Tabs_List_AdvancedColumns() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickXpthName_TextView(genMeth, "List / Grid");
Thread.sleep(3000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List", useEye, genMeth, skipfailure);
//Phone
genMeth.clickXpthName_TextView(genMeth, "Call");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Phone", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List Phone", useEye, skipfailure);
//Email
genMeth.clickXpthName_TextView(genMeth, "Email");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
Thread.sleep(2000);
//URL
genMeth.clickXpthName_TextView(genMeth, "URL");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Thread.sleep(3000);
// Landline
genMeth.clickXpthName_TextView(genMeth, "Landline");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Landline", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Landline");
//Address
genMeth.clickXpthName_TextView(genMeth, "Address");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Address", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List Address", useEye, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Address");
//Mini Map
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/template_view_item_map_layout");
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(3000);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.swipedownMeizuShort(1000);
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List See All", useEye, genMeth, skipfailure);
//Folder
genMeth.clickXpthName_TextView(genMeth, "Folder");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List Folder", useEye, genMeth, skipfailure);
genMeth.swipedownMeizuLong(1000);
Thread.sleep(2000);
genMeth.eyesCheckWindow("Tabs_List_AdvancedColumns (Droid)- List See All scroll down", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List See All scroll down", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.clickId(genMeth, DroidData.IconHome);
Thread.sleep(2000);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Grid two layer Advanced", retryAnalyzer = Retry.class, description = "Check the Grid two layer tab",
groups = { "Sanity Android" })
public void Tabs_Grid_Two_Layers() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to Grid
genMeth.clickXpthName_TextView(genMeth, "List / Grid");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Grid - Two Layers");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- List Address", useEye, genMeth, skipfailure);
//Open the second layer
genMeth.clickXpthName_TextView(genMeth, "$200");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Second layer", useEye, genMeth, skipfailure);
genMeth.swipedownMeizuShorter(1000);
genMeth.setLandscapeMode();
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Second layer - Landscape", useEye, genMeth, skipfailure);
genMeth.setPortraitMode();
//Phone
genMeth.clickXpthName_TextView(genMeth, "Phone");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Phone options open", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Phone");
// Landline
genMeth.clickXpthName_TextView(genMeth, "Landline");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Landline", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Landline");
//URL
genMeth.clickXpthName_TextView(genMeth, "URL");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- List URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Email
genMeth.clickXpthName_TextView(genMeth, "Email");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- List Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
Thread.sleep(2000);
//Address
genMeth.clickXpthName_TextView(genMeth, "Address");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Address", useEye, genMeth, skipfailure);
//genMeth.eyesCheckWindow(eyes, "All Tabs- List Address", useEye, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Address");
//Mini Map
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/template_view_item_map_layout");
genMeth.eyesCheckWindow("All Tabs- Grid two layers (Droid)- Mini Map", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(3000);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.clickId(genMeth, DroidData.IconHome);
//Verify Startup screen is open
Thread.sleep(2000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Grid one layer", retryAnalyzer = Retry.class, description = "Check the Grid one layer tab Advanced & navigation",
groups = { "Sanity Android123" })
public void Tabs_Grid_One_Layer_Advance_Navigation() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
//Need to find a solution to the find element by xpath for the Address/Mobile Phone etc sonce it keeps failing (seems like Appium bug)
// go to Grid
genMeth.clickXpthName_TextView(genMeth, "List / Grid");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Grid - One Layer");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - (Advanced - Part 1)", useEye, genMeth, skipfailure);
// Address
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[4]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Address", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Mobile Phone
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[5]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Phone", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.swipeRightMeizuLong(2000);
genMeth.swipeRightMeizuLong(2000);
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Swipe to the right", useEye, genMeth, skipfailure);
// MiniMap - Navigation to slicer report
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[2]/android.widget.ImageView[1]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Mini Map Navigation", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
// Email
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[3]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Email", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// URL
//genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[4]");
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[4]/android.widget.ImageView[1]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - URL", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
Thread.sleep(4000);
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Go To URL", useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// Landline
genMeth.clickXpth(genMeth, "//android.view.View[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.widget.DrawerLayout[1]/android.widget.FrameLayout[1]/android.widget.FrameLayout[2]/android.widget.FrameLayout[1]/android.widget.FrameLayout[1]/android.support.v4.view.ViewPager[1]/android.widget.FrameLayout[1]/android.widget.LinearLayout[1]/android.widget.HorizontalScrollView[1]/android.widget.LinearLayout[1]/android.widget.FrameLayout[1]/android.widget.ListView[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[1]/android.widget.LinearLayout[5]/android.widget.ImageView[1]");
genMeth.eyesCheckWindow("All Tabs- Grid one layer (Droid) - Landline", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
genMeth.clickId(genMeth, DroidData.IconHome);
//Verify Startup screen is open
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Employee Directory", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Tabs_Employee_Directory() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to Employee Directory tab
genMeth.clickXpthName_TextView(genMeth, "DashB/Cards/Employee");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "Employee Directory");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Main",useEye, genMeth, skipfailure);
//Search an employee (Empty search)
genMeth.clickId(genMeth, DroidData.IconSearch);
genMeth.sendId(genMeth, DroidData.IconSearch , "no emplyees found");
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - empty search",useEye, genMeth, skipfailure);
genMeth.deleteKey(17);
genMeth.clearId(genMeth, DroidData.IconSearch);
//Search an employee
genMeth.clickId(genMeth, DroidData.IconSearch);
genMeth.sendId(genMeth, DroidData.IconSearch , "Lane");
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - search Specific employee",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//second layer
genMeth.clickXpthName_TextView(genMeth, "Lane R. Barlow");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Second layer",useEye, genMeth, skipfailure);
// Phone
Thread.sleep(1000);
genMeth.clickXpthName_TextView(genMeth, "Phone");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Phone",useEye, genMeth, skipfailure);
// Email
genMeth.clickXpthName_TextView(genMeth, "Email");
Thread.sleep(2000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Email",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Map
genMeth.swipedownMeizuShorter(1000);
genMeth.clickXpthName_TextView(genMeth, "Address First");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Address First",useEye, genMeth, skipfailure);
// Mini Map
genMeth.swipedownMeizuShorter(1000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/template_view_item_map_layout");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Address second",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
// URL
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpthName_TextView(genMeth, "google.com");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - URL",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit);
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Social Networks - Facebook
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_facebook");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Facebook",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//Twitter
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_twitter");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Twitter",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//LinkedIn
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_linkidin");
Thread.sleep(3000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - LinkedIn",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//Google+
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/employee_directory_detail_person_social_net_google_plus");
Thread.sleep(5000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Google+",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//Navigation
genMeth.clickXpthName_TextView(genMeth, "First_Name");
Thread.sleep(8000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Navigation to Param report ed",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Back from navigation",useEye, genMeth, skipfailure);
//No Social Networks available
genMeth.sendId(genMeth, DroidData.IconSearch , "Callum R. Aguirre");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - No Social Networks",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
//No Google+
genMeth.sendId(genMeth, DroidData.IconSearch , "Caldwell Alexander");
genMeth.swipedownMeizuLong(1000);
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - No Google+",useEye, genMeth, skipfailure);
//Back to Startup screen
genMeth.backDroidButton();
genMeth.clickId(genMeth, DroidData.IconHome);
//Press info for the app
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/left_menu_child_info_icon");
Thread.sleep(1000);
genMeth.eyesCheckWindow("All Tabs- Employee Directory (Droid) - Golden App info screen",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "SQL Golden App");
genMeth.clickXpthName_TextView(genMeth, "SQL Golden App");
//Verify Startup screen is open
Thread.sleep(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Parameterized report Grid", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Param_Report_Grid() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to parameterized report - Grid tab
genMeth.swipedownMeizuShort(1000);
// go to Employee Directory tab
genMeth.clickXpthName_TextView(genMeth, "Param Report Grid");
Thread.sleep(4000);
genMeth.eyesCheckWindow("Param Report Grid (Droid)- add Parameters",useEye, genMeth, skipfailure);
//Attempt to submit while mandatory is missing
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- Mandatory field is missing",useEye, genMeth, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/app_message_dialog_cancel_button");
genMeth.clickXpthName_TextView(genMeth, "SL-Device Types");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- SL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Laptop");
genMeth.clickXpthName_TextView(genMeth, "PSL- Device Model");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- PSL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "PSL- Device Model");
genMeth.clickXpthName_TextView(genMeth, "Lenovo");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- All params were filled",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Param Report Grid (Droid)- Grid first layer",useEye, genMeth, skipfailure);
//Go To second layer
genMeth.clickXpthName_TextView(genMeth, "Laptop");
genMeth.eyesCheckWindow("Param Report Grid (Droid)- Grid second layer",useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Parameterized report With all variables", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Param_Report_AllVariables() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
genMeth.swipedownMeizuShort(1000);
genMeth.clickXpthName_TextView(genMeth, "Param Variables only");
genMeth.eyesCheckWindow("Param Report with All Variables (Droid) - SQL Golden App",useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
// genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Parameterized report List", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab",
groups = { "Sanity Android" })
public void Param_Report_List() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to parameterized report - Grid tab
genMeth.swipedownMeizuShort(1000);
genMeth.swipedownMeizuShort(1000);
genMeth.clickXpthName_TextView(genMeth, "Param Report List");
genMeth.eyesCheckWindow("Param Report List (Droid)- add Parameters", useEye, genMeth, skipfailure);
//Attempt to submit while mandatory is missing
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
genMeth.eyesCheckWindow("Param Report List (Droid)- Mandatory field is missing", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/app_message_dialog_cancel_button");
//Insert parameters
genMeth.clickXpthName_TextView(genMeth, "FreeText (Priority)");
driver.pressKeyCode(AndroidKeyCode.KEYCODE_1);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/action_submit_button");
genMeth.clickXpthName_TextView(genMeth, "SL_ML (Priority)");
genMeth.eyesCheckWindow("Param Report List (Droid)- SL ML Priority", useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "2");
//genMeth.clickId(genMeth, "2");
genMeth.eyesCheckWindow("Param Report List (Droid)- All params were filled", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
Thread.sleep(2000);
genMeth.eyesCheckWindow("Param Report List (Droid)- FreeText Priority = 1", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "SL_ML (Priority)");
genMeth.eyesCheckWindow("Param Report List (Droid)- FreeText Priority = 2", useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickXpthName_CheckedTextView(genMeth, "SC(Up_Date<=MobDate)");
genMeth.eyesCheckWindow("Param Report List (Droid)- SC(Up_Date<=MobDate)", useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity Android1" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_DL_Dashboard()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report dashboard - DL- Device Info tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpthName_TextView(genMeth, "Param DL-Dashboard");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- add Parameters",useEye, genMeth, skipfailure);
//Insert parameters
genMeth.clickXpthName_TextView(genMeth, "SL- Devices Type");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- SL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Laptop");
Thread.sleep(2000);
genMeth.clickXpthName_TextView(genMeth, "DL- Device Model");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- DL param",useEye, genMeth, skipfailure);
genMeth.clickXpthName_TextView(genMeth, "Lenovo");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- All params were filled",useEye, genMeth, skipfailure);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
//genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(5000);
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- Dashboard tab",useEye, genMeth, skipfailure);
//Navigate to Dashboard tab
genMeth.clickXpthName_TextView(genMeth, "Device Type Name (ParentName)");
Thread.sleep(5000);
genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- Navigate to SL- Devices by Type tab",useEye, genMeth, skipfailure);
genMeth.backDroidButton();
genMeth.backDroidButton();
genMeth.eyesCheckWindow("Param Report Dashboard DL (Droid)- Dashboard tab",useEye, genMeth, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconHome);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS"}, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_CoverFlow()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report dashboard - DL- Device Info tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Param Rep Cover Flow");
genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Insert Gender (F or M)");
genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - QR", useEye, skipfailure);
genMeth.clickId(genMeth, "Insert Gender (F or M)");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.clickId(genMeth, "m");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(1000);
genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - Males", useEye, skipfailure);
//Go To cover flow tab by const (females)
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Const-Female Only");
genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - Female", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_Chart()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Param Report Chart");
Thread.sleep(2000);
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Choose Value");
genMeth.clickId(genMeth, "Mall of America");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL ", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america Bar", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america in bar chart", useEye, skipfailure);
//Naviagte to param report
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAImage[2]");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - Param report map - parameters screen", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNCancelName);
//Go To Pie tab
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "SL-SalesbyBranch-Pie");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america Pie", useEye, skipfailure);
genMeth.clickId(genMeth, "Returns");
genMeth.eyesCheckWindow(eyes, "Param Rep Chart - SL Mall of america Pie- Returnes", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_EmployeeDirectoryD()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Param Report ED");
Thread.sleep(2000);
genMeth.eyesCheckWindow(eyes, "Param Rep ED - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Choose Value");
genMeth.clickId(genMeth, "Female");
genMeth.eyesCheckWindow(eyes, "Param Rep ED -SL MB", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(2000);
genMeth.eyesCheckWindow(eyes, "Param Rep ED - Female only", useEye, skipfailure);
//Go To Employee tab by Login variable
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "ED by Login");
genMeth.eyesCheckWindow(eyes, "Param Rep ED - ED by Login", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_Map()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "Param Report Map");
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "Param Rep Map - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Choose Value");
genMeth.clickId(genMeth, "Mall of America");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Param Rep Map - Mall Of america chosen", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(12000);
genMeth.eyesCheckWindow(eyes, "Param Rep Map - Mall Of america on map", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = { "Sanity IOS" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
public void Param_Report_Cards()
throws ParserConfigurationException, SAXException, IOException,
InterruptedException {
// go to parameterized report- Param report chart tab
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Param Report Cards");
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "Param Rep Cards - Parameters", useEye, skipfailure);
//Insert parameters
genMeth.clickId(genMeth, "Default");
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Param Rep Cards - Priority = 1", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Param Rep Cards - Priority = 1 service calls", useEye, skipfailure);
//Back to startup screen
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity IOS" })
public void Actions_List() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickId(genMeth, "List / Grid Actions");
//Set slicer to one item
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Service Call ID");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, "1 Slicers");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "List Actions- List Actions", useEye, skipfailure);
//Execute action in the first layer
//Free text description
genMeth.clickId(genMeth, "Description");
boolean checkAction = genMeth.checkIsElementVisible(By.id("Descrip 1"));
if (checkAction) {
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "2");
} else {
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "1");
}
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "List Actions- cell description", useEye, skipfailure);
//Priority (Simple List MB)
genMeth.clickId(genMeth, "Priority");
genMeth.clickId(genMeth, "91");
checkAction = genMeth.checkIsElementVisible(By.id("Update Pirority (MB)"));
if (checkAction) {
genMeth.clickId(genMeth, "90");
}
genMeth.eyesCheckWindow(eyes, "List Actions- cell Priority (Simple List MB)", useEye, skipfailure);
//Assign To (Dynamic List)
genMeth.clickId(genMeth, "Assigned To");
genMeth.clickId(genMeth, "Adrian Lopez");
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "List Actions- cell Assign To (DL)", useEye, skipfailure);
//Action in second layer
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
// genMeth.swipedownIphone5Shortest(1000);
genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
Thread.sleep(2000);
genMeth.swipedownMeizuLong(1000);
//QR code
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.swipedownMeizuLong(1000);
Thread.sleep(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
try {
driver.findElementById(DroidData.BTNseeAll_ID).click();
//genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
} catch (Exception e1) {
// TODO Auto-generated catch block
}
boolean isDisplayed = genMeth.checkIsElementVisible(By.id("Service Call ID"));
if(!isDisplayed){
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, DroidData.BTNseeAll_ID);
}
Thread.sleep(2000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "02");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.swipedownMeizuShort(1000);
genMeth.eyesCheckWindow(eyes, "List Actions- cell QR second layer (QR)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNBackName);
//Row Action (Adding a row to the all parameters table)
genMeth.swipedownMeizuLong(1000);
try {
driver.findElementById("PopUp- AddRow").click();
} catch (Exception e) {
// TODO Auto-generated catch block
}
Thread.sleep(4000);
isDisplayed = genMeth.checkIsElementVisible(By.id("PopUp- AddRow"));
if (!isDisplayed) {
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "PopUp- AddRow");
}
genMeth.clickId(genMeth, "Write");
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "New Row");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "DeviceType_SL_ByName");
genMeth.clickId(genMeth, "Laptop");
genMeth.clickId(genMeth, "Device_Model_DL");
genMeth.clickId(genMeth, "Asus");
genMeth.clickId(genMeth, "Items_By_Category_PSL");
genMeth.clickId(genMeth, "Keyboard (Cat 1)");
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "SL_Manual_List");
genMeth.clickId(genMeth, "2");
//PSL with Variable
genMeth.clickId(genMeth, "Items_SmallerThanMobileDate_PSL");
genMeth.clickId(genMeth, "3");
// image
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]/UIAStaticText[1]");
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIANavigationBar[1]/UIAButton[5]");
genMeth.clickId(genMeth, "PhotoCapture");
genMeth.clickId(genMeth, "Use Photo");
genMeth.clickId(genMeth, "Done");
genMeth.eyesCheckWindow(eyes, "List Actions- Image set", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
/*
//Signature
genMeth.swipedownIphone5Long(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]");
TouchAction touchAction = new TouchAction(driver);
// touchAction.press(250, 250).moveTo(250, 150).release().perform();
//MobileElement el1 = genMeth.returnName(driver, genMeth, "SkyGiraffe");
//MobileElement el2 = genMeth.returnId(driver, genMeth, "X");
//touchAction.longPress(el1, 2000).moveTo(el2).release().perform();
touchAction.longPress(200, 200, 3000).perform();
touchAction.longPress(200, 200, 3000).waitAction(1000).moveTo(200,201).release().perform();
touchAction.longPress(100, 100, 3000);
touchAction.moveTo(100, 50).waitAction(1000);
touchAction.release();
touchAction.perform();
genMeth.eyesCheckWindow(eyes, "List Actions- Cancel signature", useEye, skipfailure);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]");
touchAction.longPress(250, 250, 1000).moveTo(250, 150).release().perform();
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "List Actions- signature Set", useEye, skipfailure);
/
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
*/
//Row Action with input type = Inline (Adding a row to the all parameters table)
//Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuShort(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "Inline row action", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity IOS" })
public void Actions_List_Inline() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
genMeth.clickId(genMeth, "List / Grid Actions");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "List (Inline)");
// go to List
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "InLine- AddRow");
Thread.sleep(4000);
genMeth.eyesCheckWindow(eyes, "Actions_List_Inline- Inline parameters default", useEye, skipfailure);
genMeth.sendId(genMeth, "This is default value", "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "Mobile");
genMeth.clickId(genMeth, "iPhone6");
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "Keyboard (Cat 1)");
genMeth.clickId(genMeth, "QR");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.swipedownMeizuShort(1000);
genMeth.clickId(genMeth, "2");
genMeth.swipedownMeizuShort(1000);
//PSL with Variable
genMeth.clickId(genMeth, "7");
// image
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[50]/UIAStaticText[1]");
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIANavigationBar[1]/UIAButton[5]");
genMeth.clickId(genMeth, "PhotoCapture");
genMeth.clickId(genMeth, "Use Photo");
genMeth.clickId(genMeth, "Done");
genMeth.eyesCheckWindow(eyes, "Actions_List_Inline- Inline Image set", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
//Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuShort(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the List tab",
groups = { "Sanity IOS" })
public void Actions_Grid_One_Layer() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickId(genMeth, "List / Grid Actions");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Grid - One Layer");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer main view", useEye, skipfailure);
//USER INPUT = Free Text (Description)
boolean isTextDisplayed = genMeth.checkIsElementVisible(By.id("Descrip 1"));
if (isTextDisplayed){
genMeth.clickId(genMeth, "Descrip 1");
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "2");
}
else{
genMeth.clickId(genMeth, "Descrip 2");
genMeth.clickId(genMeth, DroidData.BTNkeyboardDelete);
genMeth.clickId(genMeth, DroidData.BtnkeyboardMoreNumbers);
genMeth.clickId(genMeth, "1");
}
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- description (free text)", useEye, skipfailure);
//USER INPUT = Simple List MB (Priority)
isTextDisplayed = genMeth.checkIsElementVisible(By.id("90"));
if (isTextDisplayed) {
genMeth.clickId(genMeth, "90");
genMeth.clickId(genMeth, "91");
}
else{
genMeth.clickId(genMeth, "91");
genMeth.clickId(genMeth, "90");
}
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Priority (Simple List MB)", useEye, skipfailure);
//USER INPUT = Simple List DI (Status)
genMeth.clickId(genMeth, "6");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Status (Simple List DI)", useEye, skipfailure);
genMeth.clickId(genMeth, "Not Clear");
Thread.sleep(6000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Status success (Simple List DI)", useEye, skipfailure);
genMeth.swipeRightMeizuLong(1000);
//USER INPUT = PSL (ItemID)
genMeth.clickId(genMeth, "21");
genMeth.clickId(genMeth, "Video card (Cat 1)");
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- ItemID (PSL)", useEye, skipfailure);
//USER INPUT = QR (KPI)
isTextDisplayed = genMeth.checkIsElementVisible(By.id("01"));
if (isTextDisplayed) {
genMeth.clickId(genMeth, "01");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "02");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
}
else{
genMeth.clickId(genMeth, "02");
genMeth.clickId(genMeth, DroidData.BTNClearName);
genMeth.sendXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "01");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
}
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- KPI (QR)", useEye, skipfailure);
//OutGrid(Row)
genMeth.clickXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAScrollView[1]/UIAScrollView[2]/UIAImage[13]");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Row parameters before insert", useEye, skipfailure);
genMeth.clickId(genMeth, "Free_Text1");
Thread.sleep(2000);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "New row");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "QR_");
Thread.sleep(3000);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]", "New QR");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "SL_Manual_List_");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, "Device_Type_SL_DI_");
genMeth.clickId(genMeth, "Laptop");
genMeth.clickId(genMeth, "Device_Model_DL_");
genMeth.clickId(genMeth, "Lenovo");
genMeth.clickId(genMeth, "Items_By_Category_PSL");
genMeth.clickId(genMeth, "Power Supply (Cat 1)");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Row parameters after insert", useEye, skipfailure);
genMeth.swipedownMeizuLong(1000);
// image
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[8]/UIAStaticText[1]");
genMeth.clickXpth(genMeth,"//UIAApplication[1]/UIAWindow[1]/UIANavigationBar[1]/UIAButton[5]");
genMeth.clickId(genMeth, "PhotoCapture");
genMeth.clickId(genMeth, "Use Photo");
genMeth.clickId(genMeth, "Done");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- Image set", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_One_Layer- Grid One Layer- after action executed", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuShort(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, testName = "List", retryAnalyzer = Retry.class, description = "Check the Grid two layer actions",
groups = { "Sanity IOS" })
public void Actions_Grid_Two_Layer() throws ParserConfigurationException, SAXException,
IOException, InterruptedException {
// go to List
genMeth.clickId(genMeth, "List / Grid Actions");
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Grid - Two Layers");
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layer main view", useEye, skipfailure);
genMeth.clickId(genMeth, "3");
//DL
genMeth.clickId(genMeth, "ItemID");
genMeth.clickId(genMeth, "Keyboard (Cat 1)");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
Thread.sleep(10000);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- ItemID SL", useEye, skipfailure);
//Row Action
genMeth.clickId(genMeth, "m");
genMeth.clickId(genMeth, "UpdateWithTableParam");
genMeth.clickId(genMeth, "DummyParam");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "TableParams");
genMeth.clickId(genMeth, "add icon table");
genMeth.clickId(genMeth, "Priority");
genMeth.clickId(genMeth, "1");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, "Status");
genMeth.clickId(genMeth, "Open");
genMeth.clickId(genMeth, DroidData.BTNsave);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- Table Parameter filled", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- All parameters are filled", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(10000);
//Check the push notification
genMeth.eyesCheckWindow(eyes, "Actions_Grid_Two_Layer- Grid Two Layers- Action Success", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = {"Sanity IOS"}, testName = "Sanity", description = "Slicer report")
public void slicerReport() throws InterruptedException, IOException{
// go to List
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "Slicer report");
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, "BranchID");
genMeth.clickId(genMeth, "7");
genMeth.eyesCheckWindow(eyes, "Slicer Report- branchID selected", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Slicer Report- List (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Grid");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Grid (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cover Flow");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Cover Flow (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Dashboard");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Dashboard (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Map");
// genMeth.clickName(genMeth, "Garden State Plaza, Paramus, NJ, 1 item");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Map (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cards");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Cards (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer News");
genMeth.eyesCheckWindow(eyes, "Slicer Report- News (BranchID=7)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Bar Chart");
genMeth.eyesCheckWindow(eyes, "Slicer Report- Bar chart empty slicing", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, "BranchID");
genMeth.clickId(genMeth, "7");
genMeth.clickId(genMeth, "Aventura Mall");
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Slicer Report- Bar chart Aventura Mall", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = true, groups = {"Sanity IOS1"}, testName = "Sanity", description = "Slicer report")
public void slicerReportWithSecurityFilter() throws InterruptedException, IOException{
// go to List
genMeth.swipedownMeizuLong(1000);
genMeth.swipedownMeizuLong(1000);
genMeth.clickId(genMeth, "SlicerReport_Sfilter");
genMeth.clickId(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, "BranchID");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter - branchID 1,3 only", useEye, skipfailure);
genMeth.clickId(genMeth, "3");
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- List (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Grid");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Grid (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cover Flow");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Cover Flow (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Dashboard");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Dashboard (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Map");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Map (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer Cards");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- Cards (BranchID=3)", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
genMeth.clickId(genMeth, "Slicer News");
genMeth.eyesCheckWindow(eyes, "Slicer Report with Security Filter- News (BranchID=3)", useEye, skipfailure);
// Verify Startup screen is open
genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
genMeth.swipeUpMeizuLong(1000);
genMeth.swipeUpMeizuLong(1000);
genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
}
@Test(enabled = false, groups = { "Sanity IOS__" }, testName = "Sanity Tests", description = "login with bad/missing credentials", retryAnalyzer = Retry.class)
public void badCredentials() throws Exception, Throwable {
genMeth.signOutFromStartup(genMeth);
// Login with bad user name
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, "bad name");
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, DroidData.passwordProd);
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Login Failed"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with bad password
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, DroidData.userQA);
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, "bad password");
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Login Failed"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with bad user name & password
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, "bad name");
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, "bad password");
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Login Failed"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with empty Name
genMeth.clearId(genMeth, DroidData.TEXTFIELDemailXpth);
genMeth.sendId( genMeth, DroidData.TEXTFIELDpasswordXpth, DroidData.passwordQA);
genMeth.clickId( genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Bad Request"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with empty Password
genMeth.sendId( genMeth, DroidData.TEXTFIELDemailXpth, DroidData.userQA);
genMeth.clearId(genMeth, DroidData.TEXTFIELDpasswordXpth);
genMeth.clickId(genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Bad Request"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Login with empty Name & password
genMeth.clearId(genMeth, DroidData.TEXTFIELDemailXpth);
genMeth.clearId(genMeth, DroidData.TEXTFIELDpasswordXpth);
genMeth.clickId(genMeth, DroidData.BTNloginID);
genMeth.isElementVisible(By.name("Bad Request"));
genMeth.clickId(genMeth, DroidData.BTNokName);
// Forgot your password Negative (attempt to restore password with a non
// existing email)
// Forgot your password Positive (attempt to restore password with an
// existing email)
}
@Test(enabled = false, retryAnalyzer = Retry.class, testName = "Sanity Tests", description = "Switching from Foreground to Background and vice versa use cases",
groups = { "Sanity IOS__" })
public void foregroundBackgroundSwitch() throws Exception, Throwable {
//Take the app to background & foreground x times
//Take the app to sleep/lock x times
}
@Test(enabled = false, retryAnalyzer = Retry.class, testName = "connection lost handling", description = "Checking how the app owrks while connection is lost & back again", dependsOnGroups = { "Sanity*" },
groups = { "Sanity IOS__" })
public void connectionLost() throws InterruptedException, IOException,
ParserConfigurationException, SAXException {
}
@AfterSuite(alwaysRun = true)
public void tearDown() throws Exception {
try {
driver.removeApp(appIdentifier);
driver.quit();
/*
boolean isAppInstalled = driver.isAppInstalled(appIdentifier);
if (isAppInstalled) {
driver.removeApp(appIdentifier);
}
*/
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
SendResults sr = new SendResults("[email protected]",
"[email protected]", "TestNG results", "Test Results");
//sr.sendTestNGResult();
sr.sendRegularEmail();
/*
TestListenerAdapter tla = new TestListenerAdapter();
TestNG testng2 = new TestNG();
testng2.setTestClasses(new Class[] { SendReport.class });
testng2.setGroups("send mail");
testng2.addListener(tla);
testng2.run();
*/
}
/*
@Test (enabled = true ,testName = "Sample App Dashboard DailySales", retryAnalyzer = Retry.class, description = "Dashboard DailySales" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationDashboardDailySales() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//Logout from startup page
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 5.2");
//useEye = true;
// Login to sample app & open Dashboard report
genMeth.eyesCheckWindow(eyes, "SampleApp Main screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.DashboardName);
genMeth.eyesCheckWindow(eyes, "Dashboard Tab", useEye, skipfailure);
// genMeth.swipeRightIphone6Plus(1000);
genMeth.swipeRightIphone5(500);
genMeth.eyesCheckWindow(eyes, "World wide orders Tab", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, DroidData.DashboardName);
// Open Sales Bar
// Change eye back to true once oleg fix the decimal issue
Thread.sleep(2000);
genMeth.clickId(genMeth, DroidData.SalesName);
//set Eye UI to false due to ordinal change
// useEye = true;
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar- Show All", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.ReturnsName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar- show Sales/Net Sales", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.SalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp show Net Sales", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.NetSalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales - show Empty", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.SalesName);
genMeth.clickId(genMeth, DroidData.ReturnsName);
genMeth.clickId(genMeth, DroidData.NetSalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar- Show All", useEye, skipfailure);
//Open Sales Pie
genMeth.clickId(genMeth, DroidData.DailySalesBarID);
genMeth.clickId(genMeth, DroidData.DailysalesPieID);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Net Sales", useEye, skipfailure);
//genMeth.clickId(genMeth, DroidData.DestinyUSAID);
//genMeth.clickName(genMeth, DroidData.DestinyUSAID);
try {
driver.findElementById(DroidData.DestinyUSAID).click();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Net Sales - Destiny USA", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.ReturnsName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Returns", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.SalesName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Pie- Sales", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.DailysalesPieID);
genMeth.clickId(genMeth, DroidData.Last12hoursID);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Last 12 Months - Sparklines", useEye, skipfailure);
// Check slicer in Sparklines
genMeth.clickName(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.clickId(genMeth, DroidData.DestinyUSAID);
genMeth.clickName(genMeth, DroidData.BTNBackName);
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Last 12 Months - Sparklines / Destiny USA", useEye, skipfailure);
//Clear the Slicer
genMeth.clickName(genMeth, DroidData.BTNSlicer);
genMeth.clickName(genMeth, DroidData.BTNClearName);
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Last 12 Months - Sparklines", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
//Open Daily Sales from main screen
genMeth.clickId(genMeth, DroidData.DailySalesID);
genMeth.eyesCheckWindow(eyes, "SampleApp Daily Sales Bar (no back icon)- Show All", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, "M");
}
@Test (enabled = true ,testName = "Sample Application", retryAnalyzer = Retry.class, description = "" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationServiceCalls() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN SERVICE CALLS
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
//genMeth.clickName(genMeth, DroidData.DashboardName);
genMeth.clickId(genMeth, DroidData.ServiceCallsID);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls", useEye, skipfailure);
// InGrid Action- First layer
//genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
/* genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIACollectionView[1]/UIACollectionCell[1]/UIAStaticText[7]");
genMeth.clickName(genMeth, "1");
Thread.sleep(3000);
genMeth.swipedownIphone5(1000);
genMeth.swipeUpIphone5(1000);
genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
genMeth.clickName(genMeth, "3");
Thread.sleep(5000);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- priority = 3", useEye, skipfailure);
//Open the Slicer
genMeth.clickName(genMeth, DroidData.BTNSlicer);
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.clickId(genMeth, DroidData.MallOfAmerica_Id);
genMeth.clickId(genMeth, DroidData.BTNBackName);
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Slicer Mall Of America", useEye, skipfailure);
/*
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIATableView[1]/UIATableCell[6]/UIAStaticText[1]");
Thread.sleep(3000);
genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
genMeth.clickName(genMeth, "1");
Thread.sleep(10000);
genMeth.clickName(genMeth, DroidData.BTNpriority_Name);
genMeth.clickName(genMeth, "4");
Thread.sleep(6000);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- priority = 4", useEye, skipfailure);
//Open the second layer
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIACollectionView[1]/UIACollectionCell[1]/UIAButton[1]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Second layer", useEye, skipfailure);
//Mobile & Email Contact Details/Person
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[16]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Mobile Contact Person -Cards", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[17]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Email Contact Person -Cards", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
genMeth.scrollDown(driver);
genMeth.scrollDown(driver);
//Mobile / Email / Map / URL - Address section
//Phone
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[20]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Mobile (Address Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
//Email
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[21]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Email (Address Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
// URL
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[22]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- URL ((Address Section))", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNdoneName);
//Map
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[24]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Mobile Maps (Address Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// Mobile / Email (Address Section)
//Mobile
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[28]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Phone (Assigned To Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// Email
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIAStaticText[29]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Email (Assigned To Section)", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
//Close Service Call Action
genMeth.clickName(genMeth, "Close Service Call");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Close Service Calls - Action", useEye, skipfailure);
genMeth.clickName(genMeth, "Comments");
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "Meny The Best");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.clickName(genMeth, "Parts");
genMeth.clickName(genMeth, "Drawer");
genMeth.clickName(genMeth, "SolutionType");
genMeth.clickName(genMeth, "Replaced cash drawer");
genMeth.clickName(genMeth, "Status");
genMeth.clickName(genMeth, "Open");
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls- Close Service Calls - After Action", useEye, skipfailure);
Thread.sleep(2000);
genMeth.clickName(genMeth, DroidData.BTNBackName);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
}
@Test (enabled = true ,testName = "Sample Application", retryAnalyzer = Retry.class, description = "" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationServiceCallsMapNewServicecall() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN SERVICE CALLS Map
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
//Open service calls map
genMeth.clickId(genMeth, DroidData.ServiceCallsMapID);
Thread.sleep(1000);
genMeth.clickXpth(genMeth, DroidData.MallofAmericaOnMapXpath);
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls Maps- Mall of America", useEye, skipfailure);
//Check is Location popup is displayed
//genMeth.clickId(genMeth, DroidData.BTNmapphoneiconID);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[1]/UIAButton[2]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls Maps- Mall of America - Phone Icon Option", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// genMeth.clickName(genMeth, DroidData.BTNMapCarIconName);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAButton[2]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls Maps- Mall of America - Car Direction", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
//go back to the map tab via the back navigation icon
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAButton[3]");
genMeth.eyesCheckWindow(eyes, "SampleApp Service Calls:5", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
//Create new service call
genMeth.clickId(genMeth, DroidData.BTNnewServiceCallId);
genMeth.eyesCheckWindow(eyes, "New Service Call", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.eyesCheckWindow(eyes, "Branch simple list", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.MallOfAmerica_Id);
genMeth.clickId(genMeth, "Assigned To");
genMeth.clickId(genMeth, "Jessica Blue");
genMeth.clickId(genMeth, "Category");
genMeth.clickId(genMeth, "Computer");
genMeth.clickId(genMeth, "Item");
genMeth.clickId(genMeth, "Memory card");
genMeth.clickId(genMeth, "Description");
genMeth.setEnglishKeyboard(genMeth);
genMeth.sendXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextView[1]", "Meny The Best");
genMeth.clickId(genMeth, DroidData.BTNdoneName);
genMeth.clickId(genMeth, DroidData.BTNpriority_Name);
genMeth.clickId(genMeth, "1");
genMeth.eyesCheckWindow(eyes, "New service call with parameters", useEye, skipfailure);
genMeth.clickId(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(2000);
//genMeth.eyesCheckWindow(eyes, "New Service Call", useEye, skipfailure);
genMeth.eyesCheckWindow(eyes, "New service call Actions collections +", useEye, skipfailure);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIAScrollView[1]/UIACollectionView[1]/UIACollectionCell[1]/UIAStaticText[1]");
genMeth.eyesCheckWindow(eyes, "New Service Call", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
}
@Test (enabled = true ,testName = "Sample App OrderLookup Operation", retryAnalyzer = Retry.class, description = "OrderLookup Operation" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationOrderLookupOperation() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN Order Lookup
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
//Order lookup
genMeth.clickId(genMeth, DroidData.OrderLookup_ID);
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Order Lookup parameters", useEye, skipfailure);
genMeth.clickName(genMeth, "Start Date");
MobileElement UIAPickerWheel = driver.findElementByXPath("//UIAApplication[1]/UIAWindow[1]/UIAPicker[1]/UIAPickerWheel[1]");
UIAPickerWheel.sendKeys("July");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.clickName(genMeth, DroidData.BTNsubmit_ID);
Thread.sleep(1000);
genMeth.eyesCheckWindow(eyes, "List of Orders", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
//Operations
genMeth.clickXpth(genMeth, " //UIAApplication[1]/UIAWindow[1]/UIATableView[1]/UIATableCell[7]");
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Inventory", useEye, skipfailure);
//Open grid second layer
genMeth.clickName(genMeth, DroidData.MallOfAmerica_Id);
genMeth.eyesCheckWindow(eyes, "Inventory second layer", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, "Inventory");
genMeth.clickName(genMeth, "Orders");
/*genMeth.swipeRightIphone5(1000);
genMeth.swipeRightIphone5(1000);
genMeth.swipeRightIphone5(1000);
genMeth.eyesCheckWindow(eyes, "Orders", useEye, skipfailure);
genMeth.clickName(genMeth, "Orders");
genMeth.clickName(genMeth, "Place New Order");
genMeth.eyesCheckWindow(eyes, "Place New Order", useEye, skipfailure);
//Open the place new order
MobileElement El = driver.findElementByXPath(DroidData.BTNplaceNewOrder_Xpth);
El.click();
genMeth.eyesCheckWindow(eyes, "Place new order parameters", useEye, skipfailure);
// genMeth.clickName(genMeth, DroidData.BTNsubmit_ID);
// genMeth.eyesCheckWindow(eyes, "Place new order parameters missing", useEye, skipfailure);
// genMeth.clickName(genMeth, DroidData.BTNokName);
//Fill the parameters
genMeth.clickId(genMeth, DroidData.BranchID);
genMeth.clickName(genMeth, DroidData.MallOfAmerica_Id);
genMeth.clickName(genMeth, "ProductID");
// genMeth.accessToCameraHandle(genMeth);
Thread.sleep(1000);
genMeth.clickXpth(genMeth, "//UIAApplication[1]/UIAWindow[1]/UIATextField[1]");
Thread.sleep(1000);
genMeth.clickName(genMeth, "1");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
Thread.sleep(2000);
genMeth.clickName(genMeth, "Quantity");
genMeth.clickName(genMeth, "1");
genMeth.clickName(genMeth, DroidData.BTNdoneName);
genMeth.eyesCheckWindow(eyes, "Place new order All parameters", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNsubmit_ID);
genMeth.eyesCheckWindow(eyes, "Place New Order", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.IconBack_Nav_Name);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
}
@Test (enabled = true ,testName = "Sample App Technicians", retryAnalyzer = Retry.class, description = "Technicians" ,
groups= {"Sanity IOSsample"} /*dependsOnMethods={"testLogin"})
public void sampleAplicationTechnicians() throws ParserConfigurationException,
SAXException, IOException, InterruptedException {
//OPEN Order Lookup
genMeth.signOutFromStartup(genMeth);
genMeth.clickId(genMeth, DroidData.BTNsampleAccountID);
genMeth.clickName(genMeth, DroidData.Icon_AllApps_Name);
genMeth.clickName(genMeth, "Operations 4.11");
// Technicians
genMeth.clickName(genMeth, "Technicians");
Thread.sleep(1000);
genMeth.eyesCheckWindow(eyes, "Technicians", useEye, skipfailure);
// Phone Icon
genMeth.clickName(genMeth, "Phone");
Thread.sleep(1000);
genMeth.eyesCheckWindow(eyes, "Technicians- Phone", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, "Phone");
// Add to contacts
genMeth.clickName(genMeth, DroidData.BTNaddContact_Name);
// genMeth.accessToContactsHandle(genMeth);
genMeth.eyesCheckWindow(eyes, "Technicians- Added by SkyGiraffe screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNBackName);
// Mail Icon
genMeth.clickName(genMeth, "Email");
Thread.sleep(3000);
genMeth.eyesCheckWindow(eyes, "Technicians- New Message screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
genMeth.clickName(genMeth, DroidData.BTNdeleteDraft_Name);
// Map Icon
genMeth.clickName(genMeth, "Address");
genMeth.eyesCheckWindow(eyes, "Technicians- Address screen", useEye, skipfailure);
genMeth.clickName(genMeth, DroidData.BTNCancelName);
// Swipe along the technicians Cover Flow
genMeth.swipeRightIphone5(1000);
genMeth.eyesCheckWindow(eyes, "Technicians- cover flow John Grant", useEye, skipfailure);
}
*/
}
|
Param_Report_CoverFlow
|
src/Native/SanityAndroid.java
|
Param_Report_CoverFlow
|
<ide><path>rc/Native/SanityAndroid.java
<ide>
<ide> }
<ide>
<del> @Test(enabled = true, groups = { "Sanity Android1" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
<add> @Test(enabled = true, groups = { "Sanity Android" }, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
<ide> public void Param_Report_DL_Dashboard()
<ide> throws ParserConfigurationException, SAXException, IOException,
<ide> InterruptedException {
<ide> }
<ide>
<ide>
<del> @Test(enabled = true, groups = { "Sanity IOS"}, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
<add> @Test(enabled = true, groups = { "Sanity Android1"}, testName = "Param_Report_DL_Dashboard", retryAnalyzer = Retry.class, description = "Check the Employee Directory tab")
<ide> public void Param_Report_CoverFlow()
<ide> throws ParserConfigurationException, SAXException, IOException,
<ide> InterruptedException {
<ide>
<ide> // go to parameterized report dashboard - DL- Device Info tab
<ide> genMeth.swipedownMeizuLong(1000);
<del> genMeth.clickId(genMeth, "Param Rep Cover Flow");
<del>
<del> genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - Parameters", useEye, skipfailure);
<add> genMeth.clickXpthName_TextView(genMeth, "Param Rep Cover Flow");
<add>
<add> genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - Parameters",useEye, genMeth, skipfailure);
<ide>
<ide> //Insert parameters
<del> genMeth.clickId(genMeth, "Insert Gender (F or M)");
<del> genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - QR", useEye, skipfailure);
<del> genMeth.clickId(genMeth, "Insert Gender (F or M)");
<del> genMeth.clickId(genMeth, DroidData.BTNClearName);
<del> genMeth.clickId(genMeth, "m");
<del> genMeth.clickId(genMeth, DroidData.BTNdoneName);
<del>
<del> Thread.sleep(1000);
<del> genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - Males", useEye, skipfailure);
<add> genMeth.clickXpthName_TextView(genMeth, "Insert Gender (F or M)");
<add> genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - QR",useEye, genMeth, skipfailure);
<add> genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/barcode_activity_manual_input_button");
<add> genMeth.sendId(genMeth, "com.skygiraffe.operationaldata:id/qr_scanner_view_finder", "m");
<add> //genMeth.clickId(genMeth, "m");
<add> genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/qr_manual_input_submit_btn");
<add> genMeth.clickId(genMeth, "com.skygiraffe.operationaldata:id/parameterized_fragment_submit_button");
<add>
<add> Thread.sleep(2000);
<add> genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - Males",useEye, genMeth, skipfailure);
<ide>
<ide> //Go To cover flow tab by const (females)
<ide> genMeth.clickId(genMeth, DroidData.TabBarTitle_Name);
<del> genMeth.clickId(genMeth, "Const-Female Only");
<del> genMeth.eyesCheckWindow(eyes, "Param Rep Cover Flow - Female", useEye, skipfailure);
<add> genMeth.clickXpthName_CheckedTextView(genMeth, "Const-Female Only");
<add> genMeth.eyesCheckWindow("Param Rep Cover Flow (Droid) - Female",useEye, genMeth, skipfailure);
<ide>
<ide> //Back to startup screen
<del> genMeth.clickId(genMeth, DroidData.IconBack_Nav_Name);
<add> genMeth.clickId(genMeth, DroidData.IconHome);
<add> genMeth.swipeUpMeizuLong(1000);
<add> genMeth.swipeUpMeizuLong(1000);
<ide> genMeth.swipeUpMeizuLong(1000);
<ide> genMeth.eyesCheckWindow("Default app is open (Droid) - SQL Golden App", useEye, genMeth, skipfailure);
<ide>
|
|
Java
|
lgpl-2.1
|
59af65d194da001d34a48da91ba296e6c5d439cb
| 0 |
universsky/checkstyle,WonderCsabo/checkstyle,rnveach/checkstyle,jonmbake/checkstyle,another-dave/checkstyle,FeodorFitsner/checkstyle,bansalayush/checkstyle,vboerchers/checkstyle,nikhilgupta23/checkstyle,zofuthan/checkstyle-1,nikhilgupta23/checkstyle,rmswimkktt/checkstyle,WilliamRen/checkstyle,mkordas/checkstyle,MEZk/checkstyle,attatrol/checkstyle,sabaka/checkstyle,rnveach/checkstyle,designreuse/checkstyle,MEZk/checkstyle,cs1331/checkstyle,ilanKeshet/checkstyle,StetsiukRoman/checkstyle,ivanov-alex/checkstyle,jonmbake/checkstyle,Godin/checkstyle,beckerhd/checkstyle,StetsiukRoman/checkstyle,zofuthan/checkstyle-1,llocc/checkstyle,Bhavik3/checkstyle,autermann/checkstyle,attatrol/checkstyle,nikhilgupta23/checkstyle,baratali/checkstyle,izishared/checkstyle,gallandarakhneorg/checkstyle,romani/checkstyle,Bhavik3/checkstyle,liscju/checkstyle,zofuthan/checkstyle-1,another-dave/checkstyle,baratali/checkstyle,naver/checkstyle,rnveach/checkstyle,ilanKeshet/checkstyle,attatrol/checkstyle,universsky/checkstyle,sirdis/checkstyle,AkshitaKukreja30/checkstyle,checkstyle/checkstyle,WonderCsabo/checkstyle,philwebb/checkstyle,checkstyle/checkstyle,mkordas/checkstyle,romani/checkstyle,cs1331/checkstyle,philwebb/checkstyle,llocc/checkstyle,gallandarakhneorg/checkstyle,jochenvdv/checkstyle,jochenvdv/checkstyle,philwebb/checkstyle,autermann/checkstyle,pietern/checkstyle,universsky/checkstyle,Bhavik3/checkstyle,liscju/checkstyle,vboerchers/checkstyle,HubSpot/checkstyle,Andrew0701/checkstyle,vboerchers/checkstyle,rnveach/checkstyle,mkordas/checkstyle,jasonchaffee/checkstyle,romani/checkstyle,jasonchaffee/checkstyle,pietern/checkstyle,naver/checkstyle,beckerhd/checkstyle,bansalayush/checkstyle,sharang108/checkstyle,HubSpot/checkstyle,MEZk/checkstyle,jdoyle65/checkstyle,bansalayush/checkstyle,another-dave/checkstyle,rmswimkktt/checkstyle,pietern/checkstyle,checkstyle/checkstyle,gallandarakhneorg/checkstyle,rmswimkktt/checkstyle,romani/checkstyle,naver/checkstyle,liscju/checkstyle,StetsiukRoman/checkstyle,checkstyle/checkstyle,beckerhd/checkstyle,romani/checkstyle,izishared/checkstyle,jonmbake/checkstyle,FeodorFitsner/checkstyle,sabaka/checkstyle,pbaranchikov/checkstyle,autermann/checkstyle,sharang108/checkstyle,llocc/checkstyle,sabaka/checkstyle,AkshitaKukreja30/checkstyle,AkshitaKukreja30/checkstyle,romani/checkstyle,designreuse/checkstyle,ivanov-alex/checkstyle,Godin/checkstyle,sirdis/checkstyle,sirdis/checkstyle,jochenvdv/checkstyle,WilliamRen/checkstyle,WonderCsabo/checkstyle,jdoyle65/checkstyle,ilanKeshet/checkstyle,Andrew0701/checkstyle,izishared/checkstyle,checkstyle/checkstyle,designreuse/checkstyle,Godin/checkstyle,checkstyle/checkstyle,rnveach/checkstyle,FeodorFitsner/checkstyle,jasonchaffee/checkstyle,sharang108/checkstyle,HubSpot/checkstyle,WilliamRen/checkstyle,cs1331/checkstyle,ivanov-alex/checkstyle,pbaranchikov/checkstyle,rnveach/checkstyle,baratali/checkstyle
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2002 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.api;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
/**
* Represents a message that can be localised. The translations come from
* message.properties files. The underlying implementation uses
* java.text.MessageFormat.
*
* @author <a href="mailto:[email protected]">Oliver Burn</a>
* @author lkuehne
* @version 1.0
*/
public final class LocalizedMessage
implements Comparable
{
/** the locale to localise messages to **/
private static Locale sLocale = Locale.getDefault();
/**
* A cache that maps bundle names to RessourceBundles.
* Avoids repetitive calls to ResourceBundle.getBundle().
* TODO: The cache should be cleared at some point.
*/
private static Map sBundleCache = new HashMap();
/** the line number **/
private final int mLineNo;
/** the column number **/
private final int mColNo;
/** key for the message format **/
private final String mKey;
/** arguments for MessageFormat **/
private final Object[] mArgs;
/** name of the resource bundle to get messages from **/
private final String mBundle;
/** @see Object#equals */
public boolean equals(Object aObject)
{
if (this == aObject) {
return true;
}
if (!(aObject instanceof LocalizedMessage)) {
return false;
}
final LocalizedMessage localizedMessage = (LocalizedMessage) aObject;
if (mColNo != localizedMessage.mColNo) {
return false;
}
if (mLineNo != localizedMessage.mLineNo) {
return false;
}
if (!mKey.equals(localizedMessage.mKey)) {
return false;
}
// ignoring mArgs and mBundle for perf reasons.
// we currently never load the same error from different bundles or
// fire the same error for the same location with different arguments.
return true;
}
/**
* @see Object#hashCode
*/
public int hashCode()
{
int result;
result = mLineNo;
result = 29 * result + mColNo;
result = 29 * result + mKey.hashCode();
return result;
}
/**
* Creates a new <code>LocalizedMessage</code> instance.
*
* @param aLineNo line number associated with the message
* @param aColNo column number associated with the message
* @param aBundle resource bundle name
* @param aKey the key to locate the translation
* @param aArgs arguments for the translation
*/
public LocalizedMessage(int aLineNo,
int aColNo,
String aBundle,
String aKey,
Object[] aArgs)
{
mLineNo = aLineNo;
mColNo = aColNo;
mKey = aKey;
mArgs = aArgs;
mBundle = aBundle;
}
/**
* Creates a new <code>LocalizedMessage</code> instance. The column number
* defaults to 0.
*
* @param aLineNo line number associated with the message
* @param aBundle name of a resource bundle that contains error messages
* @param aKey the key to locate the translation
* @param aArgs arguments for the translation
*/
public LocalizedMessage(
int aLineNo, String aBundle, String aKey, Object[] aArgs)
{
this(aLineNo, 0, aBundle, aKey, aArgs);
}
/** @return the translated message **/
public String getMessage()
{
try {
// Important to use the default class loader, and not the one in
// the GlobalProperties object. This is because the class loader in
// the GlobalProperties is specified by the user for resolving
// custom classes.
final ResourceBundle bundle = getBundle(mBundle);
final String pattern = bundle.getString(mKey);
return MessageFormat.format(pattern, mArgs);
}
catch (MissingResourceException ex) {
// If the Check author didn't provide i18n resource bundles
// and logs error messages directly, this will return
// the author's original message
return MessageFormat.format(mKey, mArgs);
}
}
/**
* Find a ResourceBundle for a given bundle name.
* @param aBundleName the bundle name
* @return a ResourceBundle
*/
private static ResourceBundle getBundle(String aBundleName)
{
ResourceBundle bundle = (ResourceBundle) sBundleCache.get(aBundleName);
if (bundle == null) {
bundle = ResourceBundle.getBundle(aBundleName, sLocale);
sBundleCache.put(aBundleName, bundle);
}
return bundle;
}
/** @return the line number **/
public int getLineNo()
{
return mLineNo;
}
/** @return the column number **/
public int getColumnNo()
{
return mColNo;
}
/**
* Returns the message key to locate the translation, can also be used
* in IDE plugins to map error messages to corrective actions.
*
* @return the message key
*/
public String getKey()
{
return mKey;
}
/** @param aLocale the locale to use for localization **/
public static void setLocale(Locale aLocale)
{
sLocale = aLocale;
}
////////////////////////////////////////////////////////////////////////////
// Interface Comparable methods
////////////////////////////////////////////////////////////////////////////
/** @see java.lang.Comparable **/
public int compareTo(Object aOther)
{
final LocalizedMessage lt = (LocalizedMessage) aOther;
if (getLineNo() == lt.getLineNo()) {
if (getColumnNo() == lt.getColumnNo()) {
return mKey.compareTo(lt.mKey);
}
return (getColumnNo() < lt.getColumnNo()) ? -1 : 1;
}
return (getLineNo() < lt.getLineNo()) ? -1 : 1;
}
}
|
src/checkstyle/com/puppycrawl/tools/checkstyle/api/LocalizedMessage.java
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2002 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.api;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
/**
* Represents a message that can be localised. The translations come from
* message.properties files. The underlying implementation uses
* java.text.MessageFormat.
*
* @author <a href="mailto:[email protected]">Oliver Burn</a>
* @author lkuehne
* @version 1.0
*/
public final class LocalizedMessage
implements Comparable
{
/** the locale to localise messages to **/
private static Locale sLocale = Locale.getDefault();
/**
* A cache that maps bundle names to RessourceBundles.
* Avoids repetitive calls to ResourceBundle.getBundle().
* TODO: The cache should be cleared at some point.
*/
private static Map sBundleCache = new HashMap();
/** the line number **/
private final int mLineNo;
/** the column number **/
private final int mColNo;
/** key for the message format **/
private final String mKey;
/** arguments for MessageFormat **/
private final Object[] mArgs;
/** name of the resource bundle to get messages from **/
private final String mBundle;
/** @see Object#equals */
public boolean equals(Object o)
{
if (this == o) return true;
if (!(o instanceof LocalizedMessage)) return false;
final LocalizedMessage localizedMessage = (LocalizedMessage) o;
if (mColNo != localizedMessage.mColNo) return false;
if (mLineNo != localizedMessage.mLineNo) return false;
if (!mKey.equals(localizedMessage.mKey)) return false;
// ignoring mArgs and mBundle for perf reasons.
// we currently never load the same error from different bundles or
// fire the same error for the same location with different arguments.
return true;
}
/**
* @see Object#hashCode
*/
public int hashCode()
{
int result;
result = mLineNo;
result = 29 * result + mColNo;
result = 29 * result + mKey.hashCode();
return result;
}
/**
* Creates a new <code>LocalizedMessage</code> instance.
*
* @param aLineNo line number associated with the message
* @param aColNo column number associated with the message
* @param aBundle resource bundle name
* @param aKey the key to locate the translation
* @param aArgs arguments for the translation
*/
public LocalizedMessage(int aLineNo,
int aColNo,
String aBundle,
String aKey,
Object[] aArgs)
{
mLineNo = aLineNo;
mColNo = aColNo;
mKey = aKey;
mArgs = aArgs;
mBundle = aBundle;
}
/**
* Creates a new <code>LocalizedMessage</code> instance. The column number
* defaults to 0.
*
* @param aLineNo line number associated with the message
* @param aBundle name of a resource bundle that contains error messages
* @param aKey the key to locate the translation
* @param aArgs arguments for the translation
*/
public LocalizedMessage(
int aLineNo, String aBundle, String aKey, Object[] aArgs)
{
this(aLineNo, 0, aBundle, aKey, aArgs);
}
/** @return the translated message **/
public String getMessage()
{
try {
// Important to use the default class loader, and not the one in
// the GlobalProperties object. This is because the class loader in
// the GlobalProperties is specified by the user for resolving
// custom classes.
final ResourceBundle bundle = getBundle(mBundle);
final String pattern = bundle.getString(mKey);
return MessageFormat.format(pattern, mArgs);
}
catch (MissingResourceException ex) {
// If the Check author didn't provide i18n resource bundles
// and logs error messages directly, this will return
// the author's original message
return MessageFormat.format(mKey, mArgs);
}
}
/**
* Find a ResourceBundle for a given bundle name.
* @param aBundleName the bundle name
* @return a ResourceBundle
*/
private static ResourceBundle getBundle(String aBundleName)
{
ResourceBundle bundle = (ResourceBundle) sBundleCache.get(aBundleName);
if (bundle == null) {
bundle = ResourceBundle.getBundle(aBundleName, sLocale);
sBundleCache.put(aBundleName, bundle);
}
return bundle;
}
/** @return the line number **/
public int getLineNo()
{
return mLineNo;
}
/** @return the column number **/
public int getColumnNo()
{
return mColNo;
}
/**
* Returns the message key to locate the translation, can also be used
* in IDE plugins to map error messages to corrective actions.
*
* @return the message key
*/
public String getKey()
{
return mKey;
}
/** @param aLocale the locale to use for localization **/
public static void setLocale(Locale aLocale)
{
sLocale = aLocale;
}
////////////////////////////////////////////////////////////////////////////
// Interface Comparable methods
////////////////////////////////////////////////////////////////////////////
/** @see java.lang.Comparable **/
public int compareTo(Object aOther)
{
final LocalizedMessage lt = (LocalizedMessage) aOther;
if (getLineNo() == lt.getLineNo()) {
if (getColumnNo() == lt.getColumnNo()) {
return mKey.compareTo(lt.mKey);
}
return (getColumnNo() < lt.getColumnNo()) ? -1 : 1;
}
return (getLineNo() < lt.getLineNo()) ? -1 : 1;
}
}
|
fixed checkstyle errors
IDEAs code generation did not know about our requirements...
|
src/checkstyle/com/puppycrawl/tools/checkstyle/api/LocalizedMessage.java
|
fixed checkstyle errors IDEAs code generation did not know about our requirements...
|
<ide><path>rc/checkstyle/com/puppycrawl/tools/checkstyle/api/LocalizedMessage.java
<ide> private final String mBundle;
<ide>
<ide> /** @see Object#equals */
<del> public boolean equals(Object o)
<del> {
<del> if (this == o) return true;
<del> if (!(o instanceof LocalizedMessage)) return false;
<del>
<del> final LocalizedMessage localizedMessage = (LocalizedMessage) o;
<del>
<del> if (mColNo != localizedMessage.mColNo) return false;
<del> if (mLineNo != localizedMessage.mLineNo) return false;
<del> if (!mKey.equals(localizedMessage.mKey)) return false;
<add> public boolean equals(Object aObject)
<add> {
<add> if (this == aObject) {
<add> return true;
<add> }
<add> if (!(aObject instanceof LocalizedMessage)) {
<add> return false;
<add> }
<add>
<add> final LocalizedMessage localizedMessage = (LocalizedMessage) aObject;
<add>
<add> if (mColNo != localizedMessage.mColNo) {
<add> return false;
<add> }
<add> if (mLineNo != localizedMessage.mLineNo) {
<add> return false;
<add> }
<add> if (!mKey.equals(localizedMessage.mKey)) {
<add> return false;
<add> }
<ide>
<ide> // ignoring mArgs and mBundle for perf reasons.
<ide>
|
|
Java
|
bsd-2-clause
|
483f41bc4a672f38bc4dba53801a6edef2382c69
| 0 |
TehSAUCE/imagej,biovoxxel/imagej,biovoxxel/imagej,TehSAUCE/imagej,TehSAUCE/imagej,biovoxxel/imagej
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2013 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
* #L%
*/
package imagej.legacy;
import java.awt.GraphicsEnvironment;
import java.lang.reflect.Field;
import java.net.URL;
import javassist.NotFoundException;
import javassist.bytecode.DuplicateMemberException;
import org.scijava.Context;
import org.scijava.util.ClassUtils;
/**
* Overrides class behavior of ImageJ1 classes using bytecode manipulation. This
* class uses the {@link CodeHacker} (which uses Javassist) to inject method
* hooks, which are implemented in the {@link imagej.legacy.patches} package.
*
* @author Curtis Rueden
*/
public class LegacyInjector {
private CodeHacker hacker;
/** Overrides class behavior of ImageJ1 classes by injecting method hooks. */
public void injectHooks(final ClassLoader classLoader) {
hacker = new CodeHacker(classLoader);
injectHooks(hacker);
}
/** Overrides class behavior of ImageJ1 classes by injecting method hooks. */
protected void injectHooks(final CodeHacker hacker) {
// NB: Override class behavior before class loading gets too far along.
if (GraphicsEnvironment.isHeadless()) {
new LegacyHeadless(hacker).patch();
}
// override behavior of ij.ImageJ
hacker.insertNewMethod("ij.ImageJ",
"public java.awt.Point getLocationOnScreen()");
hacker.insertAtTopOfMethod("ij.ImageJ",
"public java.awt.Point getLocationOnScreen()",
"if ($isLegacyMode()) return super.getLocationOnScreen();");
hacker.insertAtTopOfMethod("ij.ImageJ", "public void quit()",
"if (!($service instanceof imagej.legacy.DummyLegacyService)) $service.getContext().dispose();"
+ "if (!$isLegacyMode()) return;");
// override behavior of ij.IJ
hacker.insertAtTopOfMethod("ij.IJ",
"public static java.lang.Object runPlugIn(java.lang.String className, java.lang.String arg)",
"if (\"MacAdapter\".equals(className)) return null;");
hacker.insertAtBottomOfMethod("ij.IJ",
"public static void showProgress(double progress)");
hacker.insertAtBottomOfMethod("ij.IJ",
"public static void showProgress(int currentIndex, int finalIndex)");
hacker.insertAtBottomOfMethod("ij.IJ",
"public static void showStatus(java.lang.String s)");
hacker.insertPrivateStaticField("ij.IJ", Context.class, "_context");
hacker.insertNewMethod("ij.IJ",
"public synchronized static org.scijava.Context getContext()",
"if (_context == null) _context = new org.scijava.Context();"
+ "return _context;");
hacker.insertAtTopOfMethod("ij.IJ",
"public static Object runPlugIn(java.lang.String className, java.lang.String arg)",
"if (\"" + LegacyService.class.getName() + "\".equals($1))"
+ " return getLegacyService();"
+ "if (\"" + Context.class.getName() + "\".equals($1))"
+ " return getContext();");
hacker.insertAtTopOfMethod("ij.IJ", "public static void log(java.lang.String message)");
hacker.insertAtTopOfMethod("ij.IJ",
"static java.lang.Object runUserPlugIn(java.lang.String commandName, java.lang.String className, java.lang.String arg, boolean createNewLoader)",
"if (classLoader != null) Thread.currentThread().setContextClassLoader(classLoader);");
// override behavior of ij.ImagePlus
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void updateAndDraw()");
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void repaintWindow()");
hacker.insertAtBottomOfMethod("ij.ImagePlus",
"public void show(java.lang.String statusMessage)");
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void hide()");
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void close()");
// override behavior of ij.gui.ImageWindow
hacker.insertNewMethod("ij.gui.ImageWindow",
"public void setVisible(boolean vis)");
hacker.insertAtTopOfMethod("ij.gui.ImageWindow",
"public void setVisible(boolean vis)",
"if ($isLegacyMode()) { super.setVisible($1); }");
hacker.insertNewMethod("ij.gui.ImageWindow", "public void show()");
hacker.insertAtTopOfMethod("ij.gui.ImageWindow",
"public void show()",
"if ($isLegacyMode()) { super.show(); }");
hacker.insertAtTopOfMethod("ij.gui.ImageWindow", "public void close()");
// override behavior of PluginClassLoader
hacker.insertAtTopOfMethod("ij.io.PluginClassLoader", "void init(java.lang.String path)");
// override behavior of ij.macro.Functions
hacker
.insertAtTopOfMethod("ij.macro.Functions",
"void displayBatchModeImage(ij.ImagePlus imp2)",
"imagej.legacy.patches.FunctionsMethods.displayBatchModeImageBefore($service, $1);");
hacker
.insertAtBottomOfMethod("ij.macro.Functions",
"void displayBatchModeImage(ij.ImagePlus imp2)",
"imagej.legacy.patches.FunctionsMethods.displayBatchModeImageAfter($service, $1);");
// override behavior of MacAdapter, if needed
if (ClassUtils.hasClass("com.apple.eawt.ApplicationListener")) {
// NB: If com.apple.eawt package is present, override IJ1's MacAdapter.
hacker.insertAtTopOfMethod("MacAdapter",
"public void run(java.lang.String arg)",
"if (!$isLegacyMode()) return;");
}
// override behavior of ij.plugin.frame.RoiManager
hacker.insertNewMethod("ij.plugin.frame.RoiManager",
"public void show()",
"if ($isLegacyMode()) { super.show(); }");
hacker.insertNewMethod("ij.plugin.frame.RoiManager",
"public void setVisible(boolean b)",
"if ($isLegacyMode()) { super.setVisible($1); }");
//
// Below are patches to make ImageJ 1.x more backwards-compatible
//
// add back the (deprecated) killProcessor(), and overlay methods
final String[] imagePlusMethods = {
"public void killProcessor()",
"{}",
"public void setDisplayList(java.util.Vector list)",
"getCanvas().setDisplayList(list);",
"public java.util.Vector getDisplayList()",
"return getCanvas().getDisplayList();",
"public void setDisplayList(ij.gui.Roi roi, java.awt.Color strokeColor,"
+ " int strokeWidth, java.awt.Color fillColor)",
"setOverlay(roi, strokeColor, strokeWidth, fillColor);"
};
for (int i = 0; i < imagePlusMethods.length; i++) try {
hacker.insertNewMethod("ij.ImagePlus",
imagePlusMethods[i], imagePlusMethods[++i]);
} catch (Exception e) { /* ignore */ }
// make sure that ImageJ has been initialized in batch mode
hacker.insertAtTopOfMethod("ij.IJ",
"public static java.lang.String runMacro(java.lang.String macro, java.lang.String arg)",
"if (ij==null && ij.Menus.getCommands()==null) init();");
try {
hacker.insertNewMethod("ij.CompositeImage",
"public ij.ImagePlus[] splitChannels(boolean closeAfter)",
"ij.ImagePlus[] result = ij.plugin.ChannelSplitter.split(this);"
+ "if (closeAfter) close();"
+ "return result;");
hacker.insertNewMethod("ij.plugin.filter.RGBStackSplitter",
"public static ij.ImagePlus[] splitChannelsToArray(ij.ImagePlus imp, boolean closeAfter)",
"if (!imp.isComposite()) {"
+ " ij.IJ.error(\"splitChannelsToArray was called on a non-composite image\");"
+ " return null;"
+ "}"
+ "ij.ImagePlus[] result = ij.plugin.ChannelSplitter.split(imp);"
+ "if (closeAfter)"
+ " imp.close();"
+ "return result;");
} catch (IllegalArgumentException e) {
final Throwable cause = e.getCause();
if (cause != null && !(cause instanceof DuplicateMemberException)) {
throw e;
}
}
// handle mighty mouse (at least on old Linux, Java mistakes the horizontal wheel for a popup trigger)
for (String fullClass : new String[] {
"ij.gui.ImageCanvas",
"ij.plugin.frame.RoiManager",
"ij.text.TextPanel",
"ij.gui.Toolbar"
}) {
hacker.handleMightyMousePressed(fullClass);
}
// tell IJ#runUserPlugIn to catch NoSuchMethodErrors
final String runUserPlugInSig = "static java.lang.Object runUserPlugIn(java.lang.String commandName, java.lang.String className, java.lang.String arg, boolean createNewLoader)";
hacker.addCatch("ij.IJ", runUserPlugInSig, "java.lang.NoSuchMethodError",
"if (" + IJ1Helper.class.getName() + ".handleNoSuchMethodError($e))"
+ " throw new RuntimeException(ij.Macro.MACRO_CANCELED);"
+ "throw $e;");
// tell IJ#runUserPlugIn to be more careful about catching NoClassDefFoundError
hacker.insertPrivateStaticField("ij.IJ", String.class, "originalClassName");
hacker.insertAtTopOfMethod("ij.IJ", runUserPlugInSig, "originalClassName = $2;");
hacker.insertAtTopOfExceptionHandlers("ij.IJ", runUserPlugInSig, "java.lang.NoClassDefFoundError",
"java.lang.String realClassName = $1.getMessage();"
+ "int spaceParen = realClassName.indexOf(\" (\");"
+ "if (spaceParen > 0) realClassName = realClassName.substring(0, spaceParen);"
+ "if (!originalClassName.replace('.', '/').equals(realClassName)) {"
+ " if (realClassName.startsWith(\"javax/vecmath/\") || realClassName.startsWith(\"com/sun/j3d/\") || realClassName.startsWith(\"javax/media/j3d/\"))"
+ " ij.IJ.error(\"The class \" + originalClassName + \" did not find Java3D (\" + realClassName + \")\\nPlease call Plugins>3D Viewer to install\");"
+ " else"
+ " ij.IJ.handleException($1);"
+ " return null;"
+ "}");
// let the plugin class loader find stuff in $HOME/.plugins, too
hacker.addExtraPlugins();
// make sure that the GenericDialog is disposed in macro mode
try {
hacker.insertAtTopOfMethod("ij.gui.GenericDialog", "public void showDialog()", "if (macro) dispose();");
} catch (IllegalArgumentException e) {
// ignore if the headless patcher renamed the method away
if (e.getCause() == null || !(e.getCause() instanceof NotFoundException)) {
throw e;
}
}
// make sure NonBlockingGenericDialog does not wait in macro mode
hacker.replaceCallInMethod("ij.gui.NonBlockingGenericDialog", "public void showDialog()", "java.lang.Object", "wait", "if (isShowing()) wait();");
// tell the showStatus() method to show the version() instead of empty status
hacker.insertAtTopOfMethod("ij.ImageJ", "void showStatus(java.lang.String s)", "if ($1 == null || \"\".equals($1)) $1 = version();");
// handle custom icon (e.g. for Fiji)
if (!hacker.hasField("ij.IJ", "_iconURL")) { // Fiji will already have called CodeHacker#setIcon(File icon)
hacker.insertPublicStaticField("ij.IJ", URL.class, "_iconURL", null);
}
hacker.replaceCallInMethod("ij.ImageJ", "void setIcon()", "java.lang.Class", "getResource",
"if (ij.IJ._iconURL == null) $_ = $0.getResource($1);" +
"else $_ = ij.IJ._iconURL;");
hacker.insertAtTopOfMethod("ij.ImageJ", "public <init>(java.applet.Applet applet, int mode)",
"if ($2 != 2 /* ij.ImageJ.NO_SHOW */) setIcon();");
hacker.insertAtTopOfMethod("ij.WindowManager", "public void addWindow(java.awt.Frame window)",
"if (ij.IJ._iconURL != null && $1 != null) {"
+ " java.awt.Image img = $1.createImage((java.awt.image.ImageProducer)ij.IJ._iconURL.getContent());"
+ " if (img != null) {"
+ " $1.setIconImage(img);"
+ " }"
+ "}");
// optionally disallow batch mode from calling System.exit()
hacker.insertPrivateStaticField("ij.ImageJ", Boolean.TYPE, "batchModeMayExit");
hacker.insertAtTopOfMethod("ij.ImageJ", "public static void main(java.lang.String[] args)",
"batchModeMayExit = true;"
+ "for (int i = 0; i < $1.length; i++) {"
+ " if (\"-batch-no-exit\".equals($1[i])) {"
+ " batchModeMayExit = false;"
+ " $1[i] = \"-batch\";"
+ " }"
+ "}");
hacker.replaceCallInMethod("ij.ImageJ", "public static void main(java.lang.String[] args)", "java.lang.System", "exit",
"if (batchModeMayExit) System.exit($1);"
+ "if ($1 == 0) return;"
+ "throw new RuntimeException(\"Exit code: \" + $1);");
// do not use the current directory as IJ home on Windows
String prefsDir = System.getenv("IJ_PREFS_DIR");
if (prefsDir == null && System.getProperty("os.name").startsWith("Windows")) {
prefsDir = System.getenv("user.home");
}
if (prefsDir != null) {
hacker.overrideFieldWrite("ij.Prefs", "public java.lang.String load(java.lang.Object ij, java.applet.Applet applet)",
"prefsDir", "$_ = \"" + prefsDir + "\";");
}
// tool names can be prefixes of other tools, watch out for that!
hacker.replaceCallInMethod("ij.gui.Toolbar", "public int getToolId(java.lang.String name)", "java.lang.String", "startsWith",
"$_ = $0.equals($1) || $0.startsWith($1 + \"-\") || $0.startsWith($1 + \" -\");");
// make sure Rhino gets the correct class loader
hacker.insertAtTopOfMethod("JavaScriptEvaluator", "public void run()",
"Thread.currentThread().setContextClassLoader(ij.IJ.getClassLoader());");
// make sure that the check for Bio-Formats is correct
hacker.addToClassInitializer("ij.io.Opener",
"try {"
+ " ij.IJ.getClassLoader().loadClass(\"loci.plugins.LociImporter\");"
+ " bioformats = true;"
+ "} catch (ClassNotFoundException e) {"
+ " bioformats = false;"
+ "}");
// make sure that symbolic links are *not* resolved (because then the parent info in the FileInfo would be wrong)
hacker.replaceCallInMethod("ij.plugin.DragAndDrop", "public void openFile(java.io.File f)", "java.io.File", "getCanonicalPath",
"$_ = $0.getAbsolutePath();");
// commit patches
hacker.loadClasses();
// make sure that there is a legacy service
if (this.hacker != null) {
setLegacyService(new DummyLegacyService());
}
}
void setLegacyService(final LegacyService legacyService) {
try {
final Class<?> ij = hacker.classLoader.loadClass("ij.IJ");
Field field = ij.getDeclaredField("_legacyService");
field.setAccessible(true);
field.set(null, legacyService);
Context context;
try {
context = legacyService.getContext();
} catch (UnsupportedOperationException e) {
// DummyLegacyService does not have a context
context = null;
}
field = ij.getDeclaredField("_context");
field.setAccessible(true);
field.set(null, context);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Cannot find ij.IJ", e);
} catch (SecurityException e) {
throw new IllegalArgumentException("Cannot find ij.IJ", e);
} catch (NoSuchFieldException e) {
throw new IllegalArgumentException("Cannot find field in ij.IJ", e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Cannot access field in ij.IJ", e);
}
}
}
|
core/legacy/src/main/java/imagej/legacy/LegacyInjector.java
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2013 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
* #L%
*/
package imagej.legacy;
import java.awt.GraphicsEnvironment;
import java.lang.reflect.Field;
import java.net.URL;
import javassist.NotFoundException;
import javassist.bytecode.DuplicateMemberException;
import org.scijava.Context;
import org.scijava.util.ClassUtils;
/**
* Overrides class behavior of ImageJ1 classes using bytecode manipulation. This
* class uses the {@link CodeHacker} (which uses Javassist) to inject method
* hooks, which are implemented in the {@link imagej.legacy.patches} package.
*
* @author Curtis Rueden
*/
public class LegacyInjector {
private CodeHacker hacker;
/** Overrides class behavior of ImageJ1 classes by injecting method hooks. */
public void injectHooks(final ClassLoader classLoader) {
hacker = new CodeHacker(classLoader);
injectHooks(hacker);
}
/** Overrides class behavior of ImageJ1 classes by injecting method hooks. */
protected void injectHooks(final CodeHacker hacker) {
// NB: Override class behavior before class loading gets too far along.
if (GraphicsEnvironment.isHeadless()) {
new LegacyHeadless(hacker).patch();
}
// override behavior of ij.ImageJ
hacker.insertNewMethod("ij.ImageJ",
"public java.awt.Point getLocationOnScreen()");
hacker.insertAtTopOfMethod("ij.ImageJ",
"public java.awt.Point getLocationOnScreen()",
"if ($isLegacyMode()) return super.getLocationOnScreen();");
hacker.insertAtTopOfMethod("ij.ImageJ", "public void quit()",
"if (!($service instanceof imagej.legacy.DummyLegacyService)) $service.getContext().dispose();"
+ "if (!$isLegacyMode()) return;");
// override behavior of ij.IJ
hacker.insertAtTopOfMethod("ij.IJ",
"public static java.lang.Object runPlugIn(java.lang.String className, java.lang.String arg)",
"if (\"MacAdapter\".equals(className)) return null;");
hacker.insertAtBottomOfMethod("ij.IJ",
"public static void showProgress(double progress)");
hacker.insertAtBottomOfMethod("ij.IJ",
"public static void showProgress(int currentIndex, int finalIndex)");
hacker.insertAtBottomOfMethod("ij.IJ",
"public static void showStatus(java.lang.String s)");
hacker.insertPrivateStaticField("ij.IJ", Context.class, "_context");
hacker.insertNewMethod("ij.IJ",
"public synchronized static org.scijava.Context getContext()",
"if (_context == null) _context = new org.scijava.Context();"
+ "return _context;");
hacker.insertAtTopOfMethod("ij.IJ",
"public static Object runPlugIn(java.lang.String className, java.lang.String arg)",
"if (\"" + LegacyService.class.getName() + "\".equals($1))"
+ " return getLegacyService();"
+ "if (\"" + Context.class.getName() + "\".equals($1))"
+ " return getContext();");
hacker.insertAtTopOfMethod("ij.IJ", "public static void log(java.lang.String message)");
hacker.insertAtTopOfMethod("ij.IJ",
"static java.lang.Object runUserPlugIn(java.lang.String commandName, java.lang.String className, java.lang.String arg, boolean createNewLoader)",
"if (classLoader != null) Thread.currentThread().setContextClassLoader(classLoader);");
// override behavior of ij.ImagePlus
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void updateAndDraw()");
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void repaintWindow()");
hacker.insertAtBottomOfMethod("ij.ImagePlus",
"public void show(java.lang.String statusMessage)");
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void hide()");
hacker.insertAtBottomOfMethod("ij.ImagePlus", "public void close()");
// override behavior of ij.gui.ImageWindow
hacker.insertNewMethod("ij.gui.ImageWindow",
"public void setVisible(boolean vis)");
hacker.insertAtTopOfMethod("ij.gui.ImageWindow",
"public void setVisible(boolean vis)",
"if ($isLegacyMode()) { super.setVisible($1); }");
hacker.insertNewMethod("ij.gui.ImageWindow", "public void show()");
hacker.insertAtTopOfMethod("ij.gui.ImageWindow",
"public void show()",
"if ($isLegacyMode()) { super.show(); }");
hacker.insertAtTopOfMethod("ij.gui.ImageWindow", "public void close()");
// override behavior of PluginClassLoader
hacker.insertAtTopOfMethod("ij.io.PluginClassLoader", "void init(java.lang.String path)");
// override behavior of ij.macro.Functions
hacker
.insertAtTopOfMethod("ij.macro.Functions",
"void displayBatchModeImage(ij.ImagePlus imp2)",
"imagej.legacy.patches.FunctionsMethods.displayBatchModeImageBefore($service, $1);");
hacker
.insertAtBottomOfMethod("ij.macro.Functions",
"void displayBatchModeImage(ij.ImagePlus imp2)",
"imagej.legacy.patches.FunctionsMethods.displayBatchModeImageAfter($service, $1);");
// override behavior of MacAdapter, if needed
if (ClassUtils.hasClass("com.apple.eawt.ApplicationListener")) {
// NB: If com.apple.eawt package is present, override IJ1's MacAdapter.
hacker.insertAtTopOfMethod("MacAdapter",
"public void run(java.lang.String arg)",
"if (!$isLegacyMode()) return;");
}
// override behavior of ij.plugin.frame.RoiManager
hacker.insertNewMethod("ij.plugin.frame.RoiManager",
"public void show()",
"if ($isLegacyMode()) { super.show(); }");
hacker.insertNewMethod("ij.plugin.frame.RoiManager",
"public void setVisible(boolean b)",
"if ($isLegacyMode()) { super.setVisible($1); }");
//
// Below are patches to make ImageJ 1.x more backwards-compatible
//
// add back the (deprecated) killProcessor(), and overlay methods
final String[] imagePlusMethods = {
"public void killProcessor()",
"{}",
"public void setDisplayList(java.util.Vector list)",
"getCanvas().setDisplayList(list);",
"public java.util.Vector getDisplayList()",
"return getCanvas().getDisplayList();",
"public void setDisplayList(ij.gui.Roi roi, java.awt.Color strokeColor,"
+ " int strokeWidth, java.awt.Color fillColor)",
"setOverlay(roi, strokeColor, strokeWidth, fillColor);"
};
for (int i = 0; i < imagePlusMethods.length; i++) try {
hacker.insertNewMethod("ij.ImagePlus",
imagePlusMethods[i], imagePlusMethods[++i]);
} catch (Exception e) { /* ignore */ }
// make sure that ImageJ has been initialized in batch mode
hacker.insertAtTopOfMethod("ij.IJ",
"public static java.lang.String runMacro(java.lang.String macro, java.lang.String arg)",
"if (ij==null && ij.Menus.getCommands()==null) init();");
try {
hacker.insertNewMethod("ij.CompositeImage",
"public ij.ImagePlus[] splitChannels(boolean closeAfter)",
"ij.ImagePlus[] result = ij.plugin.ChannelSplitter.split(this);"
+ "if (closeAfter) close();"
+ "return result;");
hacker.insertNewMethod("ij.plugin.filter.RGBStackSplitter",
"public static ij.ImagePlus[] splitChannelsToArray(ij.ImagePlus imp, boolean closeAfter)",
"if (!imp.isComposite()) {"
+ " ij.IJ.error(\"splitChannelsToArray was called on a non-composite image\");"
+ " return null;"
+ "}"
+ "ij.ImagePlus[] result = ij.plugin.ChannelSplitter.split(imp);"
+ "if (closeAfter)"
+ " imp.close();"
+ "return result;");
} catch (IllegalArgumentException e) {
final Throwable cause = e.getCause();
if (cause != null && !(cause instanceof DuplicateMemberException)) {
throw e;
}
}
// handle mighty mouse (at least on old Linux, Java mistakes the horizontal wheel for a popup trigger)
for (String fullClass : new String[] {
"ij.gui.ImageCanvas",
"ij.plugin.frame.RoiManager",
"ij.text.TextPanel",
"ij.gui.Toolbar"
}) {
hacker.handleMightyMousePressed(fullClass);
}
// tell IJ#runUserPlugIn to catch NoSuchMethodErrors
final String runUserPlugInSig = "static java.lang.Object runUserPlugIn(java.lang.String commandName, java.lang.String className, java.lang.String arg, boolean createNewLoader)";
hacker.addCatch("ij.IJ", runUserPlugInSig, "java.lang.NoSuchMethodError",
"if (" + IJ1Helper.class.getName() + ".handleNoSuchMethodError($e))"
+ " throw new RuntimeException(ij.Macro.MACRO_CANCELED);"
+ "throw $e;");
// tell IJ#runUserPlugIn to be more careful about catching NoClassDefFoundError
hacker.insertPrivateStaticField("ij.IJ", String.class, "originalClassName");
hacker.insertAtTopOfMethod("ij.IJ", runUserPlugInSig, "originalClassName = $2;");
hacker.insertAtTopOfExceptionHandlers("ij.IJ", runUserPlugInSig, "java.lang.NoClassDefFoundError",
"java.lang.String realClassName = $1.getMessage();"
+ "int spaceParen = realClassName.indexOf(\" (\");"
+ "if (spaceParen > 0) realClassName = realClassName.substring(0, spaceParen);"
+ "if (!originalClassName.replace('.', '/').equals(realClassName)) {"
+ " if (realClassName.startsWith(\"javax/vecmath/\") || realClassName.startsWith(\"com/sun/j3d/\") || realClassName.startsWith(\"javax/media/j3d/\"))"
+ " ij.IJ.error(\"The class \" + originalClassName + \" did not find Java3D (\" + realClassName + \")\\nPlease call Plugins>3D Viewer to install\");"
+ " else"
+ " ij.IJ.handleException($1);"
+ " return null;"
+ "}");
// let the plugin class loader find stuff in $HOME/.plugins, too
hacker.addExtraPlugins();
// make sure that the GenericDialog is disposed in macro mode
try {
hacker.insertAtTopOfMethod("ij.gui.GenericDialog", "public void showDialog()", "if (macro) dispose();");
} catch (IllegalArgumentException e) {
// ignore if the headless patcher renamed the method away
if (e.getCause() == null || !(e.getCause() instanceof NotFoundException)) {
throw e;
}
}
// make sure NonBlockingGenericDialog does not wait in macro mode
hacker.replaceCallInMethod("ij.gui.NonBlockingGenericDialog", "public void showDialog()", "java.lang.Object", "wait", "if (isShowing()) wait();");
// tell the showStatus() method to show the version() instead of empty status
hacker.insertAtTopOfMethod("ij.ImageJ", "void showStatus(java.lang.String s)", "if ($1 == null || \"\".equals($1)) $1 = version();");
// handle custom icon (e.g. for Fiji)
if (!hacker.hasField("ij.IJ", "_iconURL")) { // Fiji will already have called CodeHacker#setIcon(File icon)
hacker.insertPublicStaticField("ij.IJ", URL.class, "_iconURL", null);
}
hacker.replaceCallInMethod("ij.ImageJ", "void setIcon()", "java.lang.Class", "getResource",
"if (ij.IJ._iconURL == null) $_ = $0.getResource($1);" +
"else $_ = ij.IJ._iconURL;");
hacker.insertAtTopOfMethod("ij.ImageJ", "public <init>(java.applet.Applet applet, int mode)",
"if ($2 != 2 /* ij.ImageJ.NO_SHOW */) setIcon();");
hacker.insertAtTopOfMethod("ij.WindowManager", "public void addWindow(java.awt.Frame window)",
"if (ij.IJ._iconURL != null && $1 != null) {"
+ " java.awt.Image img = $1.createImage((java.awt.image.ImageProducer)ij.IJ._iconURL.getContent());"
+ " if (img != null) {"
+ " $1.setIconImage(img);"
+ " }"
+ "}");
// optionally disallow batch mode from calling System.exit()
hacker.insertPrivateStaticField("ij.ImageJ", Boolean.TYPE, "batchModeMayExit");
hacker.insertAtTopOfMethod("ij.ImageJ", "public static void main(java.lang.String[] args)",
"batchModeMayExit = true;"
+ "for (int i = 0; i < $1.length; i++) {"
+ " if (\"-batch-no-exit\".equals($1[i])) {"
+ " batchModeMayExit = false;"
+ " $1[i] = \"-batch\";"
+ " }"
+ "}");
hacker.replaceCallInMethod("ij.ImageJ", "public static void main(java.lang.String[] args)", "java.lang.System", "exit",
"if (batchModeMayExit) System.exit($1);"
+ "if ($1 == 0) return;"
+ "throw new RuntimeException(\"Exit code: \" + $1);");
// do not use the current directory as IJ home on Windows
String prefsDir = System.getenv("IJ_PREFS_DIR");
if (prefsDir == null && System.getProperty("os.name").startsWith("Windows")) {
prefsDir = System.getenv("user.home");
}
if (prefsDir != null) {
hacker.overrideFieldWrite("ij.Prefs", "public java.lang.String load(java.lang.Object ij, java.applet.Applet applet)",
"prefsDir", "$_ = \"" + prefsDir + "\";");
}
// tool names can be prefixes of other tools, watch out for that!
hacker.replaceCallInMethod("ij.gui.Toolbar", "public int getToolId(java.lang.String name)", "java.lang.String", "startsWith",
"$_ = $0.equals($1) || $0.startsWith($1 + \"-\") || $0.startsWith($1 + \" -\");");
// make sure Rhino gets the correct class loader
hacker.insertAtTopOfMethod("JavaScriptEvaluator", "public void run()",
"Thread.currentThread().setContextClassLoader(ij.IJ.getClassLoader());");
// make sure that the check for Bio-Formats is correct
hacker.addToClassInitializer("ij.io.Opener",
"try {"
+ " ij.IJ.getClassLoader().loadClass(\"loci.plugins.LociImporter\");"
+ " bioformats = true;"
+ "} catch (ClassNotFoundException e) {"
+ " bioformats = false;"
+ "}");
// commit patches
hacker.loadClasses();
// make sure that there is a legacy service
if (this.hacker != null) {
setLegacyService(new DummyLegacyService());
}
}
void setLegacyService(final LegacyService legacyService) {
try {
final Class<?> ij = hacker.classLoader.loadClass("ij.IJ");
Field field = ij.getDeclaredField("_legacyService");
field.setAccessible(true);
field.set(null, legacyService);
Context context;
try {
context = legacyService.getContext();
} catch (UnsupportedOperationException e) {
// DummyLegacyService does not have a context
context = null;
}
field = ij.getDeclaredField("_context");
field.setAccessible(true);
field.set(null, context);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Cannot find ij.IJ", e);
} catch (SecurityException e) {
throw new IllegalArgumentException("Cannot find ij.IJ", e);
} catch (NoSuchFieldException e) {
throw new IllegalArgumentException("Cannot find field in ij.IJ", e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Cannot access field in ij.IJ", e);
}
}
}
|
ij-legacy: Port fiji-compat's Drag 'n Drop patch regarding symbolic links
This patch is needed to resolve parent directories for TrakEM2
projects correctly.
See Albert Cardona's explanation in
https://github.com/fiji/ImageJA/commit/53d1b5dd9
Signed-off-by: Johannes Schindelin <[email protected]>
|
core/legacy/src/main/java/imagej/legacy/LegacyInjector.java
|
ij-legacy: Port fiji-compat's Drag 'n Drop patch regarding symbolic links
|
<ide><path>ore/legacy/src/main/java/imagej/legacy/LegacyInjector.java
<ide> + " bioformats = false;"
<ide> + "}");
<ide>
<add> // make sure that symbolic links are *not* resolved (because then the parent info in the FileInfo would be wrong)
<add> hacker.replaceCallInMethod("ij.plugin.DragAndDrop", "public void openFile(java.io.File f)", "java.io.File", "getCanonicalPath",
<add> "$_ = $0.getAbsolutePath();");
<add>
<ide> // commit patches
<ide> hacker.loadClasses();
<ide>
|
|
Java
|
apache-2.0
|
24ca6717cfe56d0e6e5a3310989770c8295e3825
| 0 |
AlexFalappa/nb-springboot,AlexFalappa/nb-springboot
|
/*
* Copyright 2016 Alessandro Falappa.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.alexfalappa.nbspringboot.projects.initializr;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.Rectangle;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.Scrollable;
import com.fasterxml.jackson.databind.JsonNode;
import static javax.swing.SwingConstants.HORIZONTAL;
/**
* Specialized scrollable panel to manage a list of checkboxes groups each containing two columns of checkboxes.
* <p>
* The panel is dynamically filled processing a JSON tree received from the Spring Initializr rest service.
*
* @author Alessandro Falappa
*/
public class BootDependenciesPanel extends javax.swing.JPanel implements Scrollable {
private static final String PROP_VERSION_RANGE = "versionRange";
private static final String PROP_DESCRIPTION = "boot.description";
private static final int OUTER_GAP = 4;
private static final int INNER_GAP = 2;
private static final int INDENT = 10;
private static final int GROUP_SPACE = 16;
private static final int TOOLTIP_WIDTH = 40;
private boolean initialized = false;
private final Map<String, List<JCheckBox>> chkBoxesMap = new HashMap<>();
private final List<JLabel> grpLabels = new ArrayList<>();
private Integer unitIncrement = null;
private Integer blockIncrement = null;
public BootDependenciesPanel() {
initComponents();
}
public void init(JsonNode metaData) {
JsonNode depArray = metaData.path("dependencies").path("values");
final int nodeNum = depArray.size();
// remove informative label
if (nodeNum > 0) {
this.remove(lNotInitialized);
}
// prepare dependencies checkboxes
for (int i = 0; i < nodeNum; i++) {
JsonNode gn = depArray.get(i);
final String groupName = gn.path("name").asText();
// group label
JLabel lGroup = new JLabel(groupName);
lGroup.setFont(lGroup.getFont().deriveFont(Font.BOLD, lGroup.getFont().getSize() + 2));
grpLabels.add(lGroup);
this.add(lGroup, constraintsForGroupLabel(i == 0));
// starter checkboxes in two columns
final JsonNode valArray = gn.path("values");
for (int j = 0; j < valArray.size(); j++) {
// first column
JsonNode dn = valArray.get(j);
this.add(checkBoxForNode(groupName, dn), constraintsForFirstColumnCheckbox());
// second column (optional)
if (++j < valArray.size()) {
dn = valArray.get(j);
this.add(checkBoxForNode(groupName, dn), constraintsForSecondColumnCheckbox());
}
}
}
initialized = true;
// force recompute of increments
unitIncrement = null;
blockIncrement = null;
}
public String getSelectedDependenciesString() {
StringBuilder sb = new StringBuilder();
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
if (cb.isEnabled() && cb.isSelected()) {
sb.append(cb.getName()).append(',');
}
}
}
// remove last comma (if present)
if (sb.length() > 0) {
sb.setLength(sb.length() - 1);
}
return sb.toString();
}
void setSelectedDependenciesString(String deps) {
HashSet<String> hs = new HashSet<>(Arrays.asList(deps.split(",")));
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
cb.setSelected(hs.contains(cb.getName()));
}
}
}
public List<String> getSelectedDependencies() {
List<String> ret = new ArrayList<>();
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
if (cb.isEnabled() && cb.isSelected()) {
ret.add(cb.getName());
}
}
}
return ret;
}
void setSelectedDependencies(List<String> deps) {
HashSet<String> hs = new HashSet<>(deps);
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
cb.setSelected(hs.contains(cb.getName()));
}
}
}
@Override
public Dimension getPreferredScrollableViewportSize() {
Dimension size = getPreferredSize();
if (initialized) {
size = new Dimension(size.width, size.height / 8);
}
return size;
}
@Override
public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == HORIZONTAL) {
return getPreferredSize().width / 10;
} else {
if (unitIncrement == null) {
unitIncrement = computeUnitIncrement();
}
return unitIncrement;
}
}
@Override
public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == HORIZONTAL) {
return getPreferredSize().width / 5;
} else {
if (blockIncrement == null) {
blockIncrement = computeBlockIncrement();
}
return blockIncrement;
}
}
@Override
public boolean getScrollableTracksViewportWidth() {
return false;
}
@Override
public boolean getScrollableTracksViewportHeight() {
return false;
}
private JCheckBox checkBoxForNode(String group, JsonNode dn) {
final String name = dn.path("name").asText();
final String id = dn.path("id").asText();
final String description = dn.path("description").asText();
final String versRange = dn.path("versionRange").asText();
JCheckBox ch = new JCheckBox(name);
ch.setName(id);
ch.putClientProperty(PROP_VERSION_RANGE, versRange);
ch.putClientProperty(PROP_DESCRIPTION, description);
if (!chkBoxesMap.containsKey(group)) {
chkBoxesMap.put(group, new ArrayList<JCheckBox>());
}
chkBoxesMap.get(group).add(ch);
return ch;
}
private GridBagConstraints constraintsForSecondColumnCheckbox() {
GridBagConstraints gbc;
gbc = new java.awt.GridBagConstraints();
gbc.gridx = 1;
gbc.gridwidth = GridBagConstraints.REMAINDER;
gbc.insets = new Insets(INNER_GAP, INDENT, 0, 0);
gbc.anchor = GridBagConstraints.LINE_START;
return gbc;
}
private GridBagConstraints constraintsForFirstColumnCheckbox() {
GridBagConstraints gbc;
gbc = new java.awt.GridBagConstraints();
gbc.gridx = 0;
gbc.insets = new Insets(INNER_GAP, INDENT, 0, 0);
gbc.anchor = GridBagConstraints.LINE_START;
return gbc;
}
private GridBagConstraints constraintsForGroupLabel(boolean first) {
GridBagConstraints gbc = new java.awt.GridBagConstraints();
gbc.gridx = 0;
gbc.gridwidth = GridBagConstraints.REMAINDER;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.insets = (first) ? new Insets(OUTER_GAP, OUTER_GAP, 0, OUTER_GAP) : new Insets(GROUP_SPACE, OUTER_GAP, 0, OUTER_GAP);
return gbc;
}
/** This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The content of this
* method is always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
lNotInitialized = new javax.swing.JLabel();
setLayout(new java.awt.GridBagLayout());
lNotInitialized.setText("Not initialized");
lNotInitialized.setEnabled(false);
add(lNotInitialized, new java.awt.GridBagConstraints());
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel lNotInitialized;
// End of variables declaration//GEN-END:variables
void adaptToBootVersion(String bootVersion) {
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
String verRange = (String) cb.getClientProperty(PROP_VERSION_RANGE);
String description = (String) cb.getClientProperty(PROP_DESCRIPTION);
final boolean allowable = allowable(verRange, bootVersion);
cb.setEnabled(allowable);
cb.setToolTipText(prepTooltip(description, allowable, verRange));
}
}
}
private static boolean allowable(String verRange, String bootVersion) {
boolean ret = true;
if (verRange != null && !verRange.isEmpty()) {
if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0
|| verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) {
// bounded range
String[] bounds = verRange.substring(1, verRange.length() - 1).split(",");
// check there are two bounds
if (bounds.length != 2) {
return false;
}
// test various cases
if (bootVersion.compareTo(bounds[0]) > 0 && bootVersion.compareTo(bounds[1]) < 0) {
return true;
} else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("[")) {
return true;
} else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("(")) {
return false;
} else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith("]")) {
return true;
} else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith(")")) {
return false;
} else {
return false;
}
} else {
// unbounded range
return bootVersion.compareTo(verRange) >= 0;
}
}
return ret;
}
private String prepTooltip(String description, boolean allowable, String versRange) {
StringBuilder sb = new StringBuilder(wrap(description));
if (!allowable) {
sb.append("<br/><i>").append(decode(versRange)).append("</i>");
}
return sb.toString();
}
private StringBuilder wrap(String description) {
StringBuilder sb = new StringBuilder("<html>");
String[] words = description.split(" ");
String w = words[0];
sb.append(w);
int len = w.length();
for (int i = 1; i < words.length; i++) {
w = words[i];
if (len + w.length() + 1 > TOOLTIP_WIDTH) {
sb.append("<br/>").append(w);
len = w.length();
} else {
sb.append(" ").append(w);
len += w.length() + 1;
}
}
return sb;
}
private String decode(String verRange) {
StringBuilder sb = new StringBuilder();
if (verRange != null && !verRange.isEmpty()) {
if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0
|| verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) {
// bounded range
String[] bounds = verRange.substring(1, verRange.length() - 1).split(",");
// check there are two bounds
if (bounds.length == 2) {
sb.append(bounds[0]);
if (verRange.startsWith("[")) {
sb.append(" <= ");
} else if (verRange.startsWith("(")) {
sb.append(" < ");
}
sb.append("Boot version");
if (verRange.endsWith("]")) {
sb.append(" >= ");
} else if (verRange.endsWith(")")) {
sb.append(" > ");
}
sb.append(bounds[1]);
}
} else {
// unbounded range
sb.append("Boot version >= ").append(verRange);
}
}
return sb.toString();
}
void clearFilter() {
filter(null);
}
void filter(String text) {
this.removeAll();
int cg = 1;
for (JLabel lGroup : grpLabels) {
List<JCheckBox> cbList = cbFilter(lGroup.getText(), text);
if (!cbList.isEmpty()) {
this.add(lGroup, constraintsForGroupLabel(cg++ == 0));
int cd = 1;
for (JCheckBox cb : cbList) {
if (cd++ % 2 == 0) {
this.add(cb, constraintsForSecondColumnCheckbox());
} else {
this.add(cb, constraintsForFirstColumnCheckbox());
}
}
}
}
this.revalidate();
this.repaint();
}
private List<JCheckBox> cbFilter(String group, String text) {
ArrayList<JCheckBox> ret = new ArrayList<>();
for (JCheckBox cb : chkBoxesMap.get(group)) {
if (text == null || cb.getText().toLowerCase().contains(text)) {
ret.add(cb);
}
}
return ret;
}
private int computeUnitIncrement() {
final Iterator<List<JCheckBox>> it = chkBoxesMap.values().iterator();
if (it.hasNext()) {
List<JCheckBox> list = it.next();
if (!list.isEmpty()) {
return list.get(0).getPreferredSize().height;
}
}
return getPreferredSize().height / 24;
}
private Integer computeBlockIncrement() {
final Iterator<List<JCheckBox>> it = chkBoxesMap.values().iterator();
if (it.hasNext()) {
List<JCheckBox> list = it.next();
if (!list.isEmpty()) {
return list.get(0).getPreferredSize().height * 5;
}
}
return getPreferredSize().height / 8;
}
}
|
src/main/java/com/github/alexfalappa/nbspringboot/projects/initializr/BootDependenciesPanel.java
|
/*
* Copyright 2016 Alessandro Falappa.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.alexfalappa.nbspringboot.projects.initializr;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.Rectangle;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.Scrollable;
import com.fasterxml.jackson.databind.JsonNode;
import static javax.swing.SwingConstants.HORIZONTAL;
/**
* Specialized scrollable panel to manage a list of checkboxes groups each containing two columns of checkboxes.
* <p>
* The panel is dynamically filled processing a JSON tree received from the Spring Initializr rest service.
*
* @author Alessandro Falappa
*/
public class BootDependenciesPanel extends javax.swing.JPanel implements Scrollable {
private static final String PROP_VERSION_RANGE = "versionRange";
private static final String PROP_DESCRIPTION = "boot.description";
private static final int OUTER_GAP = 4;
private static final int INNER_GAP = 2;
private static final int INDENT = 10;
private static final int GROUP_SPACE = 16;
private static final int TOOLTIP_WIDTH = 40;
private boolean initialized = false;
private final Map<String, List<JCheckBox>> chkBoxesMap = new HashMap<>();
private final List<JLabel> grpLabels = new ArrayList<>();
private Integer vertUnitIncrement = null;
private Integer vertBlockIncrement = null;
public BootDependenciesPanel() {
initComponents();
}
public void init(JsonNode metaData) {
JsonNode depArray = metaData.path("dependencies").path("values");
final int nodeNum = depArray.size();
// remove informative label
if (nodeNum > 0) {
this.remove(lNotInitialized);
}
// prepare dependencies checkboxes
for (int i = 0; i < nodeNum; i++) {
JsonNode gn = depArray.get(i);
final String groupName = gn.path("name").asText();
// group label
JLabel lGroup = new JLabel(groupName);
lGroup.setFont(lGroup.getFont().deriveFont(Font.BOLD, lGroup.getFont().getSize() + 2));
grpLabels.add(lGroup);
this.add(lGroup, constraintsForGroupLabel(i == 0));
// starter checkboxes in two columns
final JsonNode valArray = gn.path("values");
for (int j = 0; j < valArray.size(); j++) {
// first column
JsonNode dn = valArray.get(j);
this.add(checkBoxForNode(groupName, dn), constraintsForFirstColumnCheckbox());
// second column (optional)
if (++j < valArray.size()) {
dn = valArray.get(j);
this.add(checkBoxForNode(groupName, dn), constraintsForSecondColumnCheckbox());
}
}
}
initialized = true;
// force recompute of increments
vertUnitIncrement = null;
vertBlockIncrement = null;
}
public String getSelectedDependenciesString() {
StringBuilder sb = new StringBuilder();
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
if (cb.isEnabled() && cb.isSelected()) {
sb.append(cb.getName()).append(',');
}
}
}
// remove last comma (if present)
if (sb.length() > 0) {
sb.setLength(sb.length() - 1);
}
return sb.toString();
}
void setSelectedDependenciesString(String deps) {
HashSet<String> hs = new HashSet<>(Arrays.asList(deps.split(",")));
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
cb.setSelected(hs.contains(cb.getName()));
}
}
}
public List<String> getSelectedDependencies() {
List<String> ret = new ArrayList<>();
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
if (cb.isEnabled() && cb.isSelected()) {
ret.add(cb.getName());
}
}
}
return ret;
}
void setSelectedDependencies(List<String> deps) {
HashSet<String> hs = new HashSet<>(deps);
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
cb.setSelected(hs.contains(cb.getName()));
}
}
}
@Override
public Dimension getPreferredScrollableViewportSize() {
Dimension size = getPreferredSize();
if (initialized) {
size = new Dimension(size.width, size.height / 8);
}
return size;
}
@Override
public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == HORIZONTAL) {
return getPreferredSize().width / 10;
} else {
if (vertUnitIncrement == null) {
vertUnitIncrement = computeUnitIncrement();
}
return vertUnitIncrement;
}
}
@Override
public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == HORIZONTAL) {
return getPreferredSize().width / 5;
} else {
if (vertBlockIncrement == null) {
vertBlockIncrement = computeBlockIncrement();
}
return vertUnitIncrement;
}
}
@Override
public boolean getScrollableTracksViewportWidth() {
return false;
}
@Override
public boolean getScrollableTracksViewportHeight() {
return false;
}
private JCheckBox checkBoxForNode(String group, JsonNode dn) {
final String name = dn.path("name").asText();
final String id = dn.path("id").asText();
final String description = dn.path("description").asText();
final String versRange = dn.path("versionRange").asText();
JCheckBox ch = new JCheckBox(name);
ch.setName(id);
ch.putClientProperty(PROP_VERSION_RANGE, versRange);
ch.putClientProperty(PROP_DESCRIPTION, description);
if (!chkBoxesMap.containsKey(group)) {
chkBoxesMap.put(group, new ArrayList<JCheckBox>());
}
chkBoxesMap.get(group).add(ch);
return ch;
}
private GridBagConstraints constraintsForSecondColumnCheckbox() {
GridBagConstraints gbc;
gbc = new java.awt.GridBagConstraints();
gbc.gridx = 1;
gbc.gridwidth = GridBagConstraints.REMAINDER;
gbc.insets = new Insets(INNER_GAP, INDENT, 0, 0);
gbc.anchor = GridBagConstraints.LINE_START;
return gbc;
}
private GridBagConstraints constraintsForFirstColumnCheckbox() {
GridBagConstraints gbc;
gbc = new java.awt.GridBagConstraints();
gbc.gridx = 0;
gbc.insets = new Insets(INNER_GAP, INDENT, 0, 0);
gbc.anchor = GridBagConstraints.LINE_START;
return gbc;
}
private GridBagConstraints constraintsForGroupLabel(boolean first) {
GridBagConstraints gbc = new java.awt.GridBagConstraints();
gbc.gridx = 0;
gbc.gridwidth = GridBagConstraints.REMAINDER;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.insets = (first) ? new Insets(OUTER_GAP, OUTER_GAP, 0, OUTER_GAP) : new Insets(GROUP_SPACE, OUTER_GAP, 0, OUTER_GAP);
return gbc;
}
/** This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The content of this
* method is always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
lNotInitialized = new javax.swing.JLabel();
setLayout(new java.awt.GridBagLayout());
lNotInitialized.setText("Not initialized");
lNotInitialized.setEnabled(false);
add(lNotInitialized, new java.awt.GridBagConstraints());
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel lNotInitialized;
// End of variables declaration//GEN-END:variables
void adaptToBootVersion(String bootVersion) {
for (List<JCheckBox> chList : chkBoxesMap.values()) {
for (JCheckBox cb : chList) {
String verRange = (String) cb.getClientProperty(PROP_VERSION_RANGE);
String description = (String) cb.getClientProperty(PROP_DESCRIPTION);
final boolean allowable = allowable(verRange, bootVersion);
cb.setEnabled(allowable);
cb.setToolTipText(prepTooltip(description, allowable, verRange));
}
}
}
private static boolean allowable(String verRange, String bootVersion) {
boolean ret = true;
if (verRange != null && !verRange.isEmpty()) {
if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0
|| verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) {
// bounded range
String[] bounds = verRange.substring(1, verRange.length() - 1).split(",");
// check there are two bounds
if (bounds.length != 2) {
return false;
}
// test various cases
if (bootVersion.compareTo(bounds[0]) > 0 && bootVersion.compareTo(bounds[1]) < 0) {
return true;
} else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("[")) {
return true;
} else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("(")) {
return false;
} else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith("]")) {
return true;
} else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith(")")) {
return false;
} else {
return false;
}
} else {
// unbounded range
return bootVersion.compareTo(verRange) >= 0;
}
}
return ret;
}
private String prepTooltip(String description, boolean allowable, String versRange) {
StringBuilder sb = new StringBuilder(wrap(description));
if (!allowable) {
sb.append("<br/><i>").append(decode(versRange)).append("</i>");
}
return sb.toString();
}
private StringBuilder wrap(String description) {
StringBuilder sb = new StringBuilder("<html>");
String[] words = description.split(" ");
String w = words[0];
sb.append(w);
int len = w.length();
for (int i = 1; i < words.length; i++) {
w = words[i];
if (len + w.length() + 1 > TOOLTIP_WIDTH) {
sb.append("<br/>").append(w);
len = w.length();
} else {
sb.append(" ").append(w);
len += w.length() + 1;
}
}
return sb;
}
private String decode(String verRange) {
StringBuilder sb = new StringBuilder();
if (verRange != null && !verRange.isEmpty()) {
if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0
|| verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) {
// bounded range
String[] bounds = verRange.substring(1, verRange.length() - 1).split(",");
// check there are two bounds
if (bounds.length == 2) {
sb.append(bounds[0]);
if (verRange.startsWith("[")) {
sb.append(" <= ");
} else if (verRange.startsWith("(")) {
sb.append(" < ");
}
sb.append("Boot version");
if (verRange.endsWith("]")) {
sb.append(" >= ");
} else if (verRange.endsWith(")")) {
sb.append(" > ");
}
sb.append(bounds[1]);
}
} else {
// unbounded range
sb.append("Boot version >= ").append(verRange);
}
}
return sb.toString();
}
void clearFilter() {
filter(null);
}
void filter(String text) {
this.removeAll();
int cg = 1;
for (JLabel lGroup : grpLabels) {
List<JCheckBox> cbList = cbFilter(lGroup.getText(), text);
if (!cbList.isEmpty()) {
this.add(lGroup, constraintsForGroupLabel(cg++ == 0));
int cd = 1;
for (JCheckBox cb : cbList) {
if (cd++ % 2 == 0) {
this.add(cb, constraintsForSecondColumnCheckbox());
} else {
this.add(cb, constraintsForFirstColumnCheckbox());
}
}
}
}
this.revalidate();
this.repaint();
}
private List<JCheckBox> cbFilter(String group, String text) {
ArrayList<JCheckBox> ret = new ArrayList<>();
for (JCheckBox cb : chkBoxesMap.get(group)) {
if (text == null || cb.getText().toLowerCase().contains(text)) {
ret.add(cb);
}
}
return ret;
}
private int computeUnitIncrement() {
System.out.println("com.github.alexfalappa.nbspringboot.projects.initializr.BootDependenciesPanel.computeUnitIncrement()");
final Iterator<List<JCheckBox>> it = chkBoxesMap.values().iterator();
if (it.hasNext()) {
List<JCheckBox> list = it.next();
if (!list.isEmpty()) {
return list.get(0).getPreferredSize().height;
}
}
return getPreferredSize().height / 24;
}
private Integer computeBlockIncrement() {
System.out.println("com.github.alexfalappa.nbspringboot.projects.initializr.BootDependenciesPanel.computeBlockIncrement()");
final Iterator<List<JCheckBox>> it = chkBoxesMap.values().iterator();
if (it.hasNext()) {
List<JCheckBox> list = it.next();
if (!list.isEmpty()) {
return list.get(0).getPreferredSize().height * 4;
}
}
return getPreferredSize().height / 8;
}
}
|
Initializr project: smooth dependencies panel scrolling
|
src/main/java/com/github/alexfalappa/nbspringboot/projects/initializr/BootDependenciesPanel.java
|
Initializr project: smooth dependencies panel scrolling
|
<ide><path>rc/main/java/com/github/alexfalappa/nbspringboot/projects/initializr/BootDependenciesPanel.java
<ide> private boolean initialized = false;
<ide> private final Map<String, List<JCheckBox>> chkBoxesMap = new HashMap<>();
<ide> private final List<JLabel> grpLabels = new ArrayList<>();
<del> private Integer vertUnitIncrement = null;
<del> private Integer vertBlockIncrement = null;
<add> private Integer unitIncrement = null;
<add> private Integer blockIncrement = null;
<ide>
<ide> public BootDependenciesPanel() {
<ide> initComponents();
<ide> }
<ide> initialized = true;
<ide> // force recompute of increments
<del> vertUnitIncrement = null;
<del> vertBlockIncrement = null;
<add> unitIncrement = null;
<add> blockIncrement = null;
<ide> }
<ide>
<ide> public String getSelectedDependenciesString() {
<ide> if (orientation == HORIZONTAL) {
<ide> return getPreferredSize().width / 10;
<ide> } else {
<del> if (vertUnitIncrement == null) {
<del> vertUnitIncrement = computeUnitIncrement();
<del> }
<del> return vertUnitIncrement;
<add> if (unitIncrement == null) {
<add> unitIncrement = computeUnitIncrement();
<add> }
<add> return unitIncrement;
<ide> }
<ide> }
<ide>
<ide> if (orientation == HORIZONTAL) {
<ide> return getPreferredSize().width / 5;
<ide> } else {
<del> if (vertBlockIncrement == null) {
<del> vertBlockIncrement = computeBlockIncrement();
<del> }
<del> return vertUnitIncrement;
<add> if (blockIncrement == null) {
<add> blockIncrement = computeBlockIncrement();
<add> }
<add> return blockIncrement;
<ide> }
<ide> }
<ide>
<ide> }
<ide>
<ide> private int computeUnitIncrement() {
<del> System.out.println("com.github.alexfalappa.nbspringboot.projects.initializr.BootDependenciesPanel.computeUnitIncrement()");
<ide> final Iterator<List<JCheckBox>> it = chkBoxesMap.values().iterator();
<ide> if (it.hasNext()) {
<ide> List<JCheckBox> list = it.next();
<ide> }
<ide>
<ide> private Integer computeBlockIncrement() {
<del> System.out.println("com.github.alexfalappa.nbspringboot.projects.initializr.BootDependenciesPanel.computeBlockIncrement()");
<ide> final Iterator<List<JCheckBox>> it = chkBoxesMap.values().iterator();
<ide> if (it.hasNext()) {
<ide> List<JCheckBox> list = it.next();
<ide> if (!list.isEmpty()) {
<del> return list.get(0).getPreferredSize().height * 4;
<add> return list.get(0).getPreferredSize().height * 5;
<ide> }
<ide> }
<ide> return getPreferredSize().height / 8;
|
|
JavaScript
|
lgpl-2.1
|
1b8373af10ee8f7bed98c7ff6a27a3ab34f3b72f
| 0 |
jasonmunro/cypht,jasonmunro/cypht,jasonmunro/cypht,jasonmunro/cypht
|
/* globals Hm_Ajax,Hm_Message_List,Hm_Utils,Hm_Folders,Hm_Background_Unread,hm_list_path,hm_msg_uid,hm_search_terms,hm_list_parent,hm_page_name,Message_List,Hm_Timer: true */
var smtp_test_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
},
{'smtp_connect': 1}
);
};
var smtp_save_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
if (res.just_saved_credentials) {
form.find('.credentials').attr('disabled', true);
form.find('.save_smtp_connection').hide();
form.find('.smtp_password').val('');
form.find('.smtp_password').attr('placeholder', '[saved]');
form.append('<input type="submit" value="Forget" class="forget_smtp_connection" />');
$('.forget_smtp_connection').on('click', smtp_forget_action);
Hm_Utils.set_unsaved_changes(1);
Hm_Folders.reload_folders(true);
}
},
{'smtp_save': 1}
);
};
var smtp_forget_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
if (res.just_forgot_credentials) {
form.find('.credentials').attr('disabled', false);
form.find('.smtp_password').val('');
form.find('.smtp_password').attr('placeholder', 'Password');
form.append('<input type="submit" value="Save" class="save_smtp_connection" />');
$('.save_smtp_connection').on('click', smtp_save_action);
$('.forget_smtp_connection', form).remove();
Hm_Utils.set_unsaved_changes(1);
Hm_Folders.reload_folders(true);
}
},
{'smtp_forget': 1}
);
};
var smtp_delete_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
if (res.deleted_server_id > -1 ) {
form.parent().remove();
Hm_Utils.set_unsaved_changes(1);
Hm_Folders.reload_folders(true);
}
},
{'smtp_delete': 1}
);
};
var save_compose_state = function() {
var body = $('.compose_body').val();
var subject = $('.compose_subject').val();
var to = $('.compose_to').val();
Hm_Ajax.request(
[{'name': 'hm_ajax_hook', 'value': 'ajax_smtp_save_draft'},
{'name': 'draft_body', 'value': body},
{'name': 'draft_subject', 'value': subject},
{'name': 'draft_to', 'value': to}],
function() { },
[],
true
);
};
var toggle_recip_flds = function() {
var symbol = '+';
if ($('.toggle_recipients').text() == '+') {
symbol = '-';
}
$('.toggle_recipients').text(symbol);
$('.recipient_fields').toggle();
return false;
}
if (hm_page_name() === 'servers') {
$('.test_smtp_connect').on('click', smtp_test_action);
$('.save_smtp_connection').on('click', smtp_save_action);
$('.forget_smtp_connection').on('click', smtp_forget_action);
$('.delete_smtp_connection').on('click', smtp_delete_action);
var dsp = Hm_Utils.get_from_local_storage('.smtp_section');
if (dsp === 'block' || dsp === 'none') {
$('.smtp_section').css('display', dsp);
}
}
var reset_smtp_form = function() {
$('.compose_body').val('');
$('.compose_subject').val('');
$('.compose_to').val('');
$('.ke-content', $('iframe').contents()).html('');
save_compose_state();
};
if (hm_page_name() === 'compose') {
Hm_Timer.add_job(save_compose_state, 30, true);
$('.toggle_recipients').click(function() { return toggle_recip_flds(); });
$('.smtp_reset').click(function() { reset_smtp_form(); });
if ($('.compose_cc').val() || $('.compose_bcc').val()) {
toggle_recip_flds();
}
}
|
modules/smtp/site.js
|
/* globals Hm_Ajax,Hm_Message_List,Hm_Utils,Hm_Folders,Hm_Background_Unread,hm_list_path,hm_msg_uid,hm_search_terms,hm_list_parent,hm_page_name,Message_List,Hm_Timer: true */
var smtp_test_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
},
{'smtp_connect': 1}
);
};
var smtp_save_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
if (res.just_saved_credentials) {
form.find('.credentials').attr('disabled', true);
form.find('.save_smtp_connection').hide();
form.find('.smtp_password').val('');
form.find('.smtp_password').attr('placeholder', '[saved]');
form.append('<input type="submit" value="Forget" class="forget_smtp_connection" />');
$('.forget_smtp_connection').on('click', smtp_forget_action);
Hm_Utils.set_unsaved_changes(1);
reload_folders(true);
}
},
{'smtp_save': 1}
);
};
var smtp_forget_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
if (res.just_forgot_credentials) {
form.find('.credentials').attr('disabled', false);
form.find('.smtp_password').val('');
form.find('.smtp_password').attr('placeholder', 'Password');
form.append('<input type="submit" value="Save" class="save_smtp_connection" />');
$('.save_smtp_connection').on('click', smtp_save_action);
$('.forget_smtp_connection', form).remove();
Hm_Utils.set_unsaved_changes(1);
reload_folders(true);
}
},
{'smtp_forget': 1}
);
};
var smtp_delete_action = function(event) {
event.preventDefault();
var form = $(this).parent();
Hm_Ajax.request(
form.serializeArray(),
function(res) {
Hm_Notices.show(res.router_user_msgs);
if (res.deleted_server_id > -1 ) {
form.parent().remove();
Hm_Utils.set_unsaved_changes(1);
Hm_Folders.reload_folders(true);
}
},
{'smtp_delete': 1}
);
};
var save_compose_state = function() {
var body = $('.compose_body').val();
var subject = $('.compose_subject').val();
var to = $('.compose_to').val();
Hm_Ajax.request(
[{'name': 'hm_ajax_hook', 'value': 'ajax_smtp_save_draft'},
{'name': 'draft_body', 'value': body},
{'name': 'draft_subject', 'value': subject},
{'name': 'draft_to', 'value': to}],
function() { },
[],
true
);
};
var toggle_recip_flds = function() {
var symbol = '+';
if ($('.toggle_recipients').text() == '+') {
symbol = '-';
}
$('.toggle_recipients').text(symbol);
$('.recipient_fields').toggle();
return false;
}
if (hm_page_name() === 'servers') {
$('.test_smtp_connect').on('click', smtp_test_action);
$('.save_smtp_connection').on('click', smtp_save_action);
$('.forget_smtp_connection').on('click', smtp_forget_action);
$('.delete_smtp_connection').on('click', smtp_delete_action);
var dsp = Hm_Utils.get_from_local_storage('.smtp_section');
if (dsp === 'block' || dsp === 'none') {
$('.smtp_section').css('display', dsp);
}
}
var reset_smtp_form = function() {
$('.compose_body').val('');
$('.compose_subject').val('');
$('.compose_to').val('');
$('.ke-content', $('iframe').contents()).html('');
save_compose_state();
};
if (hm_page_name() === 'compose') {
Hm_Timer.add_job(save_compose_state, 30, true);
$('.toggle_recipients').click(function() { return toggle_recip_flds(); });
$('.smtp_reset').click(function() { reset_smtp_form(); });
if ($('.compose_cc').val() || $('.compose_bcc').val()) {
toggle_recip_flds();
}
}
|
fix typo in smtp module set when reloading folders on the server page
|
modules/smtp/site.js
|
fix typo in smtp module set when reloading folders on the server page
|
<ide><path>odules/smtp/site.js
<ide> form.append('<input type="submit" value="Forget" class="forget_smtp_connection" />');
<ide> $('.forget_smtp_connection').on('click', smtp_forget_action);
<ide> Hm_Utils.set_unsaved_changes(1);
<del> reload_folders(true);
<add> Hm_Folders.reload_folders(true);
<ide> }
<ide> },
<ide> {'smtp_save': 1}
<ide> $('.save_smtp_connection').on('click', smtp_save_action);
<ide> $('.forget_smtp_connection', form).remove();
<ide> Hm_Utils.set_unsaved_changes(1);
<del> reload_folders(true);
<add> Hm_Folders.reload_folders(true);
<ide> }
<ide> },
<ide> {'smtp_forget': 1}
|
|
Java
|
apache-2.0
|
488e36c5823f784d607d160c49a657394de700a6
| 0 |
intentionet/batfish,intentionet/batfish,intentionet/batfish,batfish/batfish,dhalperi/batfish,arifogel/batfish,arifogel/batfish,dhalperi/batfish,arifogel/batfish,batfish/batfish,dhalperi/batfish,intentionet/batfish,batfish/batfish,intentionet/batfish
|
package org.batfish.datamodel;
import static com.google.common.base.MoreObjects.firstNonNull;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.batfish.datamodel.collections.NodeInterfacePair;
/**
* Represents a set of {@link Edge Edges} and provides methods to prune the edges with edge, node,
* and interface blacklists.
*/
public final class Topology implements Serializable {
private static final long serialVersionUID = 1L;
@JsonCreator
private static Topology jacksonCreateTopology(SortedSet<Edge> edges) {
return new Topology(firstNonNull(edges, ImmutableSortedSet.of()));
}
private final SortedSet<Edge> _edges;
// Mapping of interface -> set of all edges whose source or dest is that interface
private final Map<NodeInterfacePair, SortedSet<Edge>> _interfaceEdges;
// Mapping of node -> set of all edges whose source or dest is on that node
private final Map<String, SortedSet<Edge>> _nodeEdges;
public Topology(SortedSet<Edge> edges) {
_edges = new TreeSet<>(edges);
_nodeEdges = new HashMap<>();
_interfaceEdges = new HashMap<>();
rebuildFromEdges();
}
@JsonIgnore
public SortedSet<Edge> getEdges() {
return _edges;
}
@JsonIgnore
public Map<NodeInterfacePair, SortedSet<Edge>> getInterfaceEdges() {
return _interfaceEdges;
}
public Set<NodeInterfacePair> getNeighbors(NodeInterfacePair iface) {
return getInterfaceEdges()
.getOrDefault(iface, ImmutableSortedSet.of())
.stream()
.filter(e -> e.getFirst().equals(iface))
.map(Edge::getSecond)
.collect(ImmutableSet.toImmutableSet());
}
@JsonIgnore
public Map<String, SortedSet<Edge>> getNodeEdges() {
return _nodeEdges;
}
/** Removes the specified blacklists from the topology */
public void prune(
Set<Edge> blacklistEdges,
Set<String> blacklistNodes,
Set<NodeInterfacePair> blacklistInterfaces) {
if (blacklistEdges != null) {
_edges.removeAll(blacklistEdges);
}
if (blacklistNodes != null) {
for (String blacklistNode : blacklistNodes) {
_edges.removeAll(_nodeEdges.getOrDefault(blacklistNode, ImmutableSortedSet.of()));
}
}
if (blacklistInterfaces != null) {
for (NodeInterfacePair blacklistInterface : blacklistInterfaces) {
_edges.removeAll(_interfaceEdges.getOrDefault(blacklistInterface, ImmutableSortedSet.of()));
}
}
rebuildFromEdges();
}
private void rebuildFromEdges() {
_nodeEdges.clear();
_interfaceEdges.clear();
for (Edge edge : _edges) {
String node1 = edge.getNode1();
String node2 = edge.getNode2();
NodeInterfacePair iface1 = edge.getInterface1();
NodeInterfacePair iface2 = edge.getInterface2();
_nodeEdges.computeIfAbsent(node1, k -> new TreeSet<>()).add(edge);
_nodeEdges.computeIfAbsent(node2, k -> new TreeSet<>()).add(edge);
_interfaceEdges.computeIfAbsent(iface1, k -> new TreeSet<>()).add(edge);
_interfaceEdges.computeIfAbsent(iface2, k -> new TreeSet<>()).add(edge);
}
}
@JsonValue
public SortedSet<Edge> sortedEdges() {
return new TreeSet<>(_edges);
}
}
|
projects/batfish-common-protocol/src/main/java/org/batfish/datamodel/Topology.java
|
package org.batfish.datamodel;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.batfish.datamodel.collections.NodeInterfacePair;
public final class Topology implements Serializable {
private static final long serialVersionUID = 1L;
@JsonCreator
private static Topology jacksonCreateTopology(SortedSet<Edge> edges) {
return new Topology(edges);
}
private final SortedSet<Edge> _edges;
private final Map<NodeInterfacePair, SortedSet<Edge>> _interfaceEdges;
private final Map<String, SortedSet<Edge>> _nodeEdges;
public Topology(SortedSet<Edge> edges) {
_edges = new TreeSet<>(edges);
_nodeEdges = new HashMap<>();
_interfaceEdges = new HashMap<>();
rebuildFromEdges();
}
@JsonIgnore
public SortedSet<Edge> getEdges() {
return _edges;
}
@JsonIgnore
public Map<NodeInterfacePair, SortedSet<Edge>> getInterfaceEdges() {
return _interfaceEdges;
}
public Set<NodeInterfacePair> getNeighbors(NodeInterfacePair iface) {
return getInterfaceEdges()
.getOrDefault(iface, ImmutableSortedSet.of())
.stream()
.filter(e -> e.getFirst().equals(iface))
.map(Edge::getSecond)
.collect(ImmutableSet.toImmutableSet());
}
@JsonIgnore
public Map<String, SortedSet<Edge>> getNodeEdges() {
return _nodeEdges;
}
/** Removes the specified blacklists from the topology */
public void prune(
Set<Edge> blacklistEdges,
Set<String> blacklistNodes,
Set<NodeInterfacePair> blacklistInterfaces) {
if (blacklistEdges != null) {
SortedSet<Edge> edges = getEdges();
edges.removeAll(blacklistEdges);
}
if (blacklistNodes != null) {
for (String blacklistNode : blacklistNodes) {
removeNode(blacklistNode);
}
}
if (blacklistInterfaces != null) {
for (NodeInterfacePair blacklistInterface : blacklistInterfaces) {
removeInterface(blacklistInterface);
}
}
rebuildFromEdges();
}
private void rebuildFromEdges() {
_nodeEdges.clear();
_interfaceEdges.clear();
for (Edge edge : getEdges()) {
String node1 = edge.getNode1();
String node2 = edge.getNode2();
NodeInterfacePair int1 = edge.getInterface1();
NodeInterfacePair int2 = edge.getInterface2();
SortedSet<Edge> node1Edges = _nodeEdges.computeIfAbsent(node1, k -> new TreeSet<>());
node1Edges.add(edge);
SortedSet<Edge> node2Edges = _nodeEdges.computeIfAbsent(node2, k -> new TreeSet<>());
node2Edges.add(edge);
SortedSet<Edge> interface1Edges = _interfaceEdges.computeIfAbsent(int1, k -> new TreeSet<>());
interface1Edges.add(edge);
SortedSet<Edge> interface2Edges = _interfaceEdges.computeIfAbsent(int2, k -> new TreeSet<>());
interface2Edges.add(edge);
}
}
private void removeInterface(NodeInterfacePair iface) {
SortedSet<Edge> interfaceEdges = _interfaceEdges.get(iface);
if (interfaceEdges != null) {
_edges.removeAll(interfaceEdges);
}
}
private void removeNode(String hostname) {
SortedSet<Edge> nodeEdges = _nodeEdges.get(hostname);
if (nodeEdges != null) {
_edges.removeAll(nodeEdges);
}
}
@JsonValue
public SortedSet<Edge> sortedEdges() {
return new TreeSet<>(_edges);
}
}
|
Simplify code and add clarifying comments in Topology (#2483)
* Simplify code and add clarifying comments in Topology
|
projects/batfish-common-protocol/src/main/java/org/batfish/datamodel/Topology.java
|
Simplify code and add clarifying comments in Topology (#2483)
|
<ide><path>rojects/batfish-common-protocol/src/main/java/org/batfish/datamodel/Topology.java
<ide> package org.batfish.datamodel;
<add>
<add>import static com.google.common.base.MoreObjects.firstNonNull;
<ide>
<ide> import com.fasterxml.jackson.annotation.JsonCreator;
<ide> import com.fasterxml.jackson.annotation.JsonIgnore;
<ide> import java.util.TreeSet;
<ide> import org.batfish.datamodel.collections.NodeInterfacePair;
<ide>
<add>/**
<add> * Represents a set of {@link Edge Edges} and provides methods to prune the edges with edge, node,
<add> * and interface blacklists.
<add> */
<ide> public final class Topology implements Serializable {
<ide>
<ide> private static final long serialVersionUID = 1L;
<ide>
<ide> @JsonCreator
<ide> private static Topology jacksonCreateTopology(SortedSet<Edge> edges) {
<del> return new Topology(edges);
<add> return new Topology(firstNonNull(edges, ImmutableSortedSet.of()));
<ide> }
<ide>
<ide> private final SortedSet<Edge> _edges;
<ide>
<add> // Mapping of interface -> set of all edges whose source or dest is that interface
<ide> private final Map<NodeInterfacePair, SortedSet<Edge>> _interfaceEdges;
<ide>
<add> // Mapping of node -> set of all edges whose source or dest is on that node
<ide> private final Map<String, SortedSet<Edge>> _nodeEdges;
<ide>
<ide> public Topology(SortedSet<Edge> edges) {
<ide> Set<String> blacklistNodes,
<ide> Set<NodeInterfacePair> blacklistInterfaces) {
<ide> if (blacklistEdges != null) {
<del> SortedSet<Edge> edges = getEdges();
<del> edges.removeAll(blacklistEdges);
<add> _edges.removeAll(blacklistEdges);
<ide> }
<ide> if (blacklistNodes != null) {
<ide> for (String blacklistNode : blacklistNodes) {
<del> removeNode(blacklistNode);
<add> _edges.removeAll(_nodeEdges.getOrDefault(blacklistNode, ImmutableSortedSet.of()));
<ide> }
<ide> }
<ide> if (blacklistInterfaces != null) {
<ide> for (NodeInterfacePair blacklistInterface : blacklistInterfaces) {
<del> removeInterface(blacklistInterface);
<add> _edges.removeAll(_interfaceEdges.getOrDefault(blacklistInterface, ImmutableSortedSet.of()));
<ide> }
<ide> }
<ide> rebuildFromEdges();
<ide> private void rebuildFromEdges() {
<ide> _nodeEdges.clear();
<ide> _interfaceEdges.clear();
<del> for (Edge edge : getEdges()) {
<add> for (Edge edge : _edges) {
<ide> String node1 = edge.getNode1();
<ide> String node2 = edge.getNode2();
<del> NodeInterfacePair int1 = edge.getInterface1();
<del> NodeInterfacePair int2 = edge.getInterface2();
<add> NodeInterfacePair iface1 = edge.getInterface1();
<add> NodeInterfacePair iface2 = edge.getInterface2();
<ide>
<del> SortedSet<Edge> node1Edges = _nodeEdges.computeIfAbsent(node1, k -> new TreeSet<>());
<del> node1Edges.add(edge);
<del>
<del> SortedSet<Edge> node2Edges = _nodeEdges.computeIfAbsent(node2, k -> new TreeSet<>());
<del> node2Edges.add(edge);
<del>
<del> SortedSet<Edge> interface1Edges = _interfaceEdges.computeIfAbsent(int1, k -> new TreeSet<>());
<del> interface1Edges.add(edge);
<del>
<del> SortedSet<Edge> interface2Edges = _interfaceEdges.computeIfAbsent(int2, k -> new TreeSet<>());
<del> interface2Edges.add(edge);
<del> }
<del> }
<del>
<del> private void removeInterface(NodeInterfacePair iface) {
<del> SortedSet<Edge> interfaceEdges = _interfaceEdges.get(iface);
<del> if (interfaceEdges != null) {
<del> _edges.removeAll(interfaceEdges);
<del> }
<del> }
<del>
<del> private void removeNode(String hostname) {
<del> SortedSet<Edge> nodeEdges = _nodeEdges.get(hostname);
<del> if (nodeEdges != null) {
<del> _edges.removeAll(nodeEdges);
<add> _nodeEdges.computeIfAbsent(node1, k -> new TreeSet<>()).add(edge);
<add> _nodeEdges.computeIfAbsent(node2, k -> new TreeSet<>()).add(edge);
<add> _interfaceEdges.computeIfAbsent(iface1, k -> new TreeSet<>()).add(edge);
<add> _interfaceEdges.computeIfAbsent(iface2, k -> new TreeSet<>()).add(edge);
<ide> }
<ide> }
<ide>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.