Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
3,580 | public static class Builder extends NumberFieldMapper.Builder<Builder, FloatFieldMapper> {
protected Float nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
builder = this;
}
public Builder nullValue(float nullValue) {
this.nullValue = nullValue;
return this;
}
@Override
public FloatFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
FloatFieldMapper fieldMapper = new FloatFieldMapper(buildNames(context),
precisionStep, boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), postingsProvider,
docValuesProvider, similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_core_FloatFieldMapper.java |
3,978 | public class WrapperFilterParser implements FilterParser {
public static final String NAME = "wrapper";
@Inject
public WrapperFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new QueryParsingException(parseContext.index(), "[wrapper] filter malformed");
}
String fieldName = parser.currentName();
if (!fieldName.equals("filter")) {
throw new QueryParsingException(parseContext.index(), "[wrapper] filter malformed");
}
parser.nextToken();
byte[] querySource = parser.binaryValue();
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
try {
final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParser);
context.reset(qSourceParser);
Filter result = context.parseInnerFilter();
parser.nextToken();
return result;
} finally {
qSourceParser.close();
}
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_WrapperFilterParser.java |
341 | private final IPartListener2 fLinkWithEditorListener= new IPartListener2() {
public void partVisible(IWorkbenchPartReference partRef) {}
public void partBroughtToTop(IWorkbenchPartReference partRef) {}
public void partClosed(IWorkbenchPartReference partRef) {}
public void partDeactivated(IWorkbenchPartReference partRef) {}
public void partHidden(IWorkbenchPartReference partRef) {}
public void partOpened(IWorkbenchPartReference partRef) {}
public void partInputChanged(IWorkbenchPartReference partRef) {
IWorkbenchPage activePage= JavaPlugin.getActivePage();
if (partRef instanceof IEditorReference && activePage != null && activePage.getActivePartReference() == partRef) {
editorActivated(((IEditorReference) partRef).getEditor(true));
}
}
public void partActivated(IWorkbenchPartReference partRef) {
if (partRef instanceof IEditorReference) {
editorActivated(((IEditorReference) partRef).getEditor(true));
}
}
}; | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_explorer_PackageExplorerPart.java |
1,723 | return new UnmodifiableIterator<T>() {
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public T next() {
final T ret = it.next();
while (it.hasNext() && comparator.compare(ret, it.peek()) == 0) {
it.next();
}
assert !it.hasNext() || comparator.compare(ret, it.peek()) < 0 : "iterator is not sorted: " + ret + " > " + it.peek();
return ret;
}
}; | 0true
| src_main_java_org_elasticsearch_common_collect_Iterators2.java |
1,156 | @Repository("blSecurePaymentInfoDao")
public class SecurePaymentInfoDaoImpl implements SecurePaymentInfoDao {
@PersistenceContext(unitName = "blSecurePU")
protected EntityManager em;
@Resource(name = "blEncryptionModule")
protected EncryptionModule encryptionModule;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
public Referenced save(Referenced securePaymentInfo) {
return em.merge(securePaymentInfo);
}
public BankAccountPaymentInfo createBankAccountPaymentInfo() {
BankAccountPaymentInfo response = (BankAccountPaymentInfo) entityConfiguration.createEntityInstance("org.broadleafcommerce.core.payment.domain.BankAccountPaymentInfo");
response.setEncryptionModule(encryptionModule);
return response;
}
public GiftCardPaymentInfo createGiftCardPaymentInfo() {
GiftCardPaymentInfo response = (GiftCardPaymentInfo) entityConfiguration.createEntityInstance("org.broadleafcommerce.core.payment.domain.GiftCardPaymentInfo");
response.setEncryptionModule(encryptionModule);
return response;
}
public CreditCardPaymentInfo createCreditCardPaymentInfo() {
CreditCardPaymentInfo response = (CreditCardPaymentInfo) entityConfiguration.createEntityInstance("org.broadleafcommerce.core.payment.domain.CreditCardPaymentInfo");
response.setEncryptionModule(encryptionModule);
return response;
}
@SuppressWarnings("unchecked")
public BankAccountPaymentInfo findBankAccountInfo(String referenceNumber) {
Query query = em.createNamedQuery("BC_READ_BANK_ACCOUNT_BY_REFERENCE_NUMBER");
query.setParameter("referenceNumber", referenceNumber);
List<BankAccountPaymentInfo> infos = query.getResultList();
BankAccountPaymentInfo response = (infos == null || infos.size() == 0) ? null : infos.get(0);
if (response != null) {
response.setEncryptionModule(encryptionModule);
}
return response;
}
@SuppressWarnings("unchecked")
public CreditCardPaymentInfo findCreditCardInfo(String referenceNumber) {
Query query = em.createNamedQuery("BC_READ_CREDIT_CARD_BY_REFERENCE_NUMBER");
query.setParameter("referenceNumber", referenceNumber);
List<CreditCardPaymentInfo> infos = query.getResultList();
CreditCardPaymentInfo response = (infos == null || infos.size() == 0) ? null : infos.get(0);
if (response != null) {
response.setEncryptionModule(encryptionModule);
}
return response;
}
@SuppressWarnings("unchecked")
public GiftCardPaymentInfo findGiftCardInfo(String referenceNumber) {
Query query = em.createNamedQuery("BC_READ_GIFT_CARD_BY_REFERENCE_NUMBER");
query.setParameter("referenceNumber", referenceNumber);
List<GiftCardPaymentInfo> infos = query.getResultList();
GiftCardPaymentInfo response = (infos == null || infos.size() == 0) ? null : infos.get(0);
if (response != null) {
response.setEncryptionModule(encryptionModule);
}
return response;
}
public void delete(Referenced securePaymentInfo) {
if (!em.contains(securePaymentInfo)) {
securePaymentInfo = em.find(securePaymentInfo.getClass(), securePaymentInfo.getId());
}
em.remove(securePaymentInfo);
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_dao_SecurePaymentInfoDaoImpl.java |
134 | public class StampedLock implements java.io.Serializable {
/*
* Algorithmic notes:
*
* The design employs elements of Sequence locks
* (as used in linux kernels; see Lameter's
* http://www.lameter.com/gelato2005.pdf
* and elsewhere; see
* Boehm's http://www.hpl.hp.com/techreports/2012/HPL-2012-68.html)
* and Ordered RW locks (see Shirako et al
* http://dl.acm.org/citation.cfm?id=2312015)
*
* Conceptually, the primary state of the lock includes a sequence
* number that is odd when write-locked and even otherwise.
* However, this is offset by a reader count that is non-zero when
* read-locked. The read count is ignored when validating
* "optimistic" seqlock-reader-style stamps. Because we must use
* a small finite number of bits (currently 7) for readers, a
* supplementary reader overflow word is used when the number of
* readers exceeds the count field. We do this by treating the max
* reader count value (RBITS) as a spinlock protecting overflow
* updates.
*
* Waiters use a modified form of CLH lock used in
* AbstractQueuedSynchronizer (see its internal documentation for
* a fuller account), where each node is tagged (field mode) as
* either a reader or writer. Sets of waiting readers are grouped
* (linked) under a common node (field cowait) so act as a single
* node with respect to most CLH mechanics. By virtue of the
* queue structure, wait nodes need not actually carry sequence
* numbers; we know each is greater than its predecessor. This
* simplifies the scheduling policy to a mainly-FIFO scheme that
* incorporates elements of Phase-Fair locks (see Brandenburg &
* Anderson, especially http://www.cs.unc.edu/~bbb/diss/). In
* particular, we use the phase-fair anti-barging rule: If an
* incoming reader arrives while read lock is held but there is a
* queued writer, this incoming reader is queued. (This rule is
* responsible for some of the complexity of method acquireRead,
* but without it, the lock becomes highly unfair.) Method release
* does not (and sometimes cannot) itself wake up cowaiters. This
* is done by the primary thread, but helped by any other threads
* with nothing better to do in methods acquireRead and
* acquireWrite.
*
* These rules apply to threads actually queued. All tryLock forms
* opportunistically try to acquire locks regardless of preference
* rules, and so may "barge" their way in. Randomized spinning is
* used in the acquire methods to reduce (increasingly expensive)
* context switching while also avoiding sustained memory
* thrashing among many threads. We limit spins to the head of
* queue. A thread spin-waits up to SPINS times (where each
* iteration decreases spin count with 50% probability) before
* blocking. If, upon wakening it fails to obtain lock, and is
* still (or becomes) the first waiting thread (which indicates
* that some other thread barged and obtained lock), it escalates
* spins (up to MAX_HEAD_SPINS) to reduce the likelihood of
* continually losing to barging threads.
*
* Nearly all of these mechanics are carried out in methods
* acquireWrite and acquireRead, that, as typical of such code,
* sprawl out because actions and retries rely on consistent sets
* of locally cached reads.
*
* As noted in Boehm's paper (above), sequence validation (mainly
* method validate()) requires stricter ordering rules than apply
* to normal volatile reads (of "state"). In the absence of (but
* continual hope for) explicit JVM support of intrinsics with
* double-sided reordering prohibition, or corresponding fence
* intrinsics, we for now uncomfortably rely on the fact that the
* Unsafe.getXVolatile intrinsic must have this property
* (syntactic volatile reads do not) for internal purposes anyway,
* even though it is not documented.
*
* The memory layout keeps lock state and queue pointers together
* (normally on the same cache line). This usually works well for
* read-mostly loads. In most other cases, the natural tendency of
* adaptive-spin CLH locks to reduce memory contention lessens
* motivation to further spread out contended locations, but might
* be subject to future improvements.
*/
private static final long serialVersionUID = -6001602636862214147L;
/** Number of processors, for spin control */
private static final int NCPU = Runtime.getRuntime().availableProcessors();
/** Maximum number of retries before enqueuing on acquisition */
private static final int SPINS = (NCPU > 1) ? 1 << 6 : 0;
/** Maximum number of retries before blocking at head on acquisition */
private static final int HEAD_SPINS = (NCPU > 1) ? 1 << 10 : 0;
/** Maximum number of retries before re-blocking */
private static final int MAX_HEAD_SPINS = (NCPU > 1) ? 1 << 16 : 0;
/** The period for yielding when waiting for overflow spinlock */
private static final int OVERFLOW_YIELD_RATE = 7; // must be power 2 - 1
/** The number of bits to use for reader count before overflowing */
private static final int LG_READERS = 7;
// Values for lock state and stamp operations
private static final long RUNIT = 1L;
private static final long WBIT = 1L << LG_READERS;
private static final long RBITS = WBIT - 1L;
private static final long RFULL = RBITS - 1L;
private static final long ABITS = RBITS | WBIT;
private static final long SBITS = ~RBITS; // note overlap with ABITS
// Initial value for lock state; avoid failure value zero
private static final long ORIGIN = WBIT << 1;
// Special value from cancelled acquire methods so caller can throw IE
private static final long INTERRUPTED = 1L;
// Values for node status; order matters
private static final int WAITING = -1;
private static final int CANCELLED = 1;
// Modes for nodes (int not boolean to allow arithmetic)
private static final int RMODE = 0;
private static final int WMODE = 1;
/** Wait nodes */
static final class WNode {
volatile WNode prev;
volatile WNode next;
volatile WNode cowait; // list of linked readers
volatile Thread thread; // non-null while possibly parked
volatile int status; // 0, WAITING, or CANCELLED
final int mode; // RMODE or WMODE
WNode(int m, WNode p) { mode = m; prev = p; }
}
/** Head of CLH queue */
private transient volatile WNode whead;
/** Tail (last) of CLH queue */
private transient volatile WNode wtail;
// views
transient ReadLockView readLockView;
transient WriteLockView writeLockView;
transient ReadWriteLockView readWriteLockView;
/** Lock sequence/state */
private transient volatile long state;
/** extra reader count when state read count saturated */
private transient int readerOverflow;
/**
* Creates a new lock, initially in unlocked state.
*/
public StampedLock() {
state = ORIGIN;
}
/**
* Exclusively acquires the lock, blocking if necessary
* until available.
*
* @return a stamp that can be used to unlock or convert mode
*/
public long writeLock() {
long s, next; // bypass acquireWrite in fully unlocked case only
return ((((s = state) & ABITS) == 0L &&
U.compareAndSwapLong(this, STATE, s, next = s + WBIT)) ?
next : acquireWrite(false, 0L));
}
/**
* Exclusively acquires the lock if it is immediately available.
*
* @return a stamp that can be used to unlock or convert mode,
* or zero if the lock is not available
*/
public long tryWriteLock() {
long s, next;
return ((((s = state) & ABITS) == 0L &&
U.compareAndSwapLong(this, STATE, s, next = s + WBIT)) ?
next : 0L);
}
/**
* Exclusively acquires the lock if it is available within the
* given time and the current thread has not been interrupted.
* Behavior under timeout and interruption matches that specified
* for method {@link Lock#tryLock(long,TimeUnit)}.
*
* @param time the maximum time to wait for the lock
* @param unit the time unit of the {@code time} argument
* @return a stamp that can be used to unlock or convert mode,
* or zero if the lock is not available
* @throws InterruptedException if the current thread is interrupted
* before acquiring the lock
*/
public long tryWriteLock(long time, TimeUnit unit)
throws InterruptedException {
long nanos = unit.toNanos(time);
if (!Thread.interrupted()) {
long next, deadline;
if ((next = tryWriteLock()) != 0L)
return next;
if (nanos <= 0L)
return 0L;
if ((deadline = System.nanoTime() + nanos) == 0L)
deadline = 1L;
if ((next = acquireWrite(true, deadline)) != INTERRUPTED)
return next;
}
throw new InterruptedException();
}
/**
* Exclusively acquires the lock, blocking if necessary
* until available or the current thread is interrupted.
* Behavior under interruption matches that specified
* for method {@link Lock#lockInterruptibly()}.
*
* @return a stamp that can be used to unlock or convert mode
* @throws InterruptedException if the current thread is interrupted
* before acquiring the lock
*/
public long writeLockInterruptibly() throws InterruptedException {
long next;
if (!Thread.interrupted() &&
(next = acquireWrite(true, 0L)) != INTERRUPTED)
return next;
throw new InterruptedException();
}
/**
* Non-exclusively acquires the lock, blocking if necessary
* until available.
*
* @return a stamp that can be used to unlock or convert mode
*/
public long readLock() {
long s = state, next; // bypass acquireRead on common uncontended case
return ((whead == wtail && (s & ABITS) < RFULL &&
U.compareAndSwapLong(this, STATE, s, next = s + RUNIT)) ?
next : acquireRead(false, 0L));
}
/**
* Non-exclusively acquires the lock if it is immediately available.
*
* @return a stamp that can be used to unlock or convert mode,
* or zero if the lock is not available
*/
public long tryReadLock() {
for (;;) {
long s, m, next;
if ((m = (s = state) & ABITS) == WBIT)
return 0L;
else if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, next = s + RUNIT))
return next;
}
else if ((next = tryIncReaderOverflow(s)) != 0L)
return next;
}
}
/**
* Non-exclusively acquires the lock if it is available within the
* given time and the current thread has not been interrupted.
* Behavior under timeout and interruption matches that specified
* for method {@link Lock#tryLock(long,TimeUnit)}.
*
* @param time the maximum time to wait for the lock
* @param unit the time unit of the {@code time} argument
* @return a stamp that can be used to unlock or convert mode,
* or zero if the lock is not available
* @throws InterruptedException if the current thread is interrupted
* before acquiring the lock
*/
public long tryReadLock(long time, TimeUnit unit)
throws InterruptedException {
long s, m, next, deadline;
long nanos = unit.toNanos(time);
if (!Thread.interrupted()) {
if ((m = (s = state) & ABITS) != WBIT) {
if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, next = s + RUNIT))
return next;
}
else if ((next = tryIncReaderOverflow(s)) != 0L)
return next;
}
if (nanos <= 0L)
return 0L;
if ((deadline = System.nanoTime() + nanos) == 0L)
deadline = 1L;
if ((next = acquireRead(true, deadline)) != INTERRUPTED)
return next;
}
throw new InterruptedException();
}
/**
* Non-exclusively acquires the lock, blocking if necessary
* until available or the current thread is interrupted.
* Behavior under interruption matches that specified
* for method {@link Lock#lockInterruptibly()}.
*
* @return a stamp that can be used to unlock or convert mode
* @throws InterruptedException if the current thread is interrupted
* before acquiring the lock
*/
public long readLockInterruptibly() throws InterruptedException {
long next;
if (!Thread.interrupted() &&
(next = acquireRead(true, 0L)) != INTERRUPTED)
return next;
throw new InterruptedException();
}
/**
* Returns a stamp that can later be validated, or zero
* if exclusively locked.
*
* @return a stamp, or zero if exclusively locked
*/
public long tryOptimisticRead() {
long s;
return (((s = state) & WBIT) == 0L) ? (s & SBITS) : 0L;
}
/**
* Returns true if the lock has not been exclusively acquired
* since issuance of the given stamp. Always returns false if the
* stamp is zero. Always returns true if the stamp represents a
* currently held lock. Invoking this method with a value not
* obtained from {@link #tryOptimisticRead} or a locking method
* for this lock has no defined effect or result.
*
* @param stamp a stamp
* @return {@code true} if the lock has not been exclusively acquired
* since issuance of the given stamp; else false
*/
public boolean validate(long stamp) {
// See above about current use of getLongVolatile here
return (stamp & SBITS) == (U.getLongVolatile(this, STATE) & SBITS);
}
/**
* If the lock state matches the given stamp, releases the
* exclusive lock.
*
* @param stamp a stamp returned by a write-lock operation
* @throws IllegalMonitorStateException if the stamp does
* not match the current state of this lock
*/
public void unlockWrite(long stamp) {
WNode h;
if (state != stamp || (stamp & WBIT) == 0L)
throw new IllegalMonitorStateException();
state = (stamp += WBIT) == 0L ? ORIGIN : stamp;
if ((h = whead) != null && h.status != 0)
release(h);
}
/**
* If the lock state matches the given stamp, releases the
* non-exclusive lock.
*
* @param stamp a stamp returned by a read-lock operation
* @throws IllegalMonitorStateException if the stamp does
* not match the current state of this lock
*/
public void unlockRead(long stamp) {
long s, m; WNode h;
for (;;) {
if (((s = state) & SBITS) != (stamp & SBITS) ||
(stamp & ABITS) == 0L || (m = s & ABITS) == 0L || m == WBIT)
throw new IllegalMonitorStateException();
if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, s - RUNIT)) {
if (m == RUNIT && (h = whead) != null && h.status != 0)
release(h);
break;
}
}
else if (tryDecReaderOverflow(s) != 0L)
break;
}
}
/**
* If the lock state matches the given stamp, releases the
* corresponding mode of the lock.
*
* @param stamp a stamp returned by a lock operation
* @throws IllegalMonitorStateException if the stamp does
* not match the current state of this lock
*/
public void unlock(long stamp) {
long a = stamp & ABITS, m, s; WNode h;
while (((s = state) & SBITS) == (stamp & SBITS)) {
if ((m = s & ABITS) == 0L)
break;
else if (m == WBIT) {
if (a != m)
break;
state = (s += WBIT) == 0L ? ORIGIN : s;
if ((h = whead) != null && h.status != 0)
release(h);
return;
}
else if (a == 0L || a >= WBIT)
break;
else if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, s - RUNIT)) {
if (m == RUNIT && (h = whead) != null && h.status != 0)
release(h);
return;
}
}
else if (tryDecReaderOverflow(s) != 0L)
return;
}
throw new IllegalMonitorStateException();
}
/**
* If the lock state matches the given stamp, performs one of
* the following actions. If the stamp represents holding a write
* lock, returns it. Or, if a read lock, if the write lock is
* available, releases the read lock and returns a write stamp.
* Or, if an optimistic read, returns a write stamp only if
* immediately available. This method returns zero in all other
* cases.
*
* @param stamp a stamp
* @return a valid write stamp, or zero on failure
*/
public long tryConvertToWriteLock(long stamp) {
long a = stamp & ABITS, m, s, next;
while (((s = state) & SBITS) == (stamp & SBITS)) {
if ((m = s & ABITS) == 0L) {
if (a != 0L)
break;
if (U.compareAndSwapLong(this, STATE, s, next = s + WBIT))
return next;
}
else if (m == WBIT) {
if (a != m)
break;
return stamp;
}
else if (m == RUNIT && a != 0L) {
if (U.compareAndSwapLong(this, STATE, s,
next = s - RUNIT + WBIT))
return next;
}
else
break;
}
return 0L;
}
/**
* If the lock state matches the given stamp, performs one of
* the following actions. If the stamp represents holding a write
* lock, releases it and obtains a read lock. Or, if a read lock,
* returns it. Or, if an optimistic read, acquires a read lock and
* returns a read stamp only if immediately available. This method
* returns zero in all other cases.
*
* @param stamp a stamp
* @return a valid read stamp, or zero on failure
*/
public long tryConvertToReadLock(long stamp) {
long a = stamp & ABITS, m, s, next; WNode h;
while (((s = state) & SBITS) == (stamp & SBITS)) {
if ((m = s & ABITS) == 0L) {
if (a != 0L)
break;
else if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, next = s + RUNIT))
return next;
}
else if ((next = tryIncReaderOverflow(s)) != 0L)
return next;
}
else if (m == WBIT) {
if (a != m)
break;
state = next = s + (WBIT + RUNIT);
if ((h = whead) != null && h.status != 0)
release(h);
return next;
}
else if (a != 0L && a < WBIT)
return stamp;
else
break;
}
return 0L;
}
/**
* If the lock state matches the given stamp then, if the stamp
* represents holding a lock, releases it and returns an
* observation stamp. Or, if an optimistic read, returns it if
* validated. This method returns zero in all other cases, and so
* may be useful as a form of "tryUnlock".
*
* @param stamp a stamp
* @return a valid optimistic read stamp, or zero on failure
*/
public long tryConvertToOptimisticRead(long stamp) {
long a = stamp & ABITS, m, s, next; WNode h;
for (;;) {
s = U.getLongVolatile(this, STATE); // see above
if (((s = state) & SBITS) != (stamp & SBITS))
break;
if ((m = s & ABITS) == 0L) {
if (a != 0L)
break;
return s;
}
else if (m == WBIT) {
if (a != m)
break;
state = next = (s += WBIT) == 0L ? ORIGIN : s;
if ((h = whead) != null && h.status != 0)
release(h);
return next;
}
else if (a == 0L || a >= WBIT)
break;
else if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, next = s - RUNIT)) {
if (m == RUNIT && (h = whead) != null && h.status != 0)
release(h);
return next & SBITS;
}
}
else if ((next = tryDecReaderOverflow(s)) != 0L)
return next & SBITS;
}
return 0L;
}
/**
* Releases the write lock if it is held, without requiring a
* stamp value. This method may be useful for recovery after
* errors.
*
* @return {@code true} if the lock was held, else false
*/
public boolean tryUnlockWrite() {
long s; WNode h;
if (((s = state) & WBIT) != 0L) {
state = (s += WBIT) == 0L ? ORIGIN : s;
if ((h = whead) != null && h.status != 0)
release(h);
return true;
}
return false;
}
/**
* Releases one hold of the read lock if it is held, without
* requiring a stamp value. This method may be useful for recovery
* after errors.
*
* @return {@code true} if the read lock was held, else false
*/
public boolean tryUnlockRead() {
long s, m; WNode h;
while ((m = (s = state) & ABITS) != 0L && m < WBIT) {
if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, s - RUNIT)) {
if (m == RUNIT && (h = whead) != null && h.status != 0)
release(h);
return true;
}
}
else if (tryDecReaderOverflow(s) != 0L)
return true;
}
return false;
}
// status monitoring methods
/**
* Returns combined state-held and overflow read count for given
* state s.
*/
private int getReadLockCount(long s) {
long readers;
if ((readers = s & RBITS) >= RFULL)
readers = RFULL + readerOverflow;
return (int) readers;
}
/**
* Returns {@code true} if the lock is currently held exclusively.
*
* @return {@code true} if the lock is currently held exclusively
*/
public boolean isWriteLocked() {
return (state & WBIT) != 0L;
}
/**
* Returns {@code true} if the lock is currently held non-exclusively.
*
* @return {@code true} if the lock is currently held non-exclusively
*/
public boolean isReadLocked() {
return (state & RBITS) != 0L;
}
/**
* Queries the number of read locks held for this lock. This
* method is designed for use in monitoring system state, not for
* synchronization control.
* @return the number of read locks held
*/
public int getReadLockCount() {
return getReadLockCount(state);
}
/**
* Returns a string identifying this lock, as well as its lock
* state. The state, in brackets, includes the String {@code
* "Unlocked"} or the String {@code "Write-locked"} or the String
* {@code "Read-locks:"} followed by the current number of
* read-locks held.
*
* @return a string identifying this lock, as well as its lock state
*/
public String toString() {
long s = state;
return super.toString() +
((s & ABITS) == 0L ? "[Unlocked]" :
(s & WBIT) != 0L ? "[Write-locked]" :
"[Read-locks:" + getReadLockCount(s) + "]");
}
// views
/**
* Returns a plain {@link Lock} view of this StampedLock in which
* the {@link Lock#lock} method is mapped to {@link #readLock},
* and similarly for other methods. The returned Lock does not
* support a {@link Condition}; method {@link
* Lock#newCondition()} throws {@code
* UnsupportedOperationException}.
*
* @return the lock
*/
public Lock asReadLock() {
ReadLockView v;
return ((v = readLockView) != null ? v :
(readLockView = new ReadLockView()));
}
/**
* Returns a plain {@link Lock} view of this StampedLock in which
* the {@link Lock#lock} method is mapped to {@link #writeLock},
* and similarly for other methods. The returned Lock does not
* support a {@link Condition}; method {@link
* Lock#newCondition()} throws {@code
* UnsupportedOperationException}.
*
* @return the lock
*/
public Lock asWriteLock() {
WriteLockView v;
return ((v = writeLockView) != null ? v :
(writeLockView = new WriteLockView()));
}
/**
* Returns a {@link ReadWriteLock} view of this StampedLock in
* which the {@link ReadWriteLock#readLock()} method is mapped to
* {@link #asReadLock()}, and {@link ReadWriteLock#writeLock()} to
* {@link #asWriteLock()}.
*
* @return the lock
*/
public ReadWriteLock asReadWriteLock() {
ReadWriteLockView v;
return ((v = readWriteLockView) != null ? v :
(readWriteLockView = new ReadWriteLockView()));
}
// view classes
final class ReadLockView implements Lock {
public void lock() { readLock(); }
public void lockInterruptibly() throws InterruptedException {
readLockInterruptibly();
}
public boolean tryLock() { return tryReadLock() != 0L; }
public boolean tryLock(long time, TimeUnit unit)
throws InterruptedException {
return tryReadLock(time, unit) != 0L;
}
public void unlock() { unstampedUnlockRead(); }
public Condition newCondition() {
throw new UnsupportedOperationException();
}
}
final class WriteLockView implements Lock {
public void lock() { writeLock(); }
public void lockInterruptibly() throws InterruptedException {
writeLockInterruptibly();
}
public boolean tryLock() { return tryWriteLock() != 0L; }
public boolean tryLock(long time, TimeUnit unit)
throws InterruptedException {
return tryWriteLock(time, unit) != 0L;
}
public void unlock() { unstampedUnlockWrite(); }
public Condition newCondition() {
throw new UnsupportedOperationException();
}
}
final class ReadWriteLockView implements ReadWriteLock {
public Lock readLock() { return asReadLock(); }
public Lock writeLock() { return asWriteLock(); }
}
// Unlock methods without stamp argument checks for view classes.
// Needed because view-class lock methods throw away stamps.
final void unstampedUnlockWrite() {
WNode h; long s;
if (((s = state) & WBIT) == 0L)
throw new IllegalMonitorStateException();
state = (s += WBIT) == 0L ? ORIGIN : s;
if ((h = whead) != null && h.status != 0)
release(h);
}
final void unstampedUnlockRead() {
for (;;) {
long s, m; WNode h;
if ((m = (s = state) & ABITS) == 0L || m >= WBIT)
throw new IllegalMonitorStateException();
else if (m < RFULL) {
if (U.compareAndSwapLong(this, STATE, s, s - RUNIT)) {
if (m == RUNIT && (h = whead) != null && h.status != 0)
release(h);
break;
}
}
else if (tryDecReaderOverflow(s) != 0L)
break;
}
}
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
state = ORIGIN; // reset to unlocked state
}
// internals
/**
* Tries to increment readerOverflow by first setting state
* access bits value to RBITS, indicating hold of spinlock,
* then updating, then releasing.
*
* @param s a reader overflow stamp: (s & ABITS) >= RFULL
* @return new stamp on success, else zero
*/
private long tryIncReaderOverflow(long s) {
// assert (s & ABITS) >= RFULL;
if ((s & ABITS) == RFULL) {
if (U.compareAndSwapLong(this, STATE, s, s | RBITS)) {
++readerOverflow;
state = s;
return s;
}
}
else if ((ThreadLocalRandom.current().nextInt() &
OVERFLOW_YIELD_RATE) == 0)
Thread.yield();
return 0L;
}
/**
* Tries to decrement readerOverflow.
*
* @param s a reader overflow stamp: (s & ABITS) >= RFULL
* @return new stamp on success, else zero
*/
private long tryDecReaderOverflow(long s) {
// assert (s & ABITS) >= RFULL;
if ((s & ABITS) == RFULL) {
if (U.compareAndSwapLong(this, STATE, s, s | RBITS)) {
int r; long next;
if ((r = readerOverflow) > 0) {
readerOverflow = r - 1;
next = s;
}
else
next = s - RUNIT;
state = next;
return next;
}
}
else if ((ThreadLocalRandom.current().nextInt() &
OVERFLOW_YIELD_RATE) == 0)
Thread.yield();
return 0L;
}
/**
* Wakes up the successor of h (normally whead). This is normally
* just h.next, but may require traversal from wtail if next
* pointers are lagging. This may fail to wake up an acquiring
* thread when one or more have been cancelled, but the cancel
* methods themselves provide extra safeguards to ensure liveness.
*/
private void release(WNode h) {
if (h != null) {
WNode q; Thread w;
U.compareAndSwapInt(h, WSTATUS, WAITING, 0);
if ((q = h.next) == null || q.status == CANCELLED) {
for (WNode t = wtail; t != null && t != h; t = t.prev)
if (t.status <= 0)
q = t;
}
if (q != null && (w = q.thread) != null)
U.unpark(w);
}
}
/**
* See above for explanation.
*
* @param interruptible true if should check interrupts and if so
* return INTERRUPTED
* @param deadline if nonzero, the System.nanoTime value to timeout
* at (and return zero)
* @return next state, or INTERRUPTED
*/
private long acquireWrite(boolean interruptible, long deadline) {
WNode node = null, p;
for (int spins = -1;;) { // spin while enqueuing
long m, s, ns;
if ((m = (s = state) & ABITS) == 0L) {
if (U.compareAndSwapLong(this, STATE, s, ns = s + WBIT))
return ns;
}
else if (spins < 0)
spins = (m == WBIT && wtail == whead) ? SPINS : 0;
else if (spins > 0) {
if (ThreadLocalRandom.current().nextInt() >= 0)
--spins;
}
else if ((p = wtail) == null) { // initialize queue
WNode hd = new WNode(WMODE, null);
if (U.compareAndSwapObject(this, WHEAD, null, hd))
wtail = hd;
}
else if (node == null)
node = new WNode(WMODE, p);
else if (node.prev != p)
node.prev = p;
else if (U.compareAndSwapObject(this, WTAIL, p, node)) {
p.next = node;
break;
}
}
for (int spins = -1;;) {
WNode h, np, pp; int ps;
if ((h = whead) == p) {
if (spins < 0)
spins = HEAD_SPINS;
else if (spins < MAX_HEAD_SPINS)
spins <<= 1;
for (int k = spins;;) { // spin at head
long s, ns;
if (((s = state) & ABITS) == 0L) {
if (U.compareAndSwapLong(this, STATE, s,
ns = s + WBIT)) {
whead = node;
node.prev = null;
return ns;
}
}
else if (ThreadLocalRandom.current().nextInt() >= 0 &&
--k <= 0)
break;
}
}
else if (h != null) { // help release stale waiters
WNode c; Thread w;
while ((c = h.cowait) != null) {
if (U.compareAndSwapObject(h, WCOWAIT, c, c.cowait) &&
(w = c.thread) != null)
U.unpark(w);
}
}
if (whead == h) {
if ((np = node.prev) != p) {
if (np != null)
(p = np).next = node; // stale
}
else if ((ps = p.status) == 0)
U.compareAndSwapInt(p, WSTATUS, 0, WAITING);
else if (ps == CANCELLED) {
if ((pp = p.prev) != null) {
node.prev = pp;
pp.next = node;
}
}
else {
long time; // 0 argument to park means no timeout
if (deadline == 0L)
time = 0L;
else if ((time = deadline - System.nanoTime()) <= 0L)
return cancelWaiter(node, node, false);
Thread wt = Thread.currentThread();
U.putObject(wt, PARKBLOCKER, this);
node.thread = wt;
if (p.status < 0 && (p != h || (state & ABITS) != 0L) &&
whead == h && node.prev == p)
U.park(false, time); // emulate LockSupport.park
node.thread = null;
U.putObject(wt, PARKBLOCKER, null);
if (interruptible && Thread.interrupted())
return cancelWaiter(node, node, true);
}
}
}
}
/**
* See above for explanation.
*
* @param interruptible true if should check interrupts and if so
* return INTERRUPTED
* @param deadline if nonzero, the System.nanoTime value to timeout
* at (and return zero)
* @return next state, or INTERRUPTED
*/
private long acquireRead(boolean interruptible, long deadline) {
WNode node = null, p;
for (int spins = -1;;) {
WNode h;
if ((h = whead) == (p = wtail)) {
for (long m, s, ns;;) {
if ((m = (s = state) & ABITS) < RFULL ?
U.compareAndSwapLong(this, STATE, s, ns = s + RUNIT) :
(m < WBIT && (ns = tryIncReaderOverflow(s)) != 0L))
return ns;
else if (m >= WBIT) {
if (spins > 0) {
if (ThreadLocalRandom.current().nextInt() >= 0)
--spins;
}
else {
if (spins == 0) {
WNode nh = whead, np = wtail;
if ((nh == h && np == p) || (h = nh) != (p = np))
break;
}
spins = SPINS;
}
}
}
}
if (p == null) { // initialize queue
WNode hd = new WNode(WMODE, null);
if (U.compareAndSwapObject(this, WHEAD, null, hd))
wtail = hd;
}
else if (node == null)
node = new WNode(RMODE, p);
else if (h == p || p.mode != RMODE) {
if (node.prev != p)
node.prev = p;
else if (U.compareAndSwapObject(this, WTAIL, p, node)) {
p.next = node;
break;
}
}
else if (!U.compareAndSwapObject(p, WCOWAIT,
node.cowait = p.cowait, node))
node.cowait = null;
else {
for (;;) {
WNode pp, c; Thread w;
if ((h = whead) != null && (c = h.cowait) != null &&
U.compareAndSwapObject(h, WCOWAIT, c, c.cowait) &&
(w = c.thread) != null) // help release
U.unpark(w);
if (h == (pp = p.prev) || h == p || pp == null) {
long m, s, ns;
do {
if ((m = (s = state) & ABITS) < RFULL ?
U.compareAndSwapLong(this, STATE, s,
ns = s + RUNIT) :
(m < WBIT &&
(ns = tryIncReaderOverflow(s)) != 0L))
return ns;
} while (m < WBIT);
}
if (whead == h && p.prev == pp) {
long time;
if (pp == null || h == p || p.status > 0) {
node = null; // throw away
break;
}
if (deadline == 0L)
time = 0L;
else if ((time = deadline - System.nanoTime()) <= 0L)
return cancelWaiter(node, p, false);
Thread wt = Thread.currentThread();
U.putObject(wt, PARKBLOCKER, this);
node.thread = wt;
if ((h != pp || (state & ABITS) == WBIT) &&
whead == h && p.prev == pp)
U.park(false, time);
node.thread = null;
U.putObject(wt, PARKBLOCKER, null);
if (interruptible && Thread.interrupted())
return cancelWaiter(node, p, true);
}
}
}
}
for (int spins = -1;;) {
WNode h, np, pp; int ps;
if ((h = whead) == p) {
if (spins < 0)
spins = HEAD_SPINS;
else if (spins < MAX_HEAD_SPINS)
spins <<= 1;
for (int k = spins;;) { // spin at head
long m, s, ns;
if ((m = (s = state) & ABITS) < RFULL ?
U.compareAndSwapLong(this, STATE, s, ns = s + RUNIT) :
(m < WBIT && (ns = tryIncReaderOverflow(s)) != 0L)) {
WNode c; Thread w;
whead = node;
node.prev = null;
while ((c = node.cowait) != null) {
if (U.compareAndSwapObject(node, WCOWAIT,
c, c.cowait) &&
(w = c.thread) != null)
U.unpark(w);
}
return ns;
}
else if (m >= WBIT &&
ThreadLocalRandom.current().nextInt() >= 0 && --k <= 0)
break;
}
}
else if (h != null) {
WNode c; Thread w;
while ((c = h.cowait) != null) {
if (U.compareAndSwapObject(h, WCOWAIT, c, c.cowait) &&
(w = c.thread) != null)
U.unpark(w);
}
}
if (whead == h) {
if ((np = node.prev) != p) {
if (np != null)
(p = np).next = node; // stale
}
else if ((ps = p.status) == 0)
U.compareAndSwapInt(p, WSTATUS, 0, WAITING);
else if (ps == CANCELLED) {
if ((pp = p.prev) != null) {
node.prev = pp;
pp.next = node;
}
}
else {
long time;
if (deadline == 0L)
time = 0L;
else if ((time = deadline - System.nanoTime()) <= 0L)
return cancelWaiter(node, node, false);
Thread wt = Thread.currentThread();
U.putObject(wt, PARKBLOCKER, this);
node.thread = wt;
if (p.status < 0 &&
(p != h || (state & ABITS) == WBIT) &&
whead == h && node.prev == p)
U.park(false, time);
node.thread = null;
U.putObject(wt, PARKBLOCKER, null);
if (interruptible && Thread.interrupted())
return cancelWaiter(node, node, true);
}
}
}
}
/**
* If node non-null, forces cancel status and unsplices it from
* queue if possible and wakes up any cowaiters (of the node, or
* group, as applicable), and in any case helps release current
* first waiter if lock is free. (Calling with null arguments
* serves as a conditional form of release, which is not currently
* needed but may be needed under possible future cancellation
* policies). This is a variant of cancellation methods in
* AbstractQueuedSynchronizer (see its detailed explanation in AQS
* internal documentation).
*
* @param node if nonnull, the waiter
* @param group either node or the group node is cowaiting with
* @param interrupted if already interrupted
* @return INTERRUPTED if interrupted or Thread.interrupted, else zero
*/
private long cancelWaiter(WNode node, WNode group, boolean interrupted) {
if (node != null && group != null) {
Thread w;
node.status = CANCELLED;
// unsplice cancelled nodes from group
for (WNode p = group, q; (q = p.cowait) != null;) {
if (q.status == CANCELLED) {
U.compareAndSwapObject(p, WCOWAIT, q, q.cowait);
p = group; // restart
}
else
p = q;
}
if (group == node) {
for (WNode r = group.cowait; r != null; r = r.cowait) {
if ((w = r.thread) != null)
U.unpark(w); // wake up uncancelled co-waiters
}
for (WNode pred = node.prev; pred != null; ) { // unsplice
WNode succ, pp; // find valid successor
while ((succ = node.next) == null ||
succ.status == CANCELLED) {
WNode q = null; // find successor the slow way
for (WNode t = wtail; t != null && t != node; t = t.prev)
if (t.status != CANCELLED)
q = t; // don't link if succ cancelled
if (succ == q || // ensure accurate successor
U.compareAndSwapObject(node, WNEXT,
succ, succ = q)) {
if (succ == null && node == wtail)
U.compareAndSwapObject(this, WTAIL, node, pred);
break;
}
}
if (pred.next == node) // unsplice pred link
U.compareAndSwapObject(pred, WNEXT, node, succ);
if (succ != null && (w = succ.thread) != null) {
succ.thread = null;
U.unpark(w); // wake up succ to observe new pred
}
if (pred.status != CANCELLED || (pp = pred.prev) == null)
break;
node.prev = pp; // repeat if new pred wrong/cancelled
U.compareAndSwapObject(pp, WNEXT, pred, succ);
pred = pp;
}
}
}
WNode h; // Possibly release first waiter
while ((h = whead) != null) {
long s; WNode q; // similar to release() but check eligibility
if ((q = h.next) == null || q.status == CANCELLED) {
for (WNode t = wtail; t != null && t != h; t = t.prev)
if (t.status <= 0)
q = t;
}
if (h == whead) {
if (q != null && h.status == 0 &&
((s = state) & ABITS) != WBIT && // waiter is eligible
(s == 0L || q.mode == RMODE))
release(h);
break;
}
}
return (interrupted || Thread.interrupted()) ? INTERRUPTED : 0L;
}
// Unsafe mechanics
private static final sun.misc.Unsafe U;
private static final long STATE;
private static final long WHEAD;
private static final long WTAIL;
private static final long WNEXT;
private static final long WSTATUS;
private static final long WCOWAIT;
private static final long PARKBLOCKER;
static {
try {
U = getUnsafe();
Class<?> k = StampedLock.class;
Class<?> wk = WNode.class;
STATE = U.objectFieldOffset
(k.getDeclaredField("state"));
WHEAD = U.objectFieldOffset
(k.getDeclaredField("whead"));
WTAIL = U.objectFieldOffset
(k.getDeclaredField("wtail"));
WSTATUS = U.objectFieldOffset
(wk.getDeclaredField("status"));
WNEXT = U.objectFieldOffset
(wk.getDeclaredField("next"));
WCOWAIT = U.objectFieldOffset
(wk.getDeclaredField("cowait"));
Class<?> tk = Thread.class;
PARKBLOCKER = U.objectFieldOffset
(tk.getDeclaredField("parkBlocker"));
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
} catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
} | 0true
| src_main_java_jsr166e_StampedLock.java |
866 | public class GetAndAlterOperation extends AbstractAlterOperation {
public GetAndAlterOperation() {
}
public GetAndAlterOperation(String name, Data function) {
super(name, function);
}
@Override
public void run() throws Exception {
NodeEngine nodeEngine = getNodeEngine();
IFunction f = nodeEngine.toObject(function);
ReferenceWrapper reference = getReference();
Object input = nodeEngine.toObject(reference.get());
response = input;
//noinspection unchecked
Object output = f.apply(input);
shouldBackup = !isEquals(input, output);
if (shouldBackup) {
backup = nodeEngine.toData(output);
reference.set(backup);
}
}
@Override
public int getId() {
return AtomicReferenceDataSerializerHook.GET_AND_ALTER;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_atomicreference_operations_GetAndAlterOperation.java |
2,762 | public interface HttpServerTransport extends LifecycleComponent<HttpServerTransport> {
BoundTransportAddress boundAddress();
HttpInfo info();
HttpStats stats();
void httpServerAdapter(HttpServerAdapter httpServerAdapter);
} | 0true
| src_main_java_org_elasticsearch_http_HttpServerTransport.java |
2,602 | static class NodeDoesNotExistOnMasterException extends ElasticsearchIllegalStateException {
@Override
public Throwable fillInStackTrace() {
return null;
}
} | 0true
| src_main_java_org_elasticsearch_discovery_zen_fd_MasterFaultDetection.java |
2,538 | public class YamlXContentParser extends JsonXContentParser {
public YamlXContentParser(JsonParser parser) {
super(parser);
}
@Override
public XContentType contentType() {
return XContentType.YAML;
}
} | 0true
| src_main_java_org_elasticsearch_common_xcontent_yaml_YamlXContentParser.java |
643 | public abstract class CollectionBackupAwareOperation extends CollectionOperation implements BackupAwareOperation {
protected CollectionBackupAwareOperation() {
}
protected CollectionBackupAwareOperation(String name) {
super(name);
}
@Override
public int getSyncBackupCount() {
return getOrCreateContainer().getConfig().getBackupCount();
}
@Override
public int getAsyncBackupCount() {
return getOrCreateContainer().getConfig().getAsyncBackupCount();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionBackupAwareOperation.java |
3,643 | public static class Names {
public static final String TREE = "tree";
public static final String TREE_GEOHASH = "geohash";
public static final String TREE_QUADTREE = "quadtree";
public static final String TREE_LEVELS = "tree_levels";
public static final String TREE_PRESISION = "precision";
public static final String DISTANCE_ERROR_PCT = "distance_error_pct";
public static final String STRATEGY = "strategy";
} | 0true
| src_main_java_org_elasticsearch_index_mapper_geo_GeoShapeFieldMapper.java |
838 | public class ShardSearchFailure implements ShardOperationFailedException {
public static final ShardSearchFailure[] EMPTY_ARRAY = new ShardSearchFailure[0];
private SearchShardTarget shardTarget;
private String reason;
private RestStatus status;
private transient Throwable failure;
private ShardSearchFailure() {
}
@Nullable
public Throwable failure() {
return failure;
}
public ShardSearchFailure(Throwable t) {
this(t, null);
}
public ShardSearchFailure(Throwable t, @Nullable SearchShardTarget shardTarget) {
this.failure = t;
Throwable actual = ExceptionsHelper.unwrapCause(t);
if (actual != null && actual instanceof SearchException) {
this.shardTarget = ((SearchException) actual).shard();
} else if (shardTarget != null) {
this.shardTarget = shardTarget;
}
if (actual != null && actual instanceof ElasticsearchException) {
status = ((ElasticsearchException) actual).status();
} else {
status = RestStatus.INTERNAL_SERVER_ERROR;
}
this.reason = ExceptionsHelper.detailedMessage(t);
}
public ShardSearchFailure(String reason, SearchShardTarget shardTarget) {
this(reason, shardTarget, RestStatus.INTERNAL_SERVER_ERROR);
}
public ShardSearchFailure(String reason, SearchShardTarget shardTarget, RestStatus status) {
this.shardTarget = shardTarget;
this.reason = reason;
this.status = status;
}
/**
* The search shard target the failure occurred on.
*/
@Nullable
public SearchShardTarget shard() {
return this.shardTarget;
}
public RestStatus status() {
return this.status;
}
/**
* The index the search failed on.
*/
@Override
public String index() {
if (shardTarget != null) {
return shardTarget.index();
}
return null;
}
/**
* The shard id the search failed on.
*/
@Override
public int shardId() {
if (shardTarget != null) {
return shardTarget.shardId();
}
return -1;
}
/**
* The reason of the failure.
*/
public String reason() {
return this.reason;
}
@Override
public String toString() {
return "shard [" + (shardTarget == null ? "_na" : shardTarget) + "], reason [" + reason + "]";
}
public static ShardSearchFailure readShardSearchFailure(StreamInput in) throws IOException {
ShardSearchFailure shardSearchFailure = new ShardSearchFailure();
shardSearchFailure.readFrom(in);
return shardSearchFailure;
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
shardTarget = readSearchShardTarget(in);
}
reason = in.readString();
status = RestStatus.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (shardTarget == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
shardTarget.writeTo(out);
}
out.writeString(reason);
RestStatus.writeTo(out, status);
}
} | 0true
| src_main_java_org_elasticsearch_action_search_ShardSearchFailure.java |
2,057 | public final class TypeConverterBinding implements Element {
private final Object source;
private final Matcher<? super TypeLiteral<?>> typeMatcher;
private final TypeConverter typeConverter;
TypeConverterBinding(Object source, Matcher<? super TypeLiteral<?>> typeMatcher,
TypeConverter typeConverter) {
this.source = checkNotNull(source, "source");
this.typeMatcher = checkNotNull(typeMatcher, "typeMatcher");
this.typeConverter = checkNotNull(typeConverter, "typeConverter");
}
public Object getSource() {
return source;
}
public Matcher<? super TypeLiteral<?>> getTypeMatcher() {
return typeMatcher;
}
public TypeConverter getTypeConverter() {
return typeConverter;
}
public <T> T acceptVisitor(ElementVisitor<T> visitor) {
return visitor.visit(this);
}
public void applyTo(Binder binder) {
binder.withSource(getSource()).convertToTypes(typeMatcher, typeConverter);
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_spi_TypeConverterBinding.java |
3,492 | public static class Names {
private final String name;
private final String indexName;
private final String indexNameClean;
private final String fullName;
private final String sourcePath;
public Names(String name) {
this(name, name, name, name);
}
public Names(String name, String indexName, String indexNameClean, String fullName) {
this(name, indexName, indexNameClean, fullName, fullName);
}
public Names(String name, String indexName, String indexNameClean, String fullName, @Nullable String sourcePath) {
this.name = name.intern();
this.indexName = indexName.intern();
this.indexNameClean = indexNameClean.intern();
this.fullName = fullName.intern();
this.sourcePath = sourcePath == null ? this.fullName : sourcePath.intern();
}
/**
* The logical name of the field.
*/
public String name() {
return name;
}
/**
* The indexed name of the field. This is the name under which we will
* store it in the index.
*/
public String indexName() {
return indexName;
}
/**
* The cleaned index name, before any "path" modifications performed on it.
*/
public String indexNameClean() {
return indexNameClean;
}
/**
* The full name, including dot path.
*/
public String fullName() {
return fullName;
}
/**
* The dot path notation to extract the value from source.
*/
public String sourcePath() {
return sourcePath;
}
/**
* Creates a new index term based on the provided value.
*/
public Term createIndexNameTerm(String value) {
return new Term(indexName, value);
}
/**
* Creates a new index term based on the provided value.
*/
public Term createIndexNameTerm(BytesRef value) {
return new Term(indexName, value);
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_FieldMapper.java |
1,293 | @Deprecated
public interface SearchInterceptDao {
public SearchIntercept findInterceptByTerm(String term);
public List<SearchIntercept> findAllIntercepts();
public void createIntercept(SearchIntercept intercept);
public void updateIntercept(SearchIntercept intercept);
public void deleteIntercept(SearchIntercept intercept);
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_dao_SearchInterceptDao.java |
765 | public class ListSubOperation extends CollectionOperation {
private int from;
private int to;
public ListSubOperation() {
}
public ListSubOperation(String name, int from, int to) {
super(name);
this.from = from;
this.to = to;
}
@Override
public int getId() {
return CollectionDataSerializerHook.LIST_SUB;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
final List<Data> sub = getOrCreateListContainer().sub(from, to);
response = new SerializableCollection(sub);
}
@Override
public void afterRun() throws Exception {
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeInt(from);
out.writeInt(to);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
from = in.readInt();
to = in.readInt();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_list_ListSubOperation.java |
3,071 | public class SnapshotIndexCommits implements Iterable<SnapshotIndexCommit>, Releasable {
private final List<SnapshotIndexCommit> commits;
public SnapshotIndexCommits(List<SnapshotIndexCommit> commits) {
this.commits = commits;
}
public int size() {
return commits.size();
}
@Override
public Iterator<SnapshotIndexCommit> iterator() {
return commits.iterator();
}
public boolean release() {
boolean result = false;
for (SnapshotIndexCommit snapshot : commits) {
result |= snapshot.release();
}
return result;
}
} | 0true
| src_main_java_org_elasticsearch_index_deletionpolicy_SnapshotIndexCommits.java |
7 | public class AbbreviationsManagerTest {
private Properties defaultProperties;
@BeforeMethod
public void init() {
defaultProperties = new Properties();
defaultProperties.setProperty("fiber optic", "F/O");
defaultProperties.setProperty("system", "Sys");
}
@Test
public void testGetAlternatives() {
AbbreviationsManager manager;
Properties props;
List<String> alternatives;
props = new Properties();
manager = new AbbreviationsManager(props);
alternatives = manager.getAlternatives("volts");
assertEquals(alternatives.size(), 1);
assertEquals(alternatives.get(0), "volts");
props = new Properties();
props.setProperty("Volts", "V"); // Note that lookup should be case insensitive.
manager = new AbbreviationsManager(props);
alternatives = manager.getAlternatives("volts");
assertEquals(alternatives.size(), 2);
assertEquals(alternatives.get(0), "volts"); // Matches the case of getAbbreviations() argument.
assertEquals(alternatives.get(1), "V");
props = new Properties();
props.setProperty("Amperes", "Amps | A | aa | bbbb | a | aaaa ");
manager = new AbbreviationsManager(props);
alternatives = manager.getAlternatives("amperes");
assertEquals(alternatives.size(), 7);
assertEquals(alternatives.get(0), "amperes"); // Must match in case to getAbbreviations() argument.
assertEquals(alternatives.get(1), "A");
assertEquals(alternatives.get(2), "a");
assertEquals(alternatives.get(3), "aa");
assertEquals(alternatives.get(4), "Amps"); // same length items are in left to right specified order
assertEquals(alternatives.get(5), "bbbb");
assertEquals(alternatives.get(6), "aaaa");
}
@Test(dataProvider="getAbbreviationsTests")
public void testGetAbbreviations(String s, String[] expectedPhrases) {
AbbreviationsManager manager = new AbbreviationsManager(defaultProperties);
Abbreviations abbrev = manager.getAbbreviations(s);
assertEquals(abbrev.getValue(), s);
assertEquals(abbrev.getPhrases().size(), expectedPhrases.length);
for (int i=0; i<abbrev.getPhrases().size(); ++i) {
String phrase = abbrev.getPhrases().get(i);
assertEquals(phrase, expectedPhrases[i]);
List<String> alternatives = abbrev.getAbbreviations(phrase);
List<String> expectedAlternatives = manager.getAlternatives(abbrev.getPhrases().get(i));
assertTrue(alternatives.size() >= 1);
assertEquals(alternatives.size(), expectedAlternatives.size());
assertEquals(alternatives.get(0), abbrev.getPhrases().get(i));
}
}
@DataProvider(name="getAbbreviationsTests")
private Object[][] getGetAbbreviationsTests() {
return new Object[][] {
{ "System", new String[] { "System" } }, // One word in abbreviations map
{ "MDM", new String[] { "MDM" } }, // One word not in abbreviations map
{ "Fiber Optic", new String[] { "Fiber Optic" } }, // Exact phrase in abbreviations map
// Some longer tests.
{ "Junk1 Junk2 Junk3", new String[] { "Junk1", "Junk2", "Junk3" } }, // No matches
{ "Fiber Optic MDM System", new String[] { "Fiber Optic", "MDM", "System" } },
};
}
} | 0true
| tableViews_src_test_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationsManagerTest.java |
2,912 | @AnalysisSettingsRequired
public class PatternReplaceCharFilterFactory extends AbstractCharFilterFactory {
private final Pattern pattern;
private final String replacement;
@Inject
public PatternReplaceCharFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name);
if (!Strings.hasLength(settings.get("pattern"))) {
throw new ElasticsearchIllegalArgumentException("pattern is missing for [" + name + "] char filter of type 'pattern_replace'");
}
pattern = Pattern.compile(settings.get("pattern"));
replacement = settings.get("replacement", ""); // when not set or set to "", use "".
}
public Pattern getPattern() {
return pattern;
}
public String getReplacement() {
return replacement;
}
@Override
public Reader create(Reader tokenStream) {
return new PatternReplaceCharFilter(pattern, replacement, tokenStream);
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_PatternReplaceCharFilterFactory.java |
403 | public class CreditCardType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, CreditCardType> TYPES = new LinkedHashMap<String, CreditCardType>();
public static final CreditCardType MASTERCARD = new CreditCardType("MASTERCARD", "Master Card");
public static final CreditCardType VISA = new CreditCardType("VISA", "Visa");
public static final CreditCardType AMEX = new CreditCardType("AMEX", "American Express");
public static final CreditCardType DINERSCLUB_CARTEBLANCHE = new CreditCardType("DINERSCLUB_CARTEBLANCHE", "Diner's Club / Carte Blanche");
public static final CreditCardType DISCOVER = new CreditCardType("DISCOVER", "Discover");
public static final CreditCardType ENROUTE = new CreditCardType("ENROUTE", "En Route");
public static final CreditCardType JCB = new CreditCardType("JCB", "JCB");
public static CreditCardType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public CreditCardType() {
//do nothing
}
public CreditCardType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)){
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CreditCardType other = (CreditCardType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_payment_CreditCardType.java |
2,930 | public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider<RomanianAnalyzer> {
private final RomanianAnalyzer analyzer;
@Inject
public RomanianAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
analyzer = new RomanianAnalyzer(version,
Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet(), version),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET, version));
}
@Override
public RomanianAnalyzer get() {
return this.analyzer;
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_RomanianAnalyzerProvider.java |
1,528 | Collections.sort(units, new Comparator<OJPAPersistenceUnitInfo>() {
@Override
public int compare(OJPAPersistenceUnitInfo p1, OJPAPersistenceUnitInfo p2) {
return Integer.valueOf(p1.getPersistenceUnitName()).compareTo(Integer.valueOf(p2.getPersistenceUnitName()));
}
}); | 0true
| object_src_test_java_com_orientechnologies_orient_object_jpa_parsing_PersistenceXMLParsingTest.java |
1,141 | private static class TestInitializingObject implements DistributedObject, InitializingObject {
private final String name;
private final AtomicBoolean init = new AtomicBoolean(false);
private volatile boolean error = false;
protected TestInitializingObject(final String name) {
this.name = name;
}
@Override
public void initialize() {
if (!init.compareAndSet(false, true)) {
error = true;
throw new IllegalStateException("InitializingObject must be initialized only once!");
}
}
@Override
public String getName() {
return name;
}
@Override
public String getServiceName() {
return TestInitializingObjectService.NAME;
}
@Override
public Object getId() {
return getName();
}
@Override
public String getPartitionKey() {
return getName();
}
@Override
public void destroy() {
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_core_DistributedObjectTest.java |
5,295 | class InternalOrder extends Terms.Order {
/**
* Order by the (higher) count of each term.
*/
public static final InternalOrder COUNT_DESC = new InternalOrder((byte) 1, "_count", false, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
int cmp = - Longs.compare(o1.getDocCount(), o2.getDocCount());
if (cmp == 0) {
cmp = o1.compareTerm(o2);
}
return cmp;
}
});
/**
* Order by the (lower) count of each term.
*/
public static final InternalOrder COUNT_ASC = new InternalOrder((byte) 2, "_count", true, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
int cmp = Longs.compare(o1.getDocCount(), o2.getDocCount());
if (cmp == 0) {
cmp = o1.compareTerm(o2);
}
return cmp;
}
});
/**
* Order by the terms.
*/
public static final InternalOrder TERM_DESC = new InternalOrder((byte) 3, "_term", false, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
return - o1.compareTerm(o2);
}
});
/**
* Order by the terms.
*/
public static final InternalOrder TERM_ASC = new InternalOrder((byte) 4, "_term", true, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
return o1.compareTerm(o2);
}
});
final byte id;
final String key;
final boolean asc;
protected final Comparator<Terms.Bucket> comparator;
InternalOrder(byte id, String key, boolean asc, Comparator<Terms.Bucket> comparator) {
this.id = id;
this.key = key;
this.asc = asc;
this.comparator = comparator;
}
byte id() {
return id;
}
String key() {
return key;
}
boolean asc() {
return asc;
}
@Override
protected Comparator<Terms.Bucket> comparator(Aggregator aggregator) {
return comparator;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject().field(key, asc ? "asc" : "desc").endObject();
}
public static InternalOrder validate(InternalOrder order, Aggregator termsAggregator) {
if (!(order instanceof Aggregation)) {
return order;
}
String aggName = ((Aggregation) order).aggName();
Aggregator[] subAggregators = termsAggregator.subAggregators();
for (int i = 0; i < subAggregators.length; i++) {
Aggregator aggregator = subAggregators[i];
if (aggregator.name().equals(aggName)) {
// we can only apply order on metrics sub-aggregators
if (!(aggregator instanceof MetricsAggregator)) {
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured to order by sub-aggregation ["
+ aggName + "] which is is not a metrics aggregation. Terms aggregation order can only refer to metrics aggregations");
}
if (aggregator instanceof MetricsAggregator.MultiValue) {
String valueName = ((Aggregation) order).metricName();
if (valueName == null) {
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured with a sub-aggregation order ["
+ aggName + "] which is a multi-valued aggregation, yet no metric name was specified");
}
if (!((MetricsAggregator.MultiValue) aggregator).hasMetric(valueName)) {
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured with a sub-aggregation order ["
+ aggName + "] and value [" + valueName + "] yet the referred sub aggregator holds no metric that goes by this name");
}
return order;
}
// aggregator must be of a single value type
// todo we can also choose to be really strict and verify that the user didn't specify a value name and if so fail?
return order;
}
}
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured with a sub-aggregation order ["
+ aggName + "] but no sub aggregation with this name is configured");
}
static class Aggregation extends InternalOrder {
static final byte ID = 0;
Aggregation(String key, boolean asc) {
super(ID, key, asc, new MultiBucketsAggregation.Bucket.SubAggregationComparator<Terms.Bucket>(key, asc));
}
Aggregation(String aggName, String metricName, boolean asc) {
super(ID, key(aggName, metricName), asc, new MultiBucketsAggregation.Bucket.SubAggregationComparator<Terms.Bucket>(aggName, metricName, asc));
}
String aggName() {
int index = key.indexOf('.');
return index < 0 ? key : key.substring(0, index);
}
String metricName() {
int index = key.indexOf('.');
return index < 0 ? null : key.substring(index + 1, key.length());
}
private static String key(String aggName, String valueName) {
return (valueName == null) ? aggName : aggName + "." + valueName;
}
@Override
protected Comparator<Terms.Bucket> comparator(Aggregator termsAggregator) {
if (termsAggregator == null) {
return comparator;
}
// Internal Optimization:
//
// in this phase, if the order is based on sub-aggregations, we need to use a different comparator
// to avoid constructing buckets for ordering purposes (we can potentially have a lot of buckets and building
// them will cause loads of redundant object constructions). The "special" comparators here will fetch the
// sub aggregation values directly from the sub aggregators bypassing bucket creation. Note that the comparator
// attached to the order will still be used in the reduce phase of the Aggregation.
final Aggregator aggregator = subAggregator(aggName(), termsAggregator);
assert aggregator != null && aggregator instanceof MetricsAggregator : "this should be picked up before the aggregation is executed";
if (aggregator instanceof MetricsAggregator.MultiValue) {
final String valueName = metricName();
assert valueName != null : "this should be picked up before the aggregation is executed";
return new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
double v1 = ((MetricsAggregator.MultiValue) aggregator).metric(valueName, ((InternalTerms.Bucket) o1).bucketOrd);
double v2 = ((MetricsAggregator.MultiValue) aggregator).metric(valueName, ((InternalTerms.Bucket) o2).bucketOrd);
// some metrics may return NaN (eg. avg, variance, etc...) in which case we'd like to push all of those to
// the bottom
if (v1 == Double.NaN) {
return asc ? 1 : -1;
}
return asc ? Double.compare(v1, v2) : Double.compare(v2, v1);
}
};
}
return new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
double v1 = ((MetricsAggregator.SingleValue) aggregator).metric(((InternalTerms.Bucket) o1).bucketOrd);
double v2 = ((MetricsAggregator.SingleValue) aggregator).metric(((InternalTerms.Bucket) o2).bucketOrd);
// some metrics may return NaN (eg. avg, variance, etc...) in which case we'd like to push all of those to
// the bottom
if (v1 == Double.NaN) {
return asc ? 1 : -1;
}
return asc ? Double.compare(v1, v2) : Double.compare(v2, v1);
}
};
}
private Aggregator subAggregator(String aggName, Aggregator termsAggregator) {
Aggregator[] subAggregators = termsAggregator.subAggregators();
for (int i = 0; i < subAggregators.length; i++) {
if (subAggregators[i].name().equals(aggName)) {
return subAggregators[i];
}
}
return null;
}
}
public static class Streams {
public static void writeOrder(InternalOrder order, StreamOutput out) throws IOException {
out.writeByte(order.id());
if (order instanceof Aggregation) {
out.writeBoolean(((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).asc());
out.writeString(((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).aggName());
boolean hasValueName = ((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).valueName() != null;
out.writeBoolean(hasValueName);
if (hasValueName) {
out.writeString(((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).valueName());
}
}
}
public static InternalOrder readOrder(StreamInput in) throws IOException {
byte id = in.readByte();
switch (id) {
case 1: return InternalOrder.COUNT_DESC;
case 2: return InternalOrder.COUNT_ASC;
case 3: return InternalOrder.TERM_DESC;
case 4: return InternalOrder.TERM_ASC;
case 0:
boolean asc = in.readBoolean();
String key = in.readString();
if (in.readBoolean()) {
return new InternalOrder.Aggregation(key, in.readString(), asc);
}
return new InternalOrder.Aggregation(key, asc);
default:
throw new RuntimeException("unknown terms order");
}
}
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_bucket_terms_InternalOrder.java |
669 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name="BLC_CATEGORY_ATTRIBUTE")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationClass(friendlyName = "baseCategoryAttribute")
public class CategoryAttributeImpl implements CategoryAttribute {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "CategoryAttributeId")
@GenericGenerator(
name="CategoryAttributeId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="CategoryAttributeImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.catalog.domain.CategoryAttributeImpl")
}
)
@Column(name = "CATEGORY_ATTRIBUTE_ID")
protected Long id;
@Column(name = "NAME", nullable=false)
@Index(name="CATEGORYATTRIBUTE_NAME_INDEX", columnNames={"NAME"})
@AdminPresentation(visibility = VisibilityEnum.HIDDEN_ALL)
protected String name;
@Column(name = "VALUE")
@AdminPresentation(friendlyName = "ProductAttributeImpl_Attribute_Value", order=2, group = "ProductAttributeImpl_Description", prominent=true)
protected String value;
@Column(name = "SEARCHABLE")
@AdminPresentation(excluded = true)
protected Boolean searchable = false;
@ManyToOne(targetEntity = CategoryImpl.class, optional=false)
@JoinColumn(name = "CATEGORY_ID")
@Index(name="CATEGORYATTRIBUTE_INDEX", columnNames={"CATEGORY_ID"})
protected Category category;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getValue() {
return value;
}
@Override
public void setValue(String value) {
this.value = value;
}
@Override
public Boolean getSearchable() {
if (searchable == null) {
return Boolean.FALSE;
} else {
return searchable;
}
}
@Override
public void setSearchable(Boolean searchable) {
this.searchable = searchable;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return value;
}
@Override
public Category getCategory() {
return category;
}
@Override
public void setCategory(Category category) {
this.category = category;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((category == null) ? 0 : category.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CategoryAttributeImpl other = (CategoryAttributeImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (category == null) {
if (other.category != null)
return false;
} else if (!category.equals(other.category))
return false;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_CategoryAttributeImpl.java |
3,720 | public class MultiFieldTests extends ElasticsearchTestCase {
@Test
public void testMultiField_multiFieldType() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json");
testMultiField(mapping);
}
@Test
public void testMultiField_multiFields() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-fields.json");
testMultiField(mapping);
}
private void testMultiField(String mapping) throws Exception {
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));
Document doc = docMapper.parse(json).rootDoc();
IndexableField f = doc.getField("name");
assertThat(f.name(), equalTo("name"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().stored(), equalTo(true));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("name.indexed");
assertThat(f.name(), equalTo("name.indexed"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("name.not_indexed");
assertThat(f.name(), equalTo("name.not_indexed"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().stored(), equalTo(true));
assertThat(f.fieldType().indexed(), equalTo(false));
f = doc.getField("object1.multi1");
assertThat(f.name(), equalTo("object1.multi1"));
f = doc.getField("object1.multi1.string");
assertThat(f.name(), equalTo("object1.multi1.string"));
assertThat(f.stringValue(), equalTo("2010-01-01"));
assertThat(docMapper.mappers().fullName("name").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("name").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("name").mapper().fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().fullName("name").mapper().fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.indexed").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name.indexed").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("name.indexed").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.indexed").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("name.indexed").mapper().fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper().fieldType().indexed(), equalTo(false));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper().fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper().fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.test1").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name.test1").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("name.test1").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.test1").mapper().fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.test1").mapper().fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.test1").mapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER));
assertThat(docMapper.mappers().fullName("name.test2").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name.test2").mapper(), instanceOf(TokenCountFieldMapper.class));
assertThat(docMapper.mappers().fullName("name.test2").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.test2").mapper().fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.test2").mapper().fieldType().tokenized(), equalTo(false));
assertThat(((TokenCountFieldMapper) docMapper.mappers().fullName("name.test2").mapper()).analyzer(), equalTo("simple"));
assertThat(((TokenCountFieldMapper) docMapper.mappers().fullName("name.test2").mapper()).analyzer(), equalTo("simple"));
assertThat(docMapper.mappers().fullName("object1.multi1").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("object1.multi1").mapper(), instanceOf(DateFieldMapper.class));
assertThat(docMapper.mappers().fullName("object1.multi1.string").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("object1.multi1.string").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("object1.multi1.string").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("object1.multi1.string").mapper().fieldType().tokenized(), equalTo(false));
}
@Test
public void testBuildThenParse() throws Exception {
DocumentMapperParser mapperParser = MapperTestUtils.newParser();
DocumentMapper builderDocMapper = doc("test", rootObject("person").add(
stringField("name").store(true)
.addMultiField(stringField("indexed").index(true).tokenized(true))
.addMultiField(stringField("not_indexed").index(false).store(true))
)).build(mapperParser);
builderDocMapper.refreshSource();
String builtMapping = builderDocMapper.mappingSource().string();
// System.out.println(builtMapping);
// reparse it
DocumentMapper docMapper = mapperParser.parse(builtMapping);
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));
Document doc = docMapper.parse(json).rootDoc();
IndexableField f = doc.getField("name");
assertThat(f.name(), equalTo("name"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().stored(), equalTo(true));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("name.indexed");
assertThat(f.name(), equalTo("name.indexed"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().tokenized(), equalTo(true));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("name.not_indexed");
assertThat(f.name(), equalTo("name.not_indexed"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().stored(), equalTo(true));
assertThat(f.fieldType().indexed(), equalTo(false));
}
@Test
public void testConvertMultiFieldNoDefaultField() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json");
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));
Document doc = docMapper.parse(json).rootDoc();
assertNull(doc.getField("name"));
IndexableField f = doc.getField("name.indexed");
assertThat(f.name(), equalTo("name.indexed"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("name.not_indexed");
assertThat(f.name(), equalTo("name.not_indexed"));
assertThat(f.stringValue(), equalTo("some name"));
assertThat(f.fieldType().stored(), equalTo(true));
assertThat(f.fieldType().indexed(), equalTo(false));
assertThat(docMapper.mappers().fullName("name").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("name").mapper().fieldType().indexed(), equalTo(false));
assertThat(docMapper.mappers().fullName("name").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("name").mapper().fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.indexed").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name.indexed").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("name.indexed").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.indexed").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("name.indexed").mapper().fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper().fieldType().indexed(), equalTo(false));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper().fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().fullName("name.not_indexed").mapper().fieldType().tokenized(), equalTo(true));
assertNull(doc.getField("age"));
f = doc.getField("age.not_stored");
assertThat(f.name(), equalTo("age.not_stored"));
assertThat(f.numericValue(), equalTo((Number) 28L));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("age.stored");
assertThat(f.name(), equalTo("age.stored"));
assertThat(f.numericValue(), equalTo((Number) 28L));
assertThat(f.fieldType().stored(), equalTo(true));
assertThat(f.fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("age").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("age").mapper(), instanceOf(LongFieldMapper.class));
assertThat(docMapper.mappers().fullName("age").mapper().fieldType().indexed(), equalTo(false));
assertThat(docMapper.mappers().fullName("age").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("age").mapper().fieldType().tokenized(), equalTo(false));
assertThat(docMapper.mappers().fullName("age.not_stored").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("age.not_stored").mapper(), instanceOf(LongFieldMapper.class));
assertThat(docMapper.mappers().fullName("age.not_stored").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("age.not_stored").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("age.not_stored").mapper().fieldType().tokenized(), equalTo(false));
assertThat(docMapper.mappers().fullName("age.stored").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("age.stored").mapper(), instanceOf(LongFieldMapper.class));
assertThat(docMapper.mappers().fullName("age.stored").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("age.stored").mapper().fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().fullName("age.stored").mapper().fieldType().tokenized(), equalTo(false));
}
@Test
public void testConvertMultiFieldGeoPoint() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-geo_point.json");
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
assertThat(docMapper.mappers().fullName("a").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("a").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("a").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("a").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("a").mapper().fieldType().tokenized(), equalTo(false));
assertThat(docMapper.mappers().fullName("a.b").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("a.b").mapper(), instanceOf(GeoPointFieldMapper.class));
assertThat(docMapper.mappers().fullName("a.b").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("a.b").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("a.b").mapper().fieldType().tokenized(), equalTo(false));
BytesReference json = jsonBuilder().startObject()
.field("_id", "1")
.field("a", "-1,-1")
.endObject().bytes();
Document doc = docMapper.parse(json).rootDoc();
IndexableField f = doc.getField("a");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("a"));
assertThat(f.stringValue(), equalTo("-1,-1"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("a.b");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("a.b"));
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("b").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("b").mapper(), instanceOf(GeoPointFieldMapper.class));
assertThat(docMapper.mappers().fullName("b").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("b").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("b").mapper().fieldType().tokenized(), equalTo(false));
assertThat(docMapper.mappers().fullName("b.a").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("b.a").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("b.a").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("b.a").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("b.a").mapper().fieldType().tokenized(), equalTo(false));
json = jsonBuilder().startObject()
.field("_id", "1")
.field("b", "-1,-1")
.endObject().bytes();
doc = docMapper.parse(json).rootDoc();
f = doc.getField("b");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b"));
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("b.a");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b.a"));
assertThat(f.stringValue(), equalTo("-1,-1"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
json = jsonBuilder().startObject()
.field("_id", "1")
.startArray("b").startArray().value(-1).value(-1).endArray().startArray().value(-2).value(-2).endArray().endArray()
.endObject().bytes();
doc = docMapper.parse(json).rootDoc();
f = doc.getFields("b")[0];
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b"));
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getFields("b")[1];
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b"));
assertThat(f.stringValue(), equalTo("-2.0,-2.0"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("b.a");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b.a"));
// NOTE: "]" B/c the lat,long aren't specified as a string, we miss the actual values when parsing the multi
// fields. We already skipped over the coordinates values and can't get to the coordinates.
// This happens if coordinates are specified as array and object.
assertThat(f.stringValue(), equalTo("]"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
}
@Test
public void testConvertMultiFieldCompletion() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-completion.json");
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
assertThat(docMapper.mappers().fullName("a").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("a").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("a").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("a").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("a").mapper().fieldType().tokenized(), equalTo(false));
assertThat(docMapper.mappers().fullName("a.b").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("a.b").mapper(), instanceOf(CompletionFieldMapper.class));
assertThat(docMapper.mappers().fullName("a.b").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("a.b").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("a.b").mapper().fieldType().tokenized(), equalTo(true));
BytesReference json = jsonBuilder().startObject()
.field("_id", "1")
.field("a", "complete me")
.endObject().bytes();
Document doc = docMapper.parse(json).rootDoc();
IndexableField f = doc.getField("a");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("a"));
assertThat(f.stringValue(), equalTo("complete me"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("a.b");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("a.b"));
assertThat(f.stringValue(), equalTo("complete me"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("b").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("b").mapper(), instanceOf(CompletionFieldMapper.class));
assertThat(docMapper.mappers().fullName("b").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("b").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("b").mapper().fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().fullName("b.a").mapper(), notNullValue());
assertThat(docMapper.mappers().fullName("b.a").mapper(), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().fullName("b.a").mapper().fieldType().indexed(), equalTo(true));
assertThat(docMapper.mappers().fullName("b.a").mapper().fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().fullName("b.a").mapper().fieldType().tokenized(), equalTo(false));
json = jsonBuilder().startObject()
.field("_id", "1")
.field("b", "complete me")
.endObject().bytes();
doc = docMapper.parse(json).rootDoc();
f = doc.getField("b");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b"));
assertThat(f.stringValue(), equalTo("complete me"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
f = doc.getField("b.a");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b.a"));
assertThat(f.stringValue(), equalTo("complete me"));
assertThat(f.fieldType().stored(), equalTo(false));
assertThat(f.fieldType().indexed(), equalTo(true));
}
} | 0true
| src_test_java_org_elasticsearch_index_mapper_multifield_MultiFieldTests.java |
47 | public class VersionCommand extends AbstractTextCommand {
private static final byte[] VERSION = stringToBytes("VERSION Hazelcast\r\n");
protected VersionCommand(TextCommandType type) {
super(type);
}
public boolean writeTo(ByteBuffer destination) {
destination.put(VERSION);
return true;
}
public boolean readFrom(ByteBuffer source) {
return true;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_memcache_VersionCommand.java |
897 | public interface ORecordInternal<T> extends ORecord<T>, OSerializableStream {
/**
* Internal only. Fills in one shot the record.
*/
public ORecordAbstract<?> fill(ORID iRid, ORecordVersion iVersion, byte[] iBuffer, boolean iDirty);
/**
* Internal only. Changes the identity of the record.
*/
public ORecordAbstract<?> setIdentity(int iClusterId, OClusterPosition iClusterPosition);
/**
* Internal only. Changes the identity of the record.
*/
public ORecordAbstract<?> setIdentity(ORecordId iIdentity);
/**
* Internal only. Unsets the dirty status of the record.
*/
public void unsetDirty();
/**
* Internal only. Sets the version.
*/
public void setVersion(int iVersion);
/**
* Internal only. Return the record type.
*/
public byte getRecordType();
/**
* Internal only. Executes a flat copy of the record.
*
* @see #copy()
*/
public <RET extends ORecord<T>> RET flatCopy();
public void addListener(final ORecordListener iListener);
public void removeListener(final ORecordListener listener);
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_record_ORecordInternal.java |
297 | public class TransportActionNodeProxy<Request extends ActionRequest, Response extends ActionResponse> extends AbstractComponent {
protected final TransportService transportService;
private final GenericAction<Request, Response> action;
private final TransportRequestOptions transportOptions;
@Inject
public TransportActionNodeProxy(Settings settings, GenericAction<Request, Response> action, TransportService transportService) {
super(settings);
this.action = action;
this.transportService = transportService;
this.transportOptions = action.transportOptions(settings);
}
public ActionFuture<Response> execute(DiscoveryNode node, Request request) throws ElasticsearchException {
PlainActionFuture<Response> future = newFuture();
request.listenerThreaded(false);
execute(node, request, future);
return future;
}
public void execute(DiscoveryNode node, final Request request, final ActionListener<Response> listener) {
ActionRequestValidationException validationException = request.validate();
if (validationException != null) {
listener.onFailure(validationException);
return;
}
transportService.sendRequest(node, action.name(), request, transportOptions, new BaseTransportResponseHandler<Response>() {
@Override
public Response newInstance() {
return action.newResponse();
}
@Override
public String executor() {
if (request.listenerThreaded()) {
return ThreadPool.Names.GENERIC;
}
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(Response response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
listener.onFailure(exp);
}
});
}
} | 1no label
| src_main_java_org_elasticsearch_action_TransportActionNodeProxy.java |
1,130 | public abstract class OAbstractSQLMethod implements OSQLMethod {
private final String name;
private final int minparams;
private final int maxparams;
public OAbstractSQLMethod(String name) {
this(name, 0);
}
public OAbstractSQLMethod(String name, int nbparams) {
this(name, nbparams, nbparams);
}
public OAbstractSQLMethod(String name, int minparams, int maxparams) {
this.name = name;
this.minparams = minparams;
this.maxparams = maxparams;
}
@Override
public String getName() {
return name;
}
@Override
public String getSyntax() {
final StringBuilder sb = new StringBuilder("<field>.");
sb.append(getName());
sb.append('(');
for (int i = 0; i < minparams; i++) {
if (i != 0) {
sb.append(", ");
}
sb.append("param");
sb.append(i + 1);
}
if (minparams != maxparams) {
sb.append('[');
for (int i = minparams; i < maxparams; i++) {
if (i != 0) {
sb.append(", ");
}
sb.append("param");
sb.append(i + 1);
}
sb.append(']');
}
sb.append(')');
return sb.toString();
}
@Override
public int getMinParams() {
return minparams;
}
@Override
public int getMaxParams() {
return maxparams;
}
protected Object getParameterValue(final OIdentifiable iRecord, final String iValue) {
if (iValue == null) {
return null;
}
if (iValue.charAt(0) == '\'' || iValue.charAt(0) == '"') {
// GET THE VALUE AS STRING
return iValue.substring(1, iValue.length() - 1);
}
// SEARCH FOR FIELD
return ((ODocument) iRecord.getRecord()).field(iValue);
}
@Override
public int compareTo(OSQLMethod o) {
return this.getName().compareTo(o.getName());
}
@Override
public String toString() {
return name;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OAbstractSQLMethod.java |
2,834 | DIGIT {
@Override
public boolean isTokenChar(int c) {
return Character.isDigit(c);
}
}, | 0true
| src_main_java_org_elasticsearch_index_analysis_CharMatcher.java |
3,641 | public static class Builder extends AbstractFieldMapper.Builder<Builder, GeoShapeFieldMapper> {
private String tree = Defaults.TREE;
private String strategyName = Defaults.STRATEGY;
private int treeLevels = 0;
private double precisionInMeters = -1;
private double distanceErrorPct = Defaults.DISTANCE_ERROR_PCT;
private SpatialPrefixTree prefixTree;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
}
public Builder tree(String tree) {
this.tree = tree;
return this;
}
public Builder strategy(String strategy) {
this.strategyName = strategy;
return this;
}
public Builder treeLevelsByDistance(double meters) {
this.precisionInMeters = meters;
return this;
}
public Builder treeLevels(int treeLevels) {
this.treeLevels = treeLevels;
return this;
}
public Builder distanceErrorPct(double distanceErrorPct) {
this.distanceErrorPct = distanceErrorPct;
return this;
}
@Override
public GeoShapeFieldMapper build(BuilderContext context) {
final FieldMapper.Names names = buildNames(context);
if (Names.TREE_GEOHASH.equals(tree)) {
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true));
} else if (Names.TREE_QUADTREE.equals(tree)) {
prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else {
throw new ElasticsearchIllegalArgumentException("Unknown prefix tree type [" + tree + "]");
}
return new GeoShapeFieldMapper(names, prefixTree, strategyName, distanceErrorPct, fieldType, postingsProvider,
docValuesProvider, multiFieldsBuilder.build(this, context), copyTo);
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_geo_GeoShapeFieldMapper.java |
1,502 | public static class Reduce extends Reducer<LongWritable, Holder, NullWritable, FaunusVertex> {
private boolean trackState;
@Override
public void setup(final Reducer.Context context) {
this.trackState = context.getConfiguration().getBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_STATE, false);
}
@Override
public void reduce(final LongWritable key, final Iterable<Holder> values, final Reducer<LongWritable, Holder, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
FaunusVertex vertex = null;
final Set<Long> ids = new HashSet<Long>();
for (final Holder holder : values) {
final char tag = holder.getTag();
if (tag == 'k') {
ids.add(holder.get().getLongId());
// todo: once vertex is found, do individual removes to save memory
} else if (tag == 'v') {
vertex = (FaunusVertex) holder.get();
} else {
vertex = (FaunusVertex) holder.get();
Iterator<Edge> itty = vertex.getEdges(Direction.BOTH).iterator();
while (itty.hasNext()) {
itty.next();
itty.remove();
}
vertex.updateLifeCycle(ElementLifeCycle.Event.REMOVED);
}
}
if (null != vertex) {
if (ids.size() > 0)
vertex.removeEdgesToFrom(ids);
if (this.trackState)
context.write(NullWritable.get(), vertex);
else if (!vertex.isRemoved())
context.write(NullWritable.get(), vertex);
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_KEPT, Iterables.size(vertex.getEdges(OUT)));
DEFAULT_COMPAT.incrementContextCounter(context, Counters.IN_EDGES_KEPT, Iterables.size(vertex.getEdges(IN)));
}
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_sideeffect_CommitVerticesMapReduce.java |
568 | public class DemoOracleSingleLineSqlCommandExtractor extends SingleLineSqlCommandExtractor {
private static final SupportLogger LOGGER = SupportLogManager.getLogger("UserOverride", DemoOracleSingleLineSqlCommandExtractor.class);
private static final String BOOLEANTRUEMATCH = "(?i)(true)";
private static final String BOOLEANFALSEMATCH = "(?i)(false)";
private static final String TIMESTAMPMATCH = "(?<!\\{ts\\s)('\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}')";
public static final String TRUE = "1";
public static final String FALSE = "0";
protected boolean alreadyRun = false;
@Override
public String[] extractCommands(Reader reader) {
if (!alreadyRun) {
alreadyRun = true;
LOGGER.support("Converting hibernate.hbm2ddl.import_files sql statements for compatibility with Oracle");
}
String[] statements = super.extractCommands(reader);
for (int j=0; j<statements.length; j++) {
//try start matches
statements[j] = statements[j].replaceAll(BOOLEANTRUEMATCH + "\\s*[,]", TRUE + ",");
statements[j] = statements[j].replaceAll(BOOLEANFALSEMATCH + "\\s*[,]", FALSE + ",");
//try middle matches
statements[j] = statements[j].replaceAll("[,]\\s*" + BOOLEANTRUEMATCH + "\\s*[,]", "," + TRUE + ",");
statements[j] = statements[j].replaceAll("[,]\\s*" + BOOLEANFALSEMATCH + "\\s*[,]", "," + FALSE + ",");
//try end matches
statements[j] = statements[j].replaceAll("[,]\\s*" + BOOLEANTRUEMATCH, "," + TRUE);
statements[j] = statements[j].replaceAll("[,]\\s*" + BOOLEANFALSEMATCH, "," + FALSE);
}
//remove Oracle incompatible - multi-row inserts
List<String> stringList = new ArrayList<String>(Arrays.asList(statements)); //Arrays.asList is immutable
int j=0;
for (String statement : statements) {
if (statement.matches(".*[)]\\s*[,].*")) {
int pos = statement.toUpperCase().indexOf("VALUES ") + "VALUES ".length();
String prefix = statement.substring(0, pos);
stringList.remove(j);
String values = statement.substring(pos, statement.length());
String[] tokens = values.split("[)]\\s*[,]\\s*[(]");
String[] newStatements = new String[tokens.length];
for (int i=0; i<tokens.length; i++) {
String suffix = tokens[i];
if (!suffix.startsWith("(")) {
suffix = "(" + suffix;
}
if (!suffix.endsWith(")")) {
suffix += ")";
}
newStatements[i] = prefix + suffix;
}
stringList.addAll(j, Arrays.asList(newStatements));
j += tokens.length;
} else {
j++;
}
}
//Address raw string dates, if any, for Oracle
Pattern pattern = Pattern.compile(TIMESTAMPMATCH);
statements = stringList.toArray(new String[stringList.size()]);
for (int x=0; x<statements.length; x++) {
Matcher matcher = pattern.matcher(statements[x]);
while (matcher.find()) {
String date = matcher.group(1);
String temp = statements[x].substring(0, statements[x].indexOf(date)) + "{ts " + date + "}" +
statements[x].substring(statements[x].indexOf(date) + date.length(), statements[x].length());
statements[x] = temp;
}
}
return statements;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_util_sql_importsql_DemoOracleSingleLineSqlCommandExtractor.java |
3,258 | public abstract class NestedWrappableComparator<T> extends FieldComparator<T> {
/**
* Assigns the underlying missing value to the specified slot, if the actual implementation supports missing value.
*
* @param slot The slot to assign the the missing value to.
*/
public abstract void missing(int slot);
/**
* Compares the missing value to the bottom.
*
* @return any N < 0 if the bottom value is not competitive with the missing value, any N > 0 if the
* bottom value is competitive with the missing value and 0 if they are equal.
*/
public abstract int compareBottomMissing();
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_fieldcomparator_NestedWrappableComparator.java |
2 | (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}}); | 0true
| src_main_java_jsr166e_CompletableFuture.java |
1,631 | public static final Validator BYTES_SIZE = new Validator() {
@Override
public String validate(String setting, String value) {
try {
parseBytesSizeValue(value);
} catch (ElasticsearchParseException ex) {
return ex.getMessage();
}
return null;
}
}; | 0true
| src_main_java_org_elasticsearch_cluster_settings_Validator.java |
1,101 | public class MergeCartResponse implements Serializable {
private static final long serialVersionUID = 1L;
private Order order;
private List<OrderItem> addedItems = new ArrayList<OrderItem>();;
private List<OrderItem> removedItems = new ArrayList<OrderItem>();;
private boolean merged;
public Order getOrder() {
return order;
}
public void setOrder(Order order) {
this.order = order;
}
public List<OrderItem> getAddedItems() {
return addedItems;
}
public void setAddedItems(List<OrderItem> addedItems) {
this.addedItems = addedItems;
}
public List<OrderItem> getRemovedItems() {
return removedItems;
}
public void setRemovedItems(List<OrderItem> removedItems) {
this.removedItems = removedItems;
}
public boolean isMerged() {
return merged;
}
public void setMerged(boolean merged) {
this.merged = merged;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_call_MergeCartResponse.java |
946 | public abstract class MasterNodeReadOperationRequest<T extends MasterNodeReadOperationRequest> extends MasterNodeOperationRequest<T> {
protected boolean local = false;
@SuppressWarnings("unchecked")
public final T local(boolean local) {
this.local = local;
return (T) this;
}
public final boolean local() {
return local;
}
/**
* Reads the local flag
*/
protected void readLocal(StreamInput in) throws IOException {
readLocal(in, null);
}
/**
* Reads the local flag if on or after the specified min version or if the version is <code>null</code>.
*/
protected void readLocal(StreamInput in, Version minVersion) throws IOException {
if (minVersion == null || in.getVersion().onOrAfter(minVersion)) {
local = in.readBoolean();
}
}
/**
* writes the local flag
*/
protected void writeLocal(StreamOutput out) throws IOException {
writeLocal(out, null);
}
/**
* writes the local flag if on or after the specified min version or if the version is <code>null</code>.
*/
protected void writeLocal(StreamOutput out, Version minVersion) throws IOException {
if (minVersion == null || out.getVersion().onOrAfter(minVersion)) {
out.writeBoolean(local);
}
}
} | 0true
| src_main_java_org_elasticsearch_action_support_master_MasterNodeReadOperationRequest.java |
1,397 | @XmlRootElement(name = "element")
@XmlAccessorType(value = XmlAccessType.FIELD)
public class MapElementWrapper {
@XmlElement
protected String key;
@XmlElement
protected String value;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_wrapper_MapElementWrapper.java |
489 | private final class ClusterListenerThread extends Thread {
private volatile ClientConnection conn;
private final List<MemberImpl> members = new LinkedList<MemberImpl>();
private final CountDownLatch latch = new CountDownLatch(1);
private ClusterListenerThread(ThreadGroup group, String name) {
super(group, name);
}
public void await() throws InterruptedException {
latch.await();
}
public void run() {
while (!Thread.currentThread().isInterrupted()) {
try {
if (conn == null) {
try {
conn = pickConnection();
} catch (Exception e) {
LOGGER.severe("Error while connecting to cluster!", e);
client.getLifecycleService().shutdown();
latch.countDown();
return;
}
}
getInvocationService().triggerFailedListeners();
loadInitialMemberList();
listenMembershipEvents();
} catch (Exception e) {
if (client.getLifecycleService().isRunning()) {
if (LOGGER.isFinestEnabled()) {
LOGGER.warning("Error while listening cluster events! -> " + conn, e);
} else {
LOGGER.warning("Error while listening cluster events! -> " + conn + ", Error: " + e.toString());
}
}
connectionManager.markOwnerConnectionAsClosed();
IOUtil.closeResource(conn);
conn = null;
fireConnectionEvent(true);
}
try {
Thread.sleep(SLEEP_TIME);
} catch (InterruptedException e) {
latch.countDown();
break;
}
}
}
private ClientInvocationServiceImpl getInvocationService() {
return (ClientInvocationServiceImpl) client.getInvocationService();
}
private ClientConnection pickConnection() throws Exception {
final List<InetSocketAddress> socketAddresses = new LinkedList<InetSocketAddress>();
if (!members.isEmpty()) {
for (MemberImpl member : members) {
socketAddresses.add(member.getInetSocketAddress());
}
Collections.shuffle(socketAddresses);
}
socketAddresses.addAll(getConfigAddresses());
return connectToOne(socketAddresses);
}
private void loadInitialMemberList() throws Exception {
final SerializationService serializationService = getSerializationService();
final AddMembershipListenerRequest request = new AddMembershipListenerRequest();
final SerializableCollection coll = (SerializableCollection) connectionManager.sendAndReceive(request, conn);
Map<String, MemberImpl> prevMembers = Collections.emptyMap();
if (!members.isEmpty()) {
prevMembers = new HashMap<String, MemberImpl>(members.size());
for (MemberImpl member : members) {
prevMembers.put(member.getUuid(), member);
}
members.clear();
}
for (Data data : coll) {
members.add((MemberImpl) serializationService.toObject(data));
}
updateMembersRef();
LOGGER.info(membersString());
final List<MembershipEvent> events = new LinkedList<MembershipEvent>();
final Set<Member> eventMembers = Collections.unmodifiableSet(new LinkedHashSet<Member>(members));
for (MemberImpl member : members) {
final MemberImpl former = prevMembers.remove(member.getUuid());
if (former == null) {
events.add(new MembershipEvent(client.getCluster(), member, MembershipEvent.MEMBER_ADDED, eventMembers));
}
}
for (MemberImpl member : prevMembers.values()) {
events.add(new MembershipEvent(client.getCluster(), member, MembershipEvent.MEMBER_REMOVED, eventMembers));
}
for (MembershipEvent event : events) {
fireMembershipEvent(event);
}
latch.countDown();
}
private void listenMembershipEvents() throws IOException {
final SerializationService serializationService = getSerializationService();
while (!Thread.currentThread().isInterrupted()) {
final Data clientResponseData = conn.read();
final ClientResponse clientResponse = serializationService.toObject(clientResponseData);
final Object eventObject = serializationService.toObject(clientResponse.getResponse());
final ClientMembershipEvent event = (ClientMembershipEvent) eventObject;
final MemberImpl member = (MemberImpl) event.getMember();
boolean membersUpdated = false;
if (event.getEventType() == MembershipEvent.MEMBER_ADDED) {
members.add(member);
membersUpdated = true;
} else if (event.getEventType() == ClientMembershipEvent.MEMBER_REMOVED) {
members.remove(member);
membersUpdated = true;
// getConnectionManager().removeConnectionPool(member.getAddress()); //TODO
} else if (event.getEventType() == ClientMembershipEvent.MEMBER_ATTRIBUTE_CHANGED) {
MemberAttributeChange memberAttributeChange = event.getMemberAttributeChange();
Map<Address, MemberImpl> memberMap = membersRef.get();
if (memberMap != null) {
for (MemberImpl target : memberMap.values()) {
if (target.getUuid().equals(memberAttributeChange.getUuid())) {
final MemberAttributeOperationType operationType = memberAttributeChange.getOperationType();
final String key = memberAttributeChange.getKey();
final Object value = memberAttributeChange.getValue();
target.updateAttribute(operationType, key, value);
MemberAttributeEvent memberAttributeEvent = new MemberAttributeEvent(
client.getCluster(), target, operationType, key, value);
fireMemberAttributeEvent(memberAttributeEvent);
break;
}
}
}
}
if (membersUpdated) {
((ClientPartitionServiceImpl) client.getClientPartitionService()).refreshPartitions();
updateMembersRef();
LOGGER.info(membersString());
fireMembershipEvent(new MembershipEvent(client.getCluster(), member, event.getEventType(),
Collections.unmodifiableSet(new LinkedHashSet<Member>(members))));
}
}
}
private void fireMembershipEvent(final MembershipEvent event) {
client.getClientExecutionService().executeInternal(new Runnable() {
public void run() {
for (MembershipListener listener : listeners.values()) {
if (event.getEventType() == MembershipEvent.MEMBER_ADDED) {
listener.memberAdded(event);
} else {
listener.memberRemoved(event);
}
}
}
});
}
private void fireMemberAttributeEvent(final MemberAttributeEvent event) {
client.getClientExecutionService().executeInternal(new Runnable() {
@Override
public void run() {
for (MembershipListener listener : listeners.values()) {
listener.memberAttributeChanged(event);
}
}
});
}
private void updateMembersRef() {
final Map<Address, MemberImpl> map = new LinkedHashMap<Address, MemberImpl>(members.size());
for (MemberImpl member : members) {
map.put(member.getAddress(), member);
}
membersRef.set(Collections.unmodifiableMap(map));
}
void shutdown() {
interrupt();
final ClientConnection c = conn;
if (c != null) {
c.close();
}
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientClusterServiceImpl.java |
298 | private static class EnclosingVisitor extends Visitor implements NaturalVisitor {
private Node result;
private int startOffset;
private int endOffset;
private EnclosingVisitor(int startOffset, int endOffset) {
this.startOffset = startOffset;
this.endOffset = endOffset;
}
private boolean expandsSelection(Node that) {
Integer nodeStart = that.getStartIndex();
Integer nodeStop = that.getStopIndex();
if (nodeStart!=null && nodeStop!=null) {
return nodeStart<startOffset && nodeStop+1>=endOffset ||
nodeStart<=startOffset && nodeStop+1>endOffset;
}
else {
return false;
}
}
@Override
public void visit(CompilationUnit that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(Body that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(ArgumentList that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(ParameterList that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(ControlClause that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(ConditionList that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(Condition that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(Type that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(Identifier that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(Term that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(ImportMemberOrTypeList that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(ImportMemberOrType that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(SpecifierOrInitializerExpression that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(Expression that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
@Override
public void visit(StatementOrArgument that) {
if (expandsSelection(that)) {
result = that;
}
super.visit(that);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_SelectEnclosingAction.java |
1,649 | public class Booleans {
public static boolean parseBoolean(char[] text, int offset, int length, boolean defaultValue) {
if (text == null || length == 0) {
return defaultValue;
}
if (length == 1) {
return text[offset] != '0';
}
if (length == 2) {
return !(text[offset] == 'n' && text[offset + 1] == 'o');
}
if (length == 3) {
return !(text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f');
}
if (length == 5) {
return !(text[offset] == 'f' && text[offset + 1] == 'a' && text[offset + 2] == 'l' && text[offset + 3] == 's' && text[offset + 4] == 'e');
}
return true;
}
/**
* returns true if the a sequence of chars is one of "true","false","on","off","yes","no","0","1"
*
* @param text sequence to check
* @param offset offset to start
* @param length length to check
*/
public static boolean isBoolean(char[] text, int offset, int length) {
if (text == null || length == 0) {
return false;
}
if (length == 1) {
return text[offset] == '0' || text[offset] == '1';
}
if (length == 2) {
return (text[offset] == 'n' && text[offset + 1] == 'o') || (text[offset] == 'o' && text[offset + 1] == 'n');
}
if (length == 3) {
return (text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f') ||
(text[offset] == 'y' && text[offset + 1] == 'e' && text[offset + 2] == 's');
}
if (length == 4) {
return (text[offset] == 't' && text[offset + 1] == 'r' && text[offset + 2] == 'u' && text[offset + 3] == 'e');
}
if (length == 5) {
return (text[offset] == 'f' && text[offset + 1] == 'a' && text[offset + 2] == 'l' && text[offset + 3] == 's' && text[offset + 4] == 'e');
}
return false;
}
public static boolean parseBoolean(String value, boolean defaultValue) {
if (value == null) {
return defaultValue;
}
return !(value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
}
public static Boolean parseBoolean(String value, Boolean defaultValue) {
if (value == null) {
return defaultValue;
}
return !(value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
}
public static boolean isExplicitFalse(String value) {
return (value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
}
public static boolean isExplicitTrue(String value) {
return (value.equals("true") || value.equals("1") || value.equals("on") || value.equals("yes"));
}
} | 0true
| src_main_java_org_elasticsearch_common_Booleans.java |
144 | {
@Override
public void prune( LogLoader source )
{ // Don't prune logs at all.
}
@Override
public String toString()
{
return "NO_PRUNING";
}
}; | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogPruneStrategies.java |
603 | public interface OIndexInternal<T> extends OIndex<T>, Iterable<Entry<Object, T>>, ODatabaseListener {
public static final String CONFIG_KEYTYPE = "keyType";
public static final String CONFIG_AUTOMATIC = "automatic";
public static final String CONFIG_TYPE = "type";
public static final String ALGORITHM = "algorithm";
public static final String VALUE_CONTAINER_ALGORITHM = "valueContainerAlgorithm";
public static final String CONFIG_NAME = "name";
public static final String INDEX_DEFINITION = "indexDefinition";
public static final String INDEX_DEFINITION_CLASS = "indexDefinitionClass";
/**
* Loads the index giving the configuration.
*
* @param iConfig
* ODocument instance containing the configuration
*
*/
public boolean loadFromConfiguration(ODocument iConfig);
/**
* Saves the index configuration to disk.
*
* @return The configuration as ODocument instance
* @see #getConfiguration()
*/
public ODocument updateConfiguration();
/**
* Add given cluster to the list of clusters that should be automatically indexed.
*
* @param iClusterName
* Cluster to add.
* @return Current index instance.
*/
public OIndex<T> addCluster(final String iClusterName);
/**
* Remove given cluster from the list of clusters that should be automatically indexed.
*
* @param iClusterName
* Cluster to remove.
* @return Current index instance.
*/
public OIndex<T> removeCluster(final String iClusterName);
/**
* Indicates whether given index can be used to calculate result of
* {@link com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquality} operators.
*
* @return {@code true} if given index can be used to calculate result of
* {@link com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquality} operators.
*
*/
public boolean canBeUsedInEqualityOperators();
public boolean hasRangeQuerySupport();
/**
* Prohibit index modifications. Only index read commands are allowed after this call.
*
* @param throwException
* If <code>true</code> {@link com.orientechnologies.common.concur.lock.OModificationOperationProhibitedException}
* exception will be thrown in case of write command will be performed.
*/
public void freeze(boolean throwException);
/**
* Allow any index modifications. Is called after {@link #freeze(boolean)} command.
*/
public void release();
/**
* Is used to indicate that several index changes are going to be seen as single unit from users point of view. This command is
* used with conjunction of {@link #freeze(boolean)} command.
*/
public void acquireModificationLock();
/**
* Is used to indicate that several index changes are going to be seen as single unit from users point of view were completed.
*/
public void releaseModificationLock();
public IndexMetadata loadMetadata(ODocument iConfig);
public void setRebuildingFlag();
public void close();
public String getAlgorithm();
public void preCommit();
void addTxOperation(ODocument operationDocument);
public void commit();
public void postCommit();
public final class IndexMetadata {
private final String name;
private final OIndexDefinition indexDefinition;
private final Set<String> clustersToIndex;
private final String type;
private final String algorithm;
private final String valueContainerAlgorithm;
public IndexMetadata(String name, OIndexDefinition indexDefinition, Set<String> clustersToIndex, String type, String algorithm,
String valueContainerAlgorithm) {
this.name = name;
this.indexDefinition = indexDefinition;
this.clustersToIndex = clustersToIndex;
this.type = type;
this.algorithm = algorithm;
this.valueContainerAlgorithm = valueContainerAlgorithm;
}
public String getName() {
return name;
}
public OIndexDefinition getIndexDefinition() {
return indexDefinition;
}
public Set<String> getClustersToIndex() {
return clustersToIndex;
}
public String getType() {
return type;
}
public String getAlgorithm() {
return algorithm;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
IndexMetadata that = (IndexMetadata) o;
if (algorithm != null ? !algorithm.equals(that.algorithm) : that.algorithm != null)
return false;
if (!clustersToIndex.equals(that.clustersToIndex))
return false;
if (indexDefinition != null ? !indexDefinition.equals(that.indexDefinition) : that.indexDefinition != null)
return false;
if (!name.equals(that.name))
return false;
if (!type.equals(that.type))
return false;
return true;
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + (indexDefinition != null ? indexDefinition.hashCode() : 0);
result = 31 * result + clustersToIndex.hashCode();
result = 31 * result + type.hashCode();
result = 31 * result + (algorithm != null ? algorithm.hashCode() : 0);
return result;
}
public String getValueContainerAlgorithm() {
return valueContainerAlgorithm;
}
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_OIndexInternal.java |
48 | static final class CounterHashCode {
int code;
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
1,639 | new Thread(new Runnable() {
@Override
public void run() {
while (!Thread.interrupted()) {
String senderNode = null;
ODistributedResponse message = null;
try {
message = nodeResponseQueue.take();
if (message != null) {
senderNode = message.getSenderNodeName();
dispatchResponseToThread(message);
}
} catch (InterruptedException e) {
// EXIT CURRENT THREAD
Thread.interrupted();
break;
} catch (Throwable e) {
ODistributedServerLog.error(this, manager.getLocalNodeName(), senderNode, DIRECTION.IN,
"error on reading distributed response", e, message != null ? message.getPayload() : "-");
}
}
}
}).start(); | 1no label
| distributed_src_main_java_com_orientechnologies_orient_server_hazelcast_OHazelcastDistributedMessageService.java |
904 | public final class ConditionKey implements WaitNotifyKey {
private final String name;
private final Data key;
private final String conditionId;
public ConditionKey(String name, Data key, String conditionId) {
this.name = name;
this.key = key;
this.conditionId = conditionId;
}
@Override
public String getServiceName() {
return LockServiceImpl.SERVICE_NAME;
}
@Override
public String getObjectName() {
return name;
}
public Data getKey() {
return key;
}
public String getConditionId() {
return conditionId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ConditionKey that = (ConditionKey) o;
if (key != null ? !key.equals(that.key) : that.key != null) {
return false;
}
if (conditionId != null ? !conditionId.equals(that.conditionId) : that.conditionId != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = key != null ? key.hashCode() : 0;
result = 31 * result + (conditionId != null ? conditionId.hashCode() : 0);
return result;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_lock_ConditionKey.java |
476 | public abstract class KCVSCache extends KCVSProxy {
public static final List<Entry> NO_DELETIONS = ImmutableList.of();
private final String metricsName;
private final boolean validateKeysOnly = true;
protected KCVSCache(KeyColumnValueStore store, String metricsName) {
super(store);
this.metricsName = metricsName;
}
protected boolean hasValidateKeysOnly() {
return validateKeysOnly;
}
protected void incActionBy(int by, CacheMetricsAction action, StoreTransaction txh) {
assert by>=1;
if (metricsName!=null && txh.getConfiguration().hasGroupName()) {
MetricManager.INSTANCE.getCounter(txh.getConfiguration().getGroupName(), metricsName, action.getName()).inc(by);
}
}
public abstract void clearCache();
protected abstract void invalidate(StaticBuffer key, List<CachableStaticBuffer> entries);
@Override
public void mutate(StaticBuffer key, List<Entry> additions, List<StaticBuffer> deletions, StoreTransaction txh) throws BackendException {
throw new UnsupportedOperationException("Only supports mutateEntries()");
}
public void mutateEntries(StaticBuffer key, List<Entry> additions, List<Entry> deletions, StoreTransaction txh) throws BackendException {
assert txh instanceof CacheTransaction;
((CacheTransaction) txh).mutate(this, key, additions, deletions);
}
@Override
protected final StoreTransaction unwrapTx(StoreTransaction txh) {
assert txh instanceof CacheTransaction;
return ((CacheTransaction) txh).getWrappedTransaction();
}
public EntryList getSliceNoCache(KeySliceQuery query, StoreTransaction txh) throws BackendException {
return store.getSlice(query,unwrapTx(txh));
}
public Map<StaticBuffer, EntryList> getSliceNoCache(List<StaticBuffer> keys, SliceQuery query, StoreTransaction txh) throws BackendException {
return store.getSlice(keys,query,unwrapTx(txh));
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_cache_KCVSCache.java |
1,870 | boolean b = h1.executeTransaction(options, new TransactionalTask<Boolean>() {
public Boolean execute(TransactionalTaskContext context) throws TransactionException {
final TransactionalMap<Object, Object> txMap = context.getMap("default");
txMap.put("1", "value1");
assertEquals("value1", txMap.put("1", "value2"));
assertEquals("value2", txMap.put("1", "value3"));
assertEquals("value3", txMap.put("1", "value4"));
assertEquals("value4", txMap.put("1", "value5"));
assertEquals("value5", txMap.put("1", "value6"));
assertEquals(1, txMap.size());
return true;
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_map_MapTransactionTest.java |
675 | protected static Comparator<CategorySearchFacet> facetPositionComparator = new Comparator<CategorySearchFacet>() {
@Override
public int compare(CategorySearchFacet o1, CategorySearchFacet o2) {
return o1.getSequence().compareTo(o2.getSequence());
}
}; | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_CategoryImpl.java |
838 | LINK("Link", 13, new Class<?>[] { Object.class, ORecordId.class }, new Class<?>[] { ORecord.class, ORID.class }) {
}, | 0true
| core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java |
1,673 | @Repository("blAdminUserDao")
public class AdminUserDaoImpl implements AdminUserDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
public void deleteAdminUser(AdminUser user) {
if (!em.contains(user)) {
user = em.find(entityConfiguration.lookupEntityClass("org.broadleafcommerce.openadmin.server.security.domain.AdminUser", AdminUser.class), user.getId());
}
em.remove(user);
}
public AdminUser readAdminUserById(Long id) {
return em.find(entityConfiguration.lookupEntityClass("org.broadleafcommerce.openadmin.server.security.domain.AdminUser", AdminUser.class), id);
}
public AdminUser saveAdminUser(AdminUser user) {
if (em.contains(user) || user.getId() != null) {
return em.merge(user);
} else {
em.persist(user);
return user;
}
}
public AdminUser readAdminUserByUserName(String userName) {
TypedQuery<AdminUser> query = em.createNamedQuery("BC_READ_ADMIN_USER_BY_USERNAME", AdminUser.class);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setParameter("userName", userName);
List<AdminUser> users = query.getResultList();
if (users != null && !users.isEmpty()) {
return users.get(0);
}
return null;
}
public List<AdminUser> readAllAdminUsers() {
TypedQuery<AdminUser> query = em.createNamedQuery("BC_READ_ALL_ADMIN_USERS", AdminUser.class);
query.setHint(QueryHints.HINT_CACHEABLE, true);
return query.getResultList();
}
@Override
public List<AdminUser> readAdminUserByEmail(String emailAddress) {
TypedQuery<AdminUser> query = em.createNamedQuery("BC_READ_ADMIN_USER_BY_EMAIL", AdminUser.class);
query.setParameter("email", emailAddress);
return query.getResultList();
}
} | 0true
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_security_dao_AdminUserDaoImpl.java |
5,224 | static class Bucket extends InternalHistogram.Bucket implements DateHistogram.Bucket {
private final ValueFormatter formatter;
Bucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) {
super(key, docCount, aggregations);
this.formatter = formatter;
}
@Override
public String getKey() {
return formatter != null ? formatter.format(key) : DateFieldMapper.Defaults.DATE_TIME_FORMATTER.printer().print(key);
}
@Override
public DateTime getKeyAsDate() {
return new DateTime(key);
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_bucket_histogram_InternalDateHistogram.java |
268 | public class NullEmailServiceImpl implements EmailService {
@Override
public boolean sendTemplateEmail(String emailAddress, EmailInfo emailInfo, HashMap<String, Object> props) {
return true;
}
@Override
public boolean sendTemplateEmail(EmailTarget emailTarget, EmailInfo emailInfo, HashMap<String, Object> props) {
return true;
}
@Override
public boolean sendBasicEmail(EmailInfo emailInfo, EmailTarget emailTarget, HashMap<String, Object> props) {
return true;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_email_service_NullEmailServiceImpl.java |
3,449 | public class LocalIndexGateway extends AbstractIndexComponent implements IndexGateway {
@Inject
public LocalIndexGateway(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
}
@Override
public String type() {
return "local";
}
@Override
public Class<? extends IndexShardGateway> shardGatewayClass() {
return LocalIndexShardGateway.class;
}
@Override
public String toString() {
return "local";
}
@Override
public void close() {
}
} | 0true
| src_main_java_org_elasticsearch_index_gateway_local_LocalIndexGateway.java |
6 | public class OIterableObjectArray<T> implements Iterable<T> {
private final Object object;
private int length;
public OIterableObjectArray(Object o) {
object = o;
length = Array.getLength(o);
}
/**
* Returns an iterator over a set of elements of type T.
*
* @return an Iterator.
*/
public Iterator<T> iterator() {
return new ObjIterator();
}
private class ObjIterator implements Iterator<T> {
private int p = 0;
/**
* Returns <tt>true</tt> if the iteration has more elements. (In other words, returns <tt>true</tt> if <tt>next</tt> would
* return an element rather than throwing an exception.)
*
* @return <tt>true</tt> if the iterator has more elements.
*/
public boolean hasNext() {
return p < length;
}
/**
* Returns the next element in the iteration.
*
* @return the next element in the iteration.
* @throws java.util.NoSuchElementException
* iteration has no more elements.
*/
@SuppressWarnings("unchecked")
public T next() {
if (p < length) {
return (T) Array.get(object, p++);
} else {
throw new NoSuchElementException();
}
}
/**
* Removes from the underlying collection the last element returned by the iterator (optional operation). This method can be
* called only once per call to <tt>next</tt>. The behavior of an iterator is unspecified if the underlying collection is
* modified while the iteration is in progress in any way other than by calling this method.
*
* @throws UnsupportedOperationException
* if the <tt>remove</tt> operation is not supported by this Iterator.
* @throws IllegalStateException
* if the <tt>next</tt> method has not yet been called, or the <tt>remove</tt> method has already been called after
* the last call to the <tt>next</tt> method.
*/
public void remove() {
throw new UnsupportedOperationException();
}
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_collection_OIterableObjectArray.java |
756 | private static class BucketSearchResult {
private final int itemIndex;
private final ArrayList<OBonsaiBucketPointer> path;
private BucketSearchResult(int itemIndex, ArrayList<OBonsaiBucketPointer> path) {
this.itemIndex = itemIndex;
this.path = path;
}
public OBonsaiBucketPointer getLastPathItem() {
return path.get(path.size() - 1);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_sbtreebonsai_local_OSBTreeBonsai.java |
1,380 | public static final class OTransactionRecordIndexOperation {
public OTransactionRecordIndexOperation(String index, Object key, OPERATION operation) {
this.index = index;
this.key = key;
this.operation = operation;
}
public String index;
public Object key;
public OPERATION operation;
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_tx_OTransactionRealAbstract.java |
1,832 | public class MapInterceptorContextImpl implements MapInterceptorContext {
private String mapName;
private MapOperationType operationType;
private Data key;
private Object newValue;
private Map.Entry existingEntry;
public MapInterceptorContextImpl(String mapName, MapOperationType operationType, Data key, Object newValue, Map.Entry existingEntry) {
this.mapName = mapName;
this.operationType = operationType;
this.key = key;
this.newValue = newValue;
this.existingEntry = existingEntry;
}
@Override
public String getMapName() {
return mapName;
}
public void setNewValue(Object newValue) {
this.newValue = newValue;
}
@Override
public MapOperationType getOperationType() {
return operationType;
}
@Override
public Data getKey() {
return key;
}
@Override
public Object getNewValue() {
return newValue;
}
@Override
public Map.Entry getExistingEntry() {
return existingEntry;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_MapInterceptorContextImpl.java |
83 | GREATER_THAN {
@Override
public boolean isValidValueType(Class<?> clazz) {
Preconditions.checkNotNull(clazz);
return Comparable.class.isAssignableFrom(clazz);
}
@Override
public boolean isValidCondition(Object condition) {
return condition!=null && condition instanceof Comparable;
}
@Override
public boolean evaluate(Object value, Object condition) {
Integer cmp = AttributeUtil.compare(value,condition);
return cmp!=null?cmp>0:false;
}
@Override
public String toString() {
return ">";
}
@Override
public TitanPredicate negate() {
return LESS_THAN_EQUAL;
}
}, | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Cmp.java |
3,393 | private static class EventServiceSegment {
final String serviceName;
final ConcurrentMap<String, Collection<Registration>> registrations
= new ConcurrentHashMap<String, Collection<Registration>>();
final ConcurrentMap<String, Registration> registrationIdMap = new ConcurrentHashMap<String, Registration>();
final AtomicInteger totalPublishes = new AtomicInteger();
EventServiceSegment(String serviceName) {
this.serviceName = serviceName;
}
private Collection<Registration> getRegistrations(String topic, boolean forceCreate) {
Collection<Registration> listenerList = registrations.get(topic);
if (listenerList == null && forceCreate) {
return ConcurrencyUtil.getOrPutIfAbsent(registrations, topic, new ConstructorFunction<String, Collection<Registration>>() {
public Collection<Registration> createNew(String key) {
return Collections.newSetFromMap(new ConcurrentHashMap<Registration, Boolean>());
}
});
}
return listenerList;
}
private boolean addRegistration(String topic, Registration registration) {
final Collection<Registration> registrations = getRegistrations(topic, true);
if (registrations.add(registration)) {
registrationIdMap.put(registration.id, registration);
return true;
}
return false;
}
private Registration removeRegistration(String topic, String id) {
final Registration registration = registrationIdMap.remove(id);
if (registration != null) {
final Collection<Registration> all = registrations.get(topic);
if (all != null) {
all.remove(registration);
}
}
return registration;
}
void removeRegistrations(String topic) {
final Collection<Registration> all = registrations.remove(topic);
if (all != null) {
for (Registration reg : all) {
registrationIdMap.remove(reg.getId());
}
}
}
void clear() {
registrations.clear();
registrationIdMap.clear();
}
void onMemberLeft(Address address) {
for (Collection<Registration> all : registrations.values()) {
Iterator<Registration> iter = all.iterator();
while (iter.hasNext()) {
Registration reg = iter.next();
if (address.equals(reg.getSubscriber())) {
iter.remove();
registrationIdMap.remove(reg.id);
}
}
}
}
int incrementPublish() {
return totalPublishes.incrementAndGet();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_spi_impl_EventServiceImpl.java |
1,754 | public static interface DistanceBoundingCheck {
boolean isWithin(double targetLatitude, double targetLongitude);
GeoPoint topLeft();
GeoPoint bottomRight();
} | 0true
| src_main_java_org_elasticsearch_common_geo_GeoDistance.java |
386 | public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpdateSettingsRequest> {
private Settings transientSettings = EMPTY_SETTINGS;
private Settings persistentSettings = EMPTY_SETTINGS;
public ClusterUpdateSettingsRequest() {
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (transientSettings.getAsMap().isEmpty() && persistentSettings.getAsMap().isEmpty()) {
validationException = addValidationError("no settings to update", validationException);
}
return validationException;
}
Settings transientSettings() {
return transientSettings;
}
Settings persistentSettings() {
return persistentSettings;
}
/**
* Sets the transient settings to be updated. They will not survive a full cluster restart
*/
public ClusterUpdateSettingsRequest transientSettings(Settings settings) {
this.transientSettings = settings;
return this;
}
/**
* Sets the transient settings to be updated. They will not survive a full cluster restart
*/
public ClusterUpdateSettingsRequest transientSettings(Settings.Builder settings) {
this.transientSettings = settings.build();
return this;
}
/**
* Sets the source containing the transient settings to be updated. They will not survive a full cluster restart
*/
public ClusterUpdateSettingsRequest transientSettings(String source) {
this.transientSettings = ImmutableSettings.settingsBuilder().loadFromSource(source).build();
return this;
}
/**
* Sets the transient settings to be updated. They will not survive a full cluster restart
*/
@SuppressWarnings("unchecked")
public ClusterUpdateSettingsRequest transientSettings(Map source) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
transientSettings(builder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
return this;
}
/**
* Sets the persistent settings to be updated. They will get applied cross restarts
*/
public ClusterUpdateSettingsRequest persistentSettings(Settings settings) {
this.persistentSettings = settings;
return this;
}
/**
* Sets the persistent settings to be updated. They will get applied cross restarts
*/
public ClusterUpdateSettingsRequest persistentSettings(Settings.Builder settings) {
this.persistentSettings = settings.build();
return this;
}
/**
* Sets the source containing the persistent settings to be updated. They will get applied cross restarts
*/
public ClusterUpdateSettingsRequest persistentSettings(String source) {
this.persistentSettings = ImmutableSettings.settingsBuilder().loadFromSource(source).build();
return this;
}
/**
* Sets the persistent settings to be updated. They will get applied cross restarts
*/
@SuppressWarnings("unchecked")
public ClusterUpdateSettingsRequest persistentSettings(Map source) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
persistentSettings(builder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
transientSettings = readSettingsFromStream(in);
persistentSettings = readSettingsFromStream(in);
readTimeout(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeSettingsToStream(transientSettings, out);
writeSettingsToStream(persistentSettings, out);
writeTimeout(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_settings_ClusterUpdateSettingsRequest.java |
976 | public interface OObjectSerializer<LOCAL_TYPE, DB_TYPE> {
public Object serializeFieldValue(Class<?> iClass, LOCAL_TYPE iFieldValue);
public Object unserializeFieldValue(Class<?> iClass, DB_TYPE iFieldValue);
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_object_OObjectSerializer.java |
1,154 | @Repository("blPaymentInfoDao")
public class PaymentInfoDaoImpl implements PaymentInfoDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
public PaymentInfo save(PaymentInfo paymentInfo) {
return em.merge(paymentInfo);
}
public PaymentResponseItem save(PaymentResponseItem paymentResponseItem) {
return em.merge(paymentResponseItem);
}
public PaymentLog save(PaymentLog log) {
return em.merge(log);
}
public PaymentInfo readPaymentInfoById(Long paymentId) {
return (PaymentInfo) em.find(PaymentInfoImpl.class, paymentId);
}
@SuppressWarnings("unchecked")
public List<PaymentInfo> readPaymentInfosForOrder(Order order) {
Query query = em.createNamedQuery("BC_READ_ORDERS_PAYMENTS_BY_ORDER_ID");
query.setParameter("orderId", order.getId());
return query.getResultList();
}
public PaymentInfo create() {
return ((PaymentInfo) entityConfiguration.createEntityInstance("org.broadleafcommerce.core.payment.domain.PaymentInfo"));
}
public PaymentResponseItem createResponseItem() {
return ((PaymentResponseItem) entityConfiguration.createEntityInstance("org.broadleafcommerce.core.payment.domain.PaymentResponseItem"));
}
public PaymentLog createLog() {
return ((PaymentLog) entityConfiguration.createEntityInstance("org.broadleafcommerce.core.payment.domain.PaymentLog"));
}
public void delete(PaymentInfo paymentInfo) {
if (!em.contains(paymentInfo)) {
paymentInfo = readPaymentInfoById(paymentInfo.getId());
}
em.remove(paymentInfo);
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_dao_PaymentInfoDaoImpl.java |
1,642 | public class OHazelcastDistributionPartition implements ODistributedPartition {
private final Set<String> nodes = new HashSet<String>(5);
public OHazelcastDistributionPartition(final List<String> nodes) {
for (String n : nodes)
if (!n.equals(ODistributedConfiguration.NEW_NODE_TAG))
this.nodes.add(n);
}
public Set<String> getNodes() {
return nodes;
}
} | 0true
| distributed_src_main_java_com_orientechnologies_orient_server_hazelcast_OHazelcastDistributionPartition.java |
81 | class ChangeReferenceProposal extends CorrectionProposal
implements ICompletionProposalExtension {
private ChangeReferenceProposal(ProblemLocation problem,
String name, String pkg, TextFileChange change) {
super("Change reference to '" + name + "'" + pkg, change,
new Region(problem.getOffset(), name.length()),
MINOR_CHANGE);
}
static void addChangeReferenceProposal(ProblemLocation problem,
Collection<ICompletionProposal> proposals, IFile file,
String brokenName, DeclarationWithProximity dwp, int dist,
Tree.CompilationUnit cu) {
TextFileChange change =
new TextFileChange("Change Reference", file);
change.setEdit(new MultiTextEdit());
IDocument doc = EditorUtil.getDocument(change);
Declaration dec = dwp.getDeclaration();
String pkg = "";
if (dec.isToplevel() &&
!isImported(dec, cu) &&
isInPackage(cu, dec)) {
String pn = dec.getContainer().getQualifiedNameString();
pkg = " in '" + pn + "'";
if (!pn.isEmpty() &&
!pn.equals(Module.LANGUAGE_MODULE_NAME)) {
OccurrenceLocation ol =
getOccurrenceLocation(cu,
Nodes.findNode(cu, problem.getOffset()),
problem.getOffset());
if (ol!=IMPORT) {
List<InsertEdit> ies =
importEdits(cu, singleton(dec),
null, null, doc);
for (InsertEdit ie: ies) {
change.addEdit(ie);
}
}
}
}
change.addEdit(new ReplaceEdit(problem.getOffset(),
brokenName.length(), dwp.getName())); //Note: don't use problem.getLength() because it's wrong from the problem list
proposals.add(new ChangeReferenceProposal(problem,
dwp.getName(), pkg, change));
}
protected static boolean isInPackage(Tree.CompilationUnit cu,
Declaration dec) {
return !dec.getUnit().getPackage()
.equals(cu.getUnit().getPackage());
}
@Override
public void apply(IDocument document, char trigger, int offset) {
apply(document);
}
@Override
public boolean isValidFor(IDocument document, int offset) {
return true;
}
@Override
public char[] getTriggerCharacters() {
return "r".toCharArray();
}
@Override
public int getContextInformationPosition() {
return -1;
}
static void addChangeReferenceProposals(Tree.CompilationUnit cu,
Node node, ProblemLocation problem,
Collection<ICompletionProposal> proposals, IFile file) {
String brokenName = Nodes.getIdentifyingNode(node).getText();
if (brokenName.isEmpty()) return;
for (DeclarationWithProximity dwp:
getProposals(node, node.getScope(), cu).values()) {
if (isUpperCase(dwp.getName().charAt(0))==isUpperCase(brokenName.charAt(0))) {
int dist = getLevenshteinDistance(brokenName, dwp.getName()); //+dwp.getProximity()/3;
//TODO: would it be better to just sort by dist, and
// then select the 3 closest possibilities?
if (dist<=brokenName.length()/3+1) {
addChangeReferenceProposal(problem, proposals, file,
brokenName, dwp, dist, cu);
}
}
}
}
@Override
public StyledString getStyledDisplayString() {
return Highlights.styleProposal(getDisplayString(), true);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ChangeReferenceProposal.java |
1,537 | @SuppressWarnings({ "unchecked" })
public class OObjectCustomSerializerList<TYPE> implements List<TYPE>, OObjectLazyCustomSerializer<List<TYPE>>, Serializable {
private static final long serialVersionUID = -8541477416577361792L;
private ORecord<?> sourceRecord;
private final List<Object> serializedList;
private final ArrayList<Object> list = new ArrayList<Object>();
private boolean converted = false;
private final Class<?> deserializeClass;
public OObjectCustomSerializerList(final Class<?> iDeserializeClass, final ORecord<?> iSourceRecord,
final List<Object> iRecordList) {
this.sourceRecord = iSourceRecord;
this.serializedList = iRecordList;
this.deserializeClass = iDeserializeClass;
for (int i = 0; i < iRecordList.size(); i++) {
list.add(i, null);
}
}
public OObjectCustomSerializerList(final Class<?> iDeserializeClass, final ORecord<?> iSourceRecord,
final List<Object> iRecordList, final Collection<? extends TYPE> iSourceList) {
this.sourceRecord = iSourceRecord;
this.serializedList = iRecordList;
this.deserializeClass = iDeserializeClass;
addAll(iSourceList);
for (int i = iSourceList.size(); i < iRecordList.size(); i++) {
list.add(i, null);
}
}
public Iterator<TYPE> iterator() {
return new OObjectCustomSerializerIterator<TYPE>(deserializeClass, sourceRecord, serializedList.iterator());
}
public boolean contains(final Object o) {
boolean underlyingContains = serializedList.contains(OObjectEntitySerializer.serializeFieldValue(deserializeClass, o));
return underlyingContains || list.contains(o);
}
public boolean add(TYPE element) {
serializedList.add(OObjectEntitySerializer.serializeFieldValue(deserializeClass, element));
return list.add(element);
}
public void add(int index, TYPE element) {
setDirty();
serializedList.add(index, OObjectEntitySerializer.serializeFieldValue(deserializeClass, element));
list.add(index, element);
}
public TYPE get(final int index) {
TYPE o = (TYPE) list.get(index);
if (o == null) {
Object toDeserialize = serializedList.get(index);
o = (TYPE) OObjectEntitySerializer.deserializeFieldValue(deserializeClass, toDeserialize);
list.set(index, o);
}
return o;
}
public int indexOf(final Object o) {
return list.indexOf(o);
}
public int lastIndexOf(final Object o) {
return list.lastIndexOf(o);
}
public Object[] toArray() {
convertAll();
return list.toArray();
}
public <T> T[] toArray(final T[] a) {
convertAll();
return list.toArray(a);
}
public int size() {
return serializedList.size();
}
public boolean isEmpty() {
return serializedList.isEmpty();
}
public boolean remove(Object o) {
setDirty();
int indexOfO = list.indexOf(o);
serializedList.remove(indexOfO);
return list.remove(o);
}
public boolean containsAll(Collection<?> c) {
for (Object o : c) {
if (!contains(o))
return false;
}
return true;
}
public boolean addAll(Collection<? extends TYPE> c) {
boolean dirty = false;
for (TYPE element : c) {
dirty = add(element) || dirty;
}
if (dirty)
setDirty();
return dirty;
}
public boolean addAll(int index, Collection<? extends TYPE> c) {
for (TYPE element : c) {
add(index, element);
index++;
}
if (c.size() > 0)
setDirty();
return c.size() > 0;
}
public boolean removeAll(Collection<?> c) {
boolean dirty = true;
for (Object o : c) {
dirty = dirty || remove(o);
}
if (dirty)
setDirty();
return dirty;
}
public boolean retainAll(Collection<?> c) {
boolean modified = false;
Iterator<TYPE> e = iterator();
while (e.hasNext()) {
if (!c.contains(e.next())) {
remove(e);
modified = true;
}
}
return modified;
}
public void clear() {
setDirty();
serializedList.clear();
list.clear();
}
public TYPE set(int index, TYPE element) {
serializedList.set(index, OObjectEntitySerializer.serializeFieldValue(deserializeClass, element));
return (TYPE) list.set(index, element);
}
public TYPE remove(int index) {
serializedList.remove(index);
return (TYPE) list.remove(index);
}
public ListIterator<TYPE> listIterator() {
return (ListIterator<TYPE>) list.listIterator();
}
public ListIterator<TYPE> listIterator(int index) {
return (ListIterator<TYPE>) list.listIterator(index);
}
public List<TYPE> subList(int fromIndex, int toIndex) {
return (List<TYPE>) list.subList(fromIndex, toIndex);
}
public boolean isConverted() {
return converted;
}
public void detach() {
convertAll();
}
public void detach(boolean nonProxiedInstance) {
convertAll();
}
public void detachAll(boolean nonProxiedInstance) {
convertAll();
}
protected void convertAll() {
if (converted)
return;
for (int i = 0; i < size(); ++i)
convert(i);
converted = true;
}
public void setDirty() {
if (sourceRecord != null)
sourceRecord.setDirty();
}
@Override
public List<TYPE> getNonOrientInstance() {
List<TYPE> list = new ArrayList<TYPE>();
list.addAll(this);
return this;
}
/**
* Convert the item requested.
*
* @param iIndex
* Position of the item to convert
*/
private void convert(final int iIndex) {
if (converted)
return;
Object o = list.get(iIndex);
if (o == null) {
o = serializedList.get(iIndex);
list.set(iIndex, OObjectEntitySerializer.deserializeFieldValue(deserializeClass, o));
}
}
protected boolean indexLoaded(int iIndex) {
return list.get(iIndex) != null;
}
@Override
public String toString() {
return list.toString();
}
@Override
public Object getUnderlying() {
return serializedList;
}
} | 0true
| object_src_main_java_com_orientechnologies_orient_object_serialization_OObjectCustomSerializerList.java |
1,391 | @RunWith(HazelcastSerialClassRunner.class)
@Category(SlowTest.class)
public class LocalRegionFactoryDefaultTest extends RegionFactoryDefaultTest {
@BeforeClass
@AfterClass
public static void killAllHazelcastInstances() throws IOException {
Hazelcast.shutdownAll();
}
protected Properties getCacheProperties() {
Properties props = new Properties();
props.setProperty(Environment.CACHE_REGION_FACTORY, HazelcastLocalCacheRegionFactory.class.getName());
return props;
}
@Test
public void testEntity() {
final HazelcastInstance hz = getHazelcastInstance(sf);
assertNotNull(hz);
final int count = 100;
final int childCount = 3;
insertDummyEntities(count, childCount);
sleep(1);
List<DummyEntity> list = new ArrayList<DummyEntity>(count);
Session session = sf.openSession();
try {
for (int i = 0; i < count; i++) {
DummyEntity e = (DummyEntity) session.get(DummyEntity.class, (long) i);
session.evict(e);
list.add(e);
}
} finally {
session.close();
}
session = sf.openSession();
Transaction tx = session.beginTransaction();
try {
for (DummyEntity dummy : list) {
dummy.setDate(new Date());
session.update(dummy);
}
tx.commit();
} catch (Exception e) {
tx.rollback();
e.printStackTrace();
} finally {
session.close();
}
Statistics stats = sf.getStatistics();
assertEquals((childCount + 1) * count, stats.getEntityInsertCount());
// twice put of entity and properties (on load and update) and once put of collection
assertEquals((childCount + 1) * count * 2 + count, stats.getSecondLevelCachePutCount());
assertEquals(childCount * count, stats.getEntityLoadCount());
assertEquals(count, stats.getSecondLevelCacheHitCount());
// collection cache miss
assertEquals(count, stats.getSecondLevelCacheMissCount());
stats.logSummary();
}
} | 0true
| hazelcast-hibernate_hazelcast-hibernate3_src_test_java_com_hazelcast_hibernate_LocalRegionFactoryDefaultTest.java |
1,171 | class NewUnitWizardPage extends WizardPage {
private String unitName = "";
private IPackageFragmentRoot sourceDir;
private IPackageFragment packageFragment;
private String packageName = "";
private boolean includePreamble = true;
boolean shared = true;
private IStructuredSelection selection;
private IWorkbench workbench;
private Text unitNameText;
NewUnitWizardPage(String title, String description, String icon) {
super(title, title, CeylonPlugin.getInstance()
.getImageRegistry().getDescriptor(icon));
setDescription(description);
}
@Override
public void createControl(Composite parent) {
initializeDialogUnits(parent);
initFromSelection();
Composite composite = new Composite(parent, SWT.NONE);
composite.setFont(parent.getFont());
GridLayout layout = new GridLayout();
layout.numColumns = 4;
composite.setLayout(layout);
createControls(composite);
setControl(composite);
Dialog.applyDialogFont(composite);
setPageComplete(isComplete());
}
void createControls(Composite composite) {
Text name = createNameField(composite);
createDeclarationField(composite);
createSeparator(composite);
Text folder = createFolderField(composite);
createPackageField(composite, folder);
name.forceFocus();
}
void createSeparator(Composite composite) {
Label sep = new Label(composite, SWT.SEPARATOR | SWT.HORIZONTAL);
GridData sgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
sgd.horizontalSpan = 4;
sep.setLayoutData(sgd);
}
Text createNameField(Composite composite) {
Label nameLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
nameLabel.setText(getCompilationUnitLabel());
GridData lgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
lgd.horizontalSpan = 1;
nameLabel.setLayoutData(lgd);
final Text name = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData ngd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
ngd.horizontalSpan = 2;
ngd.grabExcessHorizontalSpace = true;
name.setLayoutData(ngd);
name.setText(unitName);
name.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
unitName = name.getText();
if (!unitNameIsLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
});
unitNameText = name;
new Label(composite, SWT.NONE);
new Label(composite, SWT.NONE);
Button includeHeader = new Button(composite, SWT.CHECK);
includeHeader.setText("Include preamble in 'header.ceylon' in project root");
includeHeader.setSelection(includePreamble);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
includeHeader.setLayoutData(igd);
includeHeader.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
includePreamble = !includePreamble;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Edit 'header.ceylon'...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 2;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (sourceDir==null) {
MessageDialog.openWarning(getShell(), "No Source Folder",
getSelectSourceFolderMessage());
}
else {
EditDialog d = new EditDialog(getShell());
d.setText(readHeader());
if (d.open()==Status.OK) {
saveHeader(d.getText());
}
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
return name;
}
String getCompilationUnitLabel() {
return "Compilation unit name: ";
}
Text createFolderField(Composite composite) {
Label folderLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
folderLabel.setText("Source folder: ");
GridData flgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
flgd.horizontalSpan = 1;
folderLabel.setLayoutData(flgd);
final Text folder = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData fgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
fgd.horizontalSpan = 2;
fgd.grabExcessHorizontalSpace = true;
folder.setLayoutData(fgd);
if (sourceDir!=null) {
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
}
folder.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
setSourceDir(folder.getText());
if (sourceDir!=null && packageNameIsLegal()) {
packageFragment = sourceDir.getPackageFragment(packageName);
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitNameIsLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
private void setSourceDir(String folderName) {
try {
sourceDir = null;
for (IJavaProject jp: JavaCore.create(ResourcesPlugin.getWorkspace().getRoot())
.getJavaProjects()) {
for (IPackageFragmentRoot pfr: jp.getPackageFragmentRoots()) {
if (pfr.getPath().toPortableString().equals(folderName)) {
sourceDir = pfr;
return;
}
}
}
}
catch (JavaModelException jme) {
jme.printStackTrace();
}
}
});
Button selectFolder = new Button(composite, SWT.PUSH);
selectFolder.setText("Browse...");
GridData sfgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
sfgd.horizontalSpan = 1;
selectFolder.setLayoutData(sfgd);
selectFolder.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
IPackageFragmentRoot pfr = getSourceContainer(getShell(),
ResourcesPlugin.getWorkspace().getRoot(), sourceDir);
if (pfr!=null) {
sourceDir = pfr;
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
packageFragment = sourceDir.getPackageFragment(packageName);
setPageComplete(isComplete());
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitNameIsLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Create new source folder...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 3;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
IPackageFragmentRoot pfr = (IPackageFragmentRoot) openSourceFolderWizard();
if (pfr!=null) {
sourceDir = pfr;
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
packageFragment = sourceDir.getPackageFragment(packageName);
setPageComplete(isComplete());
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitNameIsLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return folder;
}
Text createPackageField(Composite composite, final Text folder) {
final Text pkg = createPackageField(composite);
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Create new Ceylon package with descriptor...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 3;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
NewPackageWizard wiz = openPackageWizard();
IPackageFragment pfr = wiz.getPackageFragment();
if (pfr!=null) {
sourceDir = wiz.getSourceFolder();
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
pkg.setText(pfr.getElementName());
packageFragment = pfr;
setPageComplete(isComplete());
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitNameIsLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return pkg;
}
Text createPackageField(Composite composite) {
Label packageLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
packageLabel.setText(getPackageLabel());
GridData plgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
plgd.horizontalSpan = 1;
packageLabel.setLayoutData(plgd);
final Text pkg = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData pgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
pgd.horizontalSpan = 2;
pgd.grabExcessHorizontalSpace = true;
pkg.setLayoutData(pgd);
pkg.setText(packageName);
pkg.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
packageName = pkg.getText();
if (sourceDir!=null && packageNameIsLegal()) {
packageFragment = sourceDir.getPackageFragment(packageName);
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitNameIsLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
});
/*if (packageFragment!=null) {
String pkgName = packageFragment.getElementName();
pkg.setText(pkgName);
}*/
Button selectPackage = new Button(composite, SWT.PUSH);
selectPackage.setText("Browse...");
GridData spgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
spgd.horizontalSpan = 1;
selectPackage.setLayoutData(spgd);
selectPackage.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (sourceDir==null) {
MessageDialog.openWarning(getShell(), "No Source Folder",
getSelectSourceFolderMessage());
}
else {
IPackageFragment result =
PackageSelectionDialog.selectPackage(getShell(), sourceDir);
if (result!=null) {
packageName = result.getElementName();
pkg.setText(packageName);
if (sourceDir!=null) {
packageFragment = sourceDir.getPackageFragment(packageName);
}
setPageComplete(isComplete());
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitNameIsLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return pkg;
}
private NewPackageWizard openPackageWizard() {
IWizardDescriptor descriptor =
getWorkbench().getNewWizardRegistry()
.findWizard(PLUGIN_ID + ".newPackageWizard");
if (descriptor!=null) {
try {
NewPackageWizard wizard =
(NewPackageWizard) descriptor.createWizard();
wizard.init(workbench, selection);
WizardDialog wd =
new WizardDialog(Display.getCurrent().getActiveShell(),
wizard);
wd.setTitle(wizard.getWindowTitle());
wd.open();
return wizard;
}
catch (CoreException e) {
e.printStackTrace();
}
}
return null;
}
private IJavaElement openSourceFolderWizard() {
IWizardDescriptor descriptor =
getWorkbench().getNewWizardRegistry()
.findWizard("org.eclipse.jdt.ui.wizards.NewSourceFolderCreationWizard");
if (descriptor!=null) {
try {
NewSourceFolderCreationWizard wizard =
(NewSourceFolderCreationWizard) descriptor.createWizard();
wizard.init(workbench, selection);
WizardDialog wd =
new WizardDialog(Display.getCurrent().getActiveShell(),
wizard);
wd.setTitle(wizard.getWindowTitle());
wd.open();
return wizard.getCreatedElement();
}
catch (CoreException e) {
e.printStackTrace();
}
}
return null;
}
void createSharedField(Composite composite) {
new Label(composite, SWT.NONE);
Button sharedPackage = new Button(composite, SWT.CHECK);
sharedPackage.setText(getSharedPackageLabel());
sharedPackage.setSelection(shared);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
sharedPackage.setLayoutData(igd);
sharedPackage.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
shared = !shared;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
}
void createDeclarationField(Composite composite) {}
String getSharedPackageLabel() {
return "Create shared package (visible to other modules)";
}
String getPackageLabel() {
return "Package: ";
}
public void initFromSelection() {
IJavaElement je = getSelectedJavaElement(selection);
if (je instanceof IJavaProject) {
IJavaProject jp = (IJavaProject) je;
if (jp.isOpen()) {
//default to the first source dir
//we find in the selected project
try {
for (IPackageFragmentRoot pfr:
jp.getAllPackageFragmentRoots()) {
if (!pfr.isExternal() && !pfr.isArchive()) {
je = pfr;
break;
}
}
}
catch (JavaModelException e) {}
}
}
if (je instanceof IPackageFragmentRoot) {
sourceDir = (IPackageFragmentRoot) je;
packageFragment = sourceDir.getPackageFragment("");
packageName = packageFragment.getElementName();
}
else if (je instanceof IPackageFragment) {
packageFragment = (IPackageFragment) je;
packageName = packageFragment.getElementName();
sourceDir = (IPackageFragmentRoot) packageFragment.getAncestor(PACKAGE_FRAGMENT_ROOT);
}
}
public void init(IWorkbench workbench, IStructuredSelection selection) {
this.selection = selection;
this.workbench = workbench;
}
boolean isComplete() {
return packageNameIsLegal() && unitNameIsLegal() &&
sourceDir!=null &&
sourceDir.getPackageFragment(packageFragment.getElementName())
.equals(packageFragment);
}
IFile getFile() {
IPath path = packageFragment.getPath().append(unitName + ".ceylon");
IProject project = sourceDir.getJavaProject().getProject();
return project.getFile(path.makeRelativeTo(project.getFullPath()));
}
public IPackageFragment getPackageFragment() {
return packageFragment;
}
public IPackageFragmentRoot getSourceDir() {
return sourceDir;
}
String getUnitName() {
return unitName;
}
Text getUnitNameText() {
return unitNameText;
}
public boolean isIncludePreamble() {
return includePreamble;
}
public boolean isShared() {
return shared;
}
private String readHeader() {
//TODO: use IRunnableWithProgress
StringBuilder sb = new StringBuilder();
IFile file = getHeaderFile();
if (file.exists() && file.isAccessible()) {
InputStream stream = null;
try {
stream = file.getContents();
BufferedReader reader =
new BufferedReader(new InputStreamReader(stream));
String line;
while ((line = reader.readLine())!=null) {
sb.append(line)
.append(System.lineSeparator());
}
}
catch (Exception ex) {
ex.printStackTrace();
}
finally {
try {
if (stream!=null) stream.close();
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
return sb.toString();
}
private void saveHeader(String header) {
//TODO: use IRunnableWithProgress
IFile file = getHeaderFile();
ByteArrayInputStream stream = null;
try {
if (file.exists()) {
file.delete(true, null);
}
stream = new ByteArrayInputStream(header.getBytes()); //TODO: encoding
file.create(stream, true, null);
}
catch (CoreException e) {
e.printStackTrace();
}
finally {
try {
if (stream!=null) stream.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
}
private IFile getHeaderFile() {
return sourceDir.getJavaProject().getProject()
.getFile("header.ceylon");
}
private boolean unitNameIsLegal() {
return unitName!=null &&
unitIsNameLegal(unitName);
}
boolean unitIsNameLegal(String unitName) {
return unitName.matches("(\\w|-)+");
}
private String getIllegalUnitNameMessage() {
return "Please enter a legal compilation unit name.";
}
private String getSelectSourceFolderMessage() {
return "Please select a source folder.";
}
private static final String KEYWORDS;
static {
StringBuilder sb = new StringBuilder();
for (String kw: Escaping.KEYWORDS) {
sb.append(kw).append('|');
}
sb.setLength(sb.length()-1);
KEYWORDS = sb.toString();
}
boolean packageNameIsLegal(String packageName) {
return packageName.isEmpty() ||
packageName.matches("^[a-z_]\\w*(\\.[a-z_]\\w*)*$") &&
!packageName.matches(".*\\b("+KEYWORDS+")\\b.*");
}
private boolean packageNameIsDiscouraged() {
return packageName.matches("^(ceylon|java|javax)\\b.*");
}
private boolean packageNameIsLegal() {
return packageName!=null &&
packageNameIsLegal(packageName);
}
String getIllegalPackageNameMessage() {
return "Please enter a legal package name (a period-separated list of all-lowercase identifiers).";
}
void setUnitName(String unitName) {
this.unitName = unitName;
}
@Override
public void setPageComplete(boolean complete) {
if (packageNameIsDiscouraged()) {
setMessage(getDiscouragedNamespaceMessage(), WARNING);
}
else {
setMessage(null);
}
if (complete) {
for (String file: getFileNames()) {
IPath path = packageFragment.getPath()
.append(file).addFileExtension("ceylon");
if (getWorkspace().getRoot().getFile(path)
.exists()) {
setMessage("Existing unit will not be overwritten: " +
path.toPortableString(),
WARNING);
break;
}
}
}
super.setPageComplete(complete);
}
String getDiscouragedNamespaceMessage() {
return "Discouraged namespace: " + packageName.split("\\.")[0];
}
String[] getFileNames() {
return new String[] { unitName };
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_wizard_NewUnitWizardPage.java |
3,297 | return new LongValues(true) {
final BytesRef bytes = new BytesRef();
final ByteArrayDataInput in = new ByteArrayDataInput();
long[] longs = new long[8];
int i = Integer.MAX_VALUE;
int valueCount = 0;
@Override
public int setDocument(int docId) {
values.get(docId, bytes);
in.reset(bytes.bytes, bytes.offset, bytes.length);
if (!in.eof()) {
// first value uses vLong on top of zig-zag encoding, then deltas are encoded using vLong
long previousValue = longs[0] = ByteUtils.zigZagDecode(ByteUtils.readVLong(in));
valueCount = 1;
while (!in.eof()) {
longs = ArrayUtil.grow(longs, valueCount + 1);
previousValue = longs[valueCount++] = previousValue + ByteUtils.readVLong(in);
}
} else {
valueCount = 0;
}
i = 0;
return valueCount;
}
@Override
public long nextValue() {
assert i < valueCount;
return longs[i++];
}
}; | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_BinaryDVNumericAtomicFieldData.java |
844 | INTEGER("Integer", 1, new Class<?>[] { Integer.class, Integer.TYPE }, new Class<?>[] { Integer.class, Number.class }) {
}, | 0true
| core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java |
557 | public class ORecordId implements ORID {
private static final long serialVersionUID = 247070594054408657L;
public static final ORecordId EMPTY_RECORD_ID = new ORecordId();
public static final byte[] EMPTY_RECORD_ID_STREAM = EMPTY_RECORD_ID.toStream();
public int clusterId = CLUSTER_ID_INVALID; // INT TO AVOID
// JVM
// PENALITY, BUT
// IT'S STORED
// AS SHORT
public OClusterPosition clusterPosition = OClusterPosition.INVALID_POSITION;
public static final int PERSISTENT_SIZE = OBinaryProtocol.SIZE_SHORT
+ OClusterPositionFactory.INSTANCE.getSerializedSize();
public ORecordId() {
}
public ORecordId(final int iClusterId, final OClusterPosition iPosition) {
clusterId = iClusterId;
checkClusterLimits();
clusterPosition = iPosition;
}
public ORecordId(final int iClusterIdId) {
clusterId = iClusterIdId;
checkClusterLimits();
}
public ORecordId(final String iRecordId) {
fromString(iRecordId);
}
/**
* Copy constructor.
*
* @param parentRid
* Source object
*/
public ORecordId(final ORID parentRid) {
clusterId = parentRid.getClusterId();
clusterPosition = parentRid.getClusterPosition();
}
public void reset() {
clusterId = CLUSTER_ID_INVALID;
clusterPosition = CLUSTER_POS_INVALID;
}
public boolean isValid() {
return clusterPosition.isValid();
}
public boolean isPersistent() {
return clusterId > -1 && clusterPosition.isPersistent();
}
public boolean isNew() {
return clusterPosition.isNew();
}
public boolean isTemporary() {
return clusterId != -1 && clusterPosition.isTemporary();
}
@Override
public String toString() {
return generateString(clusterId, clusterPosition);
}
public StringBuilder toString(StringBuilder iBuffer) {
if (iBuffer == null)
iBuffer = new StringBuilder();
iBuffer.append(PREFIX);
iBuffer.append(clusterId);
iBuffer.append(SEPARATOR);
iBuffer.append(clusterPosition);
return iBuffer;
}
public static String generateString(final int iClusterId, final OClusterPosition iPosition) {
final StringBuilder buffer = new StringBuilder(12);
buffer.append(PREFIX);
buffer.append(iClusterId);
buffer.append(SEPARATOR);
buffer.append(iPosition);
return buffer.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof OIdentifiable))
return false;
final ORecordId other = (ORecordId) ((OIdentifiable) obj).getIdentity();
if (clusterId != other.clusterId)
return false;
if (!clusterPosition.equals(other.clusterPosition))
return false;
return true;
}
@Override
public int hashCode() {
int result = clusterId;
result = 31 * result + clusterPosition.hashCode();
return result;
}
public int compareTo(final OIdentifiable iOther) {
if (iOther == this)
return 0;
if (iOther == null)
return 1;
final int otherClusterId = iOther.getIdentity().getClusterId();
if (clusterId == otherClusterId) {
final OClusterPosition otherClusterPos = iOther.getIdentity().getClusterPosition();
return clusterPosition.compareTo(otherClusterPos);
} else if (clusterId > otherClusterId)
return 1;
return -1;
}
public int compare(final OIdentifiable iObj1, final OIdentifiable iObj2) {
if (iObj1 == iObj2)
return 0;
if (iObj1 != null)
return iObj1.compareTo(iObj2);
return -1;
}
public ORecordId copy() {
return new ORecordId(clusterId, clusterPosition);
}
private void checkClusterLimits() {
if (clusterId < -2)
throw new ODatabaseException("RecordId cannot support negative cluster id. You've used: " + clusterId);
if (clusterId > CLUSTER_MAX)
throw new ODatabaseException("RecordId cannot support cluster id major than 32767. You've used: " + clusterId);
}
public ORecordId fromStream(final InputStream iStream) throws IOException {
clusterId = OBinaryProtocol.bytes2short(iStream);
clusterPosition = OClusterPositionFactory.INSTANCE.fromStream(iStream);
return this;
}
public ORecordId fromStream(final OMemoryStream iStream) {
clusterId = iStream.getAsShort();
clusterPosition = OClusterPositionFactory.INSTANCE.fromStream(iStream.getAsByteArrayFixed(OClusterPositionFactory.INSTANCE
.getSerializedSize()));
return this;
}
public ORecordId fromStream(final byte[] iBuffer) {
if (iBuffer != null) {
clusterId = OBinaryProtocol.bytes2short(iBuffer, 0);
clusterPosition = OClusterPositionFactory.INSTANCE.fromStream(iBuffer, OBinaryProtocol.SIZE_SHORT);
}
return this;
}
public int toStream(final OutputStream iStream) throws IOException {
final int beginOffset = OBinaryProtocol.short2bytes((short) clusterId, iStream);
iStream.write(clusterPosition.toStream());
return beginOffset;
}
public int toStream(final OMemoryStream iStream) throws IOException {
final int beginOffset = OBinaryProtocol.short2bytes((short) clusterId, iStream);
iStream.write(clusterPosition.toStream());
return beginOffset;
}
public byte[] toStream() {
final int serializedSize = OClusterPositionFactory.INSTANCE.getSerializedSize();
byte[] buffer = new byte[OBinaryProtocol.SIZE_SHORT + serializedSize];
OBinaryProtocol.short2bytes((short) clusterId, buffer, 0);
System.arraycopy(clusterPosition.toStream(), 0, buffer, OBinaryProtocol.SIZE_SHORT, serializedSize);
return buffer;
}
public int getClusterId() {
return clusterId;
}
public OClusterPosition getClusterPosition() {
return clusterPosition;
}
public void fromString(String iRecordId) {
if (iRecordId != null)
iRecordId = iRecordId.trim();
if (iRecordId == null || iRecordId.isEmpty()) {
clusterId = CLUSTER_ID_INVALID;
clusterPosition = CLUSTER_POS_INVALID;
return;
}
if (!OStringSerializerHelper.contains(iRecordId, SEPARATOR))
throw new IllegalArgumentException("Argument '" + iRecordId
+ "' is not a RecordId in form of string. Format must be: <cluster-id>:<cluster-position>");
final List<String> parts = OStringSerializerHelper.split(iRecordId, SEPARATOR, PREFIX);
if (parts.size() != 2)
throw new IllegalArgumentException("Argument received '" + iRecordId
+ "' is not a RecordId in form of string. Format must be: #<cluster-id>:<cluster-position>. Example: #3:12");
clusterId = Integer.parseInt(parts.get(0));
checkClusterLimits();
clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(parts.get(1));
}
public void copyFrom(final ORID iSource) {
if (iSource == null)
throw new IllegalArgumentException("Source is null");
clusterId = iSource.getClusterId();
clusterPosition = iSource.getClusterPosition();
}
public String next() {
return generateString(clusterId, clusterPosition.inc());
}
@Override
public ORID nextRid() {
return new ORecordId(clusterId, clusterPosition.inc());
}
public ORID getIdentity() {
return this;
}
@SuppressWarnings("unchecked")
public <T extends ORecord<?>> T getRecord() {
if (!isValid())
return null;
final ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.get();
if (db == null)
throw new ODatabaseException(
"No database found in current thread local space. If you manually control databases over threads assure to set the current database before to use it by calling: ODatabaseRecordThreadLocal.INSTANCE.set(db);");
return (T) db.load(this);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_id_ORecordId.java |
965 | public interface BundleOrderItem extends OrderItem, OrderItemContainer, SkuAccessor {
List<DiscreteOrderItem> getDiscreteOrderItems();
void setDiscreteOrderItems(List<DiscreteOrderItem> discreteOrderItems);
Money getTaxablePrice();
public List<BundleOrderItemFeePrice> getBundleOrderItemFeePrices();
public void setBundleOrderItemFeePrices(List<BundleOrderItemFeePrice> bundleOrderItemFeePrices);
public boolean hasAdjustedItems();
public Money getBaseRetailPrice();
public void setBaseRetailPrice(Money baseRetailPrice);
public Money getBaseSalePrice();
public void setBaseSalePrice(Money baseSalePrice);
/**
* For BundleOrderItem created from a ProductBundle, this will represent the default sku of
* the product bundle.
*
* This can be null for implementations that programatically create product bundles.
*
* @return
*/
Sku getSku();
void setSku(Sku sku);
/**
* Returns the associated ProductBundle or null if not applicable.
*
* If null, then this ProductBundle was manually created.
*
* @return
*/
ProductBundle getProductBundle();
/**
* Sets the ProductBundle associated with this BundleOrderItem.
*
* @param bundle
*/
void setProductBundle(ProductBundle bundle);
/**
* Same as getProductBundle.
*/
Product getProduct();
public boolean shouldSumItems();
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_BundleOrderItem.java |
3,079 | public interface Engine extends IndexShardComponent, CloseableComponent {
static final String INDEX_CODEC = "index.codec";
static ByteSizeValue INACTIVE_SHARD_INDEXING_BUFFER = ByteSizeValue.parseBytesSizeValue("500kb");
/**
* The default suggested refresh interval, -1 to disable it.
*/
TimeValue defaultRefreshInterval();
void enableGcDeletes(boolean enableGcDeletes);
void updateIndexingBufferSize(ByteSizeValue indexingBufferSize);
void addFailedEngineListener(FailedEngineListener listener);
/**
* Starts the Engine.
* <p/>
* <p>Note, after the creation and before the call to start, the store might
* be changed.
*/
void start() throws EngineException;
void create(Create create) throws EngineException;
void index(Index index) throws EngineException;
void delete(Delete delete) throws EngineException;
void delete(DeleteByQuery delete) throws EngineException;
GetResult get(Get get) throws EngineException;
/**
* Returns a new searcher instance. The consumer of this
* API is responsible for releasing the returned seacher in a
* safe manner, preferably in a try/finally block.
*
* @see Searcher#release()
*/
Searcher acquireSearcher(String source) throws EngineException;
/**
* Global stats on segments.
*/
SegmentsStats segmentsStats();
/**
* The list of segments in the engine.
*/
List<Segment> segments();
/**
* Returns <tt>true</tt> if a refresh is really needed.
*/
boolean refreshNeeded();
/**
* Returns <tt>true</tt> if a possible merge is really needed.
*/
boolean possibleMergeNeeded();
void maybeMerge() throws EngineException;
/**
* Refreshes the engine for new search operations to reflect the latest
* changes. Pass <tt>true</tt> if the refresh operation should include
* all the operations performed up to this call.
*/
void refresh(Refresh refresh) throws EngineException;
/**
* Flushes the state of the engine, clearing memory.
*/
void flush(Flush flush) throws EngineException, FlushNotAllowedEngineException;
void optimize(Optimize optimize) throws EngineException;
<T> T snapshot(SnapshotHandler<T> snapshotHandler) throws EngineException;
/**
* Snapshots the index and returns a handle to it. Will always try and "commit" the
* lucene index to make sure we have a "fresh" copy of the files to snapshot.
*/
SnapshotIndexCommit snapshotIndex() throws EngineException;
void recover(RecoveryHandler recoveryHandler) throws EngineException;
static interface FailedEngineListener {
void onFailedEngine(ShardId shardId, Throwable t);
}
/**
* Recovery allow to start the recovery process. It is built of three phases.
* <p/>
* <p>The first phase allows to take a snapshot of the master index. Once this
* is taken, no commit operations are effectively allowed on the index until the recovery
* phases are through.
* <p/>
* <p>The seconds phase takes a snapshot of the current transaction log.
* <p/>
* <p>The last phase returns the remaining transaction log. During this phase, no dirty
* operations are allowed on the index.
*/
static interface RecoveryHandler {
void phase1(SnapshotIndexCommit snapshot) throws ElasticsearchException;
void phase2(Translog.Snapshot snapshot) throws ElasticsearchException;
void phase3(Translog.Snapshot snapshot) throws ElasticsearchException;
}
static interface SnapshotHandler<T> {
T snapshot(SnapshotIndexCommit snapshotIndexCommit, Translog.Snapshot translogSnapshot) throws EngineException;
}
static interface Searcher extends Releasable {
/**
* The source that caused this searcher to be acquired.
*/
String source();
IndexReader reader();
IndexSearcher searcher();
}
static class SimpleSearcher implements Searcher {
private final String source;
private final IndexSearcher searcher;
public SimpleSearcher(String source, IndexSearcher searcher) {
this.source = source;
this.searcher = searcher;
}
@Override
public String source() {
return source;
}
@Override
public IndexReader reader() {
return searcher.getIndexReader();
}
@Override
public IndexSearcher searcher() {
return searcher;
}
@Override
public boolean release() throws ElasticsearchException {
// nothing to release here...
return true;
}
}
static class Refresh {
private final String source;
private boolean force = false;
public Refresh(String source) {
this.source = source;
}
/**
* Forces calling refresh, overriding the check that dirty operations even happened. Defaults
* to true (note, still lightweight if no refresh is needed).
*/
public Refresh force(boolean force) {
this.force = force;
return this;
}
public boolean force() {
return this.force;
}
public String source() {
return this.source;
}
@Override
public String toString() {
return "force[" + force + "], source [" + source + "]";
}
}
static class Flush {
public static enum Type {
/**
* A flush that causes a new writer to be created.
*/
NEW_WRITER,
/**
* A flush that just commits the writer, without cleaning the translog.
*/
COMMIT,
/**
* A flush that does a commit, as well as clears the translog.
*/
COMMIT_TRANSLOG
}
private Type type = Type.COMMIT_TRANSLOG;
private boolean force = false;
/**
* Should the flush operation wait if there is an ongoing flush operation.
*/
private boolean waitIfOngoing = false;
public Type type() {
return this.type;
}
/**
* Should a "full" flush be issued, basically cleaning as much memory as possible.
*/
public Flush type(Type type) {
this.type = type;
return this;
}
public boolean force() {
return this.force;
}
public Flush force(boolean force) {
this.force = force;
return this;
}
public boolean waitIfOngoing() {
return this.waitIfOngoing;
}
public Flush waitIfOngoing(boolean waitIfOngoing) {
this.waitIfOngoing = waitIfOngoing;
return this;
}
@Override
public String toString() {
return "type[" + type + "], force[" + force + "]";
}
}
static class Optimize {
private boolean waitForMerge = true;
private int maxNumSegments = -1;
private boolean onlyExpungeDeletes = false;
private boolean flush = false;
public Optimize() {
}
public boolean waitForMerge() {
return waitForMerge;
}
public Optimize waitForMerge(boolean waitForMerge) {
this.waitForMerge = waitForMerge;
return this;
}
public int maxNumSegments() {
return maxNumSegments;
}
public Optimize maxNumSegments(int maxNumSegments) {
this.maxNumSegments = maxNumSegments;
return this;
}
public boolean onlyExpungeDeletes() {
return onlyExpungeDeletes;
}
public Optimize onlyExpungeDeletes(boolean onlyExpungeDeletes) {
this.onlyExpungeDeletes = onlyExpungeDeletes;
return this;
}
public boolean flush() {
return flush;
}
public Optimize flush(boolean flush) {
this.flush = flush;
return this;
}
@Override
public String toString() {
return "waitForMerge[" + waitForMerge + "], maxNumSegments[" + maxNumSegments + "], onlyExpungeDeletes[" + onlyExpungeDeletes + "], flush[" + flush + "]";
}
}
static interface Operation {
static enum Type {
CREATE,
INDEX,
DELETE
}
static enum Origin {
PRIMARY,
REPLICA,
RECOVERY
}
Type opType();
Origin origin();
}
static interface IndexingOperation extends Operation {
ParsedDocument parsedDoc();
List<Document> docs();
DocumentMapper docMapper();
}
static class Create implements IndexingOperation {
private final DocumentMapper docMapper;
private final Term uid;
private final ParsedDocument doc;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private Origin origin = Origin.PRIMARY;
private long startTime;
private long endTime;
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc) {
this.docMapper = docMapper;
this.uid = uid;
this.doc = doc;
}
@Override
public DocumentMapper docMapper() {
return this.docMapper;
}
@Override
public Type opType() {
return Type.CREATE;
}
public Create origin(Origin origin) {
this.origin = origin;
return this;
}
@Override
public Origin origin() {
return this.origin;
}
@Override
public ParsedDocument parsedDoc() {
return this.doc;
}
public Term uid() {
return this.uid;
}
public String type() {
return this.doc.type();
}
public String id() {
return this.doc.id();
}
public String routing() {
return this.doc.routing();
}
public long timestamp() {
return this.doc.timestamp();
}
public long ttl() {
return this.doc.ttl();
}
public long version() {
return this.version;
}
public Create version(long version) {
this.version = version;
this.doc.version().setLongValue(version);
return this;
}
public VersionType versionType() {
return this.versionType;
}
public Create versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public String parent() {
return this.doc.parent();
}
@Override
public List<Document> docs() {
return this.doc.docs();
}
public Analyzer analyzer() {
return this.doc.analyzer();
}
public BytesReference source() {
return this.doc.source();
}
public Create startTime(long startTime) {
this.startTime = startTime;
return this;
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public Create endTime(long endTime) {
this.endTime = endTime;
return this;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
}
static class Index implements IndexingOperation {
private final DocumentMapper docMapper;
private final Term uid;
private final ParsedDocument doc;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private Origin origin = Origin.PRIMARY;
private boolean created;
private long startTime;
private long endTime;
public Index(DocumentMapper docMapper, Term uid, ParsedDocument doc) {
this.docMapper = docMapper;
this.uid = uid;
this.doc = doc;
}
@Override
public DocumentMapper docMapper() {
return this.docMapper;
}
@Override
public Type opType() {
return Type.INDEX;
}
public Index origin(Origin origin) {
this.origin = origin;
return this;
}
@Override
public Origin origin() {
return this.origin;
}
public Term uid() {
return this.uid;
}
@Override
public ParsedDocument parsedDoc() {
return this.doc;
}
public Index version(long version) {
this.version = version;
doc.version().setLongValue(version);
return this;
}
/**
* before indexing holds the version requested, after indexing holds the new version of the document.
*/
public long version() {
return this.version;
}
public Index versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public VersionType versionType() {
return this.versionType;
}
@Override
public List<Document> docs() {
return this.doc.docs();
}
public Analyzer analyzer() {
return this.doc.analyzer();
}
public String id() {
return this.doc.id();
}
public String type() {
return this.doc.type();
}
public String routing() {
return this.doc.routing();
}
public String parent() {
return this.doc.parent();
}
public long timestamp() {
return this.doc.timestamp();
}
public long ttl() {
return this.doc.ttl();
}
public BytesReference source() {
return this.doc.source();
}
public Index startTime(long startTime) {
this.startTime = startTime;
return this;
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public Index endTime(long endTime) {
this.endTime = endTime;
return this;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
/**
* @return true if object was created
*/
public boolean created() {
return created;
}
public void created(boolean created) {
this.created = created;
}
}
static class Delete implements Operation {
private final String type;
private final String id;
private final Term uid;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private Origin origin = Origin.PRIMARY;
private boolean found;
private long startTime;
private long endTime;
public Delete(String type, String id, Term uid) {
this.type = type;
this.id = id;
this.uid = uid;
}
@Override
public Type opType() {
return Type.DELETE;
}
public Delete origin(Origin origin) {
this.origin = origin;
return this;
}
@Override
public Origin origin() {
return this.origin;
}
public String type() {
return this.type;
}
public String id() {
return this.id;
}
public Term uid() {
return this.uid;
}
public Delete version(long version) {
this.version = version;
return this;
}
/**
* before delete execution this is the version to be deleted. After this is the version of the "delete" transaction record.
*/
public long version() {
return this.version;
}
public Delete versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public VersionType versionType() {
return this.versionType;
}
public boolean found() {
return this.found;
}
public Delete found(boolean found) {
this.found = found;
return this;
}
public Delete startTime(long startTime) {
this.startTime = startTime;
return this;
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public Delete endTime(long endTime) {
this.endTime = endTime;
return this;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
}
static class DeleteByQuery {
private final Query query;
private final BytesReference source;
private final String[] filteringAliases;
private final Filter aliasFilter;
private final String[] types;
private final Filter parentFilter;
private Operation.Origin origin = Operation.Origin.PRIMARY;
private long startTime;
private long endTime;
public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Filter aliasFilter, Filter parentFilter, String... types) {
this.query = query;
this.source = source;
this.types = types;
this.filteringAliases = filteringAliases;
this.aliasFilter = aliasFilter;
this.parentFilter = parentFilter;
}
public Query query() {
return this.query;
}
public BytesReference source() {
return this.source;
}
public String[] types() {
return this.types;
}
public String[] filteringAliases() {
return filteringAliases;
}
public Filter aliasFilter() {
return aliasFilter;
}
public boolean nested() {
return parentFilter != null;
}
public Filter parentFilter() {
return parentFilter;
}
public DeleteByQuery origin(Operation.Origin origin) {
this.origin = origin;
return this;
}
public Operation.Origin origin() {
return this.origin;
}
public DeleteByQuery startTime(long startTime) {
this.startTime = startTime;
return this;
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public DeleteByQuery endTime(long endTime) {
this.endTime = endTime;
return this;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
}
static class Get {
private final boolean realtime;
private final Term uid;
private boolean loadSource = true;
private long version;
private VersionType versionType;
public Get(boolean realtime, Term uid) {
this.realtime = realtime;
this.uid = uid;
}
public boolean realtime() {
return this.realtime;
}
public Term uid() {
return uid;
}
public boolean loadSource() {
return this.loadSource;
}
public Get loadSource(boolean loadSource) {
this.loadSource = loadSource;
return this;
}
public long version() {
return version;
}
public Get version(long version) {
this.version = version;
return this;
}
public VersionType versionType() {
return versionType;
}
public Get versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
}
static class GetResult {
private final boolean exists;
private final long version;
private final Translog.Source source;
private final Versions.DocIdAndVersion docIdAndVersion;
private final Searcher searcher;
public static final GetResult NOT_EXISTS = new GetResult(false, Versions.NOT_FOUND, null);
public GetResult(boolean exists, long version, @Nullable Translog.Source source) {
this.source = source;
this.exists = exists;
this.version = version;
this.docIdAndVersion = null;
this.searcher = null;
}
public GetResult(Searcher searcher, Versions.DocIdAndVersion docIdAndVersion) {
this.exists = true;
this.source = null;
this.version = docIdAndVersion.version;
this.docIdAndVersion = docIdAndVersion;
this.searcher = searcher;
}
public boolean exists() {
return exists;
}
public long version() {
return this.version;
}
@Nullable
public Translog.Source source() {
return source;
}
public Searcher searcher() {
return this.searcher;
}
public Versions.DocIdAndVersion docIdAndVersion() {
return docIdAndVersion;
}
public void release() {
if (searcher != null) {
searcher.release();
}
}
}
} | 0true
| src_main_java_org_elasticsearch_index_engine_Engine.java |
1,161 | final class NewModuleWizardPage extends NewUnitWizardPage {
private String version="1.0.0";
NewModuleWizardPage() {
super("New Ceylon Module",
"Create a runnable Ceylon module with module and package descriptors.",
CEYLON_NEW_MODULE);
setUnitName("run");
}
String getVersion() {
return version;
}
@Override
String getCompilationUnitLabel() {
return "Runnable compilation unit: ";
}
@Override
String getPackageLabel() {
return "Module name: ";
}
@Override
String getSharedPackageLabel() {
return "Create module with shared root package"; // (visible to other modules)
}
@Override
void createControls(Composite composite) {
Text name = createPackageField(composite);
createVersionField(composite);
createSharedField(composite);
createNameField(composite);
createSeparator(composite);
createFolderField(composite);
name.forceFocus();
}
@Override
boolean isComplete() {
return super.isComplete() &&
!getPackageFragment().isDefaultPackage();
}
@Override
boolean packageNameIsLegal(String packageName) {
return !packageName.isEmpty() &&
super.packageNameIsLegal(packageName);
}
@Override
String getIllegalPackageNameMessage() {
return "Please enter a legal module name (a period-separated list of all-lowercase identifiers).";
}
@Override
String[] getFileNames() {
return new String[] { "module", "package", getUnitName() };
}
void createVersionField(Composite composite) {
Label versionLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
versionLabel.setText("Module version:");
GridData lgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
lgd.horizontalSpan = 1;
versionLabel.setLayoutData(lgd);
final Text versionName = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData ngd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
ngd.horizontalSpan = 2;
ngd.grabExcessHorizontalSpace = true;
versionName.setLayoutData(ngd);
versionName.setText(version);
versionName.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
version = versionName.getText();
setPageComplete(isComplete());
}
});
new Label(composite, SWT.NONE);
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_wizard_NewModuleWizardPage.java |
1,296 | public class DataPropagationTask implements Callable<Void> {
private ODatabaseDocumentTx baseDB;
private ODatabaseDocumentTx testDB;
private long seed;
public DataPropagationTask(long seed) {
this.seed = seed;
baseDB = new ODatabaseDocumentTx(baseDocumentTx.getURL());
baseDB.open("admin", "admin");
if (testDocumentTx != null) {
testDB = new ODatabaseDocumentTx(testDocumentTx.getURL());
testDB.open("admin", "admin");
}
}
@Override
public Void call() throws Exception {
Random random = new Random(seed);
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
try {
List<ORID> testTwoList = new ArrayList<ORID>();
List<ORID> firstDocs = new ArrayList<ORID>();
OClass classOne = baseDB.getMetadata().getSchema().getClass("TestOne");
OClass classTwo = baseDB.getMetadata().getSchema().getClass("TestTwo");
for (int i = 0; i < 10000; i++) {
ODocument docOne = new ODocument(classOne);
docOne.field("intProp", random.nextInt());
byte[] stringData = new byte[256];
random.nextBytes(stringData);
String stringProp = new String(stringData);
docOne.field("stringProp", stringProp);
Set<String> stringSet = new HashSet<String>();
for (int n = 0; n < 5; n++) {
stringSet.add("str" + random.nextInt());
}
docOne.field("stringSet", stringSet);
saveDoc(docOne);
firstDocs.add(docOne.getIdentity());
if (random.nextBoolean()) {
ODocument docTwo = new ODocument(classTwo);
List<String> stringList = new ArrayList<String>();
for (int n = 0; n < 5; n++) {
stringList.add("strnd" + random.nextInt());
}
docTwo.field("stringList", stringList);
saveDoc(docTwo);
testTwoList.add(docTwo.getIdentity());
}
if (!testTwoList.isEmpty()) {
int startIndex = random.nextInt(testTwoList.size());
int endIndex = random.nextInt(testTwoList.size() - startIndex) + startIndex;
Map<String, ORID> linkMap = new HashMap<String, ORID>();
for (int n = startIndex; n < endIndex; n++) {
ORID docTwoRid = testTwoList.get(n);
linkMap.put(docTwoRid.toString(), docTwoRid);
}
docOne.field("linkMap", linkMap);
saveDoc(docOne);
}
boolean deleteDoc = random.nextDouble() <= 0.2;
if (deleteDoc) {
ORID rid = firstDocs.remove(random.nextInt(firstDocs.size()));
deleteDoc(rid);
}
}
} finally {
baseDB.close();
if (testDB != null)
testDB.close();
}
return null;
}
private void saveDoc(ODocument document) {
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
ODocument testDoc = new ODocument();
document.copyTo(testDoc);
document.save();
if (testDB != null) {
ODatabaseRecordThreadLocal.INSTANCE.set(testDB);
testDoc.save();
Assert.assertEquals(testDoc.getIdentity(), document.getIdentity());
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
}
}
private void deleteDoc(ORID rid) {
baseDB.delete(rid);
if (testDB != null) {
ODatabaseRecordThreadLocal.INSTANCE.set(testDB);
Assert.assertNotNull(testDB.load(rid));
testDB.delete(rid);
Assert.assertNull(testDB.load(rid));
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
}
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_storage_impl_local_paginated_LocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.java |
209 | public static abstract class Commit extends LogEntry
{
private final long txId;
private final long timeWritten;
protected final String name;
Commit( int identifier, long txId, long timeWritten, String name )
{
super( identifier );
this.txId = txId;
this.timeWritten = timeWritten;
this.name = name;
}
public long getTxId()
{
return txId;
}
public long getTimeWritten()
{
return timeWritten;
}
@Override
public String toString()
{
return toString( Format.DEFAULT_TIME_ZONE );
}
@Override
public String toString( TimeZone timeZone )
{
return name + "[" + getIdentifier() + ", txId=" + getTxId() + ", " + timestamp( getTimeWritten(), timeZone ) + "]";
}
} | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogEntry.java |
179 | private static class WoohaaException extends RuntimeException {
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_atomiclong_ClientAtomicLongTest.java |
1,175 | public class ItemEvent<E> extends EventObject {
private final E item;
private final ItemEventType eventType;
private final Member member;
public ItemEvent(String name, int eventType, E item, Member member) {
this(name, ItemEventType.getByType(eventType), item, member);
}
public ItemEvent(String name, ItemEventType itemEventType, E item, Member member) {
super(name);
this.item = item;
this.eventType = itemEventType;
this.member = member;
}
/**
* Returns the event type.
*
* @return the event type.
*/
public ItemEventType getEventType() {
return eventType;
}
/**
* Returns the item related to event.
*
* @return the item.
*/
public E getItem() {
return item;
}
/**
* Returns the member fired this event.
*
* @return the member fired this event.
*/
public Member getMember() {
return member;
}
@Override
public String toString() {
return "ItemEvent{" +
"event=" + eventType +
", item=" + getItem() +
", member=" + getMember() +
"} ";
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_core_ItemEvent.java |
265 | private static class RecordingRelationshipStore extends RelationshipStore
{
private final AtomicReference<List<String>> currentRecording;
public RecordingRelationshipStore( AtomicReference<List<String>> currentRecording )
{
super( null, null, null, null, null, null );
this.currentRecording = currentRecording;
}
@Override
public void updateRecord(RelationshipRecord record) {
currentRecording.get().add(commandActionToken(record) + " relationship");
}
@Override
protected void checkStorage() {
}
@Override
protected void checkVersion() {
}
@Override
protected void loadStorage() {
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_xa_WriteTransactionCommandOrderingTest.java |
572 | public class OpenIndexResponse extends AcknowledgedResponse {
OpenIndexResponse() {
}
OpenIndexResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_open_OpenIndexResponse.java |
1,942 | CollectionUtil.timSort(result, new Comparator<Message>() {
public int compare(Message a, Message b) {
return a.getSource().compareTo(b.getSource());
}
}); | 0true
| src_main_java_org_elasticsearch_common_inject_internal_Errors.java |
1,978 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class MapStoreTest extends HazelcastTestSupport {
@Test
public void testMapGetAll() throws InterruptedException {
final Map<String, String> _map = new HashMap<String, String>();
_map.put("key1", "value1");
_map.put("key2", "value2");
_map.put("key3", "value3");
final AtomicBoolean loadAllCalled = new AtomicBoolean(false);
final AtomicBoolean loadCalled = new AtomicBoolean(false);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
Config cfg = new Config();
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(new MapLoader<String, String>() {
public String load(String key) {
loadCalled.set(true);
return _map.get(key);
}
public Map<String, String> loadAll(Collection<String> keys) {
loadAllCalled.set(true);
final HashMap<String, String> temp = new HashMap<String, String>();
for (String key : keys) {
temp.put(key, _map.get(key));
}
return temp;
}
public Set<String> loadAllKeys() {
return _map.keySet();
}
});
cfg.getMapConfig("testMapGetAll").setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg);
IMap map = instance1.getMap("testMapGetAll");
final HashSet<String> keys = new HashSet<String>(3);
keys.add("key1");
keys.add("key3");
keys.add("key4");
final Map subMap = map.getAll(keys);
assertEquals(2, subMap.size());
assertEquals("value1", subMap.get("key1"));
assertEquals("value3", subMap.get("key3"));
assertTrue(loadAllCalled.get());
assertFalse(loadCalled.get());
}
@Test
public void testSlowStore() throws Exception {
final TestMapStore store = new WaitingOnFirstTestMapStore();
Config cfg = new Config();
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setWriteDelaySeconds(1);
mapStoreConfig.setImplementation(store);
cfg.getMapConfig("default").setMapStoreConfig(mapStoreConfig);
HazelcastInstance h1 = createHazelcastInstance(cfg);
final IMap<Integer, Integer> map = h1.getMap("testSlowStore");
int count = 1000;
for (int i = 0; i < count; i++) {
map.put(i, 1);
}
Thread.sleep(2000); // sleep for scheduling following puts to a different second
for (int i = 0; i < count; i++) {
map.put(i, 2);
}
for (int i = 0; i < count; i++) {
final int index = i;
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
final Integer valueInMap = map.get(index);
final Integer valueInStore = (Integer) store.getStore().get(index);
assertEquals(valueInMap, valueInStore);
}
});
}
}
@Test(timeout = 120000)
public void testInitialLoadModeEager() {
int size = 100000;
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(4);
Config cfg = new Config();
GroupConfig groupConfig = new GroupConfig("testEager");
cfg.setGroupConfig(groupConfig);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(new SimpleMapLoader(size, true));
mapStoreConfig.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER);
cfg.getMapConfig("testMapInitialLoad").setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg);
IMap map = instance1.getMap("testMapInitialLoad");
assertEquals(size, map.size());
}
@Test(timeout = 120000)
public void testInitialLoadModeEagerMultipleThread() {
final int instanceCount = 2;
final int size = 100000;
final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(instanceCount);
final CountDownLatch countDownLatch = new CountDownLatch(instanceCount - 1);
final Config cfg = new Config();
GroupConfig groupConfig = new GroupConfig("testEager");
cfg.setGroupConfig(groupConfig);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(new SimpleMapLoader(size, true));
mapStoreConfig.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER);
cfg.getMapConfig("testInitialLoadModeEagerMultipleThread").setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg);
Runnable runnable = new Runnable() {
public void run() {
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg);
final IMap<Object, Object> map = instance2.getMap("testInitialLoadModeEagerMultipleThread");
assertEquals(size, map.size());
countDownLatch.countDown();
}
};
new Thread(runnable).start();
assertOpenEventually(countDownLatch, 120);
IMap map = instance1.getMap("testInitialLoadModeEagerMultipleThread");
assertEquals(size, map.size());
}
@Test(timeout = 120000)
public void testInitialLoadModeEagerWhileStoppigOneNode() throws InterruptedException {
final int instanceCount = 2;
final int size = 100000;
final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(instanceCount);
final CountDownLatch countDownLatch = new CountDownLatch(instanceCount - 1);
final Config cfg = new Config();
GroupConfig groupConfig = new GroupConfig("testEager");
cfg.setGroupConfig(groupConfig);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(new SimpleMapLoader(size, true));
mapStoreConfig.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER);
cfg.getMapConfig("testInitialLoadModeEagerWhileStoppigOneNode").setMapStoreConfig(mapStoreConfig);
final HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg);
final HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg);
new Thread(new Runnable() {
@Override
public void run() {
sleepSeconds(3);
instance1.getLifecycleService().shutdown();
sleepSeconds(3);
final IMap<Object, Object> map = instance2.getMap("testInitialLoadModeEagerWhileStoppigOneNode");
assertEquals(size, map.size());
countDownLatch.countDown();
}
}).start();
assertOpenEventually(countDownLatch);
final IMap<Object, Object> map2 = instance2.getMap("testInitialLoadModeEagerWhileStoppigOneNode");
final int map2Size = map2.size();
assertEquals(size, map2Size);
}
@Test
public void testMapInitialLoad() throws InterruptedException {
int size = 100000;
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
Config cfg = new Config();
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(new SimpleMapLoader(size, true));
MapConfig mc = cfg.getMapConfig("default");
mc.setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg);
IMap map = instance1.getMap("testMapInitialLoad");
assertEquals(size, map.size());
for (int i = 0; i < size; i++) {
assertEquals(i, map.get(i));
}
assertNull(map.put(size, size));
assertEquals(size, map.remove(size));
assertNull(map.get(size));
HazelcastInstance instance3 = nodeFactory.newHazelcastInstance(cfg);
for (int i = 0; i < size; i++) {
assertEquals(i, map.get(i));
}
}
private class SimpleMapLoader implements MapLoader {
final int size;
final boolean slow;
SimpleMapLoader(int size, boolean slow) {
this.size = size;
this.slow = slow;
}
@Override
public Object load(Object key) {
return null;
}
@Override
public Map loadAll(Collection keys) {
if (slow) {
try {
Thread.sleep(150);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Map result = new HashMap();
for (Object key : keys) {
result.put(key, key);
}
return result;
}
@Override
public Set loadAllKeys() {
Set keys = new HashSet();
for (int i = 0; i < size; i++) {
keys.add(i);
}
return keys;
}
}
@Test
public void issue614() {
final ConcurrentMap<Long, String> STORE = new ConcurrentHashMap<Long, String>();
STORE.put(1l, "Event1");
STORE.put(2l, "Event2");
STORE.put(3l, "Event3");
STORE.put(4l, "Event4");
STORE.put(5l, "Event5");
STORE.put(6l, "Event6");
Config config = new Config();
config.getMapConfig("map")
.setMapStoreConfig(new MapStoreConfig()
.setWriteDelaySeconds(1)
.setImplementation(new SimpleMapStore<Long, String>(STORE)));
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h = nodeFactory.newHazelcastInstance(config);
IMap map = h.getMap("map");
Collection collection = map.values();
LocalMapStats localMapStats = map.getLocalMapStats();
assertEquals(0, localMapStats.getDirtyEntryCount());
}
@Test
public void testIssue583MapReplaceShouldTriggerMapStore() {
final ConcurrentMap<String, Long> store = new ConcurrentHashMap<String, Long>();
final MapStore<String, Long> myMapStore = new SimpleMapStore<String, Long>(store);
Config config = new Config();
config
.getMapConfig("myMap")
.setMapStoreConfig(new MapStoreConfig()
.setImplementation(myMapStore));
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance hc = nodeFactory.newHazelcastInstance(config);
IMap<String, Long> myMap = hc.getMap("myMap");
myMap.put("one", 1L);
assertEquals(1L, myMap.get("one").longValue());
assertEquals(1L, store.get("one").longValue());
myMap.putIfAbsent("two", 2L);
assertEquals(2L, myMap.get("two").longValue());
assertEquals(2L, store.get("two").longValue());
myMap.putIfAbsent("one", 5L);
assertEquals(1L, myMap.get("one").longValue());
assertEquals(1L, store.get("one").longValue());
myMap.replace("one", 1L, 111L);
assertEquals(111L, myMap.get("one").longValue());
assertEquals(111L, store.get("one").longValue());
myMap.replace("one", 1L);
assertEquals(1L, myMap.get("one").longValue());
assertEquals(1L, store.get("one").longValue());
}
@Test
public void issue587CallMapLoaderDuringRemoval() {
final AtomicInteger loadCount = new AtomicInteger(0);
final AtomicInteger storeCount = new AtomicInteger(0);
final AtomicInteger deleteCount = new AtomicInteger(0);
class SimpleMapStore2 extends SimpleMapStore<String, Long> {
SimpleMapStore2(ConcurrentMap<String, Long> store) {
super(store);
}
public Long load(String key) {
loadCount.incrementAndGet();
return super.load(key);
}
public void store(String key, Long value) {
storeCount.incrementAndGet();
super.store(key, value);
}
public void delete(String key) {
deleteCount.incrementAndGet();
super.delete(key);
}
}
final ConcurrentMap<String, Long> store = new ConcurrentHashMap<String, Long>();
final MapStore<String, Long> myMapStore = new SimpleMapStore2(store);
Config config = new Config();
config
.getMapConfig("myMap")
.setMapStoreConfig(new MapStoreConfig()
//.setWriteDelaySeconds(1)
.setImplementation(myMapStore));
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance hc = nodeFactory.newHazelcastInstance(config);
store.put("one", 1l);
store.put("two", 2l);
assertEquals(0, loadCount.get());
assertEquals(0, storeCount.get());
assertEquals(0, deleteCount.get());
IMap<String, Long> myMap = hc.getMap("myMap");
assertEquals(1l, myMap.get("one").longValue());
assertEquals(2l, myMap.get("two").longValue());
// assertEquals(2, loadCount.get());
assertEquals(0, storeCount.get());
assertEquals(0, deleteCount.get());
assertNull(myMap.remove("ten"));
// assertEquals(3, loadCount.get());
assertEquals(0, storeCount.get());
assertEquals(0, deleteCount.get());
myMap.put("three", 3L);
myMap.put("four", 4L);
// assertEquals(5, loadCount.get());
assertEquals(2, storeCount.get());
assertEquals(0, deleteCount.get());
myMap.remove("one");
assertEquals(2, storeCount.get());
assertEquals(1, deleteCount.get());
// assertEquals(5, loadCount.get());
}
@Test
public void testOneMemberWriteBehindWithMaxIdle() throws Exception {
final TestEventBasedMapStore testMapStore = new TestEventBasedMapStore();
Config config = newConfig(testMapStore, 5);
config.setProperty(GroupProperties.PROP_PARTITION_COUNT, "1");
config.getMapConfig("default").setMaxIdleSeconds(10);
HazelcastInstance h1 = createHazelcastInstance(config);
final IMap map = h1.getMap("default");
final int total = 10;
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(TestEventBasedMapStore.STORE_EVENTS.LOAD_ALL_KEYS, testMapStore.getEvents().poll());
}
});
for (int i = 0; i < total; i++) {
map.put(i, "value" + i);
}
sleepSeconds(10);
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(0, map.size());
}
});
assertEquals(total, testMapStore.getStore().size());
}
@Test
public void testOneMemberWriteBehindWithEvictions() throws Exception {
final String mapName = "testOneMemberWriteBehindWithEvictions";
final TestEventBasedMapStore testMapStore = new TestEventBasedMapStore();
testMapStore.loadAllLatch = new CountDownLatch(1);
final Config config = newConfig(testMapStore, 2);
final HazelcastInstance node1 = createHazelcastInstance(config);
final IMap map = node1.getMap(mapName);
// check if load all called.
assertTrue("map store loadAllKeys must be called", testMapStore.loadAllLatch.await(10, TimeUnit.SECONDS));
// map population count.
final int populationCount = 100;
// latch for store & storeAll events.
testMapStore.storeLatch = new CountDownLatch(populationCount);
//populate map.
for (int i = 0; i < populationCount; i++) {
map.put(i, "value" + i);
}
//wait for all store ops.
assertTrue(testMapStore.storeLatch.await(10, TimeUnit.SECONDS));
// init before eviction.
testMapStore.storeLatch = new CountDownLatch(populationCount);
//evict.
for (int i = 0; i < populationCount; i++) {
map.evict(i);
}
//expect no store op.
assertEquals(populationCount, testMapStore.storeLatch.getCount());
//check store size
assertEquals(populationCount, testMapStore.getStore().size());
//check map size
assertEquals(0, map.size());
//re-populate map.
for (int i = 0; i < populationCount; i++) {
map.put(i, "value" + i);
}
//evict again.
for (int i = 0; i < populationCount; i++) {
map.evict(i);
}
//wait for all store ops.
testMapStore.storeLatch.await(10, TimeUnit.SECONDS);
//check store size
assertEquals(populationCount, testMapStore.getStore().size());
//check map size
assertEquals(0, map.size());
//re-populate map.
for (int i = 0; i < populationCount; i++) {
map.put(i, "value" + i);
}
testMapStore.deleteLatch = new CountDownLatch(populationCount);
//clear map.
for (int i = 0; i < populationCount; i++) {
map.remove(i);
}
testMapStore.deleteLatch.await(10, TimeUnit.SECONDS);
//check map size
assertEquals(0, map.size());
}
@Test
public void testOneMemberWriteBehind() throws Exception {
TestMapStore testMapStore = new TestMapStore(1, 1, 1);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 2);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
testMapStore.insert("1", "value1");
IMap map = h1.getMap("default");
assertEquals(0, map.size());
assertEquals("value1", map.get("1"));
assertEquals("value1", map.put("1", "value2"));
assertEquals("value2", map.get("1"));
// store should have the old data as we will write-behind
assertEquals("value1", testMapStore.getStore().get("1"));
assertEquals(1, map.size());
map.flush();
assertTrue(map.evict("1"));
assertEquals("value2", testMapStore.getStore().get("1"));
assertEquals(0, map.size());
assertEquals(1, testMapStore.getStore().size());
assertEquals("value2", map.get("1"));
assertEquals(1, map.size());
map.remove("1");
// store should have the old data as we will delete-behind
assertEquals(1, testMapStore.getStore().size());
assertEquals(0, map.size());
testMapStore.assertAwait(12);
assertEquals(0, testMapStore.getStore().size());
}
@Test
public void testWriteBehindUpdateSameKey() throws Exception {
final TestMapStore testMapStore = new TestMapStore(2, 0, 0);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 5);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance h2 = nodeFactory.newHazelcastInstance(config);
IMap<Object, Object> map = h1.getMap("map");
map.put("key", "value");
Thread.sleep(2000);
map.put("key", "value2");
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals("value2", testMapStore.getStore().get("key"));
}
});
}
@Test
public void testOneMemberWriteBehindFlush() throws Exception {
TestMapStore testMapStore = new TestMapStore(1, 1, 1);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 2);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
IMap map = h1.getMap("default");
assertEquals(0, map.size());
assertEquals(null, map.put("1", "value1"));
assertEquals("value1", map.get("1"));
assertEquals(null, testMapStore.getStore().get("1"));
assertEquals(1, map.size());
map.flush();
assertEquals("value1", testMapStore.getStore().get("1"));
}
@Test
public void testOneMemberWriteBehind2() throws Exception {
final TestEventBasedMapStore testMapStore = new TestEventBasedMapStore();
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 1);
HazelcastInstance h1 = createHazelcastInstance(config);
IMap map = h1.getMap("default");
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(TestEventBasedMapStore.STORE_EVENTS.LOAD_ALL_KEYS, testMapStore.getEvents().poll());
}
});
map.put("1", "value1");
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(TestEventBasedMapStore.STORE_EVENTS.LOAD, testMapStore.getEvents().poll());
}
});
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(TestEventBasedMapStore.STORE_EVENTS.STORE, testMapStore.getEvents().poll());
}
});
map.remove("1");
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(TestEventBasedMapStore.STORE_EVENTS.DELETE, testMapStore.getEvents().poll());
}
});
}
@Test
public void testOneMemberFlush() throws Exception {
TestMapStore testMapStore = new TestMapStore(1, 1, 1);
testMapStore.setLoadAllKeys(false);
int size = 100;
Config config = newConfig(testMapStore, 200);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
IMap map = h1.getMap("default");
assertEquals(0, map.size());
for (int i = 0; i < size; i++) {
map.put(i, i);
}
assertEquals(size, map.size());
assertEquals(0, testMapStore.getStore().size());
assertEquals(size, map.getLocalMapStats().getDirtyEntryCount());
map.flush();
assertEquals(size, testMapStore.getStore().size());
assertEquals(0, map.getLocalMapStats().getDirtyEntryCount());
assertEquals(size, map.size());
for (int i = 0; i < size / 2; i++) {
map.remove(i);
}
assertEquals(size / 2, map.size());
assertEquals(size, testMapStore.getStore().size());
map.flush();
assertEquals(size / 2, testMapStore.getStore().size());
assertEquals(size / 2, map.size());
}
@Test
public void testOneMemberFlushOnShutdown() throws Exception {
TestMapStore testMapStore = new TestMapStore(1, 1, 1);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 200);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
IMap map1 = h1.getMap("default");
assertEquals(0, map1.size());
for (int i = 0; i < 100; i++) {
map1.put(i, i);
}
assertEquals(100, map1.size());
assertEquals(0, testMapStore.getStore().size());
h1.getLifecycleService().shutdown();
assertEquals(100, testMapStore.getStore().size());
assertEquals(1, testMapStore.getDestroyCount());
}
@Test
public void testOneMemberWriteThroughWithIndex() throws Exception {
TestMapStore testMapStore = new TestMapStore(1, 1, 1);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 0);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
testMapStore.insert("1", "value1");
IMap map = h1.getMap("default");
assertEquals(0, map.size());
assertTrue(map.tryLock("1", 1, TimeUnit.SECONDS));
assertEquals("value1", map.get("1"));
map.unlock("1");
assertEquals("value1", map.put("1", "value2"));
assertEquals("value2", map.get("1"));
assertEquals("value2", testMapStore.getStore().get("1"));
assertEquals(1, map.size());
assertTrue(map.evict("1"));
assertEquals(0, map.size());
assertEquals(1, testMapStore.getStore().size());
assertEquals("value2", map.get("1"));
assertEquals(1, map.size());
map.remove("1");
assertEquals(0, map.size());
assertEquals(0, testMapStore.getStore().size());
testMapStore.assertAwait(1);
assertEquals(1, testMapStore.getInitCount());
assertEquals("default", testMapStore.getMapName());
assertEquals(TestUtil.getNode((HazelcastInstanceProxy) h1), TestUtil.getNode(testMapStore.getHazelcastInstance()));
}
@Test
public void testOneMemberWriteThroughWithLRU() throws Exception {
final int size = 10000;
TestMapStore testMapStore = new TestMapStore(size * 2, 1, 1);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 0);
config.setProperty(GroupProperties.PROP_PARTITION_COUNT, "1");
MaxSizeConfig maxSizeConfig = new MaxSizeConfig();
maxSizeConfig.setSize(size);
MapConfig mapConfig = config.getMapConfig("default");
mapConfig.setEvictionPolicy(MapConfig.EvictionPolicy.LRU);
mapConfig.setMaxSizeConfig(maxSizeConfig);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
IMap map = h1.getMap("default");
final CountDownLatch countDownLatch = new CountDownLatch(10000);
map.addEntryListener(new EntryAdapter() {
@Override
public void entryEvicted(EntryEvent event) {
countDownLatch.countDown();
}
}, false);
for (int i = 0; i < size * 2; i++) {
// trigger eviction.
if (i == (size * 2) - 1 || i == size) {
sleepMillis(1001);
}
map.put(i, new Employee("joe", i, true, 100.00));
}
assertEquals(testMapStore.getStore().size(), size * 2);
assertOpenEventually(countDownLatch);
final String msgFailure = String.format("map size: %d put count: %d", map.size(), size);
assertTrue(msgFailure, map.size() > size / 2);
assertTrue(msgFailure, map.size() <= size);
assertEquals(testMapStore.getStore().size(), size * 2);
}
@Test
public void testOneMemberWriteThrough() throws Exception {
TestMapStore testMapStore = new TestMapStore(1, 1, 1);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 0);
HazelcastInstance h1 = createHazelcastInstance(config);
Employee employee = new Employee("joe", 25, true, 100.00);
Employee newEmployee = new Employee("ali", 26, true, 1000);
testMapStore.insert("1", employee);
testMapStore.insert("2", employee);
testMapStore.insert("3", employee);
testMapStore.insert("4", employee);
testMapStore.insert("5", employee);
testMapStore.insert("6", employee);
testMapStore.insert("7", employee);
IMap map = h1.getMap("default");
map.addIndex("name", false);
assertEquals(0, map.size());
assertEquals(employee, map.get("1"));
assertEquals(employee, testMapStore.getStore().get("1"));
assertEquals(1, map.size());
assertEquals(employee, map.put("2", newEmployee));
assertEquals(newEmployee, testMapStore.getStore().get("2"));
assertEquals(2, map.size());
map.remove("1");
map.put("1", employee, 1, TimeUnit.SECONDS);
map.put("1", employee);
Thread.sleep(2000);
assertEquals(employee, testMapStore.getStore().get("1"));
assertEquals(employee, map.get("1"));
map.evict("2");
assertEquals(newEmployee, map.get("2"));
assertEquals(employee, map.get("3"));
assertEquals(employee, map.put("3", newEmployee));
assertEquals(newEmployee, map.get("3"));
assertEquals(employee, map.remove("4"));
assertEquals(employee, map.get("5"));
assertEquals(employee, map.remove("5"));
assertEquals(employee, map.putIfAbsent("6", newEmployee));
assertEquals(employee, map.get("6"));
assertEquals(employee, testMapStore.getStore().get("6"));
assertTrue(map.containsKey("7"));
assertEquals(employee, map.get("7"));
assertNull(map.get("8"));
assertFalse(map.containsKey("8"));
assertNull(map.putIfAbsent("8", employee));
assertEquals(employee, map.get("8"));
assertEquals(employee, testMapStore.getStore().get("8"));
}
@Test
public void testTwoMemberWriteThrough() throws Exception {
TestMapStore testMapStore = new TestMapStore(1, 1, 1);
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 0);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance h2 = nodeFactory.newHazelcastInstance(config);
Employee employee = new Employee("joe", 25, true, 100.00);
Employee employee2 = new Employee("jay", 35, false, 100.00);
testMapStore.insert("1", employee);
IMap map = h1.getMap("default");
map.addIndex("name", false);
assertEquals(0, map.size());
assertEquals(employee, map.get("1"));
assertEquals(employee, testMapStore.getStore().get("1"));
assertEquals(1, map.size());
map.put("2", employee2);
assertEquals(employee2, testMapStore.getStore().get("2"));
assertEquals(2, testMapStore.getStore().size());
assertEquals(2, map.size());
map.remove("2");
assertEquals(1, testMapStore.getStore().size());
assertEquals(1, map.size());
testMapStore.assertAwait(10);
assertEquals(6, testMapStore.callCount.get());
}
@Test
public void testTwoMemberWriteThrough2() throws Exception {
TestMapStore testMapStore = new TestMapStore(1000, 0, 0);
Config config = newConfig(testMapStore, 0);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance h2 = nodeFactory.newHazelcastInstance(config);
IMap map1 = h1.getMap("default");
IMap map2 = h2.getMap("default");
for (int i = 0; i < 1000; i++) {
map1.put(i, "value" + i);
}
assertTrue("store operations could not be done wisely ",
testMapStore.latchStore.await(30, TimeUnit.SECONDS));
assertEquals(1000, testMapStore.getStore().size());
assertEquals(1000, map1.size());
assertEquals(1000, map2.size());
testMapStore.assertAwait(10);
// 1000 put-load 1000 put-store call and 2 loadAllKeys
assertEquals(2002, testMapStore.callCount.get());
}
@Test
public void testOneMemberWriteThroughFailingStore() throws Exception {
FailAwareMapStore testMapStore = new FailAwareMapStore();
testMapStore.setFail(true, true);
Config config = newConfig(testMapStore, 0);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
IMap map = h1.getMap("default");
assertEquals(0, map.size());
try {
map.get("1");
fail("should have thrown exception");
} catch (Exception e) {
}
assertEquals(1, testMapStore.loads.get());
try {
map.get("1");
fail("should have thrown exception");
} catch (Exception e) {
}
assertEquals(2, testMapStore.loads.get());
try {
map.put("1", "value");
fail("should have thrown exception");
} catch (Exception e) {
}
assertEquals(0, testMapStore.stores.get());
assertEquals(0, map.size());
}
@Test
public void testOneMemberWriteThroughFailingStore2() throws Exception {
FailAwareMapStore testMapStore = new FailAwareMapStore();
testMapStore.setFail(true, false);
Config config = newConfig(testMapStore, 0);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
IMap map = h1.getMap("default");
assertEquals(0, map.size());
try {
map.put("1", "value");
fail("should have thrown exception");
} catch (Exception e) {
}
assertEquals(0, map.size());
}
// fails randomly
@Test
public void testGetAllKeys() throws Exception {
TestEventBasedMapStore testMapStore = new TestEventBasedMapStore();
Map store = testMapStore.getStore();
Set keys = new HashSet();
int size = 1000;
for (int i = 0; i < size; i++) {
store.put(i, "value" + i);
keys.add(i);
}
Config config = newConfig(testMapStore, 2);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3);
HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance h2 = nodeFactory.newHazelcastInstance(config);
IMap map1 = h1.getMap("default");
IMap map2 = h2.getMap("default");
checkIfMapLoaded("default", h1);
checkIfMapLoaded("default", h2);
assertEquals("value1", map1.get(1));
assertEquals("value1", map2.get(1));
assertEquals(1000, map1.size());
assertEquals(1000, map2.size());
HazelcastInstance h3 = nodeFactory.newHazelcastInstance(config);
IMap map3 = h3.getMap("default");
checkIfMapLoaded("default", h3);
assertEquals("value1", map1.get(1));
assertEquals("value1", map2.get(1));
assertEquals("value1", map3.get(1));
assertEquals(1000, map1.size());
assertEquals(1000, map2.size());
assertEquals(1000, map3.size());
h3.shutdown();
assertEquals("value1", map1.get(1));
assertEquals("value1", map2.get(1));
assertEquals(1000, map1.size());
assertEquals(1000, map2.size());
}
private boolean checkIfMapLoaded(String mapName, HazelcastInstance instance) throws InterruptedException {
NodeEngineImpl nodeEngine = TestUtil.getNode(instance).nodeEngine;
int partitionCount = nodeEngine.getPartitionService().getPartitionCount();
MapService service = nodeEngine.getService(MapService.SERVICE_NAME);
boolean loaded = false;
final long end = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(1);
while (!loaded) {
for (int i = 0; i < partitionCount; i++) {
final RecordStore recordStore = service.getPartitionContainer(i).getRecordStore(mapName);
if (recordStore != null) {
loaded = recordStore.isLoaded();
if (!loaded) {
break;
}
}
}
if (System.currentTimeMillis() >= end) {
break;
}
//give a rest to cpu.
Thread.sleep(10);
}
return loaded;
}
/*
* Test for Issue 572
*/
@Test
public void testMapstoreDeleteOnClear() throws Exception {
Config config = new Config();
SimpleMapStore store = new SimpleMapStore();
config.getMapConfig("testMapstoreDeleteOnClear").setMapStoreConfig(new MapStoreConfig().setEnabled(true).setImplementation(store));
HazelcastInstance hz = createHazelcastInstance(config);
IMap<Object, Object> map = hz.getMap("testMapstoreDeleteOnClear");
int size = 10;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
assertEquals(size, map.size());
assertEquals(size, store.store.size());
assertEquals(size, store.loadAllKeys().size());
map.clear();
assertEquals(0, map.size());
assertEquals(0, store.loadAllKeys().size());
}
// bug: store is called twice on loadAll
@Test
public void testIssue1070() throws InterruptedException {
CountDownLatch latch = new CountDownLatch(1);
final NoDuplicateMapStore myMapStore = new NoDuplicateMapStore();
myMapStore.store.put(1, 2);
Config config = new Config();
config
.getMapConfig("testIssue1070")
.setMapStoreConfig(new MapStoreConfig()
.setImplementation(myMapStore));
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance hc = nodeFactory.newHazelcastInstance(config);
HazelcastInstance hc2 = nodeFactory.newHazelcastInstance(config);
IMap<Object, Object> map = hc.getMap("testIssue1070");
for (int i = 0; i < 271; i++) {
map.get(i);
}
assertFalse(myMapStore.failed);
}
static class NoDuplicateMapStore extends TestMapStore {
boolean failed = false;
@Override
public void store(Object key, Object value) {
if (store.containsKey(key)) {
failed = true;
throw new RuntimeException("duplicate is not allowed");
}
super.store(key, value);
}
@Override
public void storeAll(Map map) {
for (Object key : map.keySet()) {
if (store.containsKey(key)) {
failed = true;
throw new RuntimeException("duplicate is not allowed");
}
}
super.storeAll(map);
}
}
@Test
public void testIssue806CustomTTLForNull() {
final ConcurrentMap<String, String> store = new ConcurrentHashMap<String, String>();
final MapStore<String, String> myMapStore = new SimpleMapStore<String, String>(store);
Config config = new Config();
config
.getMapConfig("testIssue806CustomTTLForNull")
.setMapStoreConfig(new MapStoreConfig()
.setImplementation(myMapStore));
HazelcastInstance hc = createHazelcastInstance(config);
IMap<Object, Object> map = hc.getMap("testIssue806CustomTTLForNull");
map.get("key");
assertNull(map.get("key"));
store.put("key", "value");
assertEquals("value", map.get("key"));
}
@Test
public void testIssue991EvictedNullIssue() throws InterruptedException {
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(new MapLoader<String, String>() {
@Override
public String load(String key) {
return null;
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
return null;
}
@Override
public Set<String> loadAllKeys() {
return null;
}
});
Config config = new Config();
config
.getMapConfig("testIssue991EvictedNullIssue")
.setMapStoreConfig(mapStoreConfig);
HazelcastInstance hc = createHazelcastInstance(config);
IMap<Object, Object> map = hc.getMap("testIssue991EvictedNullIssue");
map.get("key");
assertNull(map.get("key"));
map.put("key", "value");
Thread.sleep(2000);
assertEquals("value", map.get("key"));
}
@Test
public void testIssue1019() throws InterruptedException {
final String keyWithNullValue = "keyWithNullValue";
TestEventBasedMapStore testMapStore = new TestEventBasedMapStore() {
@Override
public Set loadAllKeys() {
Set keys = new HashSet(super.loadAllKeys());
// Include an extra key that will *not* be returned by loadAll().
keys.add(keyWithNullValue);
return keys;
}
};
Map mapForStore = new HashMap();
mapForStore.put("key1", 17);
mapForStore.put("key2", 37);
mapForStore.put("key3", 47);
testMapStore.getStore().putAll(mapForStore);
Config config = newConfig(testMapStore, 0);
HazelcastInstance instance = createHazelcastInstance(config);
IMap map = instance.getMap("default");
assertEquals(map.keySet(), mapForStore.keySet());
assertEquals(new HashSet(map.values()), new HashSet(mapForStore.values()));
assertEquals(map.entrySet(), mapForStore.entrySet());
assertFalse(map.containsKey(keyWithNullValue));
assertNull(map.get(keyWithNullValue));
}
static class ProcessingStore extends MapStoreAdapter<Integer, Employee> implements PostProcessingMapStore {
@Override
public void store(Integer key, Employee employee) {
employee.setSalary(employee.getAge() * 1000);
}
}
@Test
public void testIssue1115EnablingMapstoreMutatingValue() throws InterruptedException {
Config cfg = new Config();
String mapName = "testIssue1115";
MapStore mapStore = new ProcessingStore();
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(mapStore);
cfg.getMapConfig(mapName).setMapStoreConfig(mapStoreConfig);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg);
IMap<Integer, Employee> map = instance1.getMap(mapName);
Random random = new Random();
// testing put with new object
for (int i = 0; i < 10; i++) {
Employee emp = new Employee();
emp.setAge(random.nextInt(20) + 20);
map.put(i, emp);
}
for (int i = 0; i < 10; i++) {
Employee employee = map.get(i);
assertEquals(employee.getAge() * 1000, employee.getSalary(), 0);
}
// testing put with existing object
for (int i = 0; i < 10; i++) {
Employee emp = map.get(i);
emp.setAge(random.nextInt(20) + 20);
map.put(i, emp);
}
for (int i = 0; i < 10; i++) {
Employee employee = map.get(i);
assertEquals(employee.getAge() * 1000, employee.getSalary(), 0);
}
// testing put with replace
for (int i = 0; i < 10; i++) {
Employee emp = map.get(i);
emp.setAge(random.nextInt(20) + 20);
map.replace(i, emp);
}
for (int i = 0; i < 10; i++) {
Employee employee = map.get(i);
assertEquals(employee.getAge() * 1000, employee.getSalary(), 0);
}
// testing put with putIfAbsent
for (int i = 10; i < 20; i++) {
Employee emp = new Employee();
emp.setAge(random.nextInt(20) + 20);
map.putIfAbsent(i, emp);
}
for (int i = 10; i < 20; i++) {
Employee employee = map.get(i);
assertEquals(employee.getAge() * 1000, employee.getSalary(), 0);
}
}
@Test
public void testIssue1110() throws InterruptedException {
final int mapSize = 10;
final String mapName = "testIssue1110";
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
Config cfg = new Config();
cfg.setProperty(GroupProperties.PROP_MAP_LOAD_CHUNK_SIZE, "5");
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setEnabled(true);
mapStoreConfig.setImplementation(new SimpleMapLoader(mapSize, false));
cfg.getMapConfig(mapName).setMapStoreConfig(mapStoreConfig);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg);
IMap map = instance1.getMap(mapName);
final CountDownLatch latch = new CountDownLatch(mapSize);
map.addEntryListener(new EntryAdapter() {
@Override
public void entryAdded(EntryEvent event) {
latch.countDown();
}
}, true);
// create all partition recordstores.
map.size();
//wait map load.
latch.await();
assertEquals(mapSize, map.size());
}
@Test
public void testIssue1142ExceptionWhenLoadAllReturnsNull() {
Config config = new Config();
String mapname = "testIssue1142ExceptionWhenLoadAllReturnsNull";
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setImplementation(new MapStoreAdapter<String, String>() {
@Override
public Set<String> loadAllKeys() {
Set keys = new HashSet();
keys.add("key");
return keys;
}
public Map loadAll(Collection keys) {
return null;
}
});
config.getMapConfig(mapname).setMapStoreConfig(mapStoreConfig);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
final IMap map = instance.getMap(mapname);
for (int i = 0; i < 300; i++) {
map.put(i, i);
}
assertEquals(300, map.size());
}
@Test
public void testIssue1085WriteBehindBackup() throws InterruptedException {
Config config = new Config();
String name = "testIssue1085WriteBehindBackup";
MapConfig writeBehindBackup = config.getMapConfig(name);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setWriteDelaySeconds(5);
int size = 1000;
MapStoreWithStoreCount mapStore = new MapStoreWithStoreCount(size, 120);
mapStoreConfig.setImplementation(mapStore);
writeBehindBackup.setMapStoreConfig(mapStoreConfig);
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(3);
HazelcastInstance instance = factory.newHazelcastInstance(config);
HazelcastInstance instance2 = factory.newHazelcastInstance(config);
final IMap map = instance.getMap(name);
for (int i = 0; i < size; i++) {
map.put(i, i);
}
instance2.getLifecycleService().shutdown();
mapStore.awaitStores();
}
@Test
public void testIssue1085WriteBehindBackupWithLongRunnigMapStore() throws InterruptedException {
final String name = randomMapName("testIssue1085WriteBehindBackup");
final int expectedStoreCount = 3;
final int nodeCount = 3;
Config config = new Config();
config.setProperty(GroupProperties.PROP_MAP_REPLICA_WAIT_SECONDS_FOR_SCHEDULED_OPERATIONS, "30");
MapConfig writeBehindBackupConfig = config.getMapConfig(name);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setWriteDelaySeconds(5);
final MapStoreWithStoreCount mapStore = new MapStoreWithStoreCount(expectedStoreCount, 300, 10);
mapStoreConfig.setImplementation(mapStore);
writeBehindBackupConfig.setMapStoreConfig(mapStoreConfig);
// create nodes.
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(nodeCount);
HazelcastInstance node1 = factory.newHazelcastInstance(config);
HazelcastInstance node2 = factory.newHazelcastInstance(config);
HazelcastInstance node3 = factory.newHazelcastInstance(config);
// create corresponding keys.
final String keyOwnedByNode1 = generateKeyOwnedBy(node1);
final String keyOwnedByNode2 = generateKeyOwnedBy(node2);
final String keyOwnedByNode3 = generateKeyOwnedBy(node3);
// put one key value pair per node.
final IMap map = node1.getMap(name);
map.put(keyOwnedByNode1, 1);
map.put(keyOwnedByNode2, 2);
map.put(keyOwnedByNode3, 3);
// shutdown node2.
node2.getLifecycleService().shutdown();
// wait store ops. finish.
mapStore.awaitStores();
// we should reach at least expected store count.
assertTrue(expectedStoreCount <= mapStore.count.intValue());
}
@Test
@Category(NightlyTest.class)
public void testIssue1085WriteBehindBackupTransactional() throws InterruptedException {
Config config = new Config();
String name = "testIssue1085WriteBehindBackupTransactional";
MapConfig writeBehindBackup = config.getMapConfig(name);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setWriteDelaySeconds(5);
int size = 1000;
MapStoreWithStoreCount mapStore = new MapStoreWithStoreCount(size, 20);
mapStoreConfig.setImplementation(mapStore);
writeBehindBackup.setMapStoreConfig(mapStoreConfig);
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(3);
HazelcastInstance instance = factory.newHazelcastInstance(config);
HazelcastInstance instance2 = factory.newHazelcastInstance(config);
final IMap map = instance.getMap(name);
TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
TransactionalMap<Object, Object> tmap = context.getMap(name);
for (int i = 0; i < size; i++) {
tmap.put(i, i);
}
context.commitTransaction();
instance2.getLifecycleService().shutdown();
mapStore.awaitStores();
}
@Test
public void testWriteBehindSameSecondSameKey() throws Exception {
final TestMapStore testMapStore = new TestMapStore(100, 0, 0); // In some cases 2 store operation may happened
testMapStore.setLoadAllKeys(false);
Config config = newConfig(testMapStore, 2);
HazelcastInstance h1 = createHazelcastInstance(config);
IMap<Object, Object> map = h1.getMap("testWriteBehindSameSecondSameKey");
final int size1 = 20;
final int size2 = 10;
for (int i = 0; i < size1; i++) {
map.put("key", "value" + i);
}
for (int i = 0; i < size2; i++) {
map.put("key" + i, "value" + i);
}
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals("value" + (size1 - 1), testMapStore.getStore().get("key"));
}
});
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals("value" + (size2 - 1), testMapStore.getStore().get("key" + (size2 - 1)));
}
});
}
@Test
public void testReadingConfiguration() throws Exception {
String mapName = "mapstore-test";
InputStream is = getClass().getResourceAsStream("/com/hazelcast/config/hazelcast-mapstore-config.xml");
XmlConfigBuilder builder = new XmlConfigBuilder(is);
Config config = builder.build();
HazelcastInstance hz = createHazelcastInstance(config);
MapProxyImpl map = (MapProxyImpl) hz.getMap(mapName);
MapService mapService = (MapService) map.getService();
MapContainer mapContainer = mapService.getMapContainer(mapName);
MapStoreWrapper mapStoreWrapper = mapContainer.getStore();
Set keys = mapStoreWrapper.loadAllKeys();
assertEquals(2, keys.size());
assertEquals("true", mapStoreWrapper.load("my-prop-1"));
assertEquals("foo", mapStoreWrapper.load("my-prop-2"));
}
@Test
public void testMapStoreNotCalledFromEntryProcessorBackup() throws Exception {
final String mapName = "testMapStoreNotCalledFromEntryProcessorBackup_" + randomString();
final int instanceCount = 2;
Config config = new Config();
// Configure map with one backup and dummy map store
MapConfig mapConfig = config.getMapConfig(mapName);
mapConfig.setBackupCount(1);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
MapStoreWithStoreCount mapStore = new MapStoreWithStoreCount(1, 120);
mapStoreConfig.setImplementation(mapStore);
mapConfig.setMapStoreConfig(mapStoreConfig);
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(instanceCount);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
final IMap<String, String> map = instance1.getMap(mapName);
final String key = "key";
final String value = "value";
//executeOnKey
map.executeOnKey(key, new ValueSetterEntryProcessor(value));
mapStore.awaitStores();
assertEquals(value, map.get(key));
assertEquals(1, mapStore.count.intValue());
}
@Test
public void testMapStoreWriteRemoveOrder() {
final String mapName = randomMapName("testMapStoreWriteDeleteOrder");
final int numIterations = 10;
final int writeDelaySeconds = 3;
// create map store implementation
final RecordingMapStore store = new RecordingMapStore(numIterations, numIterations);
// create hazelcast config
final Config config = newConfig(mapName, store, writeDelaySeconds);
// start hazelcast instance
final HazelcastInstance hzInstance = createHazelcastInstance(config);
// loop over num iterations
final IMap<String, String> map = hzInstance.getMap(mapName);
for (int k = 0; k < numIterations; k++) {
String key = String.valueOf(k + 10); // 2 digits for sorting in output
String value = "v:" + key;
// add entry
map.put(key, value);
// sleep 300ms
sleepMillis(1);
// remove entry
map.remove(key);
}
// wait for store to finish
store.awaitStores();
// wait for remove to finish
store.awaitRemoves();
assertEquals(0, store.getStore().keySet().size());
}
/**
* At least sleep 1 second so entries can fall different time slices in
* {@link com.hazelcast.util.scheduler.SecondsBasedEntryTaskScheduler}
*/
@Test
@Category(NightlyTest.class)
public void testWriteBehindWriteRemoveOrderOfSameKey() throws Exception {
final String mapName = randomMapName("_testWriteBehindWriteRemoveOrderOfSameKey_");
final int iterationCount = 5;
final int delaySeconds = 1;
final int putOps = 3;
final int removeOps = 2;
final int expectedStoreSizeEventually = 1;
final RecordingMapStore store = new RecordingMapStore(iterationCount * putOps, iterationCount * removeOps);
final Config config = newConfig(store, delaySeconds);
final HazelcastInstance node = createHazelcastInstance(config);
final IMap<Object, Object> map = node.getMap(mapName);
for (int i = 0; i < iterationCount; i++) {
String key = "key";
String value = "value" + i;
map.put(key, value);
sleepMillis(1000);
map.remove(key);
sleepMillis(1000);
map.put(key, value);
sleepMillis(1000);
map.remove(key);
sleepMillis(1000);
map.put(key, value);
sleepMillis(1000);
}
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(expectedStoreSizeEventually, store.getStore().size());
}
});
}
public static class RecordingMapStore implements MapStore<String, String> {
private static final boolean DEBUG = false;
private final CountDownLatch expectedStore;
private final CountDownLatch expectedRemove;
private final ConcurrentHashMap<String, String> store;
public RecordingMapStore(int expectedStore, int expectedRemove) {
this.expectedStore = new CountDownLatch(expectedStore);
this.expectedRemove = new CountDownLatch(expectedRemove);
this.store = new ConcurrentHashMap<String, String>();
}
public ConcurrentHashMap<String, String> getStore() {
return store;
}
@Override
public String load(String key) {
log("load(" + key + ") called.");
return store.get(key);
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
if (DEBUG) {
List<String> keysList = new ArrayList<String>(keys);
Collections.sort(keysList);
log("loadAll(" + keysList + ") called.");
}
Map<String, String> result = new HashMap<String, String>();
for (String key : keys) {
String value = store.get(key);
if (value != null) {
result.put(key, value);
}
}
return result;
}
@Override
public Set<String> loadAllKeys() {
log("loadAllKeys() called.");
Set<String> result = new HashSet<String>(store.keySet());
log("loadAllKeys result = " + result);
return result;
}
@Override
public void store(String key, String value) {
log("store(" + key + ") called.");
String valuePrev = store.put(key, value);
expectedStore.countDown();
if (valuePrev != null) {
log("- Unexpected Update (operations reordered?): " + key);
}
}
@Override
public void storeAll(Map<String, String> map) {
if (DEBUG) {
TreeSet<String> setSorted = new TreeSet<String>(map.keySet());
log("storeAll(" + setSorted + ") called.");
}
store.putAll(map);
final int size = map.keySet().size();
for (int i = 0; i < size; i++) {
expectedStore.countDown();
}
}
@Override
public void delete(String key) {
log("delete(" + key + ") called.");
String valuePrev = store.remove(key);
expectedRemove.countDown();
if (valuePrev == null) {
log("- Unnecessary delete (operations reordered?): " + key);
}
}
@Override
public void deleteAll(Collection<String> keys) {
if (DEBUG) {
List<String> keysList = new ArrayList<String>(keys);
Collections.sort(keysList);
log("deleteAll(" + keysList + ") called.");
}
for (String key : keys) {
String valuePrev = store.remove(key);
expectedRemove.countDown();
if (valuePrev == null) {
log("- Unnecessary delete (operations reordered?): " + key);
}
}
}
public void awaitStores() {
assertOpenEventually(expectedStore);
}
public void awaitRemoves() {
assertOpenEventually(expectedRemove);
}
private void log(String msg) {
if (DEBUG) {
System.out.println(msg);
}
}
}
private static class ValueSetterEntryProcessor extends AbstractEntryProcessor<String, String> {
private final String value;
ValueSetterEntryProcessor(String value) {
this.value = value;
}
public Object process(Map.Entry entry) {
entry.setValue(value);
return null;
}
}
public static Config newConfig(Object storeImpl, int writeDelaySeconds) {
return newConfig("default", storeImpl, writeDelaySeconds);
}
public static Config newConfig(String mapName, Object storeImpl, int writeDelaySeconds) {
Config config = new XmlConfigBuilder().build();
MapConfig mapConfig = config.getMapConfig(mapName);
MapStoreConfig mapStoreConfig = new MapStoreConfig();
mapStoreConfig.setImplementation(storeImpl);
mapStoreConfig.setWriteDelaySeconds(writeDelaySeconds);
mapConfig.setMapStoreConfig(mapStoreConfig);
return config;
}
public static class BasicMapStoreFactory implements MapStoreFactory<String, String> {
@Override
public MapLoader<String, String> newMapStore(String mapName, final Properties properties) {
return new MapStore<String, String>() {
@Override
public void store(String key, String value) {
}
@Override
public void storeAll(Map map) {
}
@Override
public void delete(String key) {
}
@Override
public void deleteAll(Collection keys) {
}
@Override
public String load(String key) {
return properties.getProperty(key.toString());
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
Map<String, String> map = new HashMap<String, String>();
for (String key : keys) {
map.put(key, properties.getProperty(key));
}
return map;
}
@Override
public Set<String> loadAllKeys() {
return new HashSet<String>(properties.stringPropertyNames());
}
};
}
}
public static class MapStoreWithStoreCount extends SimpleMapStore {
final CountDownLatch latch;
final int waitSecond;
final AtomicInteger count = new AtomicInteger(0);
final int sleepStoreAllSeconds;
public MapStoreWithStoreCount(int expectedStore, int seconds) {
latch = new CountDownLatch(expectedStore);
waitSecond = seconds;
sleepStoreAllSeconds = 0;
}
public MapStoreWithStoreCount(int expectedStore, int seconds, int sleepStoreAllSeconds) {
latch = new CountDownLatch(expectedStore);
waitSecond = seconds;
this.sleepStoreAllSeconds = sleepStoreAllSeconds;
}
public void awaitStores() {
assertOpenEventually(latch, waitSecond);
}
@Override
public void store(Object key, Object value) {
latch.countDown();
super.store(key, value);
count.incrementAndGet();
}
@Override
public void storeAll(Map map) {
if (sleepStoreAllSeconds > 0) {
try {
Thread.sleep(sleepStoreAllSeconds * 1000);
} catch (InterruptedException e) {
}
}
for (Object o : map.keySet()) {
latch.countDown();
count.incrementAndGet();
}
super.storeAll(map);
}
}
public static class TestEventBasedMapStore<K, V> implements MapLoaderLifecycleSupport, MapStore<K, V> {
protected enum STORE_EVENTS {
STORE, STORE_ALL, DELETE, DELETE_ALL, LOAD, LOAD_ALL, LOAD_ALL_KEYS
}
protected final Map<K, V> store = new ConcurrentHashMap();
protected final BlockingQueue events = new LinkedBlockingQueue();
protected final AtomicInteger storeCount = new AtomicInteger();
protected final AtomicInteger storeAllCount = new AtomicInteger();
protected final AtomicInteger loadCount = new AtomicInteger();
protected final AtomicInteger callCount = new AtomicInteger();
protected final AtomicInteger initCount = new AtomicInteger();
protected HazelcastInstance hazelcastInstance;
protected Properties properties;
protected String mapName;
protected boolean loadAllKeys = true;
protected CountDownLatch storeLatch;
protected CountDownLatch deleteLatch;
protected CountDownLatch loadAllLatch;
public void init(HazelcastInstance hazelcastInstance, Properties properties, String mapName) {
this.hazelcastInstance = hazelcastInstance;
this.properties = properties;
this.mapName = mapName;
initCount.incrementAndGet();
}
public BlockingQueue getEvents() {
return events;
}
public void destroy() {
}
public int getEventCount() {
return events.size();
}
public int getInitCount() {
return initCount.get();
}
public boolean isLoadAllKeys() {
return loadAllKeys;
}
public void setLoadAllKeys(boolean loadAllKeys) {
this.loadAllKeys = loadAllKeys;
}
public HazelcastInstance getHazelcastInstance() {
return hazelcastInstance;
}
public String getMapName() {
return mapName;
}
public Properties getProperties() {
return properties;
}
Map getStore() {
return store;
}
public void insert(K key, V value) {
store.put(key, value);
}
public void store(K key, V value) {
store.put(key, value);
callCount.incrementAndGet();
storeCount.incrementAndGet();
if (storeLatch != null) {
storeLatch.countDown();
}
events.offer(STORE_EVENTS.STORE);
}
public V load(K key) {
callCount.incrementAndGet();
loadCount.incrementAndGet();
events.offer(STORE_EVENTS.LOAD);
return store.get(key);
}
public void storeAll(Map map) {
store.putAll(map);
callCount.incrementAndGet();
final int size = map.size();
if (storeLatch != null) {
for (int i = 0; i < size; i++) {
storeLatch.countDown();
}
}
events.offer(STORE_EVENTS.STORE_ALL);
}
public void delete(K key) {
store.remove(key);
callCount.incrementAndGet();
if (deleteLatch != null) {
deleteLatch.countDown();
}
events.offer(STORE_EVENTS.DELETE);
}
public Set<K> loadAllKeys() {
if (loadAllLatch != null) {
loadAllLatch.countDown();
}
callCount.incrementAndGet();
events.offer(STORE_EVENTS.LOAD_ALL_KEYS);
if (!loadAllKeys) return null;
return store.keySet();
}
public Map loadAll(Collection keys) {
Map map = new HashMap(keys.size());
for (Object key : keys) {
Object value = store.get(key);
if (value != null) {
map.put(key, value);
}
}
callCount.incrementAndGet();
events.offer(STORE_EVENTS.LOAD_ALL);
return map;
}
public void deleteAll(Collection keys) {
for (Object key : keys) {
store.remove(key);
}
callCount.incrementAndGet();
if (deleteLatch != null) {
for (int i = 0; i < keys.size(); i++) {
deleteLatch.countDown();
}
}
events.offer(STORE_EVENTS.DELETE_ALL);
}
}
public static class FailAwareMapStore implements MapStore {
final Map db = new ConcurrentHashMap();
final AtomicLong deletes = new AtomicLong();
final AtomicLong deleteAlls = new AtomicLong();
final AtomicLong stores = new AtomicLong();
final AtomicLong storeAlls = new AtomicLong();
final AtomicLong loads = new AtomicLong();
final AtomicLong loadAlls = new AtomicLong();
final AtomicLong loadAllKeys = new AtomicLong();
final AtomicBoolean storeFail = new AtomicBoolean(false);
final AtomicBoolean loadFail = new AtomicBoolean(false);
final List<BlockingQueue> listeners = new CopyOnWriteArrayList<BlockingQueue>();
public void addListener(BlockingQueue obj) {
listeners.add(obj);
}
public void notifyListeners() {
for (BlockingQueue listener : listeners) {
listener.offer(new Object());
}
}
public void delete(Object key) {
try {
if (storeFail.get()) {
throw new RuntimeException();
} else {
db.remove(key);
}
} finally {
deletes.incrementAndGet();
notifyListeners();
}
}
public void setFail(boolean shouldFail, boolean loadFail) {
this.storeFail.set(shouldFail);
this.loadFail.set(loadFail);
}
public int dbSize() {
return db.size();
}
public boolean dbContainsKey(Object key) {
return db.containsKey(key);
}
public Object dbGet(Object key) {
return db.get(key);
}
public void store(Object key, Object value) {
try {
if (storeFail.get()) {
throw new RuntimeException();
} else {
db.put(key, value);
}
} finally {
stores.incrementAndGet();
notifyListeners();
}
}
public Set loadAllKeys() {
try {
return db.keySet();
} finally {
loadAllKeys.incrementAndGet();
}
}
public Object load(Object key) {
try {
if (loadFail.get()) {
throw new RuntimeException();
} else {
return db.get(key);
}
} finally {
loads.incrementAndGet();
}
}
public void storeAll(Map map) {
try {
if (storeFail.get()) {
throw new RuntimeException();
} else {
db.putAll(map);
}
} finally {
storeAlls.incrementAndGet();
notifyListeners();
}
}
public Map loadAll(Collection keys) {
try {
if (loadFail.get()) {
throw new RuntimeException();
} else {
Map results = new HashMap();
for (Object key : keys) {
Object value = db.get(key);
if (value != null) {
results.put(key, value);
}
}
return results;
}
} finally {
loadAlls.incrementAndGet();
notifyListeners();
}
}
public void deleteAll(Collection keys) {
try {
if (storeFail.get()) {
throw new RuntimeException();
} else {
for (Object key : keys) {
db.remove(key);
}
}
} finally {
deleteAlls.incrementAndGet();
notifyListeners();
}
}
}
public static class WaitingOnFirstTestMapStore extends TestMapStore {
private AtomicInteger count;
public WaitingOnFirstTestMapStore() {
super();
this.count = new AtomicInteger(0);
}
@Override
public void storeAll(Map map) {
if (count.get() == 0) {
count.incrementAndGet();
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
super.storeAll(map);
}
}
public static class TestMapStore extends MapStoreAdapter implements MapLoaderLifecycleSupport, MapStore {
final Map store = new ConcurrentHashMap();
final CountDownLatch latchStore;
final CountDownLatch latchStoreAll;
final CountDownLatch latchDelete;
final CountDownLatch latchDeleteAll;
final CountDownLatch latchLoad;
final CountDownLatch latchLoadAllKeys;
final CountDownLatch latchLoadAll;
CountDownLatch latchStoreOpCount;
CountDownLatch latchStoreAllOpCount;
final AtomicInteger callCount = new AtomicInteger();
final AtomicInteger initCount = new AtomicInteger();
final AtomicInteger destroyCount = new AtomicInteger();
private HazelcastInstance hazelcastInstance;
private Properties properties;
private String mapName;
private boolean loadAllKeys = true;
public TestMapStore() {
this(0, 0, 0, 0, 0, 0);
}
public TestMapStore(int expectedStore, int expectedDelete, int expectedLoad) {
this(expectedStore, 0, expectedDelete, 0, expectedLoad, 0);
}
public TestMapStore(int expectedStore, int expectedStoreAll, int expectedDelete,
int expectedDeleteAll, int expectedLoad, int expectedLoadAll) {
this(expectedStore, expectedStoreAll, expectedDelete, expectedDeleteAll,
expectedLoad, expectedLoadAll, 0);
}
public TestMapStore(int expectedStore, int expectedStoreAll, int expectedDelete,
int expectedDeleteAll, int expectedLoad, int expectedLoadAll,
int expectedLoadAllKeys) {
latchStore = new CountDownLatch(expectedStore);
latchStoreAll = new CountDownLatch(expectedStoreAll);
latchDelete = new CountDownLatch(expectedDelete);
latchDeleteAll = new CountDownLatch(expectedDeleteAll);
latchLoad = new CountDownLatch(expectedLoad);
latchLoadAll = new CountDownLatch(expectedLoadAll);
latchLoadAllKeys = new CountDownLatch(expectedLoadAllKeys);
}
public void init(HazelcastInstance hazelcastInstance, Properties properties, String mapName) {
this.hazelcastInstance = hazelcastInstance;
this.properties = properties;
this.mapName = mapName;
initCount.incrementAndGet();
}
public boolean isLoadAllKeys() {
return loadAllKeys;
}
public void setLoadAllKeys(boolean loadAllKeys) {
this.loadAllKeys = loadAllKeys;
}
public void destroy() {
destroyCount.incrementAndGet();
}
public int getInitCount() {
return initCount.get();
}
public int getDestroyCount() {
return destroyCount.get();
}
public HazelcastInstance getHazelcastInstance() {
return hazelcastInstance;
}
public String getMapName() {
return mapName;
}
public Properties getProperties() {
return properties;
}
public void assertAwait(int seconds) throws InterruptedException {
assertTrue("Store remaining: " + latchStore.getCount(), latchStore.await(seconds, TimeUnit.SECONDS));
assertTrue("Store-all remaining: " + latchStoreAll.getCount(), latchStoreAll.await(seconds, TimeUnit.SECONDS));
assertTrue("Delete remaining: " + latchDelete.getCount(), latchDelete.await(seconds, TimeUnit.SECONDS));
assertTrue("Delete-all remaining: " + latchDeleteAll.getCount(), latchDeleteAll.await(seconds, TimeUnit.SECONDS));
assertTrue("Load remaining: " + latchLoad.getCount(), latchLoad.await(seconds, TimeUnit.SECONDS));
assertTrue("Load-al remaining: " + latchLoadAll.getCount(), latchLoadAll.await(seconds, TimeUnit.SECONDS));
}
public Map getStore() {
return store;
}
public void insert(Object key, Object value) {
store.put(key, value);
}
public void store(Object key, Object value) {
store.put(key, value);
callCount.incrementAndGet();
latchStore.countDown();
if (latchStoreOpCount != null) {
latchStoreOpCount.countDown();
}
}
public Set loadAllKeys() {
callCount.incrementAndGet();
latchLoadAllKeys.countDown();
if (!loadAllKeys) return null;
return store.keySet();
}
public Object load(Object key) {
callCount.incrementAndGet();
latchLoad.countDown();
return store.get(key);
}
public void storeAll(Map map) {
store.putAll(map);
callCount.incrementAndGet();
latchStoreAll.countDown();
if (latchStoreAllOpCount != null) {
for (int i = 0; i < map.size(); i++) {
latchStoreAllOpCount.countDown();
}
}
}
public void delete(Object key) {
store.remove(key);
callCount.incrementAndGet();
latchDelete.countDown();
}
public Map loadAll(Collection keys) {
Map map = new HashMap(keys.size());
for (Object key : keys) {
Object value = store.get(key);
if (value != null) {
map.put(key, value);
}
}
callCount.incrementAndGet();
latchLoadAll.countDown();
return map;
}
public void deleteAll(Collection keys) {
for (Object key : keys) {
store.remove(key);
}
callCount.incrementAndGet();
latchDeleteAll.countDown();
}
}
public static class SimpleMapStore<K, V> extends MapStoreAdapter<K, V> {
public final Map<K, V> store;
private boolean loadAllKeys = true;
public SimpleMapStore() {
store = new ConcurrentHashMap<K, V>();
}
public SimpleMapStore(final Map<K, V> store) {
this.store = store;
}
@Override
public void delete(final K key) {
store.remove(key);
}
@Override
public V load(final K key) {
return store.get(key);
}
@Override
public void store(final K key, final V value) {
store.put(key, value);
}
public Set<K> loadAllKeys() {
if (loadAllKeys) {
return store.keySet();
}
return null;
}
public void setLoadAllKeys(boolean loadAllKeys) {
this.loadAllKeys = loadAllKeys;
}
@Override
public void storeAll(final Map<K, V> kvMap) {
store.putAll(kvMap);
}
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_map_mapstore_MapStoreTest.java |
773 | public class CollectionReserveAddOperation extends CollectionOperation {
String transactionId;
Data value;
public CollectionReserveAddOperation() {
}
public CollectionReserveAddOperation(String name, String transactionId, Data value) {
super(name);
this.transactionId = transactionId;
this.value = value;
}
@Override
public int getId() {
return CollectionDataSerializerHook.COLLECTION_RESERVE_ADD;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
response = getOrCreateContainer().reserveAdd(transactionId, value);
}
@Override
public void afterRun() throws Exception {
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeUTF(transactionId);
out.writeObject(value);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
transactionId = in.readUTF();
value = in.readObject();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_txn_CollectionReserveAddOperation.java |
929 | @Service("blOfferTimeZoneProcessor")
public class OfferTimeZoneProcessorImpl implements OfferTimeZoneProcessor {
private static final Log LOG = LogFactory.getLog(OfferTimeZoneProcessorImpl.class);
public TimeZone getTimeZone(Offer offer) {
BroadleafRequestContext brc = BroadleafRequestContext.getBroadleafRequestContext();
return (brc != null) ? brc.getTimeZone() : TimeZone.getDefault();
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_processor_OfferTimeZoneProcessorImpl.java |
1,214 | public class TransactionType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, TransactionType> TYPES = new LinkedHashMap<String, TransactionType>();
public static final TransactionType AUTHORIZE = new TransactionType("AUTHORIZE", "Authorize");
public static final TransactionType DEBIT = new TransactionType("DEBIT", "Debit");
public static final TransactionType AUTHORIZEANDDEBIT = new TransactionType("AUTHORIZEANDDEBIT", "Authorize and Debit");
public static final TransactionType CREDIT = new TransactionType("CREDIT", "Credit");
public static final TransactionType VOIDPAYMENT = new TransactionType("VOIDPAYMENT", "Void Payment");
public static final TransactionType BALANCE = new TransactionType("BALANCE", "Balance");
public static final TransactionType REVERSEAUTHORIZE = new TransactionType("REVERSEAUTHORIZE", "Reverse Authorize");
public static final TransactionType PARTIALPAYMENT = new TransactionType("PARTIALPAYMENT", "Partial Payment");
public static TransactionType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public TransactionType() {
//do nothing
}
public TransactionType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TransactionType other = (TransactionType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_type_TransactionType.java |
941 | private static final class NotFullyAvailableAtTheTimeInputStream extends InputStream {
private int pos = -1;
private int interrupt;
private final byte[] data;
private NotFullyAvailableAtTheTimeInputStream(byte[] data, int interrupt) {
this.data = data;
this.interrupt = interrupt;
assert interrupt < data.length;
}
@Override
public int read() throws IOException {
pos++;
if (pos < interrupt) {
return data[pos];
} else if (pos == interrupt) {
return -1;
} else if (pos <= data.length) {
return data[pos - 1];
} else {
return -1;
}
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_record_impl_ORecordBytesTest.java |
1,034 | public class ConfigXmlGenerator {
private static final ILogger logger = Logger.getLogger(ConfigXmlGenerator.class);
private final boolean formatted;
/**
* Creates a ConfigXmlGenerator that will format the code.
*/
public ConfigXmlGenerator() {
this(true);
}
/**
* Creates a ConfigXmlGenerator.
*
* @param formatted true if the XML should be formatted, false otherwise.
*/
public ConfigXmlGenerator(boolean formatted) {
this.formatted = formatted;
}
/**
* Generates the XML string based on some Config.
*
* @param config the configuration.
* @return the XML string.
*/
public String generate(Config config) {
isNotNull(config,"Config");
final StringBuilder xml = new StringBuilder();
xml.append("<hazelcast ")
.append("xmlns=\"http://www.hazelcast.com/schema/config\"\n")
.append("xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n")
.append("xsi:schemaLocation=\"http://www.hazelcast.com/schema/config ")
.append("http://www.hazelcast.com/schema/config/hazelcast-config-3.3.xsd\">");
xml.append("<group>");
xml.append("<name>").append(config.getGroupConfig().getName()).append("</name>");
xml.append("<password>").append(config.getGroupConfig().getPassword()).append("</password>");
xml.append("</group>");
if (config.getLicenseKey() != null) {
xml.append("<license-key>").append(config.getLicenseKey()).append("</license-key>");
}
if (config.getManagementCenterConfig() != null) {
ManagementCenterConfig mcConfig = config.getManagementCenterConfig();
xml.append("<management-center enabled=\"").append(mcConfig.isEnabled())
.append("\" update-interval=\"").append(mcConfig.getUpdateInterval()).append("\">")
.append(mcConfig.getUrl()).append("</management-center>");
}
appendProperties(xml, config.getProperties());
final Collection<WanReplicationConfig> wanRepConfigs = config.getWanReplicationConfigs().values();
for (WanReplicationConfig wan : wanRepConfigs) {
xml.append("<wan-replication name=\"").append(wan.getName()).append("\">");
final List<WanTargetClusterConfig> targets = wan.getTargetClusterConfigs();
for (WanTargetClusterConfig t : targets) {
xml.append("<target-cluster group-name=\"").append(t.getGroupName())
.append("\" group-password=\"").append(t.getGroupPassword()).append("\">");
xml.append("<replication-impl>").append(t.getReplicationImpl()).append("</replication-impl>");
xml.append("<end-points>");
final List<String> eps = t.getEndpoints();
for (String ep : eps) {
xml.append("<address>").append(ep).append("</address>");
}
xml.append("</end-points>").append("</target-cluster>");
}
xml.append("</wan-replication>");
}
final NetworkConfig netCfg = config.getNetworkConfig();
xml.append("<network>");
if (netCfg.getPublicAddress() != null) {
xml.append("<public-address>").append(netCfg.getPublicAddress()).append("</public-address>");
}
xml.append("<port port-count=\"").append(netCfg.getPortCount()).append("\" ")
.append("auto-increment=\"").append(netCfg.isPortAutoIncrement()).append("\">")
.append(netCfg.getPort()).append("</port>");
final JoinConfig join = netCfg.getJoin();
xml.append("<join>");
final MulticastConfig mcast = join.getMulticastConfig();
xml.append("<multicast enabled=\"").append(mcast.isEnabled()).append("\">");
xml.append("<multicast-group>").append(mcast.getMulticastGroup()).append("</multicast-group>");
xml.append("<multicast-port>").append(mcast.getMulticastPort()).append("</multicast-port>");
xml.append("<multicast-timeout-seconds>").append(mcast.getMulticastTimeoutSeconds()).append("</multicast-timeout-seconds>");
xml.append("<multicast-time-to-live>").append(mcast.getMulticastTimeToLive()).append("</multicast-time-to-live>");
if (!mcast.getTrustedInterfaces().isEmpty()) {
xml.append("<trusted-interfaces>");
for (String trustedInterface : mcast.getTrustedInterfaces()) {
xml.append("<interface>").append(trustedInterface).append("</interface>");
}
xml.append("</trusted-interfaces>");
}
xml.append("</multicast>");
final TcpIpConfig tcpCfg = join.getTcpIpConfig();
xml.append("<tcp-ip enabled=\"").append(tcpCfg.isEnabled()).append("\">");
final List<String> members = tcpCfg.getMembers();
for (String m : members) {
xml.append("<member>").append(m).append("</member>");
}
if (tcpCfg.getRequiredMember() != null) {
xml.append("<required-member>").append(tcpCfg.getRequiredMember()).append("</required-member>");
}
xml.append("</tcp-ip>");
final AwsConfig awsConfig = join.getAwsConfig();
xml.append("<aws enabled=\"").append(awsConfig.isEnabled()).append("\">");
xml.append("<access-key>").append(awsConfig.getAccessKey()).append("</access-key>");
xml.append("<secret-key>").append(awsConfig.getSecretKey()).append("</secret-key>");
xml.append("<region>").append(awsConfig.getRegion()).append("</region>");
xml.append("<security-group-name>").append(awsConfig.getSecurityGroupName()).append("</security-group-name>");
xml.append("<tag-key>").append(awsConfig.getTagKey()).append("</tag-key>");
xml.append("<tag-value>").append(awsConfig.getTagValue()).append("</tag-value>");
xml.append("</aws>");
xml.append("</join>");
final InterfacesConfig interfaces = netCfg.getInterfaces();
xml.append("<interfaces enabled=\"").append(interfaces.isEnabled()).append("\">");
final Collection<String> interfaceList = interfaces.getInterfaces();
for (String i : interfaceList) {
xml.append("<interface>").append(i).append("</interface>");
}
xml.append("</interfaces>");
final SSLConfig ssl = netCfg.getSSLConfig();
xml.append("<ssl enabled=\"").append(ssl != null && ssl.isEnabled()).append("\">");
if (ssl != null) {
String className = ssl.getFactoryImplementation() != null
? ssl.getFactoryImplementation().getClass().getName()
: ssl.getFactoryClassName();
xml.append("<factory-class-name>").append(className).append("</factory-class-name>");
appendProperties(xml, ssl.getProperties());
}
xml.append("</ssl>");
final SocketInterceptorConfig socket = netCfg.getSocketInterceptorConfig();
xml.append("<socket-interceptor enabled=\"").append(socket != null && socket.isEnabled()).append("\">");
if (socket != null) {
String className = socket.getImplementation() != null
? socket.getImplementation().getClass().getName() : socket.getClassName();
xml.append("<class-name>").append(className).append("</class-name>");
appendProperties(xml, socket.getProperties());
}
xml.append("</socket-interceptor>");
final SymmetricEncryptionConfig sec = netCfg.getSymmetricEncryptionConfig();
xml.append("<symmetric-encryption enabled=\"").append(sec != null && sec.isEnabled()).append("\">");
if (sec != null) {
xml.append("<algorithm>").append(sec.getAlgorithm()).append("</algorithm>");
xml.append("<salt>").append(sec.getSalt()).append("</salt>");
xml.append("<password>").append(sec.getPassword()).append("</password>");
xml.append("<iteration-count>").append(sec.getIterationCount()).append("</iteration-count>");
}
xml.append("</symmetric-encryption>");
xml.append("</network>");
final PartitionGroupConfig pg = config.getPartitionGroupConfig();
if (pg != null) {
xml.append("<partition-group enabled=\"").append(pg.isEnabled())
.append("\" group-type=\"").append(pg.getGroupType()).append("\" />");
}
final Collection<ExecutorConfig> exCfgs = config.getExecutorConfigs().values();
for (ExecutorConfig ex : exCfgs) {
xml.append("<executor-service name=\"").append(ex.getName()).append("\">");
xml.append("<pool-size>").append(ex.getPoolSize()).append("</pool-size>");
xml.append("<queue-capacity>").append(ex.getQueueCapacity()).append("</queue-capacity>");
xml.append("</executor-service>");
}
final Collection<QueueConfig> qCfgs = config.getQueueConfigs().values();
for (QueueConfig q : qCfgs) {
xml.append("<queue name=\"").append(q.getName()).append("\">");
xml.append("<queue-max-size>").append(q.getMaxSize()).append("</queue-max-size>");
xml.append("<queue-sync-backup-count>").append(q.getBackupCount()).append("</queue-sync-backup-count>");
xml.append("<queue-async-backup-count>").append(q.getAsyncBackupCount()).append("</queue-async-backup-count>");
if (!q.getItemListenerConfigs().isEmpty()) {
xml.append("<item-listeners>");
for (ItemListenerConfig lc : q.getItemListenerConfigs()) {
xml.append("<item-listener include-value=\"").append(lc.isIncludeValue()).append("\">");
xml.append(lc.getClassName());
xml.append("</item-listener>");
}
xml.append("</item-listeners>");
}
xml.append("</queue>");
}
final Collection<MapConfig> mCfgs = config.getMapConfigs().values();
for (MapConfig m : mCfgs) {
xml.append("<map name=\"").append(m.getName()).append("\">");
xml.append("<in-memory-format>").append(m.getInMemoryFormat()).append("</in-memory-format>");
xml.append("<backup-count>").append(m.getBackupCount()).append("</backup-count>");
xml.append("<async-backup-count>").append(m.getAsyncBackupCount()).append("</async-backup-count>");
xml.append("<time-to-live-seconds>").append(m.getTimeToLiveSeconds()).append("</time-to-live-seconds>");
xml.append("<max-idle-seconds>").append(m.getMaxIdleSeconds()).append("</max-idle-seconds>");
xml.append("<eviction-policy>").append(m.getEvictionPolicy()).append("</eviction-policy>");
xml.append("<max-size policy=\"").append(m.getMaxSizeConfig().getMaxSizePolicy()).append("\">").append(m.getMaxSizeConfig().getSize()).append("</max-size>");
xml.append("<eviction-percentage>").append(m.getEvictionPercentage()).append("</eviction-percentage>");
xml.append("<merge-policy>").append(m.getMergePolicy()).append("</merge-policy>");
xml.append("<read-backup-data>").append(m.isReadBackupData()).append("</read-backup-data>");
xml.append("<statistics-enabled>").append(m.isStatisticsEnabled()).append("</statistics-enabled>");
if (m.getMapStoreConfig() != null) {
final MapStoreConfig s = m.getMapStoreConfig();
xml.append("<map-store enabled=\"").append(s.isEnabled()).append("\">");
final String clazz = s.getImplementation() != null ? s.getImplementation().getClass().getName() : s.getClassName();
xml.append("<class-name>").append(clazz).append("</class-name>");
final String factoryClass = s.getFactoryImplementation() != null
? s.getFactoryImplementation().getClass().getName()
: s.getFactoryClassName();
if (factoryClass != null) {
xml.append("<factory-class-name>").append(factoryClass).append("</factory-class-name>");
}
xml.append("<write-delay-seconds>").append(s.getWriteDelaySeconds()).append("</write-delay-seconds>");
appendProperties(xml, s.getProperties());
xml.append("</map-store>");
}
if (m.getNearCacheConfig() != null) {
final NearCacheConfig n = m.getNearCacheConfig();
xml.append("<near-cache>");
xml.append("<max-size>").append(n.getMaxSize()).append("</max-size>");
xml.append("<time-to-live-seconds>").append(n.getTimeToLiveSeconds()).append("</time-to-live-seconds>");
xml.append("<max-idle-seconds>").append(n.getMaxIdleSeconds()).append("</max-idle-seconds>");
xml.append("<eviction-policy>").append(n.getEvictionPolicy()).append("</eviction-policy>");
xml.append("<invalidate-on-change>").append(n.isInvalidateOnChange()).append("</invalidate-on-change>");
xml.append("<in-memory-format>").append(n.getInMemoryFormat()).append("</in-memory-format>");
xml.append("</near-cache>");
}
if (m.getWanReplicationRef() != null) {
final WanReplicationRef wan = m.getWanReplicationRef();
xml.append("<wan-replication-ref name=\"").append(wan.getName()).append("\">");
xml.append("<merge-policy>").append(wan.getMergePolicy()).append("</merge-policy>");
xml.append("</wan-replication-ref>");
}
if (!m.getMapIndexConfigs().isEmpty()) {
xml.append("<indexes>");
for (MapIndexConfig indexCfg : m.getMapIndexConfigs()) {
xml.append("<index ordered=\"").append(indexCfg.isOrdered()).append("\">");
xml.append(indexCfg.getAttribute());
xml.append("</index>");
}
xml.append("</indexes>");
}
if (!m.getEntryListenerConfigs().isEmpty()) {
xml.append("<entry-listeners>");
for (EntryListenerConfig lc : m.getEntryListenerConfigs()) {
xml.append("<entry-listener include-value=\"").append(lc.isIncludeValue()).append("\" local=\"").append(lc.isLocal()).append("\">");
final String clazz = lc.getImplementation() != null ? lc.getImplementation().getClass().getName() : lc.getClassName();
xml.append(clazz);
xml.append("</entry-listener>");
}
xml.append("</entry-listeners>");
}
if (m.getPartitioningStrategyConfig() != null) {
xml.append("<partition-strategy>");
PartitioningStrategyConfig psc = m.getPartitioningStrategyConfig();
if (psc.getPartitioningStrategy() != null) {
xml.append(psc.getPartitioningStrategy().getClass().getName());
} else {
xml.append(psc.getPartitioningStrategyClass());
}
xml.append("</partition-strategy>");
}
xml.append("</map>");
}
final Collection<MultiMapConfig> mmCfgs = config.getMultiMapConfigs().values();
for (MultiMapConfig mm : mmCfgs) {
xml.append("<multimap name=\"").append(mm.getName()).append("\">");
xml.append("<value-collection-type>").append(mm.getValueCollectionType()).append("</value-collection-type>");
if (!mm.getEntryListenerConfigs().isEmpty()) {
xml.append("<entry-listeners>");
for (EntryListenerConfig lc : mm.getEntryListenerConfigs()) {
xml.append("<entry-listener include-value=\"").append(lc.isIncludeValue()).append("\" local=\"").append(lc.isLocal()).append("\">");
final String clazz = lc.getImplementation() != null ? lc.getImplementation().getClass().getName() : lc.getClassName();
xml.append(clazz);
xml.append("</entry-listener>");
}
xml.append("</entry-listeners>");
}
// if (mm.getPartitioningStrategyConfig() != null) {
// xml.append("<partition-strategy>");
// PartitioningStrategyConfig psc = mm.getPartitioningStrategyConfig();
// if (psc.getPartitioningStrategy() != null) {
// xml.append(psc.getPartitioningStrategy().getClass().getName());
// } else {
// xml.append(psc.getPartitioningStrategyClass());
// }
// xml.append("</partition-strategy>");
// }
xml.append("</multimap>");
}
final Collection<TopicConfig> tCfgs = config.getTopicConfigs().values();
for (TopicConfig t : tCfgs) {
xml.append("<topic name=\"").append(t.getName()).append("\">");
xml.append("<global-ordering-enabled>").append(t.isGlobalOrderingEnabled()).append("</global-ordering-enabled>");
if (!t.getMessageListenerConfigs().isEmpty()) {
xml.append("<message-listeners>");
for (ListenerConfig lc : t.getMessageListenerConfigs()) {
xml.append("<message-listener>");
final String clazz = lc.getImplementation() != null ? lc.getImplementation().getClass().getName() : lc.getClassName();
xml.append(clazz);
xml.append("</message-listener>");
}
xml.append("</message-listeners>");
}
xml.append("</topic>");
}
final Collection<SemaphoreConfig> semaphoreCfgs = config.getSemaphoreConfigs();
for (SemaphoreConfig sc : semaphoreCfgs) {
xml.append("<semaphore name=\"").append(sc.getName()).append("\">");
xml.append("<initial-permits>").append(sc.getInitialPermits()).append("</initial-permits>");
xml.append("<backup-count>").append(sc.getBackupCount()).append("</backup-count>");
xml.append("<async-backup-count>").append(sc.getAsyncBackupCount()).append("</async-backup-count>");
xml.append("</semaphore>");
}
if (!config.getListenerConfigs().isEmpty()) {
xml.append("<listeners>");
for (ListenerConfig lc : config.getListenerConfigs()) {
xml.append("<listener>");
final String clazz = lc.getImplementation() != null ? lc.getImplementation().getClass().getName() : lc.getClassName();
xml.append(clazz);
xml.append("</listener>");
}
xml.append("</listeners>");
}
xml.append("</hazelcast>");
return format(xml.toString(), 5);
}
private String format(final String input, int indent) {
if (!formatted) {
return input;
}
try {
final Source xmlInput = new StreamSource(new StringReader(input));
final StreamResult xmlOutput = new StreamResult(new StringWriter());
TransformerFactory transformerFactory = TransformerFactory.newInstance();
/* Older versions of Xalan still use this method of setting indent values.
* Attempt to make this work but don't completely fail if it's a problem.
*/
try {
transformerFactory.setAttribute("indent-number", indent);
} catch (IllegalArgumentException e) {
if(logger.isFinestEnabled()){
logger.finest( "Failed to set indent-number attribute; cause: " + e.getMessage());
}
}
Transformer transformer = transformerFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
/* Newer versions of Xalan will look for a fully-qualified output property in order to specify amount of
* indentation to use. Attempt to make this work as well but again don't completely fail if it's a problem.
*/
try {
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", Integer.toString(indent));
} catch (IllegalArgumentException e) {
if(logger.isFinestEnabled()){
logger.finest( "Failed to set indent-amount property; cause: " + e.getMessage());
}
}
transformer.transform(xmlInput, xmlOutput);
return xmlOutput.getWriter().toString();
} catch (Exception e) {
logger.warning(e);
return input;
}
}
private void appendProperties(StringBuilder xml, Properties props) {
if (!props.isEmpty()) {
xml.append("<properties>");
Set keys = props.keySet();
for (Object key : keys) {
xml.append("<property name=\"").append(key).append("\">")
.append(props.getProperty(key.toString()))
.append("</property>");
}
xml.append("</properties>");
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_ConfigXmlGenerator.java |
776 | @Deprecated
public class AvailabilityStatusType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, AvailabilityStatusType> TYPES = new LinkedHashMap<String, AvailabilityStatusType>();
public static final AvailabilityStatusType AVAILABLE = new AvailabilityStatusType("AVAILABLE", "Available");
public static final AvailabilityStatusType UNAVAILABLE = new AvailabilityStatusType("UNAVAILABLE", "Unavailable");
public static final AvailabilityStatusType BACKORDERED = new AvailabilityStatusType("BACKORDERED", "Back Ordered");
public static AvailabilityStatusType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public AvailabilityStatusType() {
//do nothing
}
public AvailabilityStatusType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
@Override
public String getType() {
return type;
}
@Override
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
} else {
throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName());
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AvailabilityStatusType other = (AvailabilityStatusType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_inventory_service_type_AvailabilityStatusType.java |
138 | static final class WNode {
volatile WNode prev;
volatile WNode next;
volatile WNode cowait; // list of linked readers
volatile Thread thread; // non-null while possibly parked
volatile int status; // 0, WAITING, or CANCELLED
final int mode; // RMODE or WMODE
WNode(int m, WNode p) { mode = m; prev = p; }
} | 0true
| src_main_java_jsr166e_StampedLock.java |
81 | LESS_THAN {
@Override
public boolean isValidValueType(Class<?> clazz) {
Preconditions.checkNotNull(clazz);
return Comparable.class.isAssignableFrom(clazz);
}
@Override
public boolean isValidCondition(Object condition) {
return condition!=null && condition instanceof Comparable;
}
@Override
public boolean evaluate(Object value, Object condition) {
Integer cmp = AttributeUtil.compare(value,condition);
return cmp!=null?cmp<0:false;
}
@Override
public String toString() {
return "<";
}
@Override
public TitanPredicate negate() {
return GREATER_THAN_EQUAL;
}
}, | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Cmp.java |
583 | nodeEngine.getExecutionService().execute(ExecutionService.SYSTEM_EXECUTOR, new Runnable() {
public void run() {
try {
final Address address = memberImpl.getAddress();
logger.warning(thisAddress + " will ping " + address);
for (int i = 0; i < 5; i++) {
try {
if (address.getInetAddress().isReachable(null, icmpTtl, icmpTimeout)) {
logger.info(thisAddress + " pings successfully. Target: " + address);
return;
}
} catch (ConnectException ignored) {
// no route to host
// means we cannot connect anymore
}
}
logger.warning(thisAddress + " couldn't ping " + address);
// not reachable.
removeAddress(address);
} catch (Throwable ignored) {
}
}
}); | 0true
| hazelcast_src_main_java_com_hazelcast_cluster_ClusterServiceImpl.java |
1,044 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class GroupConfigTest {
/**
* Test method for {@link com.hazelcast.config.GroupConfig#GroupConfig()}.
*/
@Test
public void testGroupConfig() {
GroupConfig groupConfig = new GroupConfig();
assertTrue(groupConfig.getName().equals(GroupConfig.DEFAULT_GROUP_NAME));
assertTrue(groupConfig.getPassword().equals(GroupConfig.DEFAULT_GROUP_PASSWORD));
}
/**
* Test method for {@link com.hazelcast.config.GroupConfig#GroupConfig(java.lang.String)}.
*/
@Test
public void testGroupConfigString() {
GroupConfig groupConfig = new GroupConfig("abc");
assertTrue(groupConfig.getName().equals("abc"));
assertTrue(groupConfig.getPassword().equals(GroupConfig.DEFAULT_GROUP_PASSWORD));
}
/**
* Test method for {@link com.hazelcast.config.GroupConfig#GroupConfig(java.lang.String, java.lang.String)}.
*/
@Test
public void testGroupConfigStringString() {
GroupConfig groupConfig = new GroupConfig("abc", "def");
assertTrue(groupConfig.getName().equals("abc"));
assertTrue(groupConfig.getPassword().equals("def"));
}
/**
* Test method for {@link com.hazelcast.config.GroupConfig#getName()}.
*/
@Test
public void testGetName() {
GroupConfig groupConfig = new GroupConfig();
assertTrue(groupConfig.getName().equals(GroupConfig.DEFAULT_GROUP_NAME));
}
/**
* Test method for {@link com.hazelcast.config.GroupConfig#setName(java.lang.String)}.
*/
@Test
public void testSetName() {
GroupConfig groupConfig = new GroupConfig().setName("abc");
assertTrue(groupConfig.getName().equals("abc"));
}
/**
* Test method for {@link com.hazelcast.config.GroupConfig#getPassword()}.
*/
@Test
public void testGetPassword() {
GroupConfig groupConfig = new GroupConfig();
assertTrue(groupConfig.getPassword().equals(GroupConfig.DEFAULT_GROUP_PASSWORD));
}
/**
* Test method for {@link com.hazelcast.config.GroupConfig#setPassword(java.lang.String)}.
*/
@Test
public void testSetPassword() {
GroupConfig groupConfig = new GroupConfig().setPassword("def");
assertTrue(groupConfig.getPassword().equals("def"));
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_config_GroupConfigTest.java |
238 | public interface SystemPropertiesService {
public SystemProperty saveSystemProperty(SystemProperty systemProperty);
public void deleteSystemProperty(SystemProperty systemProperty);
public List<SystemProperty> findAllSystemProperties();
public SystemProperty findSystemPropertyByName(String name);
/**
* This method should not persist anything to the database. It should simply return the correct implementation of
* the SystemProperty interface.
* @return
*/
public SystemProperty createNewSystemProperty();
} | 0true
| common_src_main_java_org_broadleafcommerce_common_config_service_SystemPropertiesService.java |
1,129 | public interface OSQLMethodFactory {
boolean hasMethod(String iName);
/**
* @return Set of supported method names of this factory
*/
Set<String> getMethodNames();
/**
* Create method for the given name. returned method may be a new instance each time or a constant.
*
* @param name
* @return OSQLMethod : created method
* @throws OCommandExecutionException
* : when method creation fail
*/
OSQLMethod createMethod(String name) throws OCommandExecutionException;
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_sql_method_OSQLMethodFactory.java |
284 | public class RecentFilesPopup extends PopupDialog {
public static List<IFile> recents = new ArrayList<IFile>();
private Text filterText;
private TableViewer list;
RecentFilesPopup(Shell shell) {
super(shell, SWT.NONE, true, false, false, false, false, "", null);
}
@Override
protected Control createDialogArea(Composite parent) {
GridLayout layout = new GridLayout();
layout.marginTop = 0;
layout.marginLeft = 2;
layout.marginRight = 2;
layout.marginBottom = 2;
parent.setLayout(layout);
list = new TableViewer(parent, SWT.NO_TRIM|SWT.SINGLE|SWT.FULL_SELECTION);
list.setFilters(new ViewerFilter[] {new ViewerFilter() {
@Override
public boolean select(Viewer viewer, Object parentElement, Object element) {
return ((IFile) element).getName().toLowerCase()
.startsWith(filterText.getText().toLowerCase());
}
}});
list.setLabelProvider(new StorageLabelProvider() {
@Override
public String getText(Object element) {
for (IEditorPart part: EditorUtil.getDirtyEditors()) {
if (getFile(part.getEditorInput())==element) {
return "*" + super.getText(element);
}
}
return super.getText(element);
}
});
list.setContentProvider(ArrayContentProvider.getInstance());
list.getTable().setCursor(new Cursor(getShell().getDisplay(), SWT.CURSOR_HAND));
list.getTable().addListener(SWT.MouseMove, new Listener() {
@Override
public void handleEvent(Event event) {
Rectangle bounds = event.getBounds();
TableItem item = list.getTable().getItem(new Point(bounds.x, bounds.y));
if (item!=null) {
list.setSelection(new StructuredSelection(item.getData()));
}
}
});
list.getTable().addKeyListener(new KeyListener() {
@Override
public void keyReleased(KeyEvent e) {}
@Override
public void keyPressed(KeyEvent e) {
if (e.keyCode == 0x0D || e.keyCode == SWT.KEYPAD_CR) { // Enter key
go();
}
}
});
list.getTable().addMouseListener(new MouseListener() {
@Override
public void mouseUp(MouseEvent e) {
go();
}
@Override
public void mouseDown(MouseEvent e) {}
@Override
public void mouseDoubleClick(MouseEvent e) {
go();
}
});
List<IFile> files = new ArrayList<IFile>(recents);
for (IEditorPart part: getDirtyEditors()) {
IFile file = getFile(part.getEditorInput());
if (file!=null) {
files.remove(file);
files.add(0, file);
}
}
list.setInput(files);
if (files.isEmpty()) {
filterText.setMessage("no files");
}
else {
list.setSelection(new StructuredSelection(files.get(0)));
}
return list.getControl();
}
void go() {
StructuredSelection selection = (StructuredSelection) list.getSelection();
IFile file = (IFile) selection.getFirstElement();
try {
IDE.openEditor(getActivePage(), file);
}
catch (PartInitException e) {
e.printStackTrace();
}
close();
}
@Override
protected Control createTitleControl(Composite parent) {
filterText= createFilterText(parent);
return filterText;
}
protected Text getFilterText() {
return filterText;
}
protected Text createFilterText(Composite parent) {
filterText= new Text(parent, SWT.NONE);
filterText.setMessage("type filter text");
Dialog.applyDialogFont(filterText);
GridData data= new GridData(GridData.FILL_HORIZONTAL);
data.horizontalAlignment= GridData.FILL;
data.verticalAlignment= GridData.CENTER;
filterText.setLayoutData(data);
filterText.addKeyListener(new KeyListener() {
public void keyPressed(KeyEvent e) {
if (e.keyCode == 0x0D || e.keyCode == SWT.KEYPAD_CR) // Enter key
go();
if (e.keyCode == SWT.ARROW_DOWN)
list.getTable().setFocus();
if (e.keyCode == SWT.ARROW_UP)
list.getTable().setFocus();
if (e.character == 0x1B) // ESC
dispose();
}
public void keyReleased(KeyEvent e) {
// do nothing
}
});
filterText.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
list.refresh();
Object elem = list.getElementAt(0);
if (elem!=null) {
list.setSelection(new StructuredSelection(elem));
}
}
});
return filterText;
}
public final void dispose() {
close();
}
public void widgetDisposed(DisposeEvent event) {
list = null;
filterText = null;
}
public void setFocus() {
getShell().forceFocus();
filterText.setFocus();
}
public static void addToHistory(IFile file) {
if (file!=null) {
if (!recents.contains(file)) {
recents.add(file);
if (recents.size()>10) {
recents.remove(0);
}
}
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_RecentFilesPopup.java |
1,217 | public interface CompositePaymentResponse {
public Map<PaymentInfo, Referenced> getInfos();
public Order getOrder();
public PaymentResponse getPaymentResponse();
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_workflow_CompositePaymentResponse.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.