Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
161k
| target
class label 2
classes | project
stringlengths 33
167
|
---|---|---|---|
78 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@EntityListeners(value = { AdminAuditableListener.class })
@Table(name = "BLC_STATIC_ASSET")
@Cache(usage= CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blCMSElements")
@AdminPresentationOverrides(
{
@AdminPresentationOverride(name="auditable.createdBy.id", value=@AdminPresentation(readOnly = true, visibility = VisibilityEnum.HIDDEN_ALL)),
@AdminPresentationOverride(name="auditable.updatedBy.id", value=@AdminPresentation(readOnly = true, visibility = VisibilityEnum.HIDDEN_ALL)),
@AdminPresentationOverride(name="auditable.createdBy.name", value=@AdminPresentation(readOnly = true, visibility = VisibilityEnum.HIDDEN_ALL)),
@AdminPresentationOverride(name="auditable.updatedBy.name", value=@AdminPresentation(readOnly = true, visibility = VisibilityEnum.HIDDEN_ALL)),
@AdminPresentationOverride(name="auditable.dateCreated", value=@AdminPresentation(readOnly = true, visibility = VisibilityEnum.HIDDEN_ALL)),
@AdminPresentationOverride(name="auditable.dateUpdated", value=@AdminPresentation(readOnly = true, visibility = VisibilityEnum.HIDDEN_ALL)),
@AdminPresentationOverride(name="sandbox", value=@AdminPresentation(excluded = true))
}
)
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE)
public class StaticAssetImpl implements StaticAsset, AdminMainEntity {
private static final long serialVersionUID = 6990685254640110350L;
@Id
@GeneratedValue(generator = "StaticAssetId")
@GenericGenerator(
name="StaticAssetId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="StaticAssetImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.cms.file.domain.StaticAssetImpl")
}
)
@Column(name = "STATIC_ASSET_ID")
protected Long id;
@Embedded
@AdminPresentation(excluded = true)
protected AdminAuditable auditable = new AdminAuditable();
@Column(name = "NAME", nullable = false)
@AdminPresentation(friendlyName = "StaticAssetImpl_Item_Name",
order = Presentation.FieldOrder.NAME,
requiredOverride = RequiredOverride.NOT_REQUIRED,
gridOrder = Presentation.FieldOrder.NAME,
prominent = true)
protected String name;
/*@ManyToOne(targetEntity = SiteImpl.class)
@JoinColumn(name="SITE_ID")*/
@Transient
@AdminPresentation(excluded = true)
protected Site site;
@Column(name ="FULL_URL", nullable = false)
@AdminPresentation(friendlyName = "StaticAssetImpl_Full_URL",
order = Presentation.FieldOrder.URL,
gridOrder = Presentation.FieldOrder.URL,
requiredOverride = RequiredOverride.REQUIRED,
fieldType = SupportedFieldType.ASSET_URL,
prominent = true)
@Index(name="ASST_FULL_URL_INDX", columnNames={"FULL_URL"})
protected String fullUrl;
@Column(name = "TITLE", nullable = true)
@AdminPresentation(friendlyName = "StaticAssetImpl_Title",
order = Presentation.FieldOrder.TITLE,
translatable = true)
protected String title;
@Column(name = "ALT_TEXT", nullable = true)
@AdminPresentation(friendlyName = "StaticAssetImpl_Alt_Text",
order = Presentation.FieldOrder.ALT_TEXT,
translatable = true)
protected String altText;
@Column(name = "MIME_TYPE")
@AdminPresentation(friendlyName = "StaticAssetImpl_Mime_Type",
order = Presentation.FieldOrder.MIME_TYPE,
tab = Presentation.Tab.Name.File_Details, tabOrder = Presentation.Tab.Order.File_Details,
readOnly = true)
protected String mimeType;
@Column(name = "FILE_SIZE")
@AdminPresentation(friendlyName = "StaticAssetImpl_File_Size_Bytes",
order = Presentation.FieldOrder.FILE_SIZE,
tab = Presentation.Tab.Name.File_Details, tabOrder = Presentation.Tab.Order.File_Details,
readOnly = true)
protected Long fileSize;
@Column(name = "FILE_EXTENSION")
@AdminPresentation(friendlyName = "StaticAssetImpl_File_Extension",
order = Presentation.FieldOrder.FILE_EXTENSION,
tab = Presentation.Tab.Name.File_Details, tabOrder = Presentation.Tab.Order.File_Details,
readOnly = true)
protected String fileExtension;
@ManyToMany(targetEntity = StaticAssetDescriptionImpl.class, cascade = CascadeType.ALL)
@JoinTable(name = "BLC_ASSET_DESC_MAP", joinColumns = @JoinColumn(name = "STATIC_ASSET_ID"),
inverseJoinColumns = @JoinColumn(name = "STATIC_ASSET_DESC_ID"))
@MapKeyColumn(name = "MAP_KEY")
@Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blCMSElements")
@BatchSize(size = 20)
@AdminPresentationMap(
excluded = true,
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced,
friendlyName = "assetDescriptionTitle",
keyPropertyFriendlyName = "SkuImpl_Sku_Media_Key",
deleteEntityUponRemove = true,
mapKeyOptionEntityClass = LocaleImpl.class,
mapKeyOptionEntityDisplayField = "friendlyName",
mapKeyOptionEntityValueField = "localeCode"
)
protected Map<String,StaticAssetDescription> contentMessageValues = new HashMap<String,StaticAssetDescription>();
@ManyToOne (targetEntity = SandBoxImpl.class)
@JoinColumn(name = "SANDBOX_ID")
@AdminPresentation(excluded = true)
protected SandBox sandbox;
@ManyToOne(targetEntity = SandBoxImpl.class)
@JoinColumn(name = "ORIG_SANDBOX_ID")
@AdminPresentation(excluded = true)
protected SandBox originalSandBox;
@Column (name = "ARCHIVED_FLAG")
@AdminPresentation(friendlyName = "StaticAssetImpl_Archived_Flag", visibility = VisibilityEnum.HIDDEN_ALL)
@Index(name="ASST_ARCHVD_FLG_INDX", columnNames={"ARCHIVED_FLAG"})
protected Boolean archivedFlag = false;
@Column (name = "DELETED_FLAG")
@AdminPresentation(friendlyName = "StaticAssetImpl_Deleted_Flag", visibility = VisibilityEnum.HIDDEN_ALL)
@Index(name="ASST_DLTD_FLG_INDX", columnNames={"DELETED_FLAG"})
protected Boolean deletedFlag = false;
@Column (name = "LOCKED_FLAG")
@AdminPresentation(friendlyName = "StaticAssetImpl_Is_Locked", visibility = VisibilityEnum.HIDDEN_ALL)
@Index(name="ASST_LCKD_FLG_INDX", columnNames={"LOCKED_FLAG"})
protected Boolean lockedFlag = false;
@Column (name = "ORIG_ASSET_ID")
@AdminPresentation(friendlyName = "StaticAssetImpl_Original_Asset_ID", visibility = VisibilityEnum.HIDDEN_ALL)
@Index(name="ORIG_ASSET_ID_INDX", columnNames={"ORIG_ASSET_ID"})
protected Long originalAssetId;
@Column(name = "STORAGE_TYPE")
@AdminPresentation(excluded = true)
protected String storageType;
@Override
public String getFullUrl() {
return fullUrl;
}
@Override
public void setFullUrl(String fullUrl) {
this.fullUrl = fullUrl;
}
@Override
public String getTitle() {
return title;
}
@Override
public void setTitle(String title) {
this.title = title;
}
@Override
public String getAltText() {
return altText;
}
@Override
public void setAltText(String altText) {
this.altText = altText;
}
@Override
public Long getFileSize() {
return fileSize;
}
@Override
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
@Override
public Map<String, StaticAssetDescription> getContentMessageValues() {
return contentMessageValues;
}
@Override
public void setContentMessageValues(Map<String, StaticAssetDescription> contentMessageValues) {
this.contentMessageValues = contentMessageValues;
}
@Override
public Boolean getArchivedFlag() {
if (archivedFlag == null) {
return Boolean.FALSE;
} else {
return archivedFlag;
}
}
@Override
public void setArchivedFlag(Boolean archivedFlag) {
this.archivedFlag = archivedFlag;
}
@Override
public Long getOriginalAssetId() {
return originalAssetId;
}
@Override
public void setOriginalAssetId(Long originalAssetId) {
this.originalAssetId = originalAssetId;
}
@Override
public SandBox getSandbox() {
return sandbox;
}
@Override
public void setSandbox(SandBox sandbox) {
this.sandbox = sandbox;
}
@Override
public String getMimeType() {
return mimeType;
}
@Override
public void setMimeType(String mimeType) {
this.mimeType = mimeType;
}
@Override
public String getFileExtension() {
return fileExtension;
}
@Override
public void setFileExtension(String fileExtension) {
this.fileExtension = fileExtension;
}
@Override
public SandBox getOriginalSandBox() {
return originalSandBox;
}
@Override
public void setOriginalSandBox(SandBox originalSandBox) {
this.originalSandBox = originalSandBox;
}
@Override
public AdminAuditable getAuditable() {
return auditable;
}
@Override
public void setAuditable(AdminAuditable auditable) {
this.auditable = auditable;
}
@Override
public Boolean getDeletedFlag() {
return deletedFlag;
}
@Override
public void setDeletedFlag(Boolean deletedFlag) {
this.deletedFlag = deletedFlag;
}
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Boolean getLockedFlag() {
return lockedFlag;
}
@Override
public void setLockedFlag(Boolean lockedFlag) {
this.lockedFlag = lockedFlag;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public Site getSite() {
return site;
}
@Override
public void setSite(Site site) {
this.site = site;
}
@Override
public StaticAsset cloneEntity() {
StaticAssetImpl asset = new StaticAssetImpl();
asset.name = name;
asset.site = site;
asset.archivedFlag = archivedFlag;
asset.deletedFlag = deletedFlag;
asset.fullUrl = fullUrl;
asset.fileSize = fileSize;
asset.mimeType = mimeType;
asset.sandbox = sandbox;
asset.originalSandBox = originalSandBox;
asset.originalAssetId = originalAssetId;
asset.fileExtension = fileExtension;
for (String key : contentMessageValues.keySet()) {
StaticAssetDescription oldAssetDescription = contentMessageValues.get(key);
StaticAssetDescription newAssetDescription = oldAssetDescription.cloneEntity();
asset.getContentMessageValues().put(key, newAssetDescription);
}
return asset;
}
@Override
public StorageType getStorageType() {
StorageType st = StorageType.getInstance(storageType);
if (st == null) {
return StorageType.DATABASE;
} else {
return st;
}
}
@Override
public void setStorageType(StorageType storageType) {
this.storageType = storageType.getType();
}
public static class Presentation {
public static class Tab {
public static class Name {
public static final String File_Details = "StaticAssetImpl_FileDetails_Tab";
public static final String Advanced = "StaticAssetImpl_Advanced_Tab";
}
public static class Order {
public static final int File_Details = 2000;
public static final int Advanced = 3000;
}
}
public static class FieldOrder {
// General Fields
public static final int NAME = 1000;
public static final int URL = 2000;
public static final int TITLE = 3000;
public static final int ALT_TEXT = 4000;
public static final int MIME_TYPE = 5000;
public static final int FILE_EXTENSION = 6000;
public static final int FILE_SIZE = 7000;
// Used by subclasses to know where the last field is.
public static final int LAST = 7000;
}
}
@Override
public String getMainEntityName() {
return getName();
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_domain_StaticAssetImpl.java
|
315 |
public class DynamicResourceIterator extends ArrayList<ResourceInputStream> {
private static final Log LOG = LogFactory.getLog(DynamicResourceIterator.class);
private int position = 0;
private int embeddedInsertPosition = 0;
public ResourceInputStream nextResource() {
ResourceInputStream ris = get(position);
position++;
embeddedInsertPosition = position;
return ris;
}
public int getPosition() {
return position;
}
public void addEmbeddedResource(ResourceInputStream ris) {
if (embeddedInsertPosition == size()) {
add(ris);
} else {
add(embeddedInsertPosition, ris);
}
embeddedInsertPosition++;
}
public boolean hasNext() {
return position < size();
}
@Override
public boolean add(ResourceInputStream resourceInputStream) {
byte[] sourceArray;
try {
sourceArray = buildArrayFromStream(resourceInputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
ResourceInputStream ris = new ResourceInputStream(new ByteArrayInputStream(sourceArray), null, resourceInputStream.getNames());
return super.add(ris);
}
@Override
public boolean addAll(Collection<? extends ResourceInputStream> c) {
for (ResourceInputStream ris : c) {
if (!add(ris)) {
return false;
}
}
return true;
}
@Override
public void add(int index, ResourceInputStream resourceInputStream) {
byte[] sourceArray;
try {
sourceArray = buildArrayFromStream(resourceInputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
ResourceInputStream ris = new ResourceInputStream(new ByteArrayInputStream(sourceArray), null, resourceInputStream.getNames());
super.add(index, ris);
}
protected byte[] buildArrayFromStream(InputStream source) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
boolean eof = false;
try{
while (!eof) {
int temp = source.read();
if (temp == -1) {
eof = true;
} else {
baos.write(temp);
}
}
} finally {
try{ source.close(); } catch (Throwable e) {
LOG.error("Unable to merge source and patch locations", e);
}
}
return baos.toByteArray();
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_DynamicResourceIterator.java
|
29 |
public static enum PartialSearchMode {
/**
* Any partially matched key will be used as search result.
*/
NONE,
/**
* The biggest partially matched key will be used as search result.
*/
HIGHEST_BOUNDARY,
/**
* The smallest partially matched key will be used as search result.
*/
LOWEST_BOUNDARY
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
|
1,439 |
public class LocalRegionCache implements RegionCache {
protected final ITopic<Object> topic;
protected final MessageListener<Object> messageListener;
protected final ConcurrentMap<Object, Value> cache;
protected final Comparator versionComparator;
protected MapConfig config;
public LocalRegionCache(final String name, final HazelcastInstance hazelcastInstance,
final CacheDataDescription metadata) {
try {
config = hazelcastInstance != null ? hazelcastInstance.getConfig().findMapConfig(name) : null;
} catch (UnsupportedOperationException ignored) {
}
versionComparator = metadata != null && metadata.isVersioned() ? metadata.getVersionComparator() : null;
cache = new ConcurrentHashMap<Object, Value>();
messageListener = createMessageListener();
if (hazelcastInstance != null) {
topic = hazelcastInstance.getTopic(name);
topic.addMessageListener(messageListener);
} else {
topic = null;
}
}
public Object get(final Object key) {
final Value value = cache.get(key);
return value != null ? value.getValue() : null;
}
public boolean put(final Object key, final Object value, final Object currentVersion) {
final Value newValue = new Value(currentVersion, value, null, Clock.currentTimeMillis());
cache.put(key, newValue);
return true;
}
public boolean update(final Object key, final Object value, final Object currentVersion,
final Object previousVersion, final SoftLock lock) {
if (lock == LOCK_FAILURE) {
return false;
}
final Value currentValue = cache.get(key);
if (lock == LOCK_SUCCESS) {
if (currentValue != null && currentVersion != null
&& versionComparator.compare(currentVersion, currentValue.getVersion()) < 0) {
return false;
}
}
if (topic != null) {
topic.publish(createMessage(key, value, currentVersion));
}
cache.put(key, new Value(currentVersion, value, lock, Clock.currentTimeMillis()));
return true;
}
protected Object createMessage(final Object key, Object value, final Object currentVersion) {
return new Invalidation(key, currentVersion);
}
protected MessageListener<Object> createMessageListener() {
return new MessageListener<Object>() {
public void onMessage(final Message<Object> message) {
final Invalidation invalidation = (Invalidation) message.getMessageObject();
if (versionComparator != null) {
final Value value = cache.get(invalidation.getKey());
if (value != null) {
Object currentVersion = value.getVersion();
Object newVersion = invalidation.getVersion();
if (versionComparator.compare(newVersion, currentVersion) > 0) {
cache.remove(invalidation.getKey(), value);
}
}
} else {
cache.remove(invalidation.getKey());
}
}
};
}
public boolean remove(final Object key) {
final Value value = cache.remove(key);
if (value != null) {
if (topic != null) {
topic.publish(createMessage(key, null, value.getVersion()));
}
return true;
}
return false;
}
public SoftLock tryLock(final Object key, final Object version) {
final Value value = cache.get(key);
if (value == null) {
if (cache.putIfAbsent(key, new Value(version, null, LOCK_SUCCESS, Clock.currentTimeMillis())) == null) {
return LOCK_SUCCESS;
} else {
return LOCK_FAILURE;
}
} else {
if (version == null || versionComparator.compare(version, value.getVersion()) >= 0) {
if (cache.replace(key, value, value.createLockedValue(LOCK_SUCCESS))) {
return LOCK_SUCCESS;
} else {
return LOCK_FAILURE;
}
} else {
return LOCK_FAILURE;
}
}
}
public void unlock(final Object key, SoftLock lock) {
final Value value = cache.get(key);
if (value != null) {
final SoftLock currentLock = value.getLock();
if (currentLock == lock) {
cache.replace(key, value, value.createUnlockedValue());
}
}
}
public boolean contains(final Object key) {
return cache.containsKey(key);
}
public void clear() {
cache.clear();
}
public long size() {
return cache.size();
}
public long getSizeInMemory() {
return 0;
}
public Map asMap() {
return cache;
}
void cleanup() {
final int maxSize;
final long timeToLive;
if (config != null) {
maxSize = config.getMaxSizeConfig().getSize();
timeToLive = config.getTimeToLiveSeconds() * 1000L;
} else {
maxSize = 100000;
timeToLive = CacheEnvironment.getDefaultCacheTimeoutInMillis();
}
if ((maxSize > 0 && maxSize != Integer.MAX_VALUE) || timeToLive > 0) {
final Iterator<Entry<Object, Value>> iter = cache.entrySet().iterator();
SortedSet<EvictionEntry> entries = null;
final long now = Clock.currentTimeMillis();
while (iter.hasNext()) {
final Entry<Object, Value> e = iter.next();
final Object k = e.getKey();
final Value v = e.getValue();
if (v.getLock() == LOCK_SUCCESS) {
continue;
}
if (v.getCreationTime() + timeToLive < now) {
iter.remove();
} else if (maxSize > 0 && maxSize != Integer.MAX_VALUE) {
if (entries == null) {
entries = new TreeSet<EvictionEntry>();
}
entries.add(new EvictionEntry(k, v));
}
}
final int diff = cache.size() - maxSize;
final int k = diff >= 0 ? (diff + maxSize * 20 / 100) : 0;
if (k > 0 && entries != null) {
int i = 0;
for (EvictionEntry entry : entries) {
if (cache.remove(entry.key, entry.value)) {
if (++i == k) {
break;
}
}
}
}
}
}
static private class EvictionEntry implements Comparable<EvictionEntry> {
final Object key;
final Value value;
private EvictionEntry(final Object key, final Value value) {
this.key = key;
this.value = value;
}
public int compareTo(final EvictionEntry o) {
final long thisVal = this.value.getCreationTime();
final long anotherVal = o.value.getCreationTime();
return (thisVal < anotherVal ? -1 : (thisVal == anotherVal ? 0 : 1));
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EvictionEntry that = (EvictionEntry) o;
if (key != null ? !key.equals(that.key) : that.key != null) return false;
if (value != null ? !value.equals(that.value) : that.value != null) return false;
return true;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
}
private static final SoftLock LOCK_SUCCESS = new SoftLock() {
@Override
public String toString() {
return "Lock::Success";
}
};
private static final SoftLock LOCK_FAILURE = new SoftLock() {
@Override
public String toString() {
return "Lock::Failure";
}
};
}
| 1no label
|
hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_local_LocalRegionCache.java
|
57 |
public class AbstractContentService {
private static final Log LOG = LogFactory.getLog(AbstractContentService.class);
public <T, U> List<T> findItems(SandBox sandbox, Criteria c, Class<T> baseClass, Class<U> concreteClass, String originalIdProperty) {
c.add(Restrictions.eq("archivedFlag", false));
if (sandbox == null) {
// Query is hitting the production sandbox for a single site
c.add(Restrictions.isNull("sandbox"));
return (List<T>) c.list();
} if (SandBoxType.PRODUCTION.equals(sandbox.getSandBoxType())) {
// Query is hitting the production sandbox for a multi-site
c.add(Restrictions.eq("sandbox", sandbox));
return (List<T>) c.list();
} else {
addSandboxCriteria(sandbox, c, concreteClass, originalIdProperty);
return (List<T>) c.list();
}
}
public <T> Long countItems(SandBox sandbox, Criteria c, Class<T> concreteClass, String originalIdProperty) {
c.add(Restrictions.eq("archivedFlag", false));
c.setProjection(Projections.rowCount());
if (sandbox == null) {
// Query is hitting the production sandbox for a single site
c.add(Restrictions.isNull("sandbox"));
return (Long) c.uniqueResult();
} if (SandBoxType.PRODUCTION.equals(sandbox.getSandBoxType())) {
// Query is hitting the production sandbox for a multi-site
c.add(Restrictions.eq("sandbox", sandbox));
return (Long) c.uniqueResult();
} else {
addSandboxCriteria(sandbox, c, concreteClass, originalIdProperty);
return (Long) c.uniqueResult();
}
}
private <T> void addSandboxCriteria(SandBox sandbox, Criteria c, Class<T> type, String originalIdProperty) {
Criterion originalSandboxExpression = Restrictions.eq("originalSandBox", sandbox);
Criterion currentSandboxExpression = Restrictions.eq("sandbox", sandbox);
Criterion userSandboxExpression = Restrictions.or(currentSandboxExpression, originalSandboxExpression);
Criterion productionSandboxExpression = null;
if (sandbox.getSite() == null || sandbox.getSite().getProductionSandbox() == null) {
productionSandboxExpression = Restrictions.isNull("sandbox");
} else {
productionSandboxExpression = Restrictions.eq("sandbox", sandbox.getSite().getProductionSandbox());
}
if (productionSandboxExpression != null) {
c.add(Restrictions.or(userSandboxExpression, productionSandboxExpression));
} else {
c.add(userSandboxExpression);
}
// Build a sub-query to exclude items from production that are also in my sandbox.
// (e.g. my sandbox always wins even if the items in my sandbox don't match the
// current criteria.)
//
// This subquery prevents the following:
// 1. Duplicate items (one for sbox, one for prod)
// 2. Filter issues where the production item qualifies for the passed in criteria
// but has been modified so that the item in the sandbox no longer does.
// 3. Inverse of #2.
DetachedCriteria existsInSboxCriteria = DetachedCriteria.forClass(type, "sboxItem");
existsInSboxCriteria.add(userSandboxExpression);
existsInSboxCriteria.add(Restrictions.eq("archivedFlag", false));
String outerAlias = c.getAlias();
existsInSboxCriteria.add(Property.forName(outerAlias + ".id").eqProperty("sboxItem."+originalIdProperty));
existsInSboxCriteria.setProjection(Projections.id());
c.add(Subqueries.notExists(existsInSboxCriteria));
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_common_AbstractContentService.java
|
1,379 |
public interface Custom {
String type();
interface Factory<T extends Custom> {
String type();
T readFrom(StreamInput in) throws IOException;
void writeTo(T customIndexMetaData, StreamOutput out) throws IOException;
T fromMap(Map<String, Object> map) throws IOException;
T fromXContent(XContentParser parser) throws IOException;
void toXContent(T customIndexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException;
/**
* Merges from first to second, with first being more important, i.e., if something exists in first and second,
* first will prevail.
*/
T merge(T first, T second);
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_IndexMetaData.java
|
206 |
public class CacheFactoryException extends Exception {
private static final long serialVersionUID = 1L;
public CacheFactoryException() {
super();
}
public CacheFactoryException(String message, Throwable cause) {
super(message, cause);
}
public CacheFactoryException(String message) {
super(message);
}
public CacheFactoryException(Throwable cause) {
super(cause);
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_cache_engine_CacheFactoryException.java
|
56 |
@Service("blAssetFormBuilderService")
public class AssetFormBuilderServiceImpl implements AssetFormBuilderService {
@Resource(name = "blFormBuilderService")
protected FormBuilderService formBuilderService;
@Resource(name = "blStaticAssetService")
protected StaticAssetService staticAssetService;
@Resource(name = "blStaticMapNamedOperationComponent")
protected StaticMapNamedOperationComponent operationMap;
@Override
public void addImageThumbnailField(ListGrid listGrid, String urlField) {
listGrid.getHeaderFields().add(new Field()
.withName("thumbnail")
.withFriendlyName("Asset_thumbnail")
.withFieldType(SupportedFieldType.STRING.toString())
.withOrder(Integer.MIN_VALUE)
.withColumnWidth("50px")
.withFilterSortDisabled(true));
for (ListGridRecord record : listGrid.getRecords()) {
// Get the value of the URL
String imageUrl = record.getField(urlField).getValue();
// Prepend the static asset url prefix if necessary
String staticAssetUrlPrefix = staticAssetService.getStaticAssetUrlPrefix();
if (staticAssetUrlPrefix != null && !staticAssetUrlPrefix.startsWith("/")) {
staticAssetUrlPrefix = "/" + staticAssetUrlPrefix;
}
if (staticAssetUrlPrefix == null) {
staticAssetUrlPrefix = "";
} else {
imageUrl = staticAssetUrlPrefix + imageUrl;
}
MediaField mf = (MediaField) new MediaField()
.withName("thumbnail")
.withFriendlyName("Asset_thumbnail")
.withFieldType(SupportedFieldType.IMAGE.toString())
.withOrder(Integer.MIN_VALUE)
.withValue(imageUrl);
// Add a hidden field for the large thumbnail path
record.getHiddenFields().add(new Field()
.withName("cmsUrlPrefix")
.withValue(staticAssetUrlPrefix));
record.getHiddenFields().add(new Field()
.withName("thumbnailKey")
.withValue("?smallAdminThumbnail"));
record.getHiddenFields().add(new Field()
.withName("servletContext")
.withValue(BroadleafRequestContext.getBroadleafRequestContext().getRequest().getContextPath()));
// Set the height value on this field
mf.setHeight(operationMap.getNamedOperations().get("smallAdminThumbnail").get("resize-height-amount"));
record.getFields().add(mf);
// Since we've added a new field, we need to clear the cached map to ensure it will display
record.clearFieldMap();
}
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_admin_web_service_AssetFormBuilderServiceImpl.java
|
492 |
private static class DocumentRewriter implements FieldRewriter<ODocument> {
@Override
public ODocument rewriteValue(ODocument documentValue) {
boolean wasRewritten = false;
documentValue.setLazyLoad(false);
for (String fieldName : documentValue.fieldNames()) {
Object fieldValue = documentValue.field(fieldName);
FieldRewriter<Object> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(documentValue, fieldName, fieldValue);
Object newFieldValue = fieldRewriter.rewriteValue(fieldValue);
if (newFieldValue != null) {
documentValue.field(fieldName, newFieldValue);
wasRewritten = true;
}
}
if (wasRewritten)
return documentValue;
return null;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseImport.java
|
6 |
fBrowser.addProgressListener(new ProgressAdapter() {
@Override
public void completed(ProgressEvent event) {
fCompleted= true;
}
});
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_browser_BrowserInformationControl.java
|
6,231 |
public class RestClient implements Closeable {
private static final ESLogger logger = Loggers.getLogger(RestClient.class);
private final RestSpec restSpec;
private final CloseableHttpClient httpClient;
private final String host;
private final int port;
private final String esVersion;
public RestClient(String host, int port, RestSpec restSpec) throws IOException, RestException {
this.restSpec = restSpec;
this.httpClient = createHttpClient();
this.host = host;
this.port = port;
this.esVersion = readVersion();
logger.info("REST client initialized [{}:{}], elasticsearch version: [{}]", host, port, esVersion);
}
private String readVersion() throws IOException, RestException {
//we make a manual call here without using callApi method, mainly because we are initializing
//and the randomized context doesn't exist for the current thread (would be used to choose the method otherwise)
RestApi restApi = restApi("info");
assert restApi.getPaths().size() == 1;
assert restApi.getMethods().size() == 1;
RestResponse restResponse = new RestResponse(httpRequestBuilder()
.path(restApi.getPaths().get(0))
.method(restApi.getMethods().get(0)).execute());
checkStatusCode(restResponse);
Object version = restResponse.evaluate("version.number");
if (version == null) {
throw new RuntimeException("elasticsearch version not found in the response");
}
return version.toString();
}
public String getEsVersion() {
return esVersion;
}
/**
* Calls an api with the provided parameters
* @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored
* according to the ignore parameter received as input (which won't get sent to elasticsearch)
*/
public RestResponse callApi(String apiName, String... params) throws IOException, RestException {
if (params.length % 2 != 0) {
throw new IllegalArgumentException("The number of params passed must be even but was [" + params.length + "]");
}
Map<String, String> paramsMap = Maps.newHashMap();
for (int i = 0; i < params.length; i++) {
paramsMap.put(params[i++], params[i]);
}
return callApi(apiName, paramsMap, null);
}
/**
* Calls an api with the provided parameters and body
* @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored
* according to the ignore parameter received as input (which won't get sent to elasticsearch)
*/
public RestResponse callApi(String apiName, Map<String, String> params, String body) throws IOException, RestException {
List<Integer> ignores = Lists.newArrayList();
Map<String, String> requestParams = null;
if (params != null) {
//makes a copy of the parameters before modifying them for this specific request
requestParams = Maps.newHashMap(params);
//ignore is a special parameter supported by the clients, shouldn't be sent to es
String ignoreString = requestParams.remove("ignore");
if (Strings.hasLength(ignoreString)) {
try {
ignores.add(Integer.valueOf(ignoreString));
} catch(NumberFormatException e) {
throw new IllegalArgumentException("ignore value should be a number, found [" + ignoreString + "] instead");
}
}
}
HttpRequestBuilder httpRequestBuilder = callApiBuilder(apiName, requestParams, body);
logger.debug("calling api [{}]", apiName);
HttpResponse httpResponse = httpRequestBuilder.execute();
//http HEAD doesn't support response body
// For the few api (exists class of api) that use it we need to accept 404 too
if (!httpResponse.supportsBody()) {
ignores.add(404);
}
RestResponse restResponse = new RestResponse(httpResponse);
checkStatusCode(restResponse, ignores);
return restResponse;
}
private void checkStatusCode(RestResponse restResponse, List<Integer> ignores) throws RestException {
//ignore is a catch within the client, to prevent the client from throwing error if it gets non ok codes back
if (ignores.contains(restResponse.getStatusCode())) {
if (logger.isDebugEnabled()) {
logger.debug("ignored non ok status codes {} as requested", ignores);
}
return;
}
checkStatusCode(restResponse);
}
private void checkStatusCode(RestResponse restResponse) throws RestException {
if (restResponse.isError()) {
throw new RestException("non ok status code [" + restResponse.getStatusCode() + "] returned", restResponse);
}
}
private HttpRequestBuilder callApiBuilder(String apiName, Map<String, String> params, String body) {
//create doesn't exist in the spec but is supported in the clients (index with op_type=create)
boolean indexCreateApi = "create".equals(apiName);
String api = indexCreateApi ? "index" : apiName;
RestApi restApi = restApi(api);
HttpRequestBuilder httpRequestBuilder = httpRequestBuilder();
if (Strings.hasLength(body)) {
if (!restApi.isBodySupported()) {
throw new IllegalArgumentException("body is not supported by [" + restApi.getName() + "] api");
}
httpRequestBuilder.body(body);
} else {
if (restApi.isBodyRequired()) {
throw new IllegalArgumentException("body is required by [" + restApi.getName() + "] api");
}
}
//divide params between ones that go within query string and ones that go within path
Map<String, String> pathParts = Maps.newHashMap();
if (params != null) {
for (Map.Entry<String, String> entry : params.entrySet()) {
if (restApi.getPathParts().contains(entry.getKey())) {
pathParts.put(entry.getKey(), entry.getValue());
} else {
if (!restApi.getParams().contains(entry.getKey())) {
throw new IllegalArgumentException("param [" + entry.getKey() + "] not supported in [" + restApi.getName() + "] api");
}
httpRequestBuilder.addParam(entry.getKey(), entry.getValue());
}
}
}
if (indexCreateApi) {
httpRequestBuilder.addParam("op_type", "create");
}
//the http method is randomized (out of the available ones with the chosen api)
return httpRequestBuilder.method(RandomizedTest.randomFrom(restApi.getSupportedMethods(pathParts.keySet())))
.path(RandomizedTest.randomFrom(restApi.getFinalPaths(pathParts)));
}
private RestApi restApi(String apiName) {
RestApi restApi = restSpec.getApi(apiName);
if (restApi == null) {
throw new IllegalArgumentException("rest api [" + apiName + "] doesn't exist in the rest spec");
}
return restApi;
}
protected HttpRequestBuilder httpRequestBuilder() {
return new HttpRequestBuilder(httpClient).host(host).port(port);
}
protected CloseableHttpClient createHttpClient() {
return HttpClients.createDefault();
}
/**
* Closes the REST client and the underlying http client
*/
public void close() {
try {
httpClient.close();
} catch(IOException e) {
logger.error(e.getMessage(), e);
}
}
}
| 1no label
|
src_test_java_org_elasticsearch_test_rest_client_RestClient.java
|
729 |
loadEntriesMajor(key, inclusive, new RangeResultListener<K, V>() {
@Override
public boolean addResult(Map.Entry<K, V> entry) {
result.add(entry.getValue());
if (maxValuesToFetch > -1 && result.size() >= maxValuesToFetch)
return false;
return true;
}
});
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_index_sbtree_local_OSBTree.java
|
91 |
public enum Geo implements TitanPredicate {
/**
* Whether the intersection between two geographic regions is non-empty
*/
INTERSECT {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).intersect((Geoshape) condition);
}
@Override
public String toString() {
return "intersect";
}
@Override
public boolean hasNegation() {
return true;
}
@Override
public TitanPredicate negate() {
return DISJOINT;
}
},
/**
* Whether the intersection between two geographic regions is empty
*/
DISJOINT {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).disjoint((Geoshape) condition);
}
@Override
public String toString() {
return "disjoint";
}
@Override
public boolean hasNegation() {
return true;
}
@Override
public TitanPredicate negate() {
return INTERSECT;
}
},
/**
* Whether one geographic region is completely contains within another
*/
WITHIN {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).within((Geoshape) condition);
}
@Override
public String toString() {
return "within";
}
@Override
public boolean hasNegation() {
return false;
}
@Override
public TitanPredicate negate() {
throw new UnsupportedOperationException();
}
};
@Override
public boolean isValidCondition(Object condition) {
return condition != null && condition instanceof Geoshape;
}
@Override
public boolean isValidValueType(Class<?> clazz) {
Preconditions.checkNotNull(clazz);
return clazz.equals(Geoshape.class);
}
@Override
public boolean isQNF() {
return true;
}
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geo.java
|
243 |
service.submitToKeyOwner(runnable, "key", new ExecutionCallback() {
public void onResponse(Object response) {
responseLatch.countDown();
}
public void onFailure(Throwable t) {
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceSubmitTest.java
|
853 |
public class IsNullRequest extends ReadRequest {
public IsNullRequest() {
}
public IsNullRequest(String name) {
super(name);
}
@Override
protected Operation prepareOperation() {
return new IsNullOperation(name);
}
@Override
public int getClassId() {
return AtomicReferencePortableHook.IS_NULL;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_concurrent_atomicreference_client_IsNullRequest.java
|
321 |
@SuppressWarnings("serial")
public class OStorageEntryConfiguration implements Serializable {
public String name;
public String value;
public OStorageEntryConfiguration() {
}
public OStorageEntryConfiguration(final String iName, final String iValue) {
name = iName;
value = iValue;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_config_OStorageEntryConfiguration.java
|
2,053 |
public class GetEntryViewOperation extends KeyBasedMapOperation {
private EntryView<Data, Data> result;
public GetEntryViewOperation(String name, Data dataKey) {
super(name, dataKey);
}
public GetEntryViewOperation() {
}
public void run() {
MapService mapService = getService();
RecordStore recordStore = mapService.getRecordStore(getPartitionId(), name);
Record record = recordStore.getRecord(dataKey);
if (record != null) {
result = mapService.createSimpleEntryView(record.getKey(), mapService.toData(record.getValue()), record);
}
}
@Override
public Object getResponse() {
return result;
}
@Override
public String toString() {
return "GetEntryViewOperation{" +
'}';
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_operation_GetEntryViewOperation.java
|
133 |
class InlineDeclarationProposal implements ICompletionProposal,
ICompletionProposalExtension6 {
private final InlineRefactoringAction action;
public InlineDeclarationProposal(CeylonEditor editor) {
action = new InlineRefactoringAction(editor);
}
@Override
public Point getSelection(IDocument doc) {
return null;
}
@Override
public Image getImage() {
return CeylonLabelProvider.COMPOSITE_CHANGE;
}
@Override
public String getDisplayString() {
return "Inline '" + action.currentName() + "'";
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument doc) {
action.run();
}
boolean isEnabled() {
return action.isEnabled();
}
public static void add(Collection<ICompletionProposal> proposals, CeylonEditor editor) {
InlineDeclarationProposal prop = new InlineDeclarationProposal(editor);
if (prop.isEnabled()) {
proposals.add(prop);
}
}
@Override
public StyledString getStyledDisplayString() {
return Highlights.styleProposal(getDisplayString(), false);
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_InlineDeclarationProposal.java
|
1,530 |
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private boolean processVertices;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.processVertices = context.getConfiguration().getBoolean(PROCESS_VERTICES, true);
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.processVertices) {
value.clearPaths();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L);
}
long edgesProcessed = 0;
for (final Edge edge : value.getEdges(Direction.IN)) {
((StandardFaunusEdge) edge).startPath();
edgesProcessed++;
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.IN_EDGES_PROCESSED, edgesProcessed);
edgesProcessed = 0;
for (final Edge edge : value.getEdges(Direction.OUT)) {
((StandardFaunusEdge) edge).startPath();
edgesProcessed++;
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed);
context.write(NullWritable.get(), value);
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_EdgesMap.java
|
1,468 |
public class OSQLFunctionDijkstra extends OSQLFunctionPathFinder<Float> {
public static final String NAME = "dijkstra";
private static final Float MIN = new Float(0f);
private String paramWeightFieldName;
public OSQLFunctionDijkstra() {
super(NAME, 3, 4);
}
public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) {
final OrientBaseGraph graph = OGraphCommandExecutorSQLFactory.getGraph();
final ORecordInternal<?> record = (ORecordInternal<?>) (iCurrentRecord != null ? iCurrentRecord.getRecord() : null);
Object source = iParameters[0];
if (OMultiValue.isMultiValue(source)) {
if (OMultiValue.getSize(source) > 1)
throw new IllegalArgumentException("Only one sourceVertex is allowed");
source = OMultiValue.getFirstValue(source);
}
paramSourceVertex = graph.getVertex((OIdentifiable) OSQLHelper.getValue(source, record, iContext));
Object dest = iParameters[1];
if (OMultiValue.isMultiValue(dest)) {
if (OMultiValue.getSize(dest) > 1)
throw new IllegalArgumentException("Only one destinationVertex is allowed");
dest = OMultiValue.getFirstValue(dest);
}
paramDestinationVertex = graph.getVertex((OIdentifiable) OSQLHelper.getValue(dest, record, iContext));
paramWeightFieldName = (String) OSQLHelper.getValue(iParameters[2], record, iContext);
if (iParameters.length > 3)
paramDirection = Direction.valueOf(iParameters[3].toString().toUpperCase());
return super.execute(iParameters, iContext);
}
public String getSyntax() {
return "Syntax error: dijkstra(<sourceVertex>, <destinationVertex>, <weightEdgeFieldName>, [<direction>])";
}
@Override
protected Float getShortestDistance(final Vertex destination) {
if (destination == null)
return Float.MAX_VALUE;
final Float d = distance.get(destination);
return d == null ? Float.MAX_VALUE : d;
}
@Override
protected Float getMinimumDistance() {
return MIN;
}
protected Float getDistance(final Vertex node, final Vertex target) {
final Iterator<Edge> edges = ((OrientVertex) node).getEdges((OrientVertex) target, paramDirection).iterator();
if (edges.hasNext()) {
final Edge e = edges.next();
if (e != null) {
final Object fieldValue = e.getProperty(paramWeightFieldName);
if (fieldValue != null)
if (fieldValue instanceof Float)
return (Float) fieldValue;
else if (fieldValue instanceof Number)
return ((Number) fieldValue).floatValue();
}
}
return MIN;
}
@Override
protected Float sumDistances(final Float iDistance1, final Float iDistance2) {
return iDistance1.floatValue() + iDistance2.floatValue();
}
}
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionDijkstra.java
|
2,889 |
public final class Predicates {
//we don't want instances.
private Predicates() {
}
public static Predicate instanceOf(final Class klass) {
return new InstanceOfPredicate(klass);
}
private static Comparable readAttribute(Map.Entry entry, String attribute) {
QueryableEntry queryableEntry = (QueryableEntry) entry;
Comparable value = queryableEntry.getAttribute(attribute);
if (value == null) {
return IndexImpl.NULL;
}
return value;
}
public static Predicate and(Predicate x, Predicate y) {
return new AndPredicate(x, y);
}
public static Predicate not(Predicate predicate) {
return new NotPredicate(predicate);
}
public static Predicate or(Predicate x, Predicate y) {
return new OrPredicate(x, y);
}
public static Predicate notEqual(String attribute, Comparable y) {
return new NotEqualPredicate(attribute, y);
}
public static Predicate equal(String attribute, Comparable y) {
return new EqualPredicate(attribute, y);
}
public static Predicate like(String attribute, String pattern) {
return new LikePredicate(attribute, pattern);
}
public static Predicate ilike(String attribute, String pattern) {
return new ILikePredicate(attribute, pattern);
}
public static Predicate regex(String attribute, String pattern) {
return new RegexPredicate(attribute, pattern);
}
public static Predicate greaterThan(String x, Comparable y) {
return new GreaterLessPredicate(x, y, false, false);
}
public static Predicate greaterEqual(String x, Comparable y) {
return new GreaterLessPredicate(x, y, true, false);
}
public static Predicate lessThan(String x, Comparable y) {
return new GreaterLessPredicate(x, y, false, true);
}
public static Predicate lessEqual(String x, Comparable y) {
return new GreaterLessPredicate(x, y, true, true);
}
public static Predicate between(String attribute, Comparable from, Comparable to) {
return new BetweenPredicate(attribute, from, to);
}
public static Predicate in(String attribute, Comparable... values) {
return new InPredicate(attribute, values);
}
public static class BetweenPredicate extends AbstractPredicate {
private Comparable to;
private Comparable from;
public BetweenPredicate() {
}
public BetweenPredicate(String first, Comparable from, Comparable to) {
super(first);
this.from = from;
this.to = to;
}
@Override
public boolean apply(Map.Entry entry) {
Comparable entryValue = readAttribute(entry);
if (entryValue == null) {
return false;
}
Comparable fromConvertedValue = convert(entry, entryValue, from);
Comparable toConvertedValue = convert(entry, entryValue, to);
if (fromConvertedValue == null || toConvertedValue == null) {
return false;
}
return entryValue.compareTo(fromConvertedValue) >= 0 && entryValue.compareTo(toConvertedValue) <= 0;
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Index index = getIndex(queryContext);
return index.getSubRecordsBetween(from, to);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeObject(to);
out.writeObject(from);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
to = in.readObject();
from = in.readObject();
}
@Override
public String toString() {
return attribute + " BETWEEN " + from + " AND " + to;
}
}
public static class NotPredicate implements Predicate, DataSerializable {
private Predicate predicate;
public NotPredicate(Predicate predicate) {
this.predicate = predicate;
}
public NotPredicate() {
}
@Override
public boolean apply(Map.Entry mapEntry) {
return !predicate.apply(mapEntry);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeObject(predicate);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
predicate = in.readObject();
}
@Override
public String toString() {
return "NOT(" + predicate + ")";
}
}
public static class InPredicate extends AbstractPredicate {
private Comparable[] values;
private volatile Set<Comparable> convertedInValues;
public InPredicate() {
}
public InPredicate(String attribute, Comparable... values) {
super(attribute);
this.values = values;
}
@Override
public boolean apply(Map.Entry entry) {
Comparable entryValue = readAttribute(entry);
Set<Comparable> set = convertedInValues;
if (set == null) {
set = new HashSet<Comparable>(values.length);
for (Comparable value : values) {
set.add(convert(entry, entryValue, value));
}
convertedInValues = set;
}
return entryValue != null && set.contains(entryValue);
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Index index = getIndex(queryContext);
if (index != null) {
return index.getRecords(values);
} else {
return null;
}
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeInt(values.length);
for (Object value : values) {
out.writeObject(value);
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
int len = in.readInt();
values = new Comparable[len];
for (int i = 0; i < len; i++) {
values[i] = in.readObject();
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(attribute);
sb.append(" IN (");
for (int i = 0; i < values.length; i++) {
if (i > 0) {
sb.append(",");
}
sb.append(values[i]);
}
sb.append(")");
return sb.toString();
}
}
public static class RegexPredicate implements Predicate, DataSerializable {
private String attribute;
private String regex;
private volatile Pattern pattern;
public RegexPredicate() {
}
public RegexPredicate(String attribute, String regex) {
this.attribute = attribute;
this.regex = regex;
}
@Override
public boolean apply(Map.Entry entry) {
Comparable attribute = readAttribute(entry, this.attribute);
String firstVal = attribute == IndexImpl.NULL ? null : (String) attribute;
if (firstVal == null) {
return (regex == null);
} else if (regex == null) {
return false;
} else {
if (pattern == null) {
pattern = Pattern.compile(regex);
}
Matcher m = pattern.matcher(firstVal);
return m.matches();
}
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(attribute);
out.writeUTF(regex);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
attribute = in.readUTF();
regex = in.readUTF();
}
@Override
public String toString() {
return attribute + " REGEX '" + regex + "'";
}
}
public static class LikePredicate implements Predicate, DataSerializable {
protected String attribute;
protected String second;
private volatile Pattern pattern;
public LikePredicate() {
}
public LikePredicate(String attribute, String second) {
this.attribute = attribute;
this.second = second;
}
@Override
public boolean apply(Map.Entry entry) {
Comparable attribute = readAttribute(entry, this.attribute);
String firstVal = attribute == IndexImpl.NULL ? null : (String) attribute;
if (firstVal == null) {
return (second == null);
} else if (second == null) {
return false;
} else {
if (pattern == null) {
// we quote the input string then escape then replace % and _
// at the end we have a regex pattern look like : \QSOME_STRING\E.*\QSOME_OTHER_STRING\E
final String quoted = Pattern.quote(second);
String regex = quoted
//escaped %
.replaceAll("(?<!\\\\)[%]", "\\\\E.*\\\\Q")
//escaped _
.replaceAll("(?<!\\\\)[_]", "\\\\E.\\\\Q")
//non escaped %
.replaceAll("\\\\%", "%")
//non escaped _
.replaceAll("\\\\_", "_");
int flags = getFlags();
pattern = Pattern.compile(regex, flags);
}
Matcher m = pattern.matcher(firstVal);
return m.matches();
}
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(attribute);
out.writeUTF(second);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
attribute = in.readUTF();
second = in.readUTF();
}
protected int getFlags() {
//no flags
return 0;
}
@Override
public String toString() {
StringBuffer builder = new StringBuffer(attribute)
.append(" LIKE '")
.append(second)
.append("'");
return builder.toString();
}
}
public static class ILikePredicate extends LikePredicate {
public ILikePredicate() {
}
public ILikePredicate(String attribute, String second) {
super(attribute, second);
}
@Override
public String toString() {
StringBuffer builder = new StringBuffer(attribute)
.append(" ILIKE '")
.append(second)
.append("'");
return builder.toString();
}
@Override
protected int getFlags() {
return Pattern.CASE_INSENSITIVE;
}
}
public static class AndPredicate implements IndexAwarePredicate, DataSerializable {
protected Predicate[] predicates;
public AndPredicate() {
}
public AndPredicate(Predicate... predicates) {
this.predicates = predicates;
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Set<QueryableEntry> smallestIndexedResult = null;
List<Set<QueryableEntry>> otherIndexedResults = new LinkedList<Set<QueryableEntry>>();
List<Predicate> lsNoIndexPredicates = null;
for (Predicate predicate : predicates) {
boolean indexed = false;
if (predicate instanceof IndexAwarePredicate) {
IndexAwarePredicate iap = (IndexAwarePredicate) predicate;
if (iap.isIndexed(queryContext)) {
indexed = true;
Set<QueryableEntry> s = iap.filter(queryContext);
if (smallestIndexedResult == null) {
smallestIndexedResult = s;
} else if (s.size() < smallestIndexedResult.size()) {
otherIndexedResults.add(smallestIndexedResult);
smallestIndexedResult = s;
} else {
otherIndexedResults.add(s);
}
} else {
if (lsNoIndexPredicates == null) {
lsNoIndexPredicates = new LinkedList<Predicate>();
lsNoIndexPredicates.add(predicate);
}
}
}
if (!indexed) {
if (lsNoIndexPredicates == null) {
lsNoIndexPredicates = new LinkedList<Predicate>();
}
lsNoIndexPredicates.add(predicate);
}
}
if (smallestIndexedResult == null) {
return null;
}
return new AndResultSet(smallestIndexedResult, otherIndexedResults, lsNoIndexPredicates);
}
@Override
public boolean isIndexed(QueryContext queryContext) {
for (Predicate predicate : predicates) {
if (predicate instanceof IndexAwarePredicate) {
IndexAwarePredicate iap = (IndexAwarePredicate) predicate;
if (iap.isIndexed(queryContext)) {
return true;
}
}
}
return false;
}
@Override
public boolean apply(Map.Entry mapEntry) {
for (Predicate predicate : predicates) {
if (!predicate.apply(mapEntry)) {
return false;
}
}
return true;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("(");
int size = predicates.length;
for (int i = 0; i < size; i++) {
if (i > 0) {
sb.append(" AND ");
}
sb.append(predicates[i]);
}
sb.append(")");
return sb.toString();
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(predicates.length);
for (Predicate predicate : predicates) {
out.writeObject(predicate);
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
int size = in.readInt();
predicates = new Predicate[size];
for (int i = 0; i < size; i++) {
predicates[i] = in.readObject();
}
}
}
public static class OrPredicate implements IndexAwarePredicate, DataSerializable {
private Predicate[] predicates;
public OrPredicate() {
}
public OrPredicate(Predicate... predicates) {
this.predicates = predicates;
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
List<Set<QueryableEntry>> indexedResults = new LinkedList<Set<QueryableEntry>>();
for (Predicate predicate : predicates) {
if (predicate instanceof IndexAwarePredicate) {
IndexAwarePredicate iap = (IndexAwarePredicate) predicate;
if (iap.isIndexed(queryContext)) {
Set<QueryableEntry> s = iap.filter(queryContext);
if (s != null) {
indexedResults.add(s);
}
} else {
return null;
}
}
}
return indexedResults.isEmpty() ? null : new OrResultSet(indexedResults);
}
@Override
public boolean isIndexed(QueryContext queryContext) {
for (Predicate predicate : predicates) {
if (predicate instanceof IndexAwarePredicate) {
IndexAwarePredicate iap = (IndexAwarePredicate) predicate;
if (!iap.isIndexed(queryContext)) {
return false;
}
} else {
return false;
}
}
return true;
}
@Override
public boolean apply(Map.Entry mapEntry) {
for (Predicate predicate : predicates) {
if (predicate.apply(mapEntry)) {
return true;
}
}
return false;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(predicates.length);
for (Predicate predicate : predicates) {
out.writeObject(predicate);
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
int size = in.readInt();
predicates = new Predicate[size];
for (int i = 0; i < size; i++) {
predicates[i] = in.readObject();
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("(");
int size = predicates.length;
for (int i = 0; i < size; i++) {
if (i > 0) {
sb.append(" OR ");
}
sb.append(predicates[i]);
}
sb.append(")");
return sb.toString();
}
}
public static class GreaterLessPredicate extends EqualPredicate {
boolean equal;
boolean less;
public GreaterLessPredicate() {
}
public GreaterLessPredicate(String attribute, Comparable value, boolean equal, boolean less) {
super(attribute, value);
this.equal = equal;
this.less = less;
}
@Override
public boolean apply(Map.Entry mapEntry) {
final Comparable entryValue = readAttribute(mapEntry);
final Comparable attributeValue = convert(mapEntry, entryValue, value);
final int result = entryValue.compareTo(attributeValue);
return equal && result == 0 || (less ? (result < 0) : (result > 0));
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Index index = getIndex(queryContext);
final ComparisonType comparisonType;
if (less) {
comparisonType = equal ? ComparisonType.LESSER_EQUAL : ComparisonType.LESSER;
} else {
comparisonType = equal ? ComparisonType.GREATER_EQUAL : ComparisonType.GREATER;
}
return index.getSubRecords(comparisonType, value);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
equal = in.readBoolean();
less = in.readBoolean();
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeBoolean(equal);
out.writeBoolean(less);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(attribute);
sb.append(less ? "<" : ">");
if (equal) {
sb.append("=");
}
sb.append(value);
return sb.toString();
}
}
public static class NotEqualPredicate extends EqualPredicate {
public NotEqualPredicate() {
}
public NotEqualPredicate(String attribute, Comparable value) {
super(attribute, value);
}
@Override
public boolean apply(Map.Entry entry) {
return !super.apply(entry);
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Index index = getIndex(queryContext);
if (index != null) {
return index.getSubRecords(ComparisonType.NOT_EQUAL, value);
} else {
return null;
}
}
@Override
public String toString() {
return attribute + " != " + value;
}
}
public static class EqualPredicate extends AbstractPredicate {
protected Comparable value;
public EqualPredicate() {
}
public EqualPredicate(String attribute, Comparable value) {
super(attribute);
this.value = value;
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Index index = getIndex(queryContext);
return index.getRecords(value);
}
@Override
public boolean apply(Map.Entry mapEntry) {
Comparable entryValue = readAttribute(mapEntry);
if (entryValue == null) {
return value == null || value == IndexImpl.NULL;
}
value = convert(mapEntry, entryValue, value);
return entryValue.equals(value);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeObject(value);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
value = in.readObject();
}
@Override
public String toString() {
return attribute + "=" + value;
}
}
public abstract static class AbstractPredicate implements IndexAwarePredicate, DataSerializable {
protected String attribute;
private transient volatile AttributeType attributeType;
protected AbstractPredicate() {
}
protected AbstractPredicate(String attribute) {
this.attribute = attribute;
}
protected Comparable convert(Map.Entry mapEntry, Comparable entryValue, Comparable attributeValue) {
if (attributeValue == null) {
return null;
}
if (attributeValue instanceof IndexImpl.NullObject) {
return IndexImpl.NULL;
}
AttributeType type = attributeType;
if (type == null) {
QueryableEntry queryableEntry = (QueryableEntry) mapEntry;
type = queryableEntry.getAttributeType(attribute);
attributeType = type;
}
if (type == AttributeType.ENUM) {
// if attribute type is enum, convert given attribute to enum string
return type.getConverter().convert(attributeValue);
} else {
// if given attribute value is already in expected type then there's no need for conversion.
if (entryValue != null && entryValue.getClass().isAssignableFrom(attributeValue.getClass())) {
return attributeValue;
} else if (type != null) {
return type.getConverter().convert(attributeValue);
} else {
throw new QueryException("Unknown attribute type: " + attributeValue.getClass());
}
}
}
@Override
public boolean isIndexed(QueryContext queryContext) {
return getIndex(queryContext) != null;
}
protected Index getIndex(QueryContext queryContext) {
return queryContext.getIndex(attribute);
}
protected Comparable readAttribute(Map.Entry entry) {
QueryableEntry queryableEntry = (QueryableEntry) entry;
Comparable val = queryableEntry.getAttribute(attribute);
if (val != null && val.getClass().isEnum()) {
val = val.toString();
}
return val;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(attribute);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
attribute = in.readUTF();
}
}
private static class InstanceOfPredicate implements Predicate, DataSerializable {
private Class klass;
public InstanceOfPredicate(Class klass) {
this.klass = klass;
}
@Override
public boolean apply(Map.Entry mapEntry) {
Object value = mapEntry.getValue();
if (value == null) {
return false;
}
return klass.isAssignableFrom(value.getClass());
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(klass.getName());
}
@Override
public void readData(ObjectDataInput in) throws IOException {
String klassName = in.readUTF();
try {
klass = in.getClassLoader().loadClass(klassName);
} catch (ClassNotFoundException e) {
throw new HazelcastSerializationException("Failed to load class: " + klass, e);
}
}
@Override
public String toString() {
return " instanceOf (" + klass.getName() + ")";
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_query_Predicates.java
|
4,189 |
public class BlobStoreIndexShardRepository extends AbstractComponent implements IndexShardRepository {
private BlobStore blobStore;
private BlobPath basePath;
private final String repositoryName;
private ByteSizeValue chunkSize;
private final IndicesService indicesService;
private RateLimiter snapshotRateLimiter;
private RateLimiter restoreRateLimiter;
private RateLimiterListener rateLimiterListener;
private RateLimitingInputStream.Listener snapshotThrottleListener;
private static final String SNAPSHOT_PREFIX = "snapshot-";
@Inject
BlobStoreIndexShardRepository(Settings settings, RepositoryName repositoryName, IndicesService indicesService) {
super(settings);
this.repositoryName = repositoryName.name();
this.indicesService = indicesService;
}
/**
* Called by {@link org.elasticsearch.repositories.blobstore.BlobStoreRepository} on repository startup
*
* @param blobStore blob store
* @param basePath base path to blob store
* @param chunkSize chunk size
*/
public void initialize(BlobStore blobStore, BlobPath basePath, ByteSizeValue chunkSize,
RateLimiter snapshotRateLimiter, RateLimiter restoreRateLimiter,
final RateLimiterListener rateLimiterListener) {
this.blobStore = blobStore;
this.basePath = basePath;
this.chunkSize = chunkSize;
this.snapshotRateLimiter = snapshotRateLimiter;
this.restoreRateLimiter = restoreRateLimiter;
this.rateLimiterListener = rateLimiterListener;
this.snapshotThrottleListener = new RateLimitingInputStream.Listener() {
@Override
public void onPause(long nanos) {
rateLimiterListener.onSnapshotPause(nanos);
}
};
}
/**
* {@inheritDoc}
*/
@Override
public void snapshot(SnapshotId snapshotId, ShardId shardId, SnapshotIndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {
SnapshotContext snapshotContext = new SnapshotContext(snapshotId, shardId, snapshotStatus);
snapshotStatus.startTime(System.currentTimeMillis());
try {
snapshotContext.snapshot(snapshotIndexCommit);
snapshotStatus.time(System.currentTimeMillis() - snapshotStatus.startTime());
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.DONE);
} catch (Throwable e) {
snapshotStatus.time(System.currentTimeMillis() - snapshotStatus.startTime());
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.FAILURE);
if (e instanceof IndexShardSnapshotFailedException) {
throw (IndexShardSnapshotFailedException) e;
} else {
throw new IndexShardSnapshotFailedException(shardId, e.getMessage(), e);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void restore(SnapshotId snapshotId, ShardId shardId, ShardId snapshotShardId, RecoveryStatus recoveryStatus) {
RestoreContext snapshotContext = new RestoreContext(snapshotId, shardId, snapshotShardId, recoveryStatus);
try {
recoveryStatus.index().startTime(System.currentTimeMillis());
snapshotContext.restore();
recoveryStatus.index().time(System.currentTimeMillis() - recoveryStatus.index().startTime());
} catch (Throwable e) {
throw new IndexShardRestoreFailedException(shardId, "failed to restore snapshot [" + snapshotId.getSnapshot() + "]", e);
}
}
/**
* Delete shard snapshot
*
* @param snapshotId snapshot id
* @param shardId shard id
*/
public void delete(SnapshotId snapshotId, ShardId shardId) {
Context context = new Context(snapshotId, shardId, shardId);
context.delete();
}
@Override
public String toString() {
return "BlobStoreIndexShardRepository[" +
"[" + repositoryName +
"], [" + blobStore + ']' +
']';
}
/**
* Returns shard snapshot metadata file name
*
* @param snapshotId snapshot id
* @return shard snapshot metadata file name
*/
private String snapshotBlobName(SnapshotId snapshotId) {
return SNAPSHOT_PREFIX + snapshotId.getSnapshot();
}
/**
* Serializes snapshot to JSON
*
* @param snapshot snapshot
* @return JSON representation of the snapshot
* @throws IOException
*/
public static byte[] writeSnapshot(BlobStoreIndexShardSnapshot snapshot) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint();
BlobStoreIndexShardSnapshot.toXContent(snapshot, builder, ToXContent.EMPTY_PARAMS);
return builder.bytes().toBytes();
}
/**
* Parses JSON representation of a snapshot
*
* @param data JSON
* @return snapshot
* @throws IOException
*/
public static BlobStoreIndexShardSnapshot readSnapshot(byte[] data) throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(data);
try {
parser.nextToken();
return BlobStoreIndexShardSnapshot.fromXContent(parser);
} finally {
parser.close();
}
}
/**
* Context for snapshot/restore operations
*/
private class Context {
protected final SnapshotId snapshotId;
protected final ShardId shardId;
protected final ImmutableBlobContainer blobContainer;
public Context(SnapshotId snapshotId, ShardId shardId) {
this(snapshotId, shardId, shardId);
}
public Context(SnapshotId snapshotId, ShardId shardId, ShardId snapshotShardId) {
this.snapshotId = snapshotId;
this.shardId = shardId;
blobContainer = blobStore.immutableBlobContainer(basePath.add("indices").add(snapshotShardId.getIndex()).add(Integer.toString(snapshotShardId.getId())));
}
/**
* Delete shard snapshot
*/
public void delete() {
final ImmutableMap<String, BlobMetaData> blobs;
try {
blobs = blobContainer.listBlobs();
} catch (IOException e) {
throw new IndexShardSnapshotException(shardId, "Failed to list content of gateway", e);
}
BlobStoreIndexShardSnapshots snapshots = buildBlobStoreIndexShardSnapshots(blobs);
String commitPointName = snapshotBlobName(snapshotId);
try {
blobContainer.deleteBlob(commitPointName);
} catch (IOException e) {
logger.debug("[{}] [{}] failed to delete shard snapshot file", shardId, snapshotId);
}
// delete all files that are not referenced by any commit point
// build a new BlobStoreIndexShardSnapshot, that includes this one and all the saved ones
List<BlobStoreIndexShardSnapshot> newSnapshotsList = Lists.newArrayList();
for (BlobStoreIndexShardSnapshot point : snapshots) {
if (!point.snapshot().equals(snapshotId.getSnapshot())) {
newSnapshotsList.add(point);
}
}
cleanup(newSnapshotsList, blobs);
}
/**
* Removes all unreferenced files from the repository
*
* @param snapshots list of active snapshots in the container
* @param blobs list of blobs in the container
*/
protected void cleanup(List<BlobStoreIndexShardSnapshot> snapshots, ImmutableMap<String, BlobMetaData> blobs) {
BlobStoreIndexShardSnapshots newSnapshots = new BlobStoreIndexShardSnapshots(snapshots);
// now go over all the blobs, and if they don't exists in a snapshot, delete them
for (String blobName : blobs.keySet()) {
if (!blobName.startsWith("__")) {
continue;
}
if (newSnapshots.findNameFile(FileInfo.canonicalName(blobName)) == null) {
try {
blobContainer.deleteBlob(blobName);
} catch (IOException e) {
logger.debug("[{}] [{}] error deleting blob [{}] during cleanup", e, snapshotId, shardId, blobName);
}
}
}
}
/**
* Generates blob name
*
* @param generation the blob number
* @return the blob name
*/
protected String fileNameFromGeneration(long generation) {
return "__" + Long.toString(generation, Character.MAX_RADIX);
}
/**
* Finds the next available blob number
*
* @param blobs list of blobs in the repository
* @return next available blob number
*/
protected long findLatestFileNameGeneration(ImmutableMap<String, BlobMetaData> blobs) {
long generation = -1;
for (String name : blobs.keySet()) {
if (!name.startsWith("__")) {
continue;
}
name = FileInfo.canonicalName(name);
try {
long currentGen = Long.parseLong(name.substring(2) /*__*/, Character.MAX_RADIX);
if (currentGen > generation) {
generation = currentGen;
}
} catch (NumberFormatException e) {
logger.warn("file [{}] does not conform to the '__' schema");
}
}
return generation;
}
/**
* Loads all available snapshots in the repository
*
* @param blobs list of blobs in repository
* @return BlobStoreIndexShardSnapshots
*/
protected BlobStoreIndexShardSnapshots buildBlobStoreIndexShardSnapshots(ImmutableMap<String, BlobMetaData> blobs) {
List<BlobStoreIndexShardSnapshot> snapshots = Lists.newArrayList();
for (String name : blobs.keySet()) {
if (name.startsWith(SNAPSHOT_PREFIX)) {
try {
snapshots.add(readSnapshot(blobContainer.readBlobFully(name)));
} catch (IOException e) {
logger.warn("failed to read commit point [{}]", e, name);
}
}
}
return new BlobStoreIndexShardSnapshots(snapshots);
}
}
/**
* Context for snapshot operations
*/
private class SnapshotContext extends Context {
private final Store store;
private final IndexShardSnapshotStatus snapshotStatus;
/**
* Constructs new context
*
* @param snapshotId snapshot id
* @param shardId shard to be snapshotted
* @param snapshotStatus snapshot status to report progress
*/
public SnapshotContext(SnapshotId snapshotId, ShardId shardId, IndexShardSnapshotStatus snapshotStatus) {
super(snapshotId, shardId);
store = indicesService.indexServiceSafe(shardId.getIndex()).shardInjectorSafe(shardId.id()).getInstance(Store.class);
this.snapshotStatus = snapshotStatus;
}
/**
* Create snapshot from index commit point
*
* @param snapshotIndexCommit
*/
public void snapshot(SnapshotIndexCommit snapshotIndexCommit) {
logger.debug("[{}] [{}] snapshot to [{}] ...", shardId, snapshotId, repositoryName);
final ImmutableMap<String, BlobMetaData> blobs;
try {
blobs = blobContainer.listBlobs();
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e);
}
long generation = findLatestFileNameGeneration(blobs);
BlobStoreIndexShardSnapshots snapshots = buildBlobStoreIndexShardSnapshots(blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.STARTED);
final CountDownLatch indexLatch = new CountDownLatch(snapshotIndexCommit.getFiles().length);
final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();
final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = newArrayList();
int indexNumberOfFiles = 0;
long indexTotalFilesSize = 0;
for (String fileName : snapshotIndexCommit.getFiles()) {
if (snapshotStatus.aborted()) {
logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileName);
throw new IndexShardSnapshotFailedException(shardId, "Aborted");
}
logger.trace("[{}] [{}] Processing [{}]", shardId, snapshotId, fileName);
final StoreFileMetaData md;
try {
md = store.metaData(fileName);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to get store file metadata", e);
}
boolean snapshotRequired = false;
// TODO: For now segment files are copied on each commit because segment files don't have checksum
// if (snapshot.indexChanged() && fileName.equals(snapshotIndexCommit.getSegmentsFileName())) {
// snapshotRequired = true; // we want to always snapshot the segment file if the index changed
// }
BlobStoreIndexShardSnapshot.FileInfo fileInfo = snapshots.findPhysicalIndexFile(fileName);
if (fileInfo == null || !fileInfo.isSame(md) || !snapshotFileExistsInBlobs(fileInfo, blobs)) {
// commit point file does not exists in any commit point, or has different length, or does not fully exists in the listed blobs
snapshotRequired = true;
}
if (snapshotRequired) {
indexNumberOfFiles++;
indexTotalFilesSize += md.length();
// create a new FileInfo
try {
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), fileName, md.length(), chunkSize, md.checksum());
indexCommitPointFiles.add(snapshotFileInfo);
snapshotFile(snapshotFileInfo, indexLatch, failures);
} catch (IOException e) {
failures.add(e);
}
} else {
indexCommitPointFiles.add(fileInfo);
indexLatch.countDown();
}
}
snapshotStatus.files(indexNumberOfFiles, indexTotalFilesSize);
snapshotStatus.indexVersion(snapshotIndexCommit.getGeneration());
try {
indexLatch.await();
} catch (InterruptedException e) {
failures.add(e);
Thread.currentThread().interrupt();
}
if (!failures.isEmpty()) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to perform snapshot (index files)", failures.get(0));
}
// now create and write the commit point
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.FINALIZE);
String commitPointName = snapshotBlobName(snapshotId);
BlobStoreIndexShardSnapshot snapshot = new BlobStoreIndexShardSnapshot(snapshotId.getSnapshot(), snapshotIndexCommit.getGeneration(), indexCommitPointFiles);
try {
byte[] snapshotData = writeSnapshot(snapshot);
logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId);
blobContainer.writeBlob(commitPointName, new BytesStreamInput(snapshotData, false), snapshotData.length);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to write commit point", e);
}
// delete all files that are not referenced by any commit point
// build a new BlobStoreIndexShardSnapshot, that includes this one and all the saved ones
List<BlobStoreIndexShardSnapshot> newSnapshotsList = Lists.newArrayList();
newSnapshotsList.add(snapshot);
for (BlobStoreIndexShardSnapshot point : snapshots) {
newSnapshotsList.add(point);
}
cleanup(newSnapshotsList, blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.DONE);
}
/**
* Snapshot individual file
* <p/>
* This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
* added to the {@code failures} list
*
* @param fileInfo file to be snapshotted
* @param latch latch that should be counted down once file is snapshoted
* @param failures thread-safe list of failures
* @throws IOException
*/
private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo, final CountDownLatch latch, final List<Throwable> failures) throws IOException {
final AtomicLong counter = new AtomicLong(fileInfo.numberOfParts());
for (long i = 0; i < fileInfo.numberOfParts(); i++) {
IndexInput indexInput = null;
try {
indexInput = store.openInputRaw(fileInfo.physicalName(), IOContext.READONCE);
indexInput.seek(i * fileInfo.partBytes());
InputStreamIndexInput inputStreamIndexInput = new ThreadSafeInputStreamIndexInput(indexInput, fileInfo.partBytes());
final IndexInput fIndexInput = indexInput;
long size = inputStreamIndexInput.actualSizeToRead();
InputStream inputStream;
if (snapshotRateLimiter != null) {
inputStream = new RateLimitingInputStream(inputStreamIndexInput, snapshotRateLimiter, snapshotThrottleListener);
} else {
inputStream = inputStreamIndexInput;
}
blobContainer.writeBlob(fileInfo.partName(i), inputStream, size, new ImmutableBlobContainer.WriterListener() {
@Override
public void onCompleted() {
IOUtils.closeWhileHandlingException(fIndexInput);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
@Override
public void onFailure(Throwable t) {
IOUtils.closeWhileHandlingException(fIndexInput);
failures.add(t);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
});
} catch (Throwable e) {
IOUtils.closeWhileHandlingException(indexInput);
failures.add(e);
latch.countDown();
}
}
}
/**
* Checks if snapshot file already exists in the list of blobs
*
* @param fileInfo file to check
* @param blobs list of blobs
* @return true if file exists in the list of blobs
*/
private boolean snapshotFileExistsInBlobs(BlobStoreIndexShardSnapshot.FileInfo fileInfo, ImmutableMap<String, BlobMetaData> blobs) {
BlobMetaData blobMetaData = blobs.get(fileInfo.name());
if (blobMetaData != null) {
return blobMetaData.length() == fileInfo.length();
} else if (blobs.containsKey(fileInfo.partName(0))) {
// multi part file sum up the size and check
int part = 0;
long totalSize = 0;
while (true) {
blobMetaData = blobs.get(fileInfo.partName(part++));
if (blobMetaData == null) {
break;
}
totalSize += blobMetaData.length();
}
return totalSize == fileInfo.length();
}
// no file, not exact and not multipart
return false;
}
}
/**
* Context for restore operations
*/
private class RestoreContext extends Context {
private final Store store;
private final RecoveryStatus recoveryStatus;
/**
* Constructs new restore context
*
* @param snapshotId snapshot id
* @param shardId shard to be restored
* @param snapshotShardId shard in the snapshot that data should be restored from
* @param recoveryStatus recovery status to report progress
*/
public RestoreContext(SnapshotId snapshotId, ShardId shardId, ShardId snapshotShardId, RecoveryStatus recoveryStatus) {
super(snapshotId, shardId, snapshotShardId);
store = indicesService.indexServiceSafe(shardId.getIndex()).shardInjectorSafe(shardId.id()).getInstance(Store.class);
this.recoveryStatus = recoveryStatus;
}
/**
* Performs restore operation
*/
public void restore() {
logger.debug("[{}] [{}] restoring to [{}] ...", snapshotId, repositoryName, shardId);
BlobStoreIndexShardSnapshot snapshot;
try {
snapshot = readSnapshot(blobContainer.readBlobFully(snapshotBlobName(snapshotId)));
} catch (IOException ex) {
throw new IndexShardRestoreFailedException(shardId, "failed to read shard snapshot file", ex);
}
recoveryStatus.updateStage(RecoveryStatus.Stage.INDEX);
int numberOfFiles = 0;
long totalSize = 0;
int numberOfReusedFiles = 0;
long reusedTotalSize = 0;
List<FileInfo> filesToRecover = Lists.newArrayList();
for (FileInfo fileInfo : snapshot.indexFiles()) {
String fileName = fileInfo.physicalName();
StoreFileMetaData md = null;
try {
md = store.metaData(fileName);
} catch (IOException e) {
// no file
}
numberOfFiles++;
// we don't compute checksum for segments, so always recover them
if (!fileName.startsWith("segments") && md != null && fileInfo.isSame(md)) {
totalSize += md.length();
numberOfReusedFiles++;
reusedTotalSize += md.length();
if (logger.isTraceEnabled()) {
logger.trace("not_recovering [{}], exists in local store and is same", fileInfo.physicalName());
}
} else {
totalSize += fileInfo.length();
filesToRecover.add(fileInfo);
if (logger.isTraceEnabled()) {
if (md == null) {
logger.trace("recovering [{}], does not exists in local store", fileInfo.physicalName());
} else {
logger.trace("recovering [{}], exists in local store but is different", fileInfo.physicalName());
}
}
}
}
recoveryStatus.index().files(numberOfFiles, totalSize, numberOfReusedFiles, reusedTotalSize);
if (filesToRecover.isEmpty()) {
logger.trace("no files to recover, all exists within the local store");
}
if (logger.isTraceEnabled()) {
logger.trace("[{}] [{}] recovering_files [{}] with total_size [{}], reusing_files [{}] with reused_size [{}]", shardId, snapshotId, numberOfFiles, new ByteSizeValue(totalSize), numberOfReusedFiles, new ByteSizeValue(reusedTotalSize));
}
final CountDownLatch latch = new CountDownLatch(filesToRecover.size());
final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();
for (final FileInfo fileToRecover : filesToRecover) {
logger.trace("[{}] [{}] restoring file [{}]", shardId, snapshotId, fileToRecover.name());
restoreFile(fileToRecover, latch, failures);
}
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (!failures.isEmpty()) {
throw new IndexShardRestoreFailedException(shardId, "Failed to recover index", failures.get(0));
}
// read the snapshot data persisted
long version = -1;
try {
if (Lucene.indexExists(store.directory())) {
version = Lucene.readSegmentInfos(store.directory()).getVersion();
}
} catch (IOException e) {
throw new IndexShardRestoreFailedException(shardId, "Failed to fetch index version after copying it over", e);
}
recoveryStatus.index().updateVersion(version);
/// now, go over and clean files that are in the store, but were not in the snapshot
try {
for (String storeFile : store.directory().listAll()) {
if (!snapshot.containPhysicalIndexFile(storeFile)) {
try {
store.directory().deleteFile(storeFile);
} catch (IOException e) {
// ignore
}
}
}
} catch (IOException e) {
// ignore
}
}
/**
* Restores a file
* This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
* added to the {@code failures} list
*
* @param fileInfo file to be restored
* @param latch latch that should be counted down once file is snapshoted
* @param failures thread-safe list of failures
*/
private void restoreFile(final FileInfo fileInfo, final CountDownLatch latch, final List<Throwable> failures) {
final IndexOutput indexOutput;
try {
// we create an output with no checksum, this is because the pure binary data of the file is not
// the checksum (because of seek). We will create the checksum file once copying is done
indexOutput = store.createOutputRaw(fileInfo.physicalName());
} catch (IOException e) {
failures.add(e);
latch.countDown();
return;
}
String firstFileToRecover = fileInfo.partName(0);
final AtomicInteger partIndex = new AtomicInteger();
blobContainer.readBlob(firstFileToRecover, new BlobContainer.ReadBlobListener() {
@Override
public synchronized void onPartial(byte[] data, int offset, int size) throws IOException {
recoveryStatus.index().addCurrentFilesSize(size);
indexOutput.writeBytes(data, offset, size);
if (restoreRateLimiter != null) {
rateLimiterListener.onRestorePause(restoreRateLimiter.pause(size));
}
}
@Override
public synchronized void onCompleted() {
int part = partIndex.incrementAndGet();
if (part < fileInfo.numberOfParts()) {
String partName = fileInfo.partName(part);
// continue with the new part
blobContainer.readBlob(partName, this);
return;
} else {
// we are done...
try {
indexOutput.close();
// write the checksum
if (fileInfo.checksum() != null) {
store.writeChecksum(fileInfo.physicalName(), fileInfo.checksum());
}
store.directory().sync(Collections.singleton(fileInfo.physicalName()));
} catch (IOException e) {
onFailure(e);
return;
}
}
latch.countDown();
}
@Override
public void onFailure(Throwable t) {
failures.add(t);
latch.countDown();
}
});
}
}
public interface RateLimiterListener {
void onRestorePause(long nanos);
void onSnapshotPause(long nanos);
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_snapshots_blobstore_BlobStoreIndexShardRepository.java
|
189 |
public interface OSizeable {
public int size();
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_util_OSizeable.java
|
396 |
handler = new EventHandler<PortableEntryEvent>() {
public void handle(PortableEntryEvent event) {
cache.remove(event.getKey());
}
@Override
public void onListenerRegister() {
cache.clear();
}
};
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_nearcache_ClientNearCache.java
|
783 |
private class TransportHandler extends BaseTransportRequestHandler<MoreLikeThisRequest> {
@Override
public MoreLikeThisRequest newInstance() {
return new MoreLikeThisRequest();
}
@Override
public void messageReceived(MoreLikeThisRequest request, final TransportChannel channel) throws Exception {
// no need to have a threaded listener since we just send back a response
request.listenerThreaded(false);
execute(request, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send response for get", e1);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_mlt_TransportMoreLikeThisAction.java
|
3,541 |
public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
public static class Defaults {
public static final FieldType FIELD_TYPE = new FieldType();
public static final boolean DOC_VALUES = false;
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.freeze();
}
public static final float BOOST = 1.0f;
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
}
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
protected final FieldType fieldType;
protected Boolean docValues;
protected float boost = Defaults.BOOST;
protected boolean omitNormsSet = false;
protected String indexName;
protected NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected Boolean includeInAll;
protected boolean indexOptionsSet = false;
protected PostingsFormatProvider postingsProvider;
protected DocValuesFormatProvider docValuesProvider;
protected SimilarityProvider similarity;
protected Loading normsLoading;
@Nullable
protected Settings fieldDataSettings;
protected final MultiFields.Builder multiFieldsBuilder;
protected CopyTo copyTo;
protected Builder(String name, FieldType fieldType) {
super(name);
this.fieldType = fieldType;
multiFieldsBuilder = new MultiFields.Builder();
}
public T index(boolean index) {
this.fieldType.setIndexed(index);
return builder;
}
public T store(boolean store) {
this.fieldType.setStored(store);
return builder;
}
public T docValues(boolean docValues) {
this.docValues = docValues;
return builder;
}
public T storeTermVectors(boolean termVectors) {
if (termVectors) {
this.fieldType.setStoreTermVectors(termVectors);
} // don't set it to false, it is default and might be flipped by a more specific option
return builder;
}
public T storeTermVectorOffsets(boolean termVectorOffsets) {
if (termVectorOffsets) {
this.fieldType.setStoreTermVectors(termVectorOffsets);
}
this.fieldType.setStoreTermVectorOffsets(termVectorOffsets);
return builder;
}
public T storeTermVectorPositions(boolean termVectorPositions) {
if (termVectorPositions) {
this.fieldType.setStoreTermVectors(termVectorPositions);
}
this.fieldType.setStoreTermVectorPositions(termVectorPositions);
return builder;
}
public T storeTermVectorPayloads(boolean termVectorPayloads) {
if (termVectorPayloads) {
this.fieldType.setStoreTermVectors(termVectorPayloads);
}
this.fieldType.setStoreTermVectorPayloads(termVectorPayloads);
return builder;
}
public T tokenized(boolean tokenized) {
this.fieldType.setTokenized(tokenized);
return builder;
}
public T boost(float boost) {
this.boost = boost;
return builder;
}
public T omitNorms(boolean omitNorms) {
this.fieldType.setOmitNorms(omitNorms);
this.omitNormsSet = true;
return builder;
}
public T indexOptions(IndexOptions indexOptions) {
this.fieldType.setIndexOptions(indexOptions);
this.indexOptionsSet = true;
return builder;
}
public T indexName(String indexName) {
this.indexName = indexName;
return builder;
}
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return builder;
}
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return builder;
}
public T includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
}
public T postingsFormat(PostingsFormatProvider postingsFormat) {
this.postingsProvider = postingsFormat;
return builder;
}
public T docValuesFormat(DocValuesFormatProvider docValuesFormat) {
this.docValuesProvider = docValuesFormat;
return builder;
}
public T similarity(SimilarityProvider similarity) {
this.similarity = similarity;
return builder;
}
public T normsLoading(Loading normsLoading) {
this.normsLoading = normsLoading;
return builder;
}
public T fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
}
public T multiFieldPathType(ContentPath.Type pathType) {
multiFieldsBuilder.pathType(pathType);
return builder;
}
public T addMultiField(Mapper.Builder mapperBuilder) {
multiFieldsBuilder.add(mapperBuilder);
return builder;
}
public T copyTo(CopyTo copyTo) {
this.copyTo = copyTo;
return builder;
}
public Names buildNames(BuilderContext context) {
return new Names(name, buildIndexName(context), indexName == null ? name : indexName, buildFullName(context), context.path().sourcePath());
}
public String buildIndexName(BuilderContext context) {
String actualIndexName = indexName == null ? name : indexName;
return context.path().pathAsText(actualIndexName);
}
public String buildFullName(BuilderContext context) {
return context.path().fullPathAsText(name);
}
}
private static final ThreadLocal<List<Field>> FIELD_LIST = new ThreadLocal<List<Field>>() {
protected List<Field> initialValue() {
return new ArrayList<Field>(2);
}
};
protected final Names names;
protected float boost;
protected final FieldType fieldType;
private final boolean docValues;
protected final NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected PostingsFormatProvider postingsFormat;
protected DocValuesFormatProvider docValuesFormat;
protected final SimilarityProvider similarity;
protected Loading normsLoading;
protected Settings customFieldDataSettings;
protected FieldDataType fieldDataType;
protected final MultiFields multiFields;
protected CopyTo copyTo;
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat,
DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings) {
this(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, postingsFormat, docValuesFormat, similarity,
normsLoading, fieldDataSettings, indexSettings, MultiFields.empty(), null);
}
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat,
DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
this.names = names;
this.boost = boost;
this.fieldType = fieldType;
this.fieldType.freeze();
// automatically set to keyword analyzer if its indexed and not analyzed
if (indexAnalyzer == null && !this.fieldType.tokenized() && this.fieldType.indexed()) {
this.indexAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.indexAnalyzer = indexAnalyzer;
}
// automatically set to keyword analyzer if its indexed and not analyzed
if (searchAnalyzer == null && !this.fieldType.tokenized() && this.fieldType.indexed()) {
this.searchAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.searchAnalyzer = searchAnalyzer;
}
if (postingsFormat == null) {
if (defaultPostingFormat() != null) {
postingsFormat = PostingFormats.getAsProvider(defaultPostingFormat());
}
}
this.postingsFormat = postingsFormat;
this.docValuesFormat = docValuesFormat;
this.similarity = similarity;
this.normsLoading = normsLoading;
this.customFieldDataSettings = fieldDataSettings;
if (fieldDataSettings == null) {
this.fieldDataType = defaultFieldDataType();
} else {
// create a new field data type, with the default settings as well as the "new ones"
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings)
);
}
if (docValues != null) {
this.docValues = docValues;
} else if (fieldDataType == null) {
this.docValues = false;
} else {
this.docValues = FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings));
}
this.multiFields = multiFields;
this.copyTo = copyTo;
}
@Nullable
protected String defaultPostingFormat() {
return null;
}
@Nullable
protected String defaultDocValuesFormat() {
return null;
}
@Override
public String name() {
return names.name();
}
@Override
public Names names() {
return this.names;
}
public abstract FieldType defaultFieldType();
public abstract FieldDataType defaultFieldDataType();
@Override
public final FieldDataType fieldDataType() {
return fieldDataType;
}
@Override
public FieldType fieldType() {
return fieldType;
}
@Override
public float boost() {
return this.boost;
}
@Override
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@Override
public Analyzer searchQuoteAnalyzer() {
return this.searchAnalyzer;
}
@Override
public SimilarityProvider similarity() {
return similarity;
}
@Override
public CopyTo copyTo() {
return copyTo;
}
@Override
public void parse(ParseContext context) throws IOException {
final List<Field> fields = FIELD_LIST.get();
assert fields.isEmpty();
try {
parseCreateField(context, fields);
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(boost);
}
if (context.listener().beforeFieldAdded(this, field, context)) {
context.doc().add(field);
}
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
} finally {
fields.clear();
}
multiFields.parse(this, context);
if (copyTo != null) {
copyTo.parse(context);
}
}
/**
* Parse the field value and populate <code>fields</code>.
*/
protected abstract void parseCreateField(ParseContext context, List<Field> fields) throws IOException;
/**
* Derived classes can override it to specify that boost value is set by derived classes.
*/
protected boolean customBoost() {
return false;
}
@Override
public void traverse(FieldMapperListener fieldMapperListener) {
fieldMapperListener.fieldMapper(this);
multiFields.traverse(fieldMapperListener);
}
@Override
public void traverse(ObjectMapperListener objectMapperListener) {
// nothing to do here...
}
@Override
public Object valueForSearch(Object value) {
return value;
}
@Override
public BytesRef indexedValueForSearch(Object value) {
return BytesRefs.toBytesRef(value);
}
@Override
public Query queryStringTermQuery(Term term) {
return null;
}
@Override
public boolean useTermQueryWithQueryString() {
return false;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Filter termFilter(Object value, @Nullable QueryParseContext context) {
return new TermFilter(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return new TermsFilter(names.indexName(), bytesRefs);
}
/**
* A terms filter based on the field data cache
*/
@Override
public Filter termsFilter(IndexFieldDataService fieldDataService, List values, @Nullable QueryParseContext context) {
// create with initial size large enough to avoid rehashing
ObjectOpenHashSet<BytesRef> terms =
new ObjectOpenHashSet<BytesRef>((int) (values.size() * (1 + ObjectOpenHashSet.DEFAULT_LOAD_FACTOR)));
for (int i = 0, len = values.size(); i < len; i++) {
terms.add(indexedValueForSearch(values.get(i)));
}
return FieldDataTermsFilter.newBytes(fieldDataService.getForField(this), terms);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeQuery(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeFilter(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return new FuzzyQuery(names.createIndexNameTerm(indexedValueForSearch(value)), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter prefixFilter(Object value, @Nullable QueryParseContext context) {
return new PrefixFilter(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Query regexpQuery(Object value, int flags, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
RegexpQuery query = new RegexpQuery(names().createIndexNameTerm(indexedValueForSearch(value)), flags);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter regexpFilter(Object value, int flags, @Nullable QueryParseContext parseContext) {
return new RegexpFilter(names().createIndexNameTerm(indexedValueForSearch(value)), flags);
}
@Override
public Filter nullValueFilter() {
return null;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeContext.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
if (this.fieldType().indexed() != fieldMergeWith.fieldType().indexed() || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index values");
}
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store values");
}
if (!this.hasDocValues() && fieldMergeWith.hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
// when the doc_values field data format is configured
mergeContext.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
}
if (this.fieldType().omitNorms() != fieldMergeWith.fieldType.omitNorms()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different `norms.enabled` values");
}
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tokenize values");
}
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values");
}
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values");
}
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values");
}
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values");
}
if (this.indexAnalyzer == null) {
if (fieldMergeWith.indexAnalyzer != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
} else if (fieldMergeWith.indexAnalyzer == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
} else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
if (this.similarity == null) {
if (fieldMergeWith.similarity() != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
} else if (fieldMergeWith.similarity() == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
} else if (!this.similarity().equals(fieldMergeWith.similarity())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
multiFields.merge(mergeWith, mergeContext);
if (!mergeContext.mergeFlags().simulate()) {
// apply changeable values
this.boost = fieldMergeWith.boost;
this.normsLoading = fieldMergeWith.normsLoading;
this.copyTo = fieldMergeWith.copyTo;
if (fieldMergeWith.postingsFormat != null) {
this.postingsFormat = fieldMergeWith.postingsFormat;
}
if (fieldMergeWith.docValuesFormat != null) {
this.docValuesFormat = fieldMergeWith.docValuesFormat;
}
if (fieldMergeWith.searchAnalyzer != null) {
this.searchAnalyzer = fieldMergeWith.searchAnalyzer;
}
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
);
}
}
}
}
@Override
public PostingsFormatProvider postingsFormatProvider() {
return postingsFormat;
}
@Override
public DocValuesFormatProvider docValuesFormatProvider() {
return docValuesFormat;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.name());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
doXContentBody(builder, includeDefaults, params);
return builder.endObject();
}
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (includeDefaults || !names.name().equals(names.indexNameClean())) {
builder.field("index_name", names.indexNameClean());
}
if (includeDefaults || boost != 1.0f) {
builder.field("boost", boost);
}
FieldType defaultFieldType = defaultFieldType();
if (includeDefaults || fieldType.indexed() != defaultFieldType.indexed() ||
fieldType.tokenized() != defaultFieldType.tokenized()) {
builder.field("index", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));
}
if (includeDefaults || fieldType.stored() != defaultFieldType.stored()) {
builder.field("store", fieldType.stored());
}
if (includeDefaults || hasDocValues() != Defaults.DOC_VALUES) {
builder.field(TypeParsers.DOC_VALUES, docValues);
}
if (includeDefaults || fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType));
}
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || normsLoading != null) {
builder.startObject("norms");
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms()) {
builder.field("enabled", !fieldType.omitNorms());
}
if (normsLoading != null) {
builder.field(Loading.KEY, normsLoading);
}
builder.endObject();
}
if (includeDefaults || fieldType.indexOptions() != defaultFieldType.indexOptions()) {
builder.field("index_options", indexOptionToString(fieldType.indexOptions()));
}
if (indexAnalyzer == null && searchAnalyzer == null) {
if (includeDefaults) {
builder.field("analyzer", "default");
}
} else if (indexAnalyzer == null) {
// searchAnalyzer != null
if (includeDefaults || (!searchAnalyzer.name().startsWith("_") && !searchAnalyzer.name().equals("default"))) {
builder.field("search_analyzer", searchAnalyzer.name());
}
} else if (searchAnalyzer == null) {
// indexAnalyzer != null
if (includeDefaults || (!indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default"))) {
builder.field("index_analyzer", indexAnalyzer.name());
}
} else if (indexAnalyzer.name().equals(searchAnalyzer.name())) {
// indexAnalyzer == searchAnalyzer
if (includeDefaults || (!indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default"))) {
builder.field("analyzer", indexAnalyzer.name());
}
} else {
// both are there but different
if (includeDefaults || (!indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default"))) {
builder.field("index_analyzer", indexAnalyzer.name());
}
if (includeDefaults || (!searchAnalyzer.name().startsWith("_") && !searchAnalyzer.name().equals("default"))) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
if (postingsFormat != null) {
if (includeDefaults || !postingsFormat.name().equals(defaultPostingFormat())) {
builder.field("postings_format", postingsFormat.name());
}
} else if (includeDefaults) {
String format = defaultPostingFormat();
if (format == null) {
format = PostingsFormatService.DEFAULT_FORMAT;
}
builder.field("postings_format", format);
}
if (docValuesFormat != null) {
if (includeDefaults || !docValuesFormat.name().equals(defaultDocValuesFormat())) {
builder.field(DOC_VALUES_FORMAT, docValuesFormat.name());
}
} else if (includeDefaults) {
String format = defaultDocValuesFormat();
if (format == null) {
format = DocValuesFormatService.DEFAULT_FORMAT;
}
builder.field(DOC_VALUES_FORMAT, format);
}
if (similarity() != null) {
builder.field("similarity", similarity().name());
} else if (includeDefaults) {
builder.field("similariry", SimilarityLookupService.DEFAULT_SIMILARITY);
}
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
}
multiFields.toXContent(builder, params);
if (copyTo != null) {
copyTo.toXContent(builder, params);
}
}
protected static String indexOptionToString(IndexOptions indexOption) {
switch (indexOption) {
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
return TypeParsers.INDEX_OPTIONS_OFFSETS;
case DOCS_AND_FREQS:
return TypeParsers.INDEX_OPTIONS_FREQS;
case DOCS_AND_FREQS_AND_POSITIONS:
return TypeParsers.INDEX_OPTIONS_POSITIONS;
case DOCS_ONLY:
return TypeParsers.INDEX_OPTIONS_DOCS;
default:
throw new ElasticsearchIllegalArgumentException("Unknown IndexOptions [" + indexOption + "]");
}
}
public static String termVectorOptionsToString(FieldType fieldType) {
if (!fieldType.storeTermVectors()) {
return "no";
} else if (!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "yes";
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "with_offsets";
} else {
StringBuilder builder = new StringBuilder("with");
if (fieldType.storeTermVectorPositions()) {
builder.append("_positions");
}
if (fieldType.storeTermVectorOffsets()) {
builder.append("_offsets");
}
if (fieldType.storeTermVectorPayloads()) {
builder.append("_payloads");
}
return builder.toString();
}
}
protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) {
if (!indexed) {
return "no";
} else if (tokenized) {
return "analyzed";
} else {
return "not_analyzed";
}
}
protected abstract String contentType();
@Override
public void close() {
multiFields.close();
}
@Override
public boolean isNumeric() {
return false;
}
@Override
public boolean isSortable() {
return true;
}
public boolean hasDocValues() {
return docValues;
}
@Override
public Loading normsLoading(Loading defaultLoading) {
return normsLoading == null ? defaultLoading : normsLoading;
}
public static class MultiFields {
public static MultiFields empty() {
return new MultiFields(Defaults.PATH_TYPE, ImmutableOpenMap.<String, Mapper>of());
}
public static class Builder {
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
private ContentPath.Type pathType = Defaults.PATH_TYPE;
public Builder pathType(ContentPath.Type pathType) {
this.pathType = pathType;
return this;
}
public Builder add(Mapper.Builder builder) {
mapperBuilders.put(builder.name(), builder);
return this;
}
@SuppressWarnings("unchecked")
public MultiFields build(AbstractFieldMapper.Builder mainFieldBuilder, BuilderContext context) {
if (pathType == Defaults.PATH_TYPE && mapperBuilders.isEmpty()) {
return empty();
} else if (mapperBuilders.isEmpty()) {
return new MultiFields(pathType, ImmutableOpenMap.<String, Mapper>of());
} else {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainFieldBuilder.name());
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
String key = cursor.key;
Mapper.Builder value = cursor.value;
mapperBuilders.put(key, value.build(context));
}
context.path().remove();
context.path().pathType(origPathType);
ImmutableOpenMap.Builder<String, Mapper> mappers = mapperBuilders.cast();
return new MultiFields(pathType, mappers.build());
}
}
}
private final ContentPath.Type pathType;
private volatile ImmutableOpenMap<String, Mapper> mappers;
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, Mapper> mappers) {
this.pathType = pathType;
this.mappers = mappers;
// we disable the all in multi-field mappers
for (ObjectCursor<Mapper> cursor : mappers.values()) {
Mapper mapper = cursor.value;
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
}
}
}
public void parse(AbstractFieldMapper mainField, ParseContext context) throws IOException {
if (mappers.isEmpty()) {
return;
}
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainField.name());
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
context.path().remove();
context.path().pathType(origPathType);
}
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
AbstractFieldMapper mergeWithMultiField = (AbstractFieldMapper) mergeWith;
List<FieldMapper> newFieldMappers = null;
ImmutableOpenMap.Builder<String, Mapper> newMappersBuilder = null;
for (ObjectCursor<Mapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
Mapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
// we disable the all in multi-field mappers
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
}
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(mergeWithMapper.name(), mergeWithMapper);
if (mergeWithMapper instanceof AbstractFieldMapper) {
if (newFieldMappers == null) {
newFieldMappers = new ArrayList<FieldMapper>(2);
}
newFieldMappers.add((FieldMapper) mergeWithMapper);
}
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
}
}
// first add all field mappers
if (newFieldMappers != null) {
mergeContext.docMapper().addFieldMappers(newFieldMappers);
}
// now publish mappers
if (newMappersBuilder != null) {
mappers = newMappersBuilder.build();
}
}
public void traverse(FieldMapperListener fieldMapperListener) {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.traverse(fieldMapperListener);
}
}
public void close() {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.close();
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (!mappers.isEmpty()) {
builder.startObject("fields");
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.toXContent(builder, params);
}
builder.endObject();
}
return builder;
}
}
/**
* Represents a list of fields with optional boost factor where the current field should be copied to
*/
public static class CopyTo {
private final ImmutableList<String> copyToFields;
private CopyTo(ImmutableList<String> copyToFields) {
this.copyToFields = copyToFields;
}
/**
* Creates instances of the fields that the current field should be copied to
*/
public void parse(ParseContext context) throws IOException {
if (!context.isWithinCopyTo()) {
for (String field : copyToFields) {
parse(field, context);
}
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!copyToFields.isEmpty()) {
builder.startArray("copy_to");
for (String field : copyToFields) {
builder.value(field);
}
builder.endArray();
}
return builder;
}
public static class Builder {
private final ImmutableList.Builder<String> copyToBuilders = ImmutableList.builder();
public Builder add(String field) {
copyToBuilders.add(field);
return this;
}
public CopyTo build() {
return new CopyTo(copyToBuilders.build());
}
}
public ImmutableList<String> copyToFields() {
return copyToFields;
}
/**
* Creates an copy of the current field with given field name and boost
*/
public void parse(String field, ParseContext context) throws IOException {
context.setWithinCopyTo();
FieldMappers mappers = context.docMapper().mappers().indexName(field);
if (mappers != null && !mappers.isEmpty()) {
mappers.mapper().parse(context);
} else {
int posDot = field.lastIndexOf('.');
if (posDot > 0) {
// Compound name
String objectPath = field.substring(0, posDot);
String fieldPath = field.substring(posDot + 1);
ObjectMapper mapper = context.docMapper().objectMappers().get(objectPath);
if (mapper == null) {
//TODO: Create an object dynamically?
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]");
}
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(ContentPath.Type.FULL);
context.path().add(objectPath);
// We might be in dynamically created field already, so need to clean withinNewMapper flag
// and then restore it, so we wouldn't miss new mappers created from copy_to fields
boolean origWithinNewMapper = context.isWithinNewMapper();
context.clearWithinNewMapper();
try {
mapper.parseDynamicValue(context, fieldPath, context.parser().currentToken());
} finally {
if (origWithinNewMapper) {
context.setWithinNewMapper();
} else {
context.clearWithinNewMapper();
}
context.path().remove();
context.path().pathType(origPathType);
}
} else {
// We might be in dynamically created field already, so need to clean withinNewMapper flag
// and then restore it, so we wouldn't miss new mappers created from copy_to fields
boolean origWithinNewMapper = context.isWithinNewMapper();
context.clearWithinNewMapper();
try {
context.docMapper().root().parseDynamicValue(context, field, context.parser().currentToken());
} finally {
if (origWithinNewMapper) {
context.setWithinNewMapper();
} else {
context.clearWithinNewMapper();
}
}
}
}
context.clearWithinCopyTo();
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_core_AbstractFieldMapper.java
|
4,246 |
static class Delete implements Operation {
private Term uid;
private long version;
public Delete() {
}
public Delete(Engine.Delete delete) {
this(delete.uid());
this.version = delete.version();
}
public Delete(Term uid) {
this.uid = uid;
}
@Override
public Type opType() {
return Type.DELETE;
}
@Override
public long estimateSize() {
return ((uid.field().length() + uid.text().length()) * 2) + 20;
}
public Term uid() {
return this.uid;
}
public long version() {
return this.version;
}
@Override
public Source readSource(StreamInput in) throws IOException {
throw new ElasticsearchIllegalStateException("trying to read doc source from delete operation");
}
@Override
public void readFrom(StreamInput in) throws IOException {
int version = in.readVInt(); // version
uid = new Term(in.readString(), in.readString());
if (version >= 1) {
this.version = in.readLong();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(1); // version
out.writeString(uid.field());
out.writeString(uid.text());
out.writeLong(version);
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_translog_Translog.java
|
357 |
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.METHOD})
public @interface NonCopied {
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_copy_NonCopied.java
|
142 |
@Test
public class DoubleSerializerTest {
private static final int FIELD_SIZE = 8;
private static final Double OBJECT = Math.PI;
private ODoubleSerializer doubleSerializer;
byte[] stream = new byte[FIELD_SIZE];
@BeforeClass
public void beforeClass() {
doubleSerializer = new ODoubleSerializer();
}
public void testFieldSize() {
Assert.assertEquals(doubleSerializer.getObjectSize(null), FIELD_SIZE);
}
public void testSerialize() {
doubleSerializer.serialize(OBJECT, stream, 0);
Assert.assertEquals(doubleSerializer.deserialize(stream, 0), OBJECT);
}
public void testSerializeNative() {
doubleSerializer.serializeNative(OBJECT, stream, 0);
Assert.assertEquals(doubleSerializer.deserializeNative(stream, 0), OBJECT);
}
public void testNativeDirectMemoryCompatibility() {
doubleSerializer.serializeNative(OBJECT, stream, 0);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream);
try {
Assert.assertEquals(doubleSerializer.deserializeFromDirectMemory(pointer, 0), OBJECT);
} finally {
pointer.free();
}
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_serialization_types_DoubleSerializerTest.java
|
542 |
public class DeleteMappingResponse extends AcknowledgedResponse {
DeleteMappingResponse() {
}
DeleteMappingResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_mapping_delete_DeleteMappingResponse.java
|
3,118 |
public class SegmentsStats implements Streamable, ToXContent {
private long count;
private long memoryInBytes;
public SegmentsStats() {
}
public void add(long count, long memoryInBytes) {
this.count += count;
this.memoryInBytes += memoryInBytes;
}
public void add(SegmentsStats mergeStats) {
if (mergeStats == null) {
return;
}
this.count += mergeStats.count;
this.memoryInBytes += mergeStats.memoryInBytes;
}
/**
* The the segments count.
*/
public long getCount() {
return this.count;
}
/**
* Estimation of the memory usage used by a segment.
*/
public long getMemoryInBytes() {
return this.memoryInBytes;
}
public ByteSizeValue getMemory() {
return new ByteSizeValue(memoryInBytes);
}
public static SegmentsStats readSegmentsStats(StreamInput in) throws IOException {
SegmentsStats stats = new SegmentsStats();
stats.readFrom(in);
return stats;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.SEGMENTS);
builder.field(Fields.COUNT, count);
builder.byteSizeField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, memoryInBytes);
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString SEGMENTS = new XContentBuilderString("segments");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString MEMORY = new XContentBuilderString("memory");
static final XContentBuilderString MEMORY_IN_BYTES = new XContentBuilderString("memory_in_bytes");
}
@Override
public void readFrom(StreamInput in) throws IOException {
count = in.readVLong();
memoryInBytes = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(count);
out.writeLong(memoryInBytes);
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_engine_SegmentsStats.java
|
214 |
interface SocketChannelWrapperFactory {
SocketChannelWrapper wrapSocketChannel(SocketChannel socketChannel, boolean client) throws Exception;
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientConnectionManagerImpl.java
|
173 |
public class SocketSimpleClient implements SimpleClient {
private final Node node;
final Socket socket = new Socket();
final ObjectDataInputStream in;
final ObjectDataOutputStream out;
public SocketSimpleClient(Node node) throws IOException {
this.node = node;
socket.connect(node.address.getInetSocketAddress());
OutputStream outputStream = socket.getOutputStream();
outputStream.write(Protocols.CLIENT_BINARY.getBytes());
outputStream.write(ClientTypes.JAVA.getBytes());
outputStream.flush();
SerializationService ss = getSerializationService();
in = ss.createObjectDataInputStream(new BufferedInputStream(socket.getInputStream()));
out = ss.createObjectDataOutputStream(new BufferedOutputStream(outputStream));
}
public void auth() throws IOException {
AuthenticationRequest auth = new AuthenticationRequest(new UsernamePasswordCredentials("dev", "dev-pass"));
send(auth);
receive();
}
public void send(Object o) throws IOException {
final Data data = getSerializationService().toData(o);
data.writeData(out);
out.flush();
}
public Object receive() throws IOException {
Data responseData = new Data();
responseData.readData(in);
ClientResponse clientResponse = getSerializationService().toObject(responseData);
return getSerializationService().toObject(clientResponse.getResponse());
}
public void close() throws IOException {
socket.close();
}
@Override
public SerializationService getSerializationService() {
return node.getSerializationService();
}
}
| 0true
|
hazelcast_src_test_java_com_hazelcast_client_SocketSimpleClient.java
|
106 |
class CreateEnumProposal extends CorrectionProposal {
CreateEnumProposal(String def, String desc, Image image,
int offset, TextFileChange change) {
super(desc, change, new Region(offset, 0), image);
}
static void addCreateEnumProposal(Tree.CompilationUnit cu, Node node,
ProblemLocation problem, Collection<ICompletionProposal> proposals,
IProject project, TypeChecker tc, IFile file) {
Node idn = Nodes.getIdentifyingNode(node);
if (idn==null) return;
String brokenName = idn.getText();
if (brokenName.isEmpty()) return;
Tree.Declaration dec = findDeclaration(cu, node);
if (dec instanceof Tree.ClassDefinition) {
Tree.ClassDefinition cd = (Tree.ClassDefinition) dec;
if (cd.getCaseTypes()!=null) {
if (cd.getCaseTypes().getTypes().contains(node)) {
addCreateEnumProposal(proposals, project,
"class " + brokenName + parameters(cd.getTypeParameterList()) +
parameters(cd.getParameterList()) +
" extends " + cd.getDeclarationModel().getName() +
parameters(cd.getTypeParameterList()) +
arguments(cd.getParameterList()) + " {}",
"class '"+ brokenName + parameters(cd.getTypeParameterList()) +
parameters(cd.getParameterList()) + "'",
CeylonLabelProvider.CLASS, cu, cd);
}
if (cd.getCaseTypes().getBaseMemberExpressions().contains(node)) {
addCreateEnumProposal(proposals, project,
"object " + brokenName +
" extends " + cd.getDeclarationModel().getName() +
parameters(cd.getTypeParameterList()) +
arguments(cd.getParameterList()) + " {}",
"object '"+ brokenName + "'",
ATTRIBUTE, cu, cd);
}
}
}
if (dec instanceof Tree.InterfaceDefinition) {
Tree.InterfaceDefinition cd = (Tree.InterfaceDefinition) dec;
if (cd.getCaseTypes()!=null) {
if (cd.getCaseTypes().getTypes().contains(node)) {
addCreateEnumProposal(proposals, project,
"interface " + brokenName + parameters(cd.getTypeParameterList()) +
" satisfies " + cd.getDeclarationModel().getName() +
parameters(cd.getTypeParameterList()) + " {}",
"interface '"+ brokenName + parameters(cd.getTypeParameterList()) + "'",
INTERFACE, cu, cd);
}
if (cd.getCaseTypes().getBaseMemberExpressions().contains(node)) {
addCreateEnumProposal(proposals, project,
"object " + brokenName +
" satisfies " + cd.getDeclarationModel().getName() +
parameters(cd.getTypeParameterList()) + " {}",
"object '"+ brokenName + "'",
ATTRIBUTE, cu, cd);
}
}
}
}
private static void addCreateEnumProposal(Collection<ICompletionProposal> proposals,
String def, String desc, Image image, PhasedUnit unit,
Tree.Statement statement) {
IFile file = getFile(unit);
TextFileChange change = new TextFileChange("Create Enumerated", file);
IDocument doc = EditorUtil.getDocument(change);
String indent = getIndent(statement, doc);
String s = indent + def + Indents.getDefaultLineDelimiter(doc);
int offset = statement.getStopIndex()+2;
if (offset>doc.getLength()) {
offset = doc.getLength();
s = Indents.getDefaultLineDelimiter(doc) + s;
}
change.setEdit(new InsertEdit(offset, s));
proposals.add(new CreateEnumProposal(def,
"Create enumerated " + desc,
image, offset + def.indexOf("{}")+1, change));
}
private static void addCreateEnumProposal(Collection<ICompletionProposal> proposals,
IProject project, String def, String desc, Image image,
Tree.CompilationUnit cu, Tree.TypeDeclaration cd) {
for (PhasedUnit unit: getUnits(project)) {
if (unit.getUnit().equals(cu.getUnit())) {
addCreateEnumProposal(proposals, def, desc, image, unit, cd);
break;
}
}
}
private static String parameters(Tree.ParameterList pl) {
StringBuilder result = new StringBuilder();
if (pl==null ||
pl.getParameters().isEmpty()) {
result.append("()");
}
else {
result.append("(");
int len = pl.getParameters().size(), i=0;
for (Tree.Parameter p: pl.getParameters()) {
if (p!=null) {
if (p instanceof Tree.ParameterDeclaration) {
Tree.TypedDeclaration td =
((Tree.ParameterDeclaration) p).getTypedDeclaration();
result.append(td.getType().getTypeModel().getProducedTypeName())
.append(" ")
.append(td.getIdentifier().getText());
}
else if (p instanceof Tree.InitializerParameter) {
result.append(p.getParameterModel().getType().getProducedTypeName())
.append(" ")
.append(((Tree.InitializerParameter) p).getIdentifier().getText());
}
//TODO: easy to add back in:
/*if (p instanceof Tree.FunctionalParameterDeclaration) {
Tree.FunctionalParameterDeclaration fp = (Tree.FunctionalParameterDeclaration) p;
for (Tree.ParameterList ipl: fp.getParameterLists()) {
parameters(ipl, label);
}
}*/
}
if (++i<len) result.append(", ");
}
result.append(")");
}
return result.toString();
}
private static String parameters(Tree.TypeParameterList tpl) {
StringBuilder result = new StringBuilder();
if (tpl!=null &&
!tpl.getTypeParameterDeclarations().isEmpty()) {
result.append("<");
int len = tpl.getTypeParameterDeclarations().size(), i=0;
for (Tree.TypeParameterDeclaration p: tpl.getTypeParameterDeclarations()) {
result.append(p.getIdentifier().getText());
if (++i<len) result.append(", ");
}
result.append(">");
}
return result.toString();
}
private static String arguments(Tree.ParameterList pl) {
StringBuilder result = new StringBuilder();
if (pl==null ||
pl.getParameters().isEmpty()) {
result.append("()");
}
else {
result.append("(");
int len = pl.getParameters().size(), i=0;
for (Tree.Parameter p: pl.getParameters()) {
if (p!=null) {
Tree.Identifier id;
if (p instanceof Tree.InitializerParameter) {
id = ((Tree.InitializerParameter) p).getIdentifier();
}
else if (p instanceof Tree.ParameterDeclaration) {
id = ((Tree.ParameterDeclaration) p).getTypedDeclaration().getIdentifier();
}
else {
continue;
}
result.append(id.getText());
//TODO: easy to add back in:
/*if (p instanceof Tree.FunctionalParameterDeclaration) {
Tree.FunctionalParameterDeclaration fp = (Tree.FunctionalParameterDeclaration) p;
for (Tree.ParameterList ipl: fp.getParameterLists()) {
parameters(ipl, label);
}
}*/
}
if (++i<len) result.append(", ");
}
result.append(")");
}
return result.toString();
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_CreateEnumProposal.java
|
863 |
public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeAction {
@Inject
public TransportSearchDfsQueryThenFetchAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings, threadPool, clusterService, searchService, searchPhaseController);
}
@Override
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
new AsyncAction(searchRequest, listener).start();
}
private class AsyncAction extends BaseAsyncAction<DfsSearchResult> {
final AtomicArray<QuerySearchResult> queryResults;
final AtomicArray<FetchSearchResult> fetchResults;
final AtomicArray<IntArrayList> docIdsToLoad;
private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
queryResults = new AtomicArray<QuerySearchResult>(firstResults.length());
fetchResults = new AtomicArray<FetchSearchResult>(firstResults.length());
docIdsToLoad = new AtomicArray<IntArrayList>(firstResults.length());
}
@Override
protected String firstPhaseName() {
return "dfs";
}
@Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<DfsSearchResult> listener) {
searchService.sendExecuteDfs(node, request, listener);
}
@Override
protected void moveToSecondPhase() {
final AggregatedDfs dfs = searchPhaseController.aggregateDfs(firstResults);
final AtomicInteger counter = new AtomicInteger(firstResults.asList().size());
int localOperations = 0;
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
DfsSearchResult dfsResult = entry.value;
DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
localOperations++;
} else {
QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
executeQuery(entry.index, dfsResult, counter, querySearchRequest, node);
}
}
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
DfsSearchResult dfsResult = entry.value;
DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
executeQuery(entry.index, dfsResult, counter, querySearchRequest, node);
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
final DfsSearchResult dfsResult = entry.value;
final DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
final QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
try {
if (localAsync) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executeQuery(entry.index, dfsResult, counter, querySearchRequest, node);
}
});
} else {
executeQuery(entry.index, dfsResult, counter, querySearchRequest, node);
}
} catch (Throwable t) {
onQueryFailure(t, querySearchRequest, entry.index, dfsResult, counter);
}
}
}
}
}
}
void executeQuery(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, final QuerySearchRequest querySearchRequest, DiscoveryNode node) {
searchService.sendExecuteQuery(node, querySearchRequest, new SearchServiceListener<QuerySearchResult>() {
@Override
public void onResult(QuerySearchResult result) {
result.shardTarget(dfsResult.shardTarget());
queryResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
executeFetchPhase();
}
}
@Override
public void onFailure(Throwable t) {
onQueryFailure(t, querySearchRequest, shardIndex, dfsResult, counter);
}
});
}
void onQueryFailure(Throwable t, QuerySearchRequest querySearchRequest, int shardIndex, DfsSearchResult dfsResult, AtomicInteger counter) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute query phase", t, querySearchRequest.id());
}
this.addShardFailure(shardIndex, dfsResult.shardTarget(), t);
successulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
executeFetchPhase();
}
}
void executeFetchPhase() {
try {
innerExecuteFetchPhase();
} catch (Throwable e) {
listener.onFailure(new ReduceSearchPhaseException("query", "", e, buildShardFailures()));
}
}
void innerExecuteFetchPhase() {
sortedShardList = searchPhaseController.sortDocs(queryResults);
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);
if (docIdsToLoad.asList().isEmpty()) {
finishHim();
return;
}
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
int localOperations = 0;
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
QuerySearchResult queryResult = queryResults.get(entry.index);
DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
localOperations++;
} else {
FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, queryResult.id(), entry.value);
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
}
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
QuerySearchResult queryResult = queryResults.get(entry.index);
DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, queryResult.id(), entry.value);
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
final QuerySearchResult queryResult = queryResults.get(entry.index);
final DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
final FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, queryResult.id(), entry.value);
try {
if (localAsync) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
});
} else {
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
} catch (Throwable t) {
onFetchFailure(t, fetchSearchRequest, entry.index, queryResult.shardTarget(), counter);
}
}
}
}
}
}
void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, final AtomicInteger counter, final FetchSearchRequest fetchSearchRequest, DiscoveryNode node) {
searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() {
@Override
public void onResult(FetchSearchResult result) {
result.shardTarget(shardTarget);
fetchResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
onFetchFailure(t, fetchSearchRequest, shardIndex, shardTarget, counter);
}
});
}
void onFetchFailure(Throwable t, FetchSearchRequest fetchSearchRequest, int shardIndex, SearchShardTarget shardTarget, AtomicInteger counter) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute fetch phase", t, fetchSearchRequest.id());
}
this.addShardFailure(shardIndex, shardTarget, t);
successulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
void finishHim() {
try {
innerFinishHim();
} catch (Throwable e) {
ReduceSearchPhaseException failure = new ReduceSearchPhaseException("merge", "", e, buildShardFailures());
if (logger.isDebugEnabled()) {
logger.debug("failed to reduce search", failure);
}
listener.onFailure(failure);
} finally {
releaseIrrelevantSearchContexts(queryResults, docIdsToLoad);
}
}
void innerFinishHim() throws Exception {
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successulOps.get(), buildTookInMillis(), buildShardFailures()));
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_search_type_TransportSearchDfsQueryThenFetchAction.java
|
801 |
public class AlterRequest extends AbstractAlterRequest {
public AlterRequest() {
}
public AlterRequest(String name, Data function) {
super(name, function);
}
@Override
protected Operation prepareOperation() {
return new AlterOperation(name, getFunction());
}
@Override
public int getClassId() {
return AtomicLongPortableHook.ALTER;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_client_AlterRequest.java
|
764 |
class TransportHandler extends BaseTransportRequestHandler<MultiGetRequest> {
@Override
public MultiGetRequest newInstance() {
return new MultiGetRequest();
}
@Override
public void messageReceived(final MultiGetRequest request, final TransportChannel channel) throws Exception {
// no need to use threaded listener, since we just send a response
request.listenerThreaded(false);
execute(request, new ActionListener<MultiGetResponse>() {
@Override
public void onResponse(MultiGetResponse response) {
try {
channel.sendResponse(response);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send error response for action [" + MultiGetAction.NAME + "] and request [" + request + "]", e1);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_get_TransportMultiGetAction.java
|
639 |
public enum Stage {
INIT((byte) 0),
INDEX((byte) 1),
TRANSLOG((byte) 2),
FINALIZE((byte) 3),
DONE((byte) 4);
private final byte value;
Stage(byte value) {
this.value = value;
}
public byte value() {
return value;
}
public static Stage fromValue(byte value) {
if (value == 0) {
return INIT;
} else if (value == 1) {
return INDEX;
} else if (value == 2) {
return TRANSLOG;
} else if (value == 3) {
return FINALIZE;
} else if (value == 4) {
return DONE;
}
throw new ElasticsearchIllegalArgumentException("No stage found for [" + value + ']');
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_status_PeerRecoveryStatus.java
|
215 |
XPostingsHighlighter highlighter2 = new XPostingsHighlighter(Integer.MAX_VALUE - 1) {
@Override
protected char getMultiValuedSeparator(String field) {
return HighlightUtils.PARAGRAPH_SEPARATOR;
}
@Override
protected PassageFormatter getFormatter(String field) {
return new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder());
}
};
| 0true
|
src_test_java_org_apache_lucene_search_postingshighlight_CustomPostingsHighlighterTests.java
|
255 |
public class EmailTargetImpl implements EmailTarget {
private static final long serialVersionUID = 1L;
protected String[] bccAddresses;
protected String[] ccAddresses;
protected String emailAddress;
/*
* (non-Javadoc)
* @see org.broadleafcommerce.common.email.domain.EmailTarget#getBCCAddresses()
*/
public String[] getBCCAddresses() {
return bccAddresses;
}
/*
* (non-Javadoc)
* @see org.broadleafcommerce.common.email.domain.EmailTarget#getCCAddresses()
*/
public String[] getCCAddresses() {
return ccAddresses;
}
/*
* (non-Javadoc)
* @see org.broadleafcommerce.common.email.domain.EmailTarget#getEmailAddress()
*/
public String getEmailAddress() {
return emailAddress;
}
/*
* (non-Javadoc)
* @see
* org.broadleafcommerce.common.email.domain.EmailTarget#setBCCAddresses(java.lang
* .String[])
*/
public void setBCCAddresses(String[] bccAddresses) {
this.bccAddresses = bccAddresses;
}
/*
* (non-Javadoc)
* @see
* org.broadleafcommerce.common.email.domain.EmailTarget#setCCAddresses(java.lang
* .String[])
*/
public void setCCAddresses(String[] ccAddresses) {
this.ccAddresses = ccAddresses;
}
/*
* (non-Javadoc)
* @see
* org.broadleafcommerce.common.email.domain.EmailTarget#setEmailAddress(java.lang
* .String)
*/
public void setEmailAddress(String emailAddress) {
this.emailAddress = emailAddress;
}
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(bccAddresses);
result = prime * result + Arrays.hashCode(ccAddresses);
result = prime * result + ((emailAddress == null) ? 0 : emailAddress.hashCode());
return result;
}
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EmailTargetImpl other = (EmailTargetImpl) obj;
if (!Arrays.equals(bccAddresses, other.bccAddresses))
return false;
if (!Arrays.equals(ccAddresses, other.ccAddresses))
return false;
if (emailAddress == null) {
if (other.emailAddress != null)
return false;
} else if (!emailAddress.equals(other.emailAddress))
return false;
return true;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_email_domain_EmailTargetImpl.java
|
409 |
public class ClientAtomicReferenceProxy<E> extends ClientProxy implements IAtomicReference<E> {
private final String name;
private volatile Data key;
public ClientAtomicReferenceProxy(String instanceName, String serviceName, String objectId) {
super(instanceName, serviceName, objectId);
this.name = objectId;
}
@Override
public <R> R apply(IFunction<E, R> function) {
isNotNull(function, "function");
return invoke(new ApplyRequest(name, toData(function)));
}
@Override
public void alter(IFunction<E, E> function) {
isNotNull(function, "function");
invoke(new AlterRequest(name, toData(function)));
}
@Override
public E alterAndGet(IFunction<E, E> function) {
isNotNull(function, "function");
return invoke(new AlterAndGetRequest(name, toData(function)));
}
@Override
public E getAndAlter(IFunction<E, E> function) {
isNotNull(function, "function");
return invoke(new GetAndAlterRequest(name, toData(function)));
}
@Override
public boolean compareAndSet(E expect, E update) {
return (Boolean) invoke(new CompareAndSetRequest(name, toData(expect), toData(update)));
}
@Override
public boolean contains(E expected) {
return (Boolean) invoke(new ContainsRequest(name, toData(expected)));
}
@Override
public E get() {
return invoke(new GetRequest(name));
}
@Override
public void set(E newValue) {
invoke(new SetRequest(name, toData(newValue)));
}
@Override
public void clear() {
set(null);
}
@Override
public E getAndSet(E newValue) {
return invoke(new GetAndSetRequest(name, toData(newValue)));
}
@Override
public E setAndGet(E update) {
invoke(new SetRequest(name, toData(update)));
return update;
}
@Override
public boolean isNull() {
return (Boolean) invoke(new IsNullRequest(name));
}
@Override
protected void onDestroy() {
}
protected <T> T invoke(ClientRequest req) {
return super.invoke(req, getKey());
}
private Data getKey() {
if (key == null) {
key = toData(name);
}
return key;
}
@Override
public String toString() {
return "IAtomicReference{" + "name='" + name + '\'' + '}';
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientAtomicReferenceProxy.java
|
1,224 |
doublePage = build(type, maxCount(limit, BigArrays.DOUBLE_PAGE_SIZE, doublesWeight, totalWeight), searchThreadPoolSize, availableProcessors, new Recycler.C<double[]>() {
@Override
public double[] newInstance(int sizing) {
return new double[BigArrays.DOUBLE_PAGE_SIZE];
}
@Override
public void clear(double[] value) {}
});
| 0true
|
src_main_java_org_elasticsearch_cache_recycler_PageCacheRecycler.java
|
1,039 |
public class GetTermVectorTests extends AbstractTermVectorTests {
@Test
public void testNoSuchDoc() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
.endObject().endObject();
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureYellow();
client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet();
refresh();
for (int i = 0; i < 20; i++) {
ActionFuture<TermVectorResponse> termVector = client().termVector(new TermVectorRequest("test", "type1", "" + i));
TermVectorResponse actionGet = termVector.actionGet();
assertThat(actionGet, Matchers.notNullValue());
assertThat(actionGet.isExists(), Matchers.equalTo(false));
}
}
@Test
public void testExistingFieldWithNoTermVectorsNoNPE() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("existingfield")
.field("type", "string")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
.endObject().endObject();
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureYellow();
// when indexing a field that simply has a question mark, the term
// vectors will be null
client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet();
refresh();
String[] selectedFields = { "existingfield" };
ActionFuture<TermVectorResponse> termVector = client().termVector(
new TermVectorRequest("test", "type1", "0").selectedFields(selectedFields));
// lets see if the null term vectors are caught...
termVector.actionGet();
TermVectorResponse actionGet = termVector.actionGet();
assertThat(actionGet.isExists(), Matchers.equalTo(true));
}
@Test
public void testExistingFieldButNotInDocNPE() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("existingfield")
.field("type", "string")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
.endObject().endObject();
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureYellow();
// when indexing a field that simply has a question mark, the term
// vectors will be null
client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet();
refresh();
String[] selectedFields = { "existingfield" };
ActionFuture<TermVectorResponse> termVector = client().termVector(
new TermVectorRequest("test", "type1", "0").selectedFields(selectedFields));
// lets see if the null term vectors are caught...
TermVectorResponse actionGet = termVector.actionGet();
assertThat(actionGet.isExists(), Matchers.equalTo(true));
}
@Test
public void testSimpleTermVectors() throws ElasticsearchException, IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("term_vector", "with_positions_offsets_payloads")
.field("analyzer", "tv_test")
.endObject()
.endObject()
.endObject().endObject();
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", mapping)
.setSettings(ImmutableSettings.settingsBuilder()
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase")));
ensureYellow();
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(XContentFactory.jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
// 0the3 4quick9 10brown15 16fox19 20jumps25 26over30
// 31the34 35lazy39 40dog43
.endObject()).execute().actionGet();
refresh();
}
String[] values = {"brown", "dog", "fox", "jumps", "lazy", "over", "quick", "the"};
int[] freq = {1, 1, 1, 1, 1, 1, 1, 2};
int[][] pos = {{2}, {8}, {3}, {4}, {7}, {5}, {1}, {0, 6}};
int[][] startOffset = {{10}, {40}, {16}, {20}, {35}, {26}, {4}, {0, 31}};
int[][] endOffset = {{15}, {43}, {19}, {25}, {39}, {30}, {9}, {3, 34}};
for (int i = 0; i < 10; i++) {
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i)).setPayloads(true)
.setOffsets(true).setPositions(true).setSelectedFields();
TermVectorResponse response = resp.execute().actionGet();
assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
Terms terms = fields.terms("field");
assertThat(terms.size(), equalTo(8l));
TermsEnum iterator = terms.iterator(null);
for (int j = 0; j < values.length; j++) {
String string = values[j];
BytesRef next = iterator.next();
assertThat(next, Matchers.notNullValue());
assertThat("expected " + string, string, equalTo(next.utf8ToString()));
assertThat(next, Matchers.notNullValue());
// do not test ttf or doc frequency, because here we have many
// shards and do not know how documents are distributed
DocsAndPositionsEnum docsAndPositions = iterator.docsAndPositions(null, null);
assertThat(docsAndPositions.nextDoc(), equalTo(0));
assertThat(freq[j], equalTo(docsAndPositions.freq()));
int[] termPos = pos[j];
int[] termStartOffset = startOffset[j];
int[] termEndOffset = endOffset[j];
assertThat(termPos.length, equalTo(freq[j]));
assertThat(termStartOffset.length, equalTo(freq[j]));
assertThat(termEndOffset.length, equalTo(freq[j]));
for (int k = 0; k < freq[j]; k++) {
int nextPosition = docsAndPositions.nextPosition();
assertThat("term: " + string, nextPosition, equalTo(termPos[k]));
assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k]));
assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k]));
assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word")));
}
}
assertThat(iterator.next(), Matchers.nullValue());
}
}
@Test
public void testRandomSingleTermVectors() throws ElasticsearchException, IOException {
FieldType ft = new FieldType();
int config = randomInt(6);
boolean storePositions = false;
boolean storeOffsets = false;
boolean storePayloads = false;
boolean storeTermVectors = false;
switch (config) {
case 0: {
// do nothing
}
case 1: {
storeTermVectors = true;
}
case 2: {
storeTermVectors = true;
storePositions = true;
}
case 3: {
storeTermVectors = true;
storeOffsets = true;
}
case 4: {
storeTermVectors = true;
storePositions = true;
storeOffsets = true;
}
case 5: {
storeTermVectors = true;
storePositions = true;
storePayloads = true;
}
case 6: {
storeTermVectors = true;
storePositions = true;
storeOffsets = true;
storePayloads = true;
}
}
ft.setStoreTermVectors(storeTermVectors);
ft.setStoreTermVectorOffsets(storeOffsets);
ft.setStoreTermVectorPayloads(storePayloads);
ft.setStoreTermVectorPositions(storePositions);
String optionString = AbstractFieldMapper.termVectorOptionsToString(ft);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("term_vector", optionString)
.field("analyzer", "tv_test")
.endObject()
.endObject()
.endObject().endObject();
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", mapping)
.setSettings(ImmutableSettings.settingsBuilder()
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase")));
ensureYellow();
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(XContentFactory.jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
// 0the3 4quick9 10brown15 16fox19 20jumps25 26over30
// 31the34 35lazy39 40dog43
.endObject()).execute().actionGet();
refresh();
}
String[] values = {"brown", "dog", "fox", "jumps", "lazy", "over", "quick", "the"};
int[] freq = {1, 1, 1, 1, 1, 1, 1, 2};
int[][] pos = {{2}, {8}, {3}, {4}, {7}, {5}, {1}, {0, 6}};
int[][] startOffset = {{10}, {40}, {16}, {20}, {35}, {26}, {4}, {0, 31}};
int[][] endOffset = {{15}, {43}, {19}, {25}, {39}, {30}, {9}, {3, 34}};
boolean isPayloadRequested = randomBoolean();
boolean isOffsetRequested = randomBoolean();
boolean isPositionsRequested = randomBoolean();
String infoString = createInfoString(isPositionsRequested, isOffsetRequested, isPayloadRequested, optionString);
for (int i = 0; i < 10; i++) {
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i))
.setPayloads(isPayloadRequested).setOffsets(isOffsetRequested).setPositions(isPositionsRequested).setSelectedFields();
TermVectorResponse response = resp.execute().actionGet();
assertThat(infoString + "doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(ft.storeTermVectors() ? 1 : 0));
if (ft.storeTermVectors()) {
Terms terms = fields.terms("field");
assertThat(terms.size(), equalTo(8l));
TermsEnum iterator = terms.iterator(null);
for (int j = 0; j < values.length; j++) {
String string = values[j];
BytesRef next = iterator.next();
assertThat(infoString, next, Matchers.notNullValue());
assertThat(infoString + "expected " + string, string, equalTo(next.utf8ToString()));
assertThat(infoString, next, Matchers.notNullValue());
// do not test ttf or doc frequency, because here we have
// many shards and do not know how documents are distributed
DocsAndPositionsEnum docsAndPositions = iterator.docsAndPositions(null, null);
// docs and pos only returns something if positions or
// payloads or offsets are stored / requestd Otherwise use
// DocsEnum?
assertThat(infoString, docsAndPositions.nextDoc(), equalTo(0));
assertThat(infoString, freq[j], equalTo(docsAndPositions.freq()));
int[] termPos = pos[j];
int[] termStartOffset = startOffset[j];
int[] termEndOffset = endOffset[j];
if (isPositionsRequested && storePositions) {
assertThat(infoString, termPos.length, equalTo(freq[j]));
}
if (isOffsetRequested && storeOffsets) {
assertThat(termStartOffset.length, equalTo(freq[j]));
assertThat(termEndOffset.length, equalTo(freq[j]));
}
for (int k = 0; k < freq[j]; k++) {
int nextPosition = docsAndPositions.nextPosition();
// only return something useful if requested and stored
if (isPositionsRequested && storePositions) {
assertThat(infoString + "positions for term: " + string, nextPosition, equalTo(termPos[k]));
} else {
assertThat(infoString + "positions for term: ", nextPosition, equalTo(-1));
}
// only return something useful if requested and stored
if (isPayloadRequested && storePayloads) {
assertThat(infoString + "payloads for term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef(
"word")));
} else {
assertThat(infoString + "payloads for term: " + string, docsAndPositions.getPayload(), equalTo(null));
}
// only return something useful if requested and stored
if (isOffsetRequested && storeOffsets) {
assertThat(infoString + "startOffsets term: " + string, docsAndPositions.startOffset(),
equalTo(termStartOffset[k]));
assertThat(infoString + "endOffsets term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k]));
} else {
assertThat(infoString + "startOffsets term: " + string, docsAndPositions.startOffset(), equalTo(-1));
assertThat(infoString + "endOffsets term: " + string, docsAndPositions.endOffset(), equalTo(-1));
}
}
}
assertThat(iterator.next(), Matchers.nullValue());
}
}
}
private String createInfoString(boolean isPositionsRequested, boolean isOffsetRequested, boolean isPayloadRequested,
String optionString) {
String ret = "Store config: " + optionString + "\n" + "Requested: pos-"
+ (isPositionsRequested ? "yes" : "no") + ", offsets-" + (isOffsetRequested ? "yes" : "no") + ", payload- "
+ (isPayloadRequested ? "yes" : "no") + "\n";
return ret;
}
@Test
public void testDuelESLucene() throws Exception {
TestFieldSetting[] testFieldSettings = getFieldSettings();
createIndexBasedOnFieldSettings(testFieldSettings, -1);
TestDoc[] testDocs = generateTestDocs(5, testFieldSettings);
DirectoryReader directoryReader = indexDocsWithLucene(testDocs);
TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings);
for (TestConfig test : testConfigs) {
try {
TermVectorRequestBuilder request = getRequestForConfig(test);
if (test.expectedException != null) {
assertThrows(request, test.expectedException);
continue;
}
TermVectorResponse response = request.get();
Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc);
validateResponse(response, luceneTermVectors, test);
} catch (Throwable t) {
throw new Exception("Test exception while running " + test.toString(), t);
}
}
}
@Test
public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws ElasticsearchException, IOException {
//create the test document
int encoding = randomIntBetween(0, 2);
String encodingString = "";
if (encoding == 0) {
encodingString = "float";
}
if (encoding == 1) {
encodingString = "int";
}
if (encoding == 2) {
encodingString = "identity";
}
String[] tokens = crateRandomTokens();
Map<String, List<BytesRef>> payloads = createPayloads(tokens, encoding);
String delimiter = createRandomDelimiter(tokens);
String queryString = createString(tokens, payloads, encoding, delimiter.charAt(0));
//create the mapping
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field").field("type", "string").field("term_vector", "with_positions_offsets_payloads")
.field("analyzer", "payload_test").endObject().endObject().endObject().endObject();
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings(
ImmutableSettings.settingsBuilder().put("index.analysis.analyzer.payload_test.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
.put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter)
.put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString)
.put("index.analysis.filter.my_delimited_payload_filter.type", "delimited_payload_filter")));
ensureYellow();
client().prepareIndex("test", "type1", Integer.toString(1))
.setSource(XContentFactory.jsonBuilder().startObject().field("field", queryString).endObject()).execute().actionGet();
refresh();
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(1)).setPayloads(true).setOffsets(true)
.setPositions(true).setSelectedFields();
TermVectorResponse response = resp.execute().actionGet();
assertThat("doc id 1 doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
Terms terms = fields.terms("field");
TermsEnum iterator = terms.iterator(null);
while (iterator.next() != null) {
String term = iterator.term().utf8ToString();
DocsAndPositionsEnum docsAndPositions = iterator.docsAndPositions(null, null);
assertThat(docsAndPositions.nextDoc(), equalTo(0));
List<BytesRef> curPayloads = payloads.get(term);
assertThat(term, curPayloads, Matchers.notNullValue());
assertNotNull(docsAndPositions);
for (int k = 0; k < docsAndPositions.freq(); k++) {
docsAndPositions.nextPosition();
if (docsAndPositions.getPayload()!=null){
String infoString = "\nterm: " + term + " has payload \n"+ docsAndPositions.getPayload().toString() + "\n but should have payload \n"+curPayloads.get(k).toString();
assertThat(infoString, docsAndPositions.getPayload(), equalTo(curPayloads.get(k)));
} else {
String infoString = "\nterm: " + term + " has no payload but should have payload \n"+curPayloads.get(k).toString();
assertThat(infoString, curPayloads.get(k).length, equalTo(0));
}
}
}
assertThat(iterator.next(), Matchers.nullValue());
}
private String createRandomDelimiter(String[] tokens) {
String delimiter = "";
boolean isTokenOrWhitespace = true;
while(isTokenOrWhitespace) {
isTokenOrWhitespace = false;
delimiter = randomUnicodeOfLength(1);
for(String token:tokens) {
if(token.contains(delimiter)) {
isTokenOrWhitespace = true;
}
}
if(Character.isWhitespace(delimiter.charAt(0))) {
isTokenOrWhitespace = true;
}
}
return delimiter;
}
private String createString(String[] tokens, Map<String, List<BytesRef>> payloads, int encoding, char delimiter) {
String resultString = "";
ObjectIntOpenHashMap<String> payloadCounter = new ObjectIntOpenHashMap<String>();
for (String token : tokens) {
if (!payloadCounter.containsKey(token)) {
payloadCounter.putIfAbsent(token, 0);
} else {
payloadCounter.put(token, payloadCounter.get(token) + 1);
}
resultString = resultString + token;
BytesRef payload = payloads.get(token).get(payloadCounter.get(token));
if (payload.length > 0) {
resultString = resultString + delimiter;
switch (encoding) {
case 0: {
resultString = resultString + Float.toString(PayloadHelper.decodeFloat(payload.bytes, payload.offset));
break;
}
case 1: {
resultString = resultString + Integer.toString(PayloadHelper.decodeInt(payload.bytes, payload.offset));
break;
}
case 2: {
resultString = resultString + payload.utf8ToString();
break;
}
default: {
throw new ElasticsearchException("unsupported encoding type");
}
}
}
resultString = resultString + " ";
}
return resultString;
}
private Map<String, List<BytesRef>> createPayloads(String[] tokens, int encoding) {
Map<String, List<BytesRef>> payloads = new HashMap<String, List<BytesRef>>();
for (String token : tokens) {
if (payloads.get(token) == null) {
payloads.put(token, new ArrayList<BytesRef>());
}
boolean createPayload = randomBoolean();
if (createPayload) {
switch (encoding) {
case 0: {
float theFloat = randomFloat();
payloads.get(token).add(new BytesRef(PayloadHelper.encodeFloat(theFloat)));
break;
}
case 1: {
payloads.get(token).add(new BytesRef(PayloadHelper.encodeInt(randomInt())));
break;
}
case 2: {
String payload = randomUnicodeOfLengthBetween(50, 100);
for (int c = 0; c < payload.length(); c++) {
if (Character.isWhitespace(payload.charAt(c))) {
payload = payload.replace(payload.charAt(c), 'w');
}
}
payloads.get(token).add(new BytesRef(payload));
break;
}
default: {
throw new ElasticsearchException("unsupported encoding type");
}
}
} else {
payloads.get(token).add(new BytesRef());
}
}
return payloads;
}
private String[] crateRandomTokens() {
String[] tokens = { "the", "quick", "brown", "fox" };
int numTokensWithDuplicates = randomIntBetween(3, 15);
String[] finalTokens = new String[numTokensWithDuplicates];
for (int i = 0; i < numTokensWithDuplicates; i++) {
finalTokens[i] = tokens[randomIntBetween(0, tokens.length - 1)];
}
return finalTokens;
}
}
| 0true
|
src_test_java_org_elasticsearch_action_termvector_GetTermVectorTests.java
|
1,269 |
nodesService.execute(new TransportClientNodesService.NodeListenerCallback<Response>() {
@Override
public void doWithNode(DiscoveryNode node, ActionListener<Response> listener) throws ElasticsearchException {
proxy.execute(node, request, listener);
}
}, listener);
| 0true
|
src_main_java_org_elasticsearch_client_transport_support_InternalTransportClient.java
|
2,619 |
return AccessController.doPrivileged(new PrivilegedAction<Unsafe>() {
@Override
public Unsafe run() {
try {
Class<Unsafe> type = Unsafe.class;
try {
Field field = type.getDeclaredField("theUnsafe");
field.setAccessible(true);
return type.cast(field.get(type));
} catch (Exception e) {
for (Field field : type.getDeclaredFields()) {
if (type.isAssignableFrom(field.getType())) {
field.setAccessible(true);
return type.cast(field.get(type));
}
}
}
} catch (Exception e) {
throw new RuntimeException("Unsafe unavailable", e);
}
throw new RuntimeException("Unsafe unavailable");
}
});
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_UnsafeHelper.java
|
101 |
public class TestManualAcquireLock extends AbstractNeo4jTestCase
{
private Worker worker;
@Before
public void doBefore() throws Exception
{
worker = new Worker();
}
@After
public void doAfter() throws Exception
{
worker.close();
}
@Test
public void releaseReleaseManually() throws Exception
{
String key = "name";
Node node = getGraphDb().createNode();
Transaction tx = newTransaction();
Worker worker = new Worker();
Lock nodeLock = tx.acquireWriteLock( node );
worker.beginTx();
try
{
worker.setProperty( node, key, "ksjd" );
fail( "Shouldn't be able to grab it" );
}
catch ( Exception e )
{
}
nodeLock.release();
worker.setProperty( node, key, "yo" );
worker.finishTx();
}
@Test
public void canOnlyReleaseOnce() throws Exception
{
Node node = getGraphDb().createNode();
Transaction tx = newTransaction();
Lock nodeLock = tx.acquireWriteLock( node );
nodeLock.release();
try
{
nodeLock.release();
fail( "Shouldn't be able to release more than once" );
}
catch ( IllegalStateException e )
{ // Good
}
}
@Test
public void makeSureNodeStaysLockedEvenAfterManualRelease() throws Exception
{
String key = "name";
Node node = getGraphDb().createNode();
Transaction tx = newTransaction();
Lock nodeLock = tx.acquireWriteLock( node );
node.setProperty( key, "value" );
nodeLock.release();
Worker worker = new Worker();
worker.beginTx();
try
{
worker.setProperty( node, key, "ksjd" );
fail( "Shouldn't be able to grab it" );
}
catch ( Exception e )
{
}
commit();
tx.success();
tx.finish();
worker.finishTx();
}
private class State
{
private final GraphDatabaseService graphDb;
private Transaction tx;
public State( GraphDatabaseService graphDb )
{
this.graphDb = graphDb;
}
}
private class Worker extends OtherThreadExecutor<State>
{
public Worker()
{
super( "other thread", new State( getGraphDb() ) );
}
void beginTx() throws Exception
{
execute( new WorkerCommand<State, Void>()
{
@Override
public Void doWork( State state )
{
state.tx = state.graphDb.beginTx();
return null;
}
} );
}
void finishTx() throws Exception
{
execute( new WorkerCommand<State, Void>()
{
@Override
public Void doWork( State state )
{
state.tx.success();
state.tx.finish();
return null;
}
} );
}
void setProperty( final Node node, final String key, final Object value ) throws Exception
{
execute( new WorkerCommand<State, Object>()
{
@Override
public Object doWork( State state )
{
node.setProperty( key, value );
return null;
}
}, 200, MILLISECONDS );
}
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestManualAcquireLock.java
|
64 |
static final class KeyIterator<K,V> extends BaseIterator<K,V>
implements Iterator<K>, Enumeration<K> {
KeyIterator(Node<K,V>[] tab, int index, int size, int limit,
ConcurrentHashMapV8<K,V> map) {
super(tab, index, size, limit, map);
}
public final K next() {
Node<K,V> p;
if ((p = next) == null)
throw new NoSuchElementException();
K k = p.key;
lastReturned = p;
advance();
return k;
}
public final K nextElement() { return next(); }
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
464 |
public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesRequest> {
private List<AliasActions> allAliasActions = Lists.newArrayList();
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false);
public IndicesAliasesRequest() {
}
/*
* Aliases can be added by passing multiple indices to the Request and
* deleted by passing multiple indices and aliases. They are expanded into
* distinct AliasAction instances when the request is processed. This class
* holds the AliasAction and in addition the arrays or alias names and
* indices that is later used to create the final AliasAction instances.
*/
public static class AliasActions {
private String[] indices = Strings.EMPTY_ARRAY;
private String[] aliases = Strings.EMPTY_ARRAY;
private AliasAction aliasAction;
public AliasActions(AliasAction.Type type, String[] indices, String[] aliases) {
aliasAction = new AliasAction(type);
indices(indices);
aliases(aliases);
}
public AliasActions(AliasAction.Type type, String index, String alias) {
aliasAction = new AliasAction(type);
indices(index);
aliases(alias);
}
AliasActions(AliasAction.Type type, String[] index, String alias) {
aliasAction = new AliasAction(type);
indices(index);
aliases(alias);
}
public AliasActions(AliasAction action) {
this.aliasAction = action;
indices(action.index());
aliases(action.alias());
}
public AliasActions(Type type, String index, String[] aliases) {
aliasAction = new AliasAction(type);
indices(index);
aliases(aliases);
}
public AliasActions() {
}
public AliasActions filter(Map<String, Object> filter) {
aliasAction.filter(filter);
return this;
}
public AliasActions filter(FilterBuilder filter) {
aliasAction.filter(filter);
return this;
}
public Type actionType() {
return aliasAction.actionType();
}
public void routing(String routing) {
aliasAction.routing(routing);
}
public void searchRouting(String searchRouting) {
aliasAction.searchRouting(searchRouting);
}
public void indexRouting(String indexRouting) {
aliasAction.indexRouting(indexRouting);
}
public AliasActions filter(String filter) {
aliasAction.filter(filter);
return this;
}
public void indices(String... indices) {
List<String> finalIndices = new ArrayList<String>();
for (String index : indices) {
if (index != null) {
finalIndices.add(index);
}
}
this.indices = finalIndices.toArray(new String[finalIndices.size()]);
}
public void aliases(String... aliases) {
this.aliases = aliases;
}
public String[] aliases() {
return aliases;
}
public String[] indices() {
return indices;
}
public AliasAction aliasAction() {
return aliasAction;
}
public String[] concreteAliases(MetaData metaData, String concreteIndex) {
if (aliasAction.actionType() == Type.REMOVE) {
//for DELETE we expand the aliases
String[] indexAsArray = {concreteIndex};
ImmutableOpenMap<String, ImmutableList<AliasMetaData>> aliasMetaData = metaData.findAliases(aliases, indexAsArray);
List<String> finalAliases = new ArrayList<String> ();
for (ObjectCursor<ImmutableList<AliasMetaData>> curAliases : aliasMetaData.values()) {
for (AliasMetaData aliasMeta: curAliases.value) {
finalAliases.add(aliasMeta.alias());
}
}
return finalAliases.toArray(new String[finalAliases.size()]);
} else {
//for add we just return the current aliases
return aliases;
}
}
public AliasActions readFrom(StreamInput in) throws IOException {
indices = in.readStringArray();
aliases = in.readStringArray();
aliasAction = readAliasAction(in);
return this;
}
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(indices);
out.writeStringArray(aliases);
this.aliasAction.writeTo(out);
}
}
/**
* Adds an alias to the index.
* @param alias The alias
* @param indices The indices
*/
public IndicesAliasesRequest addAlias(String alias, String... indices) {
addAliasAction(new AliasActions(AliasAction.Type.ADD, indices, alias));
return this;
}
public void addAliasAction(AliasActions aliasAction) {
allAliasActions.add(aliasAction);
}
public IndicesAliasesRequest addAliasAction(AliasAction action) {
addAliasAction(new AliasActions(action));
return this;
}
/**
* Adds an alias to the index.
* @param alias The alias
* @param filter The filter
* @param indices The indices
*/
public IndicesAliasesRequest addAlias(String alias, Map<String, Object> filter, String... indices) {
addAliasAction(new AliasActions(AliasAction.Type.ADD, indices, alias).filter(filter));
return this;
}
/**
* Adds an alias to the index.
* @param alias The alias
* @param filterBuilder The filter
* @param indices The indices
*/
public IndicesAliasesRequest addAlias(String alias, FilterBuilder filterBuilder, String... indices) {
addAliasAction(new AliasActions(AliasAction.Type.ADD, indices, alias).filter(filterBuilder));
return this;
}
/**
* Removes an alias to the index.
*
* @param indices The indices
* @param aliases The aliases
*/
public IndicesAliasesRequest removeAlias(String[] indices, String... aliases) {
addAliasAction(new AliasActions(AliasAction.Type.REMOVE, indices, aliases));
return this;
}
/**
* Removes an alias to the index.
*
* @param index The index
* @param aliases The aliases
*/
public IndicesAliasesRequest removeAlias(String index, String... aliases) {
addAliasAction(new AliasActions(AliasAction.Type.REMOVE, index, aliases));
return this;
}
List<AliasActions> aliasActions() {
return this.allAliasActions;
}
public List<AliasActions> getAliasActions() {
return aliasActions();
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (allAliasActions.isEmpty()) {
return addValidationError("Must specify at least one alias action", validationException);
}
for (AliasActions aliasAction : allAliasActions) {
if (aliasAction.actionType() == AliasAction.Type.ADD) {
if (aliasAction.aliases.length != 1) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "] requires exactly one [alias] to be set", validationException);
}
if (!Strings.hasText(aliasAction.aliases[0])) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "] requires an [alias] to be set", validationException);
}
} else {
if (aliasAction.aliases.length == 0) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "]: aliases may not be empty", validationException);
}
for (String alias : aliasAction.aliases) {
if (!Strings.hasText(alias)) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "]: [alias] may not be empty string", validationException);
}
}
if (CollectionUtils.isEmpty(aliasAction.indices)) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "]: indices may not be empty", validationException);
}
}
if (!CollectionUtils.isEmpty(aliasAction.indices)) {
for (String index : aliasAction.indices) {
if (!Strings.hasText(index)) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "]: [index] may not be empty string", validationException);
}
}
}
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
allAliasActions.add(readAliasActions(in));
}
readTimeout(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(allAliasActions.size());
for (AliasActions aliasAction : allAliasActions) {
aliasAction.writeTo(out);
}
writeTimeout(out);
}
public IndicesOptions indicesOptions() {
return indicesOptions;
}
private AliasActions readAliasActions(StreamInput in) throws IOException {
AliasActions actions = new AliasActions();
return actions.readFrom(in);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_alias_IndicesAliasesRequest.java
|
624 |
public class TransportIndicesStatsAction extends TransportBroadcastOperationAction<IndicesStatsRequest, IndicesStatsResponse, TransportIndicesStatsAction.IndexShardStatsRequest, ShardStats> {
private final IndicesService indicesService;
@Inject
public TransportIndicesStatsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
}
@Override
protected String executor() {
return ThreadPool.Names.MANAGEMENT;
}
@Override
protected String transportAction() {
return IndicesStatsAction.NAME;
}
@Override
protected IndicesStatsRequest newRequest() {
return new IndicesStatsRequest();
}
/**
* Status goes across *all* shards.
*/
@Override
protected GroupShardsIterator shards(ClusterState clusterState, IndicesStatsRequest request, String[] concreteIndices) {
return clusterState.routingTable().allAssignedShardsGrouped(concreteIndices, true);
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, IndicesStatsRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, IndicesStatsRequest request, String[] concreteIndices) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, concreteIndices);
}
@Override
protected IndicesStatsResponse newResponse(IndicesStatsRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
int successfulShards = 0;
int failedShards = 0;
List<ShardOperationFailedException> shardFailures = null;
final List<ShardStats> shards = Lists.newArrayList();
for (int i = 0; i < shardsResponses.length(); i++) {
Object shardResponse = shardsResponses.get(i);
if (shardResponse == null) {
// simply ignore non active shards
} else if (shardResponse instanceof BroadcastShardOperationFailedException) {
failedShards++;
if (shardFailures == null) {
shardFailures = newArrayList();
}
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
} else {
shards.add((ShardStats) shardResponse);
successfulShards++;
}
}
return new IndicesStatsResponse(shards.toArray(new ShardStats[shards.size()]), clusterState, shardsResponses.length(), successfulShards, failedShards, shardFailures);
}
@Override
protected IndexShardStatsRequest newShardRequest() {
return new IndexShardStatsRequest();
}
@Override
protected IndexShardStatsRequest newShardRequest(ShardRouting shard, IndicesStatsRequest request) {
return new IndexShardStatsRequest(shard.index(), shard.id(), request);
}
@Override
protected ShardStats newShardResponse() {
return new ShardStats();
}
@Override
protected ShardStats shardOperation(IndexShardStatsRequest request) throws ElasticsearchException {
InternalIndexService indexService = (InternalIndexService) indicesService.indexServiceSafe(request.index());
InternalIndexShard indexShard = (InternalIndexShard) indexService.shardSafe(request.shardId());
CommonStatsFlags flags = new CommonStatsFlags().clear();
if (request.request.docs()) {
flags.set(CommonStatsFlags.Flag.Docs);
}
if (request.request.store()) {
flags.set(CommonStatsFlags.Flag.Store);
}
if (request.request.indexing()) {
flags.set(CommonStatsFlags.Flag.Indexing);
flags.types(request.request.types());
}
if (request.request.get()) {
flags.set(CommonStatsFlags.Flag.Get);
}
if (request.request.search()) {
flags.set(CommonStatsFlags.Flag.Search);
flags.groups(request.request.groups());
}
if (request.request.merge()) {
flags.set(CommonStatsFlags.Flag.Merge);
}
if (request.request.refresh()) {
flags.set(CommonStatsFlags.Flag.Refresh);
}
if (request.request.flush()) {
flags.set(CommonStatsFlags.Flag.Flush);
}
if (request.request.warmer()) {
flags.set(CommonStatsFlags.Flag.Warmer);
}
if (request.request.filterCache()) {
flags.set(CommonStatsFlags.Flag.FilterCache);
}
if (request.request.idCache()) {
flags.set(CommonStatsFlags.Flag.IdCache);
}
if (request.request.fieldData()) {
flags.set(CommonStatsFlags.Flag.FieldData);
flags.fieldDataFields(request.request.fieldDataFields());
}
if (request.request.percolate()) {
flags.set(CommonStatsFlags.Flag.Percolate);
}
if (request.request.segments()) {
flags.set(CommonStatsFlags.Flag.Segments);
}
if (request.request.completion()) {
flags.set(CommonStatsFlags.Flag.Completion);
flags.completionDataFields(request.request.completionFields());
}
if (request.request.translog()) {
flags.set(CommonStatsFlags.Flag.Translog);
}
return new ShardStats(indexShard, flags);
}
public static class IndexShardStatsRequest extends BroadcastShardOperationRequest {
// TODO if there are many indices, the request might hold a large indices array..., we don't really need to serialize it
IndicesStatsRequest request;
IndexShardStatsRequest() {
}
IndexShardStatsRequest(String index, int shardId, IndicesStatsRequest request) {
super(index, shardId, request);
this.request = request;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
request = new IndicesStatsRequest();
request.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
request.writeTo(out);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_indices_stats_TransportIndicesStatsAction.java
|
145 |
public class AtomicDouble extends Number implements java.io.Serializable {
private static final long serialVersionUID = -8405198993435143622L;
private transient volatile long value;
/**
* Creates a new {@code AtomicDouble} with the given initial value.
*
* @param initialValue the initial value
*/
public AtomicDouble(double initialValue) {
value = doubleToRawLongBits(initialValue);
}
/**
* Creates a new {@code AtomicDouble} with initial value {@code 0.0}.
*/
public AtomicDouble() {
// assert doubleToRawLongBits(0.0) == 0L;
}
/**
* Gets the current value.
*
* @return the current value
*/
public final double get() {
return longBitsToDouble(value);
}
/**
* Sets to the given value.
*
* @param newValue the new value
*/
public final void set(double newValue) {
long next = doubleToRawLongBits(newValue);
value = next;
}
/**
* Eventually sets to the given value.
*
* @param newValue the new value
*/
public final void lazySet(double newValue) {
long next = doubleToRawLongBits(newValue);
unsafe.putOrderedLong(this, valueOffset, next);
}
/**
* Atomically sets to the given value and returns the old value.
*
* @param newValue the new value
* @return the previous value
*/
public final double getAndSet(double newValue) {
long next = doubleToRawLongBits(newValue);
while (true) {
long current = value;
if (unsafe.compareAndSwapLong(this, valueOffset, current, next))
return longBitsToDouble(current);
}
}
/**
* Atomically sets the value to the given updated value
* if the current value is <a href="#bitEquals">bitwise equal</a>
* to the expected value.
*
* @param expect the expected value
* @param update the new value
* @return {@code true} if successful. False return indicates that
* the actual value was not bitwise equal to the expected value.
*/
public final boolean compareAndSet(double expect, double update) {
return unsafe.compareAndSwapLong(this, valueOffset,
doubleToRawLongBits(expect),
doubleToRawLongBits(update));
}
/**
* Atomically sets the value to the given updated value
* if the current value is <a href="#bitEquals">bitwise equal</a>
* to the expected value.
*
* <p><a
* href="http://download.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/package-summary.html#Spurious">
* May fail spuriously and does not provide ordering guarantees</a>,
* so is only rarely an appropriate alternative to {@code compareAndSet}.
*
* @param expect the expected value
* @param update the new value
* @return {@code true} if successful
*/
public final boolean weakCompareAndSet(double expect, double update) {
return compareAndSet(expect, update);
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the previous value
*/
public final double getAndAdd(double delta) {
while (true) {
long current = value;
double currentVal = longBitsToDouble(current);
double nextVal = currentVal + delta;
long next = doubleToRawLongBits(nextVal);
if (unsafe.compareAndSwapLong(this, valueOffset, current, next))
return currentVal;
}
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the updated value
*/
public final double addAndGet(double delta) {
while (true) {
long current = value;
double currentVal = longBitsToDouble(current);
double nextVal = currentVal + delta;
long next = doubleToRawLongBits(nextVal);
if (unsafe.compareAndSwapLong(this, valueOffset, current, next))
return nextVal;
}
}
/**
* Returns the String representation of the current value.
* @return the String representation of the current value
*/
public String toString() {
return Double.toString(get());
}
/**
* Returns the value of this {@code AtomicDouble} as an {@code int}
* after a narrowing primitive conversion.
*/
public int intValue() {
return (int) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code long}
* after a narrowing primitive conversion.
*/
public long longValue() {
return (long) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code float}
* after a narrowing primitive conversion.
*/
public float floatValue() {
return (float) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code double}.
*/
public double doubleValue() {
return get();
}
/**
* Saves the state to a stream (that is, serializes it).
*
* @param s the stream
* @throws java.io.IOException if an I/O error occurs
* @serialData The current value is emitted (a {@code double}).
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
s.writeDouble(get());
}
/**
* Reconstitutes the instance from a stream (that is, deserializes it).
* @param s the stream
* @throws ClassNotFoundException if the class of a serialized object
* could not be found
* @throws java.io.IOException if an I/O error occurs
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
set(s.readDouble());
}
// Unsafe mechanics
private static final sun.misc.Unsafe unsafe = getUnsafe();
private static final long valueOffset;
static {
try {
valueOffset = unsafe.objectFieldOffset
(AtomicDouble.class.getDeclaredField("value"));
} catch (Exception ex) { throw new Error(ex); }
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
} catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
}
| 0true
|
src_main_java_jsr166e_extra_AtomicDouble.java
|
935 |
public class OfferItemRestrictionRuleType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, OfferItemRestrictionRuleType> TYPES = new LinkedHashMap<String, OfferItemRestrictionRuleType>();
public static final OfferItemRestrictionRuleType NONE = new OfferItemRestrictionRuleType("NONE", "None");
public static final OfferItemRestrictionRuleType QUALIFIER = new OfferItemRestrictionRuleType("QUALIFIER", "Qualifier Only");
public static final OfferItemRestrictionRuleType TARGET = new OfferItemRestrictionRuleType("TARGET", "Target Only");
public static final OfferItemRestrictionRuleType QUALIFIER_TARGET = new OfferItemRestrictionRuleType("QUALIFIER_TARGET", "Qualifier And Target");
public static OfferItemRestrictionRuleType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public OfferItemRestrictionRuleType() {
//do nothing
}
public OfferItemRestrictionRuleType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OfferItemRestrictionRuleType other = (OfferItemRestrictionRuleType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_type_OfferItemRestrictionRuleType.java
|
13 |
result.add(new CompletionProposal(offset, "", null, funtext, funtext) {
@Override
public Point getSelection(IDocument document) {
return new Point(offset + text.indexOf("nothing"), 7);
}
@Override
public Image getImage() {
return CeylonResources.MINOR_CHANGE;
}
});
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_CeylonCompletionProcessor.java
|
1,477 |
labels = OMultiValue.array(iParameters, String.class, new OCallable<Object, Object>() {
@Override
public Object call(final Object iArgument) {
return OStringSerializerHelper.getStringContent(iArgument);
}
});
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionMove.java
|
2,814 |
public final class CheckReplicaVersion extends Operation implements PartitionAwareOperation, MigrationCycleOperation {
private long version;
private boolean returnResponse;
private boolean response;
public CheckReplicaVersion() {
}
public CheckReplicaVersion(long version, boolean returnResponse) {
this.version = version;
this.returnResponse = returnResponse;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
InternalPartitionServiceImpl partitionService = getService();
int partitionId = getPartitionId();
int replicaIndex = getReplicaIndex();
long[] currentVersions = partitionService.getPartitionReplicaVersions(partitionId);
long currentVersion = currentVersions[replicaIndex - 1];
if (currentVersion == version) {
response = true;
} else {
logBackupVersionMismatch(currentVersion);
partitionService.triggerPartitionReplicaSync(partitionId, replicaIndex);
response = false;
}
}
private void logBackupVersionMismatch(long currentVersion) {
ILogger logger = getLogger();
if (logger.isFinestEnabled()) {
logger.finest("Partition: " + getPartitionId() + " version is not matching to version of the owner -> "
+ currentVersion + " -vs- " + version);
}
}
@Override
public void afterRun() throws Exception {
}
@Override
public boolean returnsResponse() {
return returnResponse;
}
@Override
public Object getResponse() {
return response;
}
@Override
public boolean validatesTarget() {
return false;
}
@Override
public String getServiceName() {
return InternalPartitionService.SERVICE_NAME;
}
@Override
public void logError(Throwable e) {
ReplicaErrorLogger.log(e, getLogger());
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
out.writeLong(version);
out.writeBoolean(returnResponse);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
version = in.readLong();
returnResponse = in.readBoolean();
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("CheckReplicaVersion");
sb.append("{partition=").append(getPartitionId());
sb.append(", replica=").append(getReplicaIndex());
sb.append(", version=").append(version);
sb.append('}');
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_partition_impl_CheckReplicaVersion.java
|
1,442 |
public class GoogleAnalyticsTag extends SimpleTagSupport {
private static final Log LOG = LogFactory.getLog(GoogleAnalyticsTag.class);
@Value("${googleAnalytics.webPropertyId}")
private String webPropertyId;
private Order order;
public void setOrder(Order order) {
this.order = order;
}
public void setWebPropertyId(String webPropertyId) {
this.webPropertyId = webPropertyId;
}
@Override
public void doTag() throws JspException, IOException {
JspWriter out = getJspContext().getOut();
if (webPropertyId == null) {
ServletContext sc = ((PageContext) getJspContext()).getServletContext();
ApplicationContext context = WebApplicationContextUtils.getWebApplicationContext(sc);
context.getAutowireCapableBeanFactory().autowireBeanProperties(this, AutowireCapableBeanFactory.AUTOWIRE_BY_NAME, false);
}
if (webPropertyId.equals("UA-XXXXXXX-X")) {
LOG.warn("googleAnalytics.webPropertyId has not been overridden in a custom property file. Please set this in order to properly use the Google Analytics tag");
}
out.println(analytics(webPropertyId, order));
super.doTag();
}
/**
* Documentation for the recommended asynchronous GA tag is at:
* http://code.google.com/apis/analytics/docs/tracking/gaTrackingEcommerce.html
*
* @param webPropertyId - Google Analytics ID
* @param order - optionally track the order submission. This should be included on the
* page after the order has been sucessfully submitted. If null, this will just track the current page
* @return the relevant Javascript to render on the page
*/
protected String analytics(String webPropertyId, Order order) {
StringBuffer sb = new StringBuffer();
sb.append("<script type=\"text/javascript\">");
sb.append("var _gaq = _gaq || [];");
sb.append("_gaq.push(['_setAccount', '" + webPropertyId + "']);");
sb.append("_gaq.push(['_trackPageview']);");
if (order != null) {
Address paymentAddress = order.getPaymentInfos().get(0).getAddress();
sb.append("_gaq.push(['_addTrans','" + order.getId() + "'");
sb.append(",'" + order.getName() + "'");
sb.append(",'" + order.getTotal() + "'");
sb.append(",'" + order.getTotalTax() + "'");
sb.append(",'" + order.getTotalShipping() + "'");
sb.append(",'" + paymentAddress.getCity() + "'");
sb.append(",'" + paymentAddress.getState().getName() + "'");
sb.append(",'" + paymentAddress.getCountry().getName() + "'");
sb.append("]);");
for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) {
for (FulfillmentGroupItem fulfillmentGroupItem : fulfillmentGroup.getFulfillmentGroupItems()) {
DiscreteOrderItem orderItem = (DiscreteOrderItem) fulfillmentGroupItem.getOrderItem();
sb.append("_gaq.push(['_addItem','" + order.getId() + "'");
sb.append(",'" + orderItem.getSku().getId() + "'");
sb.append(",'" + orderItem.getSku().getName() + "'");
sb.append(",' " + orderItem.getProduct().getDefaultCategory() + "'");
sb.append(",'" + orderItem.getPrice() + "'");
sb.append(",'" + orderItem.getQuantity() + "'");
sb.append("]);");
}
}
sb.append("_gaq.push(['_trackTrans']);");
}
sb.append(" (function() {"
+ "var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;"
+ "ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';"
+ "var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);"
+ "})();");
sb.append("</script>");
return sb.toString();
}
}
| 1no label
|
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_catalog_taglib_GoogleAnalyticsTag.java
|
383 |
public class OLazyRecordMultiIterator implements OLazyIterator<OIdentifiable>, OResettable {
final private ORecord<?> sourceRecord;
final private Object[] underlyingSources;
final private Object[] underlyingIterators;
final private boolean convertToRecord;
private int iteratorIndex = 0;
public OLazyRecordMultiIterator(final ORecord<?> iSourceRecord, final Object[] iIterators, final boolean iConvertToRecord) {
this.sourceRecord = iSourceRecord;
this.underlyingSources = iIterators;
this.underlyingIterators = new Object[iIterators.length];
this.convertToRecord = iConvertToRecord;
}
@Override
public void reset() {
iteratorIndex = 0;
for (int i = 0; i < underlyingIterators.length; ++i)
underlyingIterators[i] = null;
}
public OIdentifiable next() {
if (!hasNext())
throw new NoSuchElementException();
final Iterator<OIdentifiable> underlying = getCurrentIterator();
OIdentifiable value = underlying.next();
if (value == null)
return null;
if (value instanceof ORecordId && convertToRecord) {
value = ((ORecordId) value).getRecord();
if (underlying instanceof OLazyIterator<?>)
((OLazyIterator<OIdentifiable>) underlying).update(value);
}
return value;
}
public boolean hasNext() {
final Iterator<OIdentifiable> underlying = getCurrentIterator();
boolean again = underlying.hasNext();
while (!again && iteratorIndex < underlyingIterators.length - 1) {
iteratorIndex++;
again = getCurrentIterator().hasNext();
}
return again;
}
public OIdentifiable update(final OIdentifiable iValue) {
final Iterator<OIdentifiable> underlying = getCurrentIterator();
if (underlying instanceof OLazyIterator) {
final OIdentifiable old = ((OLazyIterator<OIdentifiable>) underlying).update(iValue);
if (sourceRecord != null && !old.equals(iValue))
sourceRecord.setDirty();
return old;
} else
throw new UnsupportedOperationException("Underlying iterator not supports lazy updates (Interface OLazyIterator");
}
public void remove() {
final Iterator<OIdentifiable> underlying = getCurrentIterator();
underlying.remove();
if (sourceRecord != null)
sourceRecord.setDirty();
}
@SuppressWarnings("unchecked")
private Iterator<OIdentifiable> getCurrentIterator() {
if (iteratorIndex > underlyingIterators.length)
throw new NoSuchElementException();
Object next = underlyingIterators[iteratorIndex];
if (next == null) {
// GET THE ITERATOR
if (underlyingSources[iteratorIndex] instanceof OResettable) {
// REUSE IT
((OResettable) underlyingSources[iteratorIndex]).reset();
underlyingIterators[iteratorIndex] = underlyingSources[iteratorIndex];
} else if (underlyingSources[iteratorIndex] instanceof Iterable<?>) {
// CREATE A NEW ONE FROM THE COLLECTION
underlyingIterators[iteratorIndex] = ((Iterable<?>) underlyingSources[iteratorIndex]).iterator();
} else if (underlyingSources[iteratorIndex] instanceof Iterator<?>) {
// COPY IT
underlyingIterators[iteratorIndex] = underlyingSources[iteratorIndex];
} else
throw new IllegalStateException("Unsupported iteration source: " + underlyingSources[iteratorIndex]);
next = underlyingIterators[iteratorIndex];
}
if (next instanceof Iterator<?>)
return (Iterator<OIdentifiable>) next;
return ((Collection<OIdentifiable>) next).iterator();
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_db_record_OLazyRecordMultiIterator.java
|
873 |
public class FulfillmentGroupOfferPotential {
protected Offer offer;
protected Money totalSavings = new Money(BankersRounding.zeroAmount());
protected int priority;
public Offer getOffer() {
return offer;
}
public void setOffer(Offer offer) {
this.offer = offer;
}
public Money getTotalSavings() {
return totalSavings;
}
public void setTotalSavings(Money totalSavings) {
this.totalSavings = totalSavings;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((offer == null) ? 0 : offer.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
FulfillmentGroupOfferPotential other = (FulfillmentGroupOfferPotential) obj;
if (offer == null) {
if (other.offer != null) {
return false;
}
} else if (!offer.equals(other.offer)) {
return false;
}
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_discount_FulfillmentGroupOfferPotential.java
|
526 |
public class DateUtil {
public static boolean isActive(Date startDate, Date endDate, boolean includeTime) {
Long date = SystemTime.asMillis(includeTime);
return !(startDate == null || startDate.getTime() > date || (endDate != null && endDate.getTime() < date));
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_DateUtil.java
|
78 |
return new DataSerializableFactory() {
@Override
public IdentifiedDataSerializable create(int typeId) {
switch (typeId) {
case CLIENT_RESPONSE:
return new ClientResponse();
default:
return null;
}
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_ClientDataSerializerHook.java
|
132 |
private final class InitializerValueProposal
implements ICompletionProposal, ICompletionProposalExtension2 {
private final String text;
private final Image image;
private final int offset;
private InitializerValueProposal(int offset, String text, Image image) {
this.offset = offset;
this.text = text;
this.image = image;
}
protected IRegion getCurrentRegion(IDocument document)
throws BadLocationException {
int start = offset;
int length = 0;
for (int i=offset;
i<document.getLength();
i++) {
char ch = document.getChar(i);
if (Character.isWhitespace(ch) ||
ch==';'||ch==','||ch==')') {
break;
}
length++;
}
return new Region(start, length);
}
@Override
public Image getImage() {
return image;
}
@Override
public Point getSelection(IDocument document) {
return new Point(offset + text.length(), 0);
}
public void apply(IDocument document) {
try {
IRegion region = getCurrentRegion(document);
document.replace(region.getOffset(),
region.getLength(), text);
}
catch (BadLocationException e) {
e.printStackTrace();
}
}
public String getDisplayString() {
return text;
}
public String getAdditionalProposalInfo() {
return null;
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public void apply(ITextViewer viewer, char trigger,
int stateMask, int offset) {
apply(viewer.getDocument());
}
@Override
public void selected(ITextViewer viewer, boolean smartToggle) {}
@Override
public void unselected(ITextViewer viewer) {}
@Override
public boolean validate(IDocument document, int offset,
DocumentEvent event) {
try {
IRegion region = getCurrentRegion(document);
String prefix = document.get(region.getOffset(),
offset-region.getOffset());
return text.startsWith(prefix);
}
catch (BadLocationException e) {
return false;
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_InitializerProposal.java
|
163 |
public class TestStandaloneLogExtractor
{
@Test
public void testRecreateCleanDbFromStandaloneExtractor() throws Exception
{
run( true, 1 );
}
@Test
public void testRecreateUncleanDbFromStandaloneExtractor() throws Exception
{
run( false, 2 );
}
private void run( boolean cleanShutdown, int nr ) throws Exception
{
EphemeralFileSystemAbstraction fileSystem = new EphemeralFileSystemAbstraction();
String storeDir = "source" + nr;
GraphDatabaseAPI db = (GraphDatabaseAPI) new TestGraphDatabaseFactory().
setFileSystem( fileSystem ).
newImpermanentDatabase( storeDir );
createSomeTransactions( db );
DbRepresentation rep = DbRepresentation.of( db );
EphemeralFileSystemAbstraction snapshot;
if ( cleanShutdown )
{
db.shutdown();
snapshot = fileSystem.snapshot();
} else
{
snapshot = fileSystem.snapshot();
db.shutdown();
}
GraphDatabaseAPI newDb = (GraphDatabaseAPI) new TestGraphDatabaseFactory().
setFileSystem( snapshot ).
newImpermanentDatabase( storeDir );
XaDataSource ds = newDb.getDependencyResolver().resolveDependency( XaDataSourceManager.class )
.getNeoStoreDataSource();
LogExtractor extractor = LogExtractor.from( snapshot, new File( storeDir ),
new Monitors().newMonitor( ByteCounterMonitor.class ) );
long expectedTxId = 2;
while ( true )
{
InMemoryLogBuffer buffer = new InMemoryLogBuffer();
long txId = extractor.extractNext( buffer );
assertEquals( expectedTxId++, txId );
/* first tx=2
* 1 tx for relationship type
* 1 tx for property index
* 1 for the first tx
* 5 additional tx + 1 tx for the other property index
* ==> 11
*/
if ( expectedTxId == 11 )
{
expectedTxId = -1;
}
if ( txId == -1 )
{
break;
}
ds.applyCommittedTransaction( txId, buffer );
}
DbRepresentation newRep = DbRepresentation.of( newDb );
newDb.shutdown();
assertEquals( rep, newRep );
fileSystem.shutdown();
}
private void createSomeTransactions( GraphDatabaseAPI db ) throws IOException
{
try ( BatchTransaction tx = beginBatchTx( db ) )
{
Node node = db.createNode();
node.setProperty( "name", "First" );
Node otherNode = db.createNode();
node.createRelationshipTo( otherNode, MyRelTypes.TEST );
tx.intermediaryCommit();
db.getDependencyResolver().resolveDependency( XaDataSourceManager.class )
.getNeoStoreDataSource().rotateLogicalLog();
for ( int i = 0; i < 5; i++ )
{
db.createNode().setProperty( "type", i );
tx.intermediaryCommit();
}
}
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestStandaloneLogExtractor.java
|
128 |
public class LongAdder extends Striped64 implements Serializable {
private static final long serialVersionUID = 7249069246863182397L;
/**
* Version of plus for use in retryUpdate
*/
final long fn(long v, long x) { return v + x; }
/**
* Creates a new adder with initial sum of zero.
*/
public LongAdder() {
}
/**
* Adds the given value.
*
* @param x the value to add
*/
public void add(long x) {
Cell[] as; long b, v; HashCode hc; Cell a; int n;
if ((as = cells) != null || !casBase(b = base, b + x)) {
boolean uncontended = true;
int h = (hc = threadHashCode.get()).code;
if (as == null || (n = as.length) < 1 ||
(a = as[(n - 1) & h]) == null ||
!(uncontended = a.cas(v = a.value, v + x)))
retryUpdate(x, hc, uncontended);
}
}
/**
* Equivalent to {@code add(1)}.
*/
public void increment() {
add(1L);
}
/**
* Equivalent to {@code add(-1)}.
*/
public void decrement() {
add(-1L);
}
/**
* Returns the current sum. The returned value is <em>NOT</em> an
* atomic snapshot; invocation in the absence of concurrent
* updates returns an accurate result, but concurrent updates that
* occur while the sum is being calculated might not be
* incorporated.
*
* @return the sum
*/
public long sum() {
long sum = base;
Cell[] as = cells;
if (as != null) {
int n = as.length;
for (int i = 0; i < n; ++i) {
Cell a = as[i];
if (a != null)
sum += a.value;
}
}
return sum;
}
/**
* Resets variables maintaining the sum to zero. This method may
* be a useful alternative to creating a new adder, but is only
* effective if there are no concurrent updates. Because this
* method is intrinsically racy, it should only be used when it is
* known that no threads are concurrently updating.
*/
public void reset() {
internalReset(0L);
}
/**
* Equivalent in effect to {@link #sum} followed by {@link
* #reset}. This method may apply for example during quiescent
* points between multithreaded computations. If there are
* updates concurrent with this method, the returned value is
* <em>not</em> guaranteed to be the final value occurring before
* the reset.
*
* @return the sum
*/
public long sumThenReset() {
long sum = base;
Cell[] as = cells;
base = 0L;
if (as != null) {
int n = as.length;
for (int i = 0; i < n; ++i) {
Cell a = as[i];
if (a != null) {
sum += a.value;
a.value = 0L;
}
}
}
return sum;
}
/**
* Returns the String representation of the {@link #sum}.
* @return the String representation of the {@link #sum}
*/
public String toString() {
return Long.toString(sum());
}
/**
* Equivalent to {@link #sum}.
*
* @return the sum
*/
public long longValue() {
return sum();
}
/**
* Returns the {@link #sum} as an {@code int} after a narrowing
* primitive conversion.
*/
public int intValue() {
return (int)sum();
}
/**
* Returns the {@link #sum} as a {@code float}
* after a widening primitive conversion.
*/
public float floatValue() {
return (float)sum();
}
/**
* Returns the {@link #sum} as a {@code double} after a widening
* primitive conversion.
*/
public double doubleValue() {
return (double)sum();
}
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
s.writeLong(sum());
}
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
busy = 0;
cells = null;
base = s.readLong();
}
}
| 0true
|
src_main_java_jsr166e_LongAdder.java
|
163 |
return executeRead(new Callable<KeyIterator>() {
@Override
public KeyIterator call() throws Exception {
return (storeFeatures.isKeyOrdered())
? edgeStore.getKeys(new KeyRangeQuery(EDGESTORE_MIN_KEY, EDGESTORE_MAX_KEY, sliceQuery), storeTx)
: edgeStore.getKeys(sliceQuery, storeTx);
}
@Override
public String toString() {
return "EdgeStoreKeys";
}
});
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_BackendTransaction.java
|
184 |
private class IDStressor implements Runnable {
private final int numRounds;
private final int numPartitions;
private final int maxIterations;
private final IDAuthority authority;
private final List<ConcurrentLinkedQueue<IDBlock>> allocatedBlocks;
private static final long sleepMS = 250L;
private IDStressor(int numRounds, int numPartitions, int maxIterations,
IDAuthority authority, List<ConcurrentLinkedQueue<IDBlock>> ids) {
this.numRounds = numRounds;
this.numPartitions = numPartitions;
this.maxIterations = maxIterations;
this.authority = authority;
this.allocatedBlocks = ids;
}
@Override
public void run() {
try {
runInterruptible();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private void runInterruptible() throws InterruptedException {
int iterations = 0;
long lastStart[] = new long[numPartitions];
for (int i = 0; i < numPartitions; i++)
lastStart[i] = Long.MIN_VALUE;
for (int j = 0; j < numRounds; j++) {
for (int p = 0; p < numPartitions; p++) {
if (maxIterations < ++iterations) {
throwIterationsExceededException();
}
final IDBlock block = allocate(p);
if (null == block) {
Thread.sleep(sleepMS);
p--;
} else {
allocatedBlocks.get(p).add(block);
if (hasEmptyUid) {
long start = block.getId(0);
Assert.assertTrue("Previous block start "
+ lastStart[p] + " exceeds next block start "
+ start, lastStart[p] <= start);
lastStart[p] = start;
}
}
}
}
}
private IDBlock allocate(int partitionIndex) {
IDBlock block;
try {
block = authority.getIDBlock(partitionIndex,partitionIndex,GET_ID_BLOCK_TIMEOUT);
} catch (BackendException e) {
log.error("Unexpected exception while getting ID block", e);
return null;
}
/*
* This is not guaranteed in the consistentkey implementation.
* Writers of ID block claims in that implementation delete their
* writes if they take too long. A peek can see this short-lived
* block claim even though a subsequent getblock does not.
*/
// Assert.assertTrue(nextId <= block[0]);
if (hasEmptyUid) assertEquals(block.getId(0)+ blockSize-1, block.getId(blockSize-1));
log.trace("Obtained ID block {}", block);
return block;
}
private boolean throwIterationsExceededException() {
throw new RuntimeException(
"Exceeded maximum ID allocation iteration count ("
+ maxIterations + "); too many timeouts?");
}
}
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_IDAuthorityTest.java
|
1,596 |
public class MapStructure implements Serializable, PersistencePerspectiveItem {
private static final long serialVersionUID = 1L;
private String keyClassName;
private String mapKeyValueProperty;
private String keyPropertyName;
private String keyPropertyFriendlyName;
private String valueClassName;
private String mapProperty;
private Boolean deleteValueEntity = Boolean.FALSE;
private String manyToField;
private Boolean mutable = true;
public MapStructure() {
//do nothing - support serialization requirements
}
public MapStructure(String keyClassName, String keyPropertyName, String keyPropertyFriendlyName, String valueClassName,
String mapProperty, Boolean deleteValueEntity, String mapKeyValueProperty) {
if (!keyClassName.equals(String.class.getName())) {
throw new RuntimeException("keyClass of java.lang.String is currently the only type supported");
}
this.keyClassName = keyClassName;
this.valueClassName = valueClassName;
this.mapProperty = mapProperty;
this.keyPropertyName = keyPropertyName;
this.keyPropertyFriendlyName = keyPropertyFriendlyName;
this.deleteValueEntity = deleteValueEntity;
this.mapKeyValueProperty = mapKeyValueProperty;
}
public String getKeyClassName() {
return keyClassName;
}
public void setKeyClassName(String keyClassName) {
if (!keyClassName.equals(String.class.getName())) {
throw new RuntimeException("keyClass of java.lang.String is currently the only type supported");
}
this.keyClassName = keyClassName;
}
public String getValueClassName() {
return valueClassName;
}
public void setValueClassName(String valueClassName) {
this.valueClassName = valueClassName;
}
public String getMapProperty() {
return mapProperty;
}
public void setMapProperty(String mapProperty) {
this.mapProperty = mapProperty;
}
public String getKeyPropertyName() {
return keyPropertyName;
}
public void setKeyPropertyName(String keyPropertyName) {
this.keyPropertyName = keyPropertyName;
}
public String getKeyPropertyFriendlyName() {
return keyPropertyFriendlyName;
}
public void setKeyPropertyFriendlyName(String keyPropertyFriendlyName) {
this.keyPropertyFriendlyName = keyPropertyFriendlyName;
}
public Boolean getDeleteValueEntity() {
return deleteValueEntity;
}
public void setDeleteValueEntity(Boolean deleteValueEntity) {
this.deleteValueEntity = deleteValueEntity;
}
public String getManyToField() {
return manyToField;
}
public void setManyToField(String manyToField) {
this.manyToField = manyToField;
}
public Boolean getMutable() {
return mutable;
}
public void setMutable(Boolean mutable) {
this.mutable = mutable;
}
public String getMapKeyValueProperty() {
return mapKeyValueProperty;
}
public void setMapKeyValueProperty(String mapKeyValueProperty) {
this.mapKeyValueProperty = mapKeyValueProperty;
}
public void accept(PersistencePerspectiveItemVisitor visitor) {
visitor.visit(this);
}
@Override
public PersistencePerspectiveItem clonePersistencePerspectiveItem() {
MapStructure mapStructure = new MapStructure();
mapStructure.keyClassName = keyClassName;
mapStructure.keyPropertyName = keyPropertyName;
mapStructure.keyPropertyFriendlyName = keyPropertyFriendlyName;
mapStructure.valueClassName = valueClassName;
mapStructure.mapProperty = mapProperty;
mapStructure.deleteValueEntity = deleteValueEntity;
mapStructure.manyToField = manyToField;
mapStructure.mutable = mutable;
mapStructure.mapKeyValueProperty = mapKeyValueProperty;
return mapStructure;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof MapStructure)) return false;
MapStructure that = (MapStructure) o;
if (deleteValueEntity != null ? !deleteValueEntity.equals(that.deleteValueEntity) : that.deleteValueEntity != null)
return false;
if (mapKeyValueProperty != null ? !mapKeyValueProperty.equals(that.mapKeyValueProperty) : that.mapKeyValueProperty != null)
return false;
if (keyClassName != null ? !keyClassName.equals(that.keyClassName) : that.keyClassName != null) return false;
if (keyPropertyFriendlyName != null ? !keyPropertyFriendlyName.equals(that.keyPropertyFriendlyName) : that.keyPropertyFriendlyName != null)
return false;
if (keyPropertyName != null ? !keyPropertyName.equals(that.keyPropertyName) : that.keyPropertyName != null)
return false;
if (mapProperty != null ? !mapProperty.equals(that.mapProperty) : that.mapProperty != null) return false;
if (valueClassName != null ? !valueClassName.equals(that.valueClassName) : that.valueClassName != null)
return false;
if (manyToField != null ? !manyToField.equals(that.manyToField) : that.manyToField != null) return false;
if (mutable != null ? !mutable.equals(that.mutable) : that.mutable != null) return false;
return true;
}
@Override
public int hashCode() {
int result = keyClassName != null ? keyClassName.hashCode() : 0;
result = 31 * result + (keyPropertyName != null ? keyPropertyName.hashCode() : 0);
result = 31 * result + (keyPropertyFriendlyName != null ? keyPropertyFriendlyName.hashCode() : 0);
result = 31 * result + (mapKeyValueProperty != null ? mapKeyValueProperty.hashCode() : 0);
result = 31 * result + (valueClassName != null ? valueClassName.hashCode() : 0);
result = 31 * result + (mapProperty != null ? mapProperty.hashCode() : 0);
result = 31 * result + (deleteValueEntity != null ? deleteValueEntity.hashCode() : 0);
result = 31 * result + (manyToField != null ? manyToField.hashCode() : 0);
result = 31 * result + (mutable != null ? mutable.hashCode() : 0);
return result;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_MapStructure.java
|
932 |
public abstract class BroadcastShardOperationResponse extends TransportResponse {
String index;
int shardId;
protected BroadcastShardOperationResponse() {
}
protected BroadcastShardOperationResponse(String index, int shardId) {
this.index = index;
this.shardId = shardId;
}
public String getIndex() {
return this.index;
}
public int getShardId() {
return this.shardId;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
index = in.readString();
shardId = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(index);
out.writeVInt(shardId);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_broadcast_BroadcastShardOperationResponse.java
|
2,362 |
private class PartitionProcessor
implements Runnable {
@Override
public void run() {
KeyValueSource<KeyIn, ValueIn> delegate = keyValueSource;
if (supervisor.getConfiguration().isCommunicateStats()) {
delegate = new KeyValueSourceFacade<KeyIn, ValueIn>(keyValueSource, supervisor);
}
while (true) {
if (cancelled.get()) {
return;
}
Integer partitionId = findNewPartitionProcessing();
if (partitionId == null) {
// Job's done
return;
}
// Migration event occurred, just retry
if (partitionId == -1) {
continue;
}
try {
// This call cannot be delegated
((PartitionIdAware) keyValueSource).setPartitionId(partitionId);
delegate.reset();
if (delegate.open(nodeEngine)) {
DefaultContext<KeyOut, ValueOut> context = supervisor.getOrCreateContext(MapCombineTask.this);
processMapping(partitionId, context, delegate);
delegate.close();
finalizeMapping(partitionId, context);
} else {
// Partition assignment might not be ready yet, postpone the processing and retry later
postponePartitionProcessing(partitionId);
}
} catch (Throwable t) {
handleProcessorThrowable(t);
}
}
}
private Integer findNewPartitionProcessing() {
try {
RequestPartitionResult result = mapReduceService
.processRequest(supervisor.getJobOwner(), new RequestPartitionMapping(name, jobId), name);
// JobSupervisor doesn't exists anymore on jobOwner, job done?
if (result.getResultState() == NO_SUPERVISOR) {
return null;
} else if (result.getResultState() == CHECK_STATE_FAILED) {
// retry
return -1;
} else if (result.getResultState() == NO_MORE_PARTITIONS) {
return null;
} else {
return result.getPartitionId();
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_task_MapCombineTask.java
|
1,182 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_PAYMENT_RESPONSE_ITEM")
public class PaymentResponseItemImpl implements PaymentResponseItem {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "PaymentResponseItemId")
@GenericGenerator(
name="PaymentResponseItemId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="PaymentResponseItemImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.payment.domain.PaymentResponseItemImpl")
}
)
@Column(name = "PAYMENT_RESPONSE_ITEM_ID")
protected Long id;
@Column(name = "USER_NAME", nullable=false)
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_User_Name", order = 1, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String userName;
@Column(name = "AMOUNT_PAID", precision=19, scale=5)
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Amount", order = 2, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true, prominent = true, gridOrder = 200, fieldType = SupportedFieldType.MONEY)
protected BigDecimal amountPaid;
@Column(name = "TRANSACTION_AMOUNT", precision=19, scale=5)
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Transaction_Amount", order = 2, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected BigDecimal transactionAmount;
@Column(name = "AUTHORIZATION_CODE")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Authorization_Code", order = 3, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String authorizationCode;
@Column(name = "MIDDLEWARE_RESPONSE_CODE")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Middleware_Response_Code", order = 4, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String middlewareResponseCode;
@Column(name = "MIDDLEWARE_RESPONSE_TEXT")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Middleware_Response_Text", order = 5, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String middlewareResponseText;
@Column(name = "PROCESSOR_RESPONSE_CODE")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Processor_Response_Code", order = 6, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String processorResponseCode;
@Column(name = "PROCESSOR_RESPONSE_TEXT")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Processor_Response_Text", order = 7, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String processorResponseText;
@Column(name = "IMPLEMENTOR_RESPONSE_CODE")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Implementer_Response_Code", order = 8, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String implementorResponseCode;
@Column(name = "IMPLEMENTOR_RESPONSE_TEXT")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Implementer_Response_Text", order = 9, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String implementorResponseText;
@Column(name = "REFERENCE_NUMBER")
@Index(name="PAYRESPONSE_REFERENCE_INDEX", columnNames={"REFERENCE_NUMBER"})
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Response_Ref_Number", order = 10, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String referenceNumber;
@Column(name = "TRANSACTION_SUCCESS")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Transaction_Successful", order = 11, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true, prominent = true, gridOrder = 300)
protected Boolean transactionSuccess = false;
@Column(name = "TRANSACTION_TIMESTAMP", nullable=false)
@Temporal(TemporalType.TIMESTAMP)
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Transaction_Time", order = 12, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true, prominent = true, gridOrder = 100)
protected Date transactionTimestamp;
@Column(name = "TRANSACTION_ID")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Transaction_Id", order = 13, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String transactionId;
@Column(name = "AVS_CODE")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_AVS_Code", order = 14, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String avsCode;
@Transient
protected String cvvCode;
@Column(name = "REMAINING_BALANCE", precision=19, scale=5)
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Remaining_Balance", order = 15, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected BigDecimal remainingBalance;
@Column(name = "TRANSACTION_TYPE", nullable=false)
@Index(name="PAYRESPONSE_TRANTYPE_INDEX", columnNames={"TRANSACTION_TYPE"})
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Transaction_Type", order = 16, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true, prominent = true, gridOrder = 400)
protected String transactionType;
@ElementCollection
@MapKeyColumn(name="FIELD_NAME")
@Column(name="FIELD_VALUE")
@CollectionTable(name="BLC_PAYMENT_ADDITIONAL_FIELDS", joinColumns=@JoinColumn(name="PAYMENT_RESPONSE_ITEM_ID"))
@BatchSize(size = 50)
protected Map<String, String> additionalFields = new HashMap<String, String>();
@Column(name = "ORDER_PAYMENT_ID")
@Index(name="PAYRESPONSE_ORDERPAYMENT_INDEX", columnNames={"ORDER_PAYMENT_ID"})
@AdminPresentation(excluded = true, readOnly = true)
protected Long paymentInfoId;
@ManyToOne(targetEntity = CustomerImpl.class)
@JoinColumn(name = "CUSTOMER_ID")
@Index(name="PAYRESPONSE_CUSTOMER_INDEX", columnNames={"CUSTOMER_ID"})
protected Customer customer;
@Column(name = "PAYMENT_INFO_REFERENCE_NUMBER")
@Index(name="PAYRESPONSE_REFERENCE_INDEX", columnNames={"PAYMENT_INFO_REFERENCE_NUMBER"})
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_Payment_Ref_Number", order = 17, group = "PaymentResponseItemImpl_Payment_Response", readOnly = true)
protected String paymentInfoReferenceNumber;
@ManyToOne(targetEntity = BroadleafCurrencyImpl.class)
@JoinColumn(name = "CURRENCY_CODE")
@AdminPresentation(friendlyName = "PaymentResponseItemImpl_currency", order = 2, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected BroadleafCurrency currency;
@ManyToOne(targetEntity = PaymentInfoImpl.class)
@JoinColumn(name = "PAYMENT_INFO_REFERENCE_NUMBER", referencedColumnName = "REFERENCE_NUMBER", insertable = false, updatable = false)
protected PaymentInfo paymentInfo;
@Override
public String getAuthorizationCode() {
return authorizationCode;
}
@Override
public void setAuthorizationCode(String authorizationCode) {
this.authorizationCode = authorizationCode;
}
@Override
public String getMiddlewareResponseCode() {
return middlewareResponseCode;
}
@Override
public void setMiddlewareResponseCode(String middlewareResponseCode) {
this.middlewareResponseCode = middlewareResponseCode;
}
@Override
public String getMiddlewareResponseText() {
return middlewareResponseText;
}
@Override
public void setMiddlewareResponseText(String middlewareResponseText) {
this.middlewareResponseText = middlewareResponseText;
}
@Override
public String getProcessorResponseCode() {
return processorResponseCode;
}
@Override
public void setProcessorResponseCode(String processorResponseCode) {
this.processorResponseCode = processorResponseCode;
}
@Override
public String getProcessorResponseText() {
return processorResponseText;
}
@Override
public void setProcessorResponseText(String processorResponseText) {
this.processorResponseText = processorResponseText;
}
@Override
public String getReferenceNumber() {
return referenceNumber;
}
@Override
public void setReferenceNumber(String referenceNumber) {
this.referenceNumber = referenceNumber;
}
@Override
@Deprecated
public Money getAmountPaid() {
return BroadleafCurrencyUtils.getMoney(amountPaid, getCurrency());
}
@Override
@Deprecated
public void setAmountPaid(Money amountPaid) {
this.amountPaid = Money.toAmount(amountPaid);
}
@Override
public Money getTransactionAmount() {
return BroadleafCurrencyUtils.getMoney(transactionAmount, getCurrency());
}
@Override
public void setTransactionAmount(Money transactionAmount) {
this.transactionAmount = Money.toAmount(transactionAmount);
}
@Override
public Boolean getTransactionSuccess() {
if (transactionSuccess == null) {
return Boolean.FALSE;
} else {
return transactionSuccess;
}
}
@Override
public void setTransactionSuccess(Boolean transactionSuccess) {
this.transactionSuccess = transactionSuccess;
}
@Override
public Date getTransactionTimestamp() {
return transactionTimestamp;
}
@Override
public void setTransactionTimestamp(Date transactionTimestamp) {
this.transactionTimestamp = transactionTimestamp;
}
@Override
public String getImplementorResponseCode() {
return implementorResponseCode;
}
@Override
public void setImplementorResponseCode(String implementorResponseCode) {
this.implementorResponseCode = implementorResponseCode;
}
@Override
public String getImplementorResponseText() {
return implementorResponseText;
}
@Override
public void setImplementorResponseText(String implementorResponseText) {
this.implementorResponseText = implementorResponseText;
}
@Override
public String getTransactionId() {
return transactionId;
}
@Override
public void setTransactionId(String transactionId) {
this.transactionId = transactionId;
}
@Override
public String getAvsCode() {
return avsCode;
}
@Override
public void setAvsCode(String avsCode) {
this.avsCode = avsCode;
}
@Override
public String getCvvCode() {
return cvvCode;
}
@Override
public void setCvvCode(String cvvCode) {
this.cvvCode = cvvCode;
}
@Override
public Money getRemainingBalance() {
return remainingBalance == null ? null : BroadleafCurrencyUtils.getMoney(remainingBalance, getCurrency());
}
@Override
public void setRemainingBalance(Money remainingBalance) {
this.remainingBalance = remainingBalance==null?null:Money.toAmount(remainingBalance);
}
@Override
public TransactionType getTransactionType() {
return TransactionType.getInstance(transactionType);
}
@Override
public void setTransactionType(TransactionType transactionType) {
this.transactionType = transactionType.getType();
}
@Override
public Map<String, String> getAdditionalFields() {
return additionalFields;
}
@Override
public void setAdditionalFields(Map<String, String> additionalFields) {
this.additionalFields = additionalFields;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Override
public Long getPaymentInfoId() {
return paymentInfoId;
}
@Override
public void setPaymentInfoId(Long paymentInfoId) {
this.paymentInfoId = paymentInfoId;
}
@Override
public String getUserName() {
return userName;
}
@Override
public void setUserName(String userName) {
this.userName = userName;
}
@Override
public Customer getCustomer() {
return customer;
}
@Override
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public BroadleafCurrency getCurrency() {
return currency;
}
@Override
public void setCurrency(BroadleafCurrency currency) {
this.currency = currency;
}
@Override
public String getPaymentInfoReferenceNumber() {
return paymentInfoReferenceNumber;
}
@Override
public void setPaymentInfoReferenceNumber(String paymentInfoReferenceNumber) {
this.paymentInfoReferenceNumber = paymentInfoReferenceNumber;
}
@Override
public PaymentInfo getPaymentInfo() {
return paymentInfo;
}
@Override
public void setPaymentInfo(PaymentInfo paymentInfo) {
this.paymentInfo = paymentInfo;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(PaymentResponseItem.class.getName()).append("\n");
sb.append("auth code: ").append(this.getAuthorizationCode()).append("\n");
sb.append("implementor response code: ").append(this.getImplementorResponseCode()).append("\n");
sb.append("implementor response text: ").append(this.getImplementorResponseText()).append("\n");
sb.append("middleware response code: ").append(this.getMiddlewareResponseCode()).append("\n");
sb.append("middleware response text: ").append(this.getMiddlewareResponseText()).append("\n");
sb.append("processor response code: ").append(this.getProcessorResponseCode()).append("\n");
sb.append("processor response text: ").append(this.getProcessorResponseText()).append("\n");
sb.append("reference number: ").append(this.getReferenceNumber()).append("\n");
sb.append("transaction id: ").append(this.getTransactionId()).append("\n");
sb.append("avs code: ").append(this.getAvsCode()).append("\n");
if (remainingBalance != null) {
sb.append("remaining balance: ").append(this.getRemainingBalance());
}
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((transactionId == null) ? 0 : transactionId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PaymentResponseItemImpl other = (PaymentResponseItemImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (transactionId == null) {
if (other.transactionId != null) {
return false;
}
} else if (!transactionId.equals(other.transactionId)) {
return false;
}
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_domain_PaymentResponseItemImpl.java
|
1,164 |
transportServiceClient.submitRequest(bigNode, "benchmark", message, options().withType(TransportRequestOptions.Type.BULK), new BaseTransportResponseHandler<BenchmarkMessageResponse>() {
@Override
public BenchmarkMessageResponse newInstance() {
return new BenchmarkMessageResponse();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(BenchmarkMessageResponse response) {
}
@Override
public void handleException(TransportException exp) {
exp.printStackTrace();
}
}).txGet();
| 0true
|
src_test_java_org_elasticsearch_benchmark_transport_BenchmarkNettyLargeMessages.java
|
439 |
@RunWith(HazelcastSerialClassRunner.class)
@Category(SlowTest.class)
public class ClientQueueDisruptionTest {
HazelcastInstance client1;
HazelcastInstance client2;
SimpleClusterUtil cluster;
@Before
public void init(){
Hazelcast.shutdownAll();
cluster = new SimpleClusterUtil("A", 3);
cluster.initCluster();
ClientConfig clientConfig = new ClientConfig();
clientConfig.setGroupConfig(new GroupConfig(cluster.getName()));
client1 = HazelcastClient.newHazelcastClient(clientConfig);
client2 = HazelcastClient.newHazelcastClient(clientConfig);
}
@Test
public void clientsConsume_withNodeTerminate() throws InterruptedException {
final int inital=2000, max = 8000;
for(int i=0; i<inital; i++){
cluster.getRandomNode().getQueue("Q1").offer(i);
cluster.getRandomNode().getQueue("Q2").offer(i);
}
int expect=0;
for(int i=inital; i<max; i++){
if(i==max/2){
cluster.terminateRandomNode();
}
assertTrue(cluster.getRandomNode().getQueue("Q1").offer(i));
assertTrue(cluster.getRandomNode().getQueue("Q2").offer(i));
TestCase.assertEquals( expect, client1.getQueue("Q1").poll() );
TestCase.assertEquals( expect, client2.getQueue("Q2").poll() );
expect++;
}
for(int i=expect; i<max; i++){
TestCase.assertEquals( i, client1.getQueue("Q1").poll() );
TestCase.assertEquals( i, client2.getQueue("Q2").poll() );
}
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_queue_ClientQueueDisruptionTest.java
|
60 |
final Iterator<String> keysToMangle = Iterators.filter(configuration.getKeys(), new Predicate<String>() {
@Override
public boolean apply(String key) {
if (null == key)
return false;
return p.matcher(key).matches();
}
});
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_TitanFactory.java
|
504 |
public class MyKey
implements Serializable {
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_standalone_model_MyKey.java
|
6,269 |
public class GreaterThanAssertion extends Assertion {
private static final ESLogger logger = Loggers.getLogger(GreaterThanAssertion.class);
public GreaterThanAssertion(String field, Object expectedValue) {
super(field, expectedValue);
}
@Override
@SuppressWarnings("unchecked")
protected void doAssert(Object actualValue, Object expectedValue) {
logger.trace("assert that [{}] is greater than [{}]", actualValue, expectedValue);
assertThat(actualValue, instanceOf(Comparable.class));
assertThat(expectedValue, instanceOf(Comparable.class));
assertThat(errorMessage(), (Comparable)actualValue, greaterThan((Comparable) expectedValue));
}
private String errorMessage() {
return "field [" + getField() + "] is not greater than [" + getExpectedValue() + "]";
}
}
| 1no label
|
src_test_java_org_elasticsearch_test_rest_section_GreaterThanAssertion.java
|
860 |
public class OSecurityNull implements OSecurity {
@Override
public boolean isAllowed(final Set<OIdentifiable> iAllowAll, final Set<OIdentifiable> iAllowOperation) {
return true;
}
public OUser create() {
return null;
}
public void load() {
}
public OUser getUser(String iUserName) {
return null;
}
public OUser createUser(String iUserName, String iUserPassword, String... iRoles) {
return null;
}
public OUser createUser(String iUserName, String iUserPassword, ORole... iRoles) {
return null;
}
public ORole getRole(String iRoleName) {
return null;
}
public ORole getRole(OIdentifiable iRole) {
return null;
}
public ORole createRole(String iRoleName, ALLOW_MODES iAllowMode) {
return null;
}
public ORole createRole(String iRoleName, ORole iParent, ALLOW_MODES iAllowMode) {
return null;
}
public List<ODocument> getAllUsers() {
return null;
}
public List<ODocument> getAllRoles() {
return null;
}
public OUser authenticate(String iUsername, String iUserPassword) {
return null;
}
public void close() {
}
public OUser repair() {
return null;
}
public boolean dropUser(String iUserName) {
return false;
}
public boolean dropRole(String iRoleName) {
return false;
}
@Override
public OIdentifiable allowUser(ODocument iDocument, String iAllowFieldName, String iUserName) {
return null;
}
@Override
public OIdentifiable allowRole(ODocument iDocument, String iAllowFieldName, String iRoleName) {
return null;
}
@Override
public OIdentifiable allowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
return null;
}
@Override
public OIdentifiable disallowUser(ODocument iDocument, String iAllowFieldName, String iUserName) {
return null;
}
@Override
public OIdentifiable disallowRole(ODocument iDocument, String iAllowFieldName, String iRoleName) {
return null;
}
@Override
public OIdentifiable disallowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
return null;
}
@Override
public void createClassTrigger() {
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_metadata_security_OSecurityNull.java
|
405 |
class DumMigrationListener implements MigrationListener {
@Override
public void migrationStarted(MigrationEvent migrationEvent) {
}
@Override
public void migrationCompleted(MigrationEvent migrationEvent) {
}
@Override
public void migrationFailed(MigrationEvent migrationEvent) {
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_partitionservice_PartitionServiceProxyTest.java
|
1,243 |
public class CeylonProjectModulesContainer implements IClasspathContainer {
public static final String CONTAINER_ID = PLUGIN_ID + ".cpcontainer.CEYLON_CONTAINER";
private IClasspathEntry[] classpathEntries;
private IPath path;
//private String jdtVersion;
private IJavaProject javaProject;
private Set<String> modulesWithSourcesAlreadySearched = synchronizedSet(new HashSet<String>());
public IJavaProject getJavaProject() {
return javaProject;
}
public IClasspathAttribute[] getAttributes() {
return attributes;
}
/**
* attributes attached to the container but not Ceylon related (Webtools or AspectJfor instance)
*/
private IClasspathAttribute[] attributes = new IClasspathAttribute[0];
public CeylonProjectModulesContainer(IJavaProject javaProject, IPath path,
IClasspathEntry[] classpathEntries, IClasspathAttribute[] attributes) {
this.path = path;
this.attributes = attributes;
this.classpathEntries = classpathEntries;
this.javaProject = javaProject;
}
public CeylonProjectModulesContainer(IProject project) {
javaProject = JavaCore.create(project);
path = new Path(CeylonProjectModulesContainer.CONTAINER_ID + "/default");
classpathEntries = new IClasspathEntry[0];
attributes = new IClasspathAttribute[0];
}
public CeylonProjectModulesContainer(CeylonProjectModulesContainer cp) {
path = cp.path;
javaProject = cp.javaProject;
classpathEntries = cp.classpathEntries;
attributes = cp.attributes;
modulesWithSourcesAlreadySearched = cp.modulesWithSourcesAlreadySearched;
}
public String getDescription() {
return "Ceylon Project Modules";
}
public int getKind() {
return K_APPLICATION;
}
public IPath getPath() {
return path;
}
public IClasspathEntry[] getClasspathEntries() {
return classpathEntries;
}
public IClasspathEntry addNewClasspathEntryIfNecessary(IPath modulePath) {
synchronized (classpathEntries) {
for (IClasspathEntry cpEntry : classpathEntries) {
if (cpEntry.getPath().equals(modulePath)) {
return null;
}
}
IClasspathEntry newEntry = newLibraryEntry(modulePath, null, null);
IClasspathEntry[] newClasspathEntries = new IClasspathEntry[classpathEntries.length + 1];
if (classpathEntries.length > 0) {
System.arraycopy(classpathEntries, 0, newClasspathEntries, 0, classpathEntries.length);
}
newClasspathEntries[classpathEntries.length] = newEntry;
classpathEntries = newClasspathEntries;
return newEntry;
}
}
/*private static final ISchedulingRule RESOLVE_EVENT_RULE = new ISchedulingRule() {
public boolean contains(ISchedulingRule rule) {
return rule == this;
}
public boolean isConflicting(ISchedulingRule rule) {
return rule == this;
}
};*/
public void runReconfigure() {
modulesWithSourcesAlreadySearched.clear();
Job job = new Job("Resolving dependencies for project " +
getJavaProject().getElementName()) {
@Override
protected IStatus run(IProgressMonitor monitor) {
final IProject project = javaProject.getProject();
try {
final IClasspathEntry[] classpath = constructModifiedClasspath(javaProject);
javaProject.setRawClasspath(classpath, monitor);
boolean changed = resolveClasspath(monitor, false);
if(changed) {
refreshClasspathContainer(monitor);
}
// Rebuild the project :
// - without referenced projects
// - with referencing projects
// - and force the rebuild even if the model is already typechecked
Job job = new BuildProjectAfterClasspathChangeJob("Rebuild of project " +
project.getName(), project, false, true, true);
job.setRule(project.getWorkspace().getRoot());
job.schedule(3000);
job.setPriority(Job.BUILD);
return Status.OK_STATUS;
}
catch (CoreException e) {
e.printStackTrace();
return new Status(IStatus.ERROR, PLUGIN_ID,
"could not resolve dependencies", e);
}
}
};
job.setUser(false);
job.setPriority(Job.BUILD);
job.setRule(getWorkspace().getRoot());
job.schedule();
}
private IClasspathEntry[] constructModifiedClasspath(IJavaProject javaProject)
throws JavaModelException {
IClasspathEntry newEntry = JavaCore.newContainerEntry(path, null,
new IClasspathAttribute[0], false);
IClasspathEntry[] entries = javaProject.getRawClasspath();
List<IClasspathEntry> newEntries = new ArrayList<IClasspathEntry>(asList(entries));
int index = 0;
boolean mustReplace = false;
for (IClasspathEntry entry: newEntries) {
if (entry.getPath().equals(newEntry.getPath()) ) {
mustReplace = true;
break;
}
index++;
}
if (mustReplace) {
newEntries.set(index, newEntry);
}
else {
newEntries.add(newEntry);
}
return (IClasspathEntry[]) newEntries.toArray(new IClasspathEntry[newEntries.size()]);
}
void notifyUpdateClasspathEntries() {
// Changes to resolved classpath are not announced by JDT Core
// and so PackageExplorer does not properly refresh when we update
// the classpath container.
// See https://bugs.eclipse.org/bugs/show_bug.cgi?id=154071
DeltaProcessingState s = JavaModelManager.getJavaModelManager().deltaState;
synchronized (s) {
IElementChangedListener[] listeners = s.elementChangedListeners;
for (int i = 0; i < listeners.length; i++) {
if (listeners[i] instanceof PackageExplorerContentProvider) {
JavaElementDelta delta = new JavaElementDelta(javaProject);
delta.changed(IJavaElementDelta.F_RESOLVED_CLASSPATH_CHANGED);
listeners[i].elementChanged(new ElementChangedEvent(delta,
ElementChangedEvent.POST_CHANGE));
}
}
}
//I've disabled this because I don't really like having it, but
//it does seem to help with the issue of archives appearing
//empty in the package manager
/*try {
javaProject.getProject().refreshLocal(IResource.DEPTH_ONE, null);
}
catch (CoreException e) {
e.printStackTrace();
}*/
}
/**
* Resolves the classpath entries for this container.
* @param monitor
* @param reparse
* @return true if the classpath was changed, false otherwise.
*/
public boolean resolveClasspath(IProgressMonitor monitor, boolean reparse) {
IJavaProject javaProject = getJavaProject();
IProject project = javaProject.getProject();
try {
TypeChecker typeChecker = null;
if (!reparse) {
typeChecker = getProjectTypeChecker(project);
}
IClasspathEntry[] oldEntries = classpathEntries;
if (typeChecker==null) {
IClasspathEntry explodeFolderEntry = null;
if (oldEntries != null) {
for (IClasspathEntry entry : oldEntries) {
if (entry.getPath() != null && entry.getPath().equals(getCeylonClassesOutputFolder(project).getFullPath())) {
explodeFolderEntry = entry;
break;
}
}
}
IClasspathEntry[] resetEntries = explodeFolderEntry == null ?
new IClasspathEntry[] {} :
new IClasspathEntry[] {explodeFolderEntry};
JavaCore.setClasspathContainer(getPath(),
new IJavaProject[]{javaProject},
new IClasspathContainer[]{ new CeylonProjectModulesContainer(javaProject, getPath(), resetEntries, attributes)} , monitor);
typeChecker = parseCeylonModel(project, monitor);
}
IFolder explodedModulesFolder = getCeylonClassesOutputFolder(project);
if (isExplodeModulesEnabled(project)) {
if (!explodedModulesFolder.exists()) {
CoreUtility.createDerivedFolder(explodedModulesFolder, true, true, monitor);
} else {
if (!explodedModulesFolder.isDerived()) {
explodedModulesFolder.setDerived(true, monitor);
}
}
}
else {
if (explodedModulesFolder.exists()) {
explodedModulesFolder.delete(true, monitor);
}
}
final Collection<IClasspathEntry> paths = findModuleArchivePaths(
javaProject, project, typeChecker);
CeylonProjectModulesContainer currentContainer = (CeylonProjectModulesContainer) JavaCore.getClasspathContainer(path, javaProject);
if (oldEntries == null ||
oldEntries != currentContainer.classpathEntries ||
!paths.equals(asList(oldEntries))) {
this.classpathEntries = paths.toArray(new IClasspathEntry[paths.size()]);
return true;
}
}
catch (CoreException e) {
e.printStackTrace();
}
return false;
}
public void refreshClasspathContainer(IProgressMonitor monitor) throws JavaModelException {
IJavaProject javaProject = getJavaProject();
setClasspathContainer(path, new IJavaProject[] { javaProject },
new IClasspathContainer[] {new CeylonProjectModulesContainer(this)}, new SubProgressMonitor(monitor, 1));
JDTModelLoader modelLoader = CeylonBuilder.getProjectModelLoader(javaProject.getProject());
if (modelLoader != null) {
modelLoader.refreshNameEnvironment();
}
//update the package manager UI
new Job("update package manager") {
@Override
protected IStatus run(IProgressMonitor monitor) {
notifyUpdateClasspathEntries();
return Status.OK_STATUS;
}
}.schedule();
}
private Collection<IClasspathEntry> findModuleArchivePaths(
IJavaProject javaProject, IProject project, TypeChecker typeChecker)
throws JavaModelException, CoreException {
final Map<String, IClasspathEntry> paths = new TreeMap<String, IClasspathEntry>();
Context context = typeChecker.getContext();
RepositoryManager provider = context.getRepositoryManager();
Set<Module> modulesToAdd = context.getModules().getListOfModules();
//modulesToAdd.add(projectModules.getLanguageModule());
for (Module module: modulesToAdd) {
JDTModule jdtModule = (JDTModule) module;
String name = module.getNameAsString();
if (name.equals(Module.DEFAULT_MODULE_NAME) ||
JDKUtils.isJDKModule(name) ||
JDKUtils.isOracleJDKModule(name) ||
module.equals(module.getLanguageModule()) ||
isProjectModule(javaProject, module) ||
! module.isAvailable()) {
continue;
}
IPath modulePath = getModuleArchive(provider, jdtModule);
if (modulePath!=null) {
IPath srcPath = null;
for (IProject p: project.getReferencedProjects()) {
if (p.isAccessible()
&& p.getLocation().isPrefixOf(modulePath)) {
//the module belongs to a referenced
//project, so use the project source
srcPath = p.getLocation();
break;
}
}
if (srcPath==null) {
for (IClasspathEntry entry : classpathEntries) {
if (entry.getPath().equals(modulePath)) {
srcPath = entry.getSourceAttachmentPath();
break;
}
}
}
if (srcPath==null &&
!modulesWithSourcesAlreadySearched.contains(module.toString())) {
//otherwise, use the src archive
srcPath = getSourceArchive(provider, jdtModule);
}
modulesWithSourcesAlreadySearched.add(module.toString());
IClasspathEntry newEntry = newLibraryEntry(modulePath, srcPath, null);
paths.put(newEntry.toString(), newEntry);
}
else {
// FIXME: ideally we should find the module.java file and put the marker there, but
// I've no idea how to find it and which import is the cause of the import problem
// as it could be transitive
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.MESSAGE, "no module archive found for classpath container: " +
module.getNameAsString() + "/" + module.getVersion());
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
}
}
if (isExplodeModulesEnabled(project)) {
IClasspathEntry newEntry = newLibraryEntry(getCeylonClassesOutputFolder(project).getFullPath(),
project.getFullPath(), null, false);
paths.put(newEntry.toString(), newEntry);
}
return asList(paths.values().toArray(new IClasspathEntry[paths.size()]));
}
public static File getSourceArtifact(RepositoryManager provider,
JDTModule module) {
String sourceArchivePath = module.getSourceArchivePath();
if (sourceArchivePath == null) {
return null;
}
File sourceArchive = new File(sourceArchivePath);
if (sourceArchive.exists()) {
return sourceArchive;
}
// BEWARE : here the request to the provider is done in 2 steps, because if
// we do this in a single step, the Aether repo might return the .jar
// archive as a default result when not finding it with the .src extension.
// In this case it will not try the second extension (-sources.jar).
String suffix = module.getArtifactType().equals(ArtifactResultType.MAVEN) ?
ArtifactContext.MAVEN_SRC : ArtifactContext.SRC;
ArtifactContext ctx = new ArtifactContext(module.getNameAsString(),
module.getVersion(), suffix);
File srcArtifact = provider.getArtifact(ctx);
if (srcArtifact!=null) {
if (srcArtifact.getPath().endsWith(suffix)) {
return srcArtifact;
}
}
return null;
}
public static IPath getSourceArchive(RepositoryManager provider,
JDTModule module) {
File srcArtifact = getSourceArtifact(provider, module);
if (srcArtifact!=null) {
return new Path(srcArtifact.getPath());
}
return null;
}
public static File getModuleArtifact(RepositoryManager provider,
JDTModule module) {
File moduleFile = module.getArtifact();
if (moduleFile == null) {
return null;
}
if (moduleFile.exists()) {
return moduleFile;
}
// Shouldn't need to execute this anymore !
// We already retrieved this information during in the ModuleVisitor.
// This should be a performance gain.
ArtifactContext ctx = new ArtifactContext(module.getNameAsString(),
module.getVersion(), ArtifactContext.CAR);
// try first with .car
File moduleArtifact = provider.getArtifact(ctx);
if (moduleArtifact==null){
// try with .jar
ctx = new ArtifactContext(module.getNameAsString(),
module.getVersion(), ArtifactContext.JAR);
moduleArtifact = provider.getArtifact(ctx);
}
return moduleArtifact;
}
public static IPath getModuleArchive(RepositoryManager provider,
JDTModule module) {
File moduleArtifact = getModuleArtifact(provider, module);
if (moduleArtifact!=null) {
return new Path(moduleArtifact.getPath());
}
return null;
}
public static boolean isProjectModule(IJavaProject javaProject, Module module)
throws JavaModelException {
boolean isSource=false;
for (IPackageFragmentRoot s: javaProject.getPackageFragmentRoots()) {
if (s.exists()
&& javaProject.isOnClasspath(s)
&& s.getKind()==IPackageFragmentRoot.K_SOURCE
&& s.getPackageFragment(module.getNameAsString()).exists()) {
isSource=true;
break;
}
}
return isSource;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_classpath_CeylonProjectModulesContainer.java
|
438 |
public enum UnspecifiedBooleanType {
TRUE,FALSE,UNSPECIFIED
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_client_UnspecifiedBooleanType.java
|
776 |
public class CollectionRollbackOperation extends CollectionBackupAwareOperation {
private long itemId;
private boolean removeOperation;
public CollectionRollbackOperation() {
}
public CollectionRollbackOperation(String name, long itemId, boolean removeOperation) {
super(name);
this.itemId = itemId;
this.removeOperation = removeOperation;
}
@Override
public boolean shouldBackup() {
return true;
}
@Override
public Operation getBackupOperation() {
return new CollectionRollbackBackupOperation(name, itemId, removeOperation);
}
@Override
public int getId() {
return CollectionDataSerializerHook.COLLECTION_ROLLBACK;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
if (removeOperation) {
getOrCreateContainer().rollbackRemove(itemId);
} else {
getOrCreateContainer().rollbackAdd(itemId);
}
}
@Override
public void afterRun() throws Exception {
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeLong(itemId);
out.writeBoolean(removeOperation);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
itemId = in.readLong();
removeOperation = in.readBoolean();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_txn_CollectionRollbackOperation.java
|
125 |
public interface PageService {
/**
* Returns the page with the passed in id.
*
* @param pageId - The id of the page.
* @return The associated page.
*/
public Page findPageById(Long pageId);
/**
* Returns the page template with the passed in id.
*
* @param id - the id of the page template
* @return The associated page template.
*/
public PageTemplate findPageTemplateById(Long id);
/**
* Saves the given {@link PageTemplate}
*
* @param template the {@link PageTemplate} to save
* @return the database-saved {@link PageTemplate}
*/
public PageTemplate savePageTemplate(PageTemplate template);
/**
* Returns the page-fields associated with the passed in page-id.
* This is preferred over the direct access from Page so that the
* two items can be cached distinctly
*
* @param pageId - The id of the page.
* @return The associated page.
*/
public Map<String,PageField> findPageFieldsByPageId(Long pageId);
/**
* This method is intended to be called from within the CMS
* admin only.
*
* Adds the passed in page to the DB.
*
* Creates a sandbox/site if one doesn't already exist.
*/
public Page addPage(Page page, SandBox destinationSandbox);
/**
* This method is intended to be called from within the CMS
* admin only.
*
* Updates the page according to the following rules:
*
* 1. If sandbox has changed from null to a value
* This means that the user is editing an item in production and
* the edit is taking place in a sandbox.
*
* Clone the page and add it to the new sandbox and set the cloned
* page's originalPageId to the id of the page being updated.
*
* 2. If the sandbox has changed from one value to another
* This means that the user is moving the item from one sandbox
* to another.
*
* Update the siteId for the page to the one associated with the
* new sandbox
*
* 3. If the sandbox has changed from a value to null
* This means that the item is moving from the sandbox to production.
*
* If the page has an originalPageId, then update that page by
* setting it's archived flag to true.
*
* Then, update the siteId of the page being updated to be the
* siteId of the original page.
*
* 4. If the sandbox is the same then just update the page.
*/
public Page updatePage(Page page, SandBox sandbox);
/**
* Looks up the page from the backend datastore. Processes the page's fields to
* fix the URL if the site has overridden the URL for images. If secure is true
* and images are being overridden, the system will use https.
*
* @param currentSandbox - current sandbox
* @param locale - current locale
* @param uri - the URI to return a page for
* @param ruleDTOs - ruleDTOs that are used as the data to process page rules
* @param secure - set to true if current request is over HTTPS
* @return
*/
public PageDTO findPageByURI(SandBox currentSandbox, Locale locale, String uri, Map<String,Object> ruleDTOs, boolean secure);
/**
* If deleting and item where page.originalPageId != null
* then the item is deleted from the database.
*
* If the originalPageId is null, then this method marks
* the items as deleted within the passed in sandbox.
*
* @param page
* @param destinationSandbox
* @return
*/
public void deletePage(Page page, SandBox destinationSandbox);
public List<Page> findPages(SandBox sandBox, Criteria criteria);
/**
* Returns all pages, regardless of any sandbox they are apart of
* @return all {@link Page}s configured in the system
*/
public List<Page> readAllPages();
/**
* Returns all page templates, regardless of any sandbox they are apart of
* @return all {@link PageTemplate}s configured in the system
*/
public List<PageTemplate> readAllPageTemplates();
public Long countPages(SandBox sandBox, Criteria criteria);
/**
* Call to evict both secure and non-secure pages matching
* the passed in key.
*
* @param baseKey
*/
public void removePageFromCache(String baseKey);
public List<ArchivedPagePublisher> getArchivedPageListeners();
public void setArchivedPageListeners(List<ArchivedPagePublisher> archivedPageListeners);
public boolean isAutomaticallyApproveAndPromotePages();
public void setAutomaticallyApproveAndPromotePages(boolean automaticallyApproveAndPromotePages);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_service_PageService.java
|
1,322 |
return new PortableFactory() {
@Override
public Portable create(int classId) {
switch (classId) {
case IS_SHUTDOWN_REQUEST:
return new IsShutdownRequest();
case CANCELLATION_REQUEST:
return new CancellationRequest();
case TARGET_CALLABLE_REQUEST:
return new TargetCallableRequest();
case PARTITION_CALLABLE_REQUEST:
return new PartitionCallableRequest();
default:
return null;
}
}
};
| 1no label
|
hazelcast_src_main_java_com_hazelcast_executor_ExecutorPortableHook.java
|
223 |
public class OrientShutdownHook extends Thread {
protected OrientShutdownHook() {
Runtime.getRuntime().addShutdownHook(this);
}
/**
* Shutdown Orient engine.
*/
@Override
public void run() {
Orient.instance().shutdown();
}
public void cancel() {
try {
Runtime.getRuntime().removeShutdownHook(this);
} catch (IllegalStateException e) {
}
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_OrientShutdownHook.java
|
566 |
private class SqlFileFilter implements FilenameFilter {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".sql");
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_sql_HibernateToolTask.java
|
698 |
public class BulkRequestTests extends ElasticsearchTestCase {
@Test
public void testSimpleBulk1() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
// translate Windows line endings (\r\n) to standard ones (\n)
if (Constants.WINDOWS) {
bulkAction = Strings.replace(bulkAction, "\r\n", "\n");
}
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(3));
assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes()));
assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class));
assertThat(((IndexRequest) bulkRequest.requests().get(2)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }").toBytes()));
}
@Test
public void testSimpleBulk2() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk2.json");
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(3));
}
@Test
public void testSimpleBulk3() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk3.json");
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(3));
}
@Test
public void testSimpleBulk4() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk4.json");
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(4));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().toUtf8(), equalTo("{\"field\":\"value\"}"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).script(), equalTo("counter += param1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).scriptLang(), equalTo("js"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).scriptParams().size(), equalTo(1));
assertThat(((Integer) ((UpdateRequest) bulkRequest.requests().get(1)).scriptParams().get("param1")), equalTo(1));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().toUtf8(), equalTo("{\"counter\":1}"));
}
@Test
public void testBulkAllowExplicitIndex() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
try {
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), true, null, null, false);
fail();
} catch (Exception e) {
}
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), true, "test", null, false);
}
}
| 0true
|
src_test_java_org_elasticsearch_action_bulk_BulkRequestTests.java
|
2,591 |
public class MasterFaultDetection extends AbstractComponent {
public static interface Listener {
void onMasterFailure(DiscoveryNode masterNode, String reason);
void onDisconnectedFromMaster();
}
private final ThreadPool threadPool;
private final TransportService transportService;
private final DiscoveryNodesProvider nodesProvider;
private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<Listener>();
private final boolean connectOnNetworkDisconnect;
private final TimeValue pingInterval;
private final TimeValue pingRetryTimeout;
private final int pingRetryCount;
// used mainly for testing, should always be true
private final boolean registerConnectionListener;
private final FDConnectionListener connectionListener;
private volatile MasterPinger masterPinger;
private final Object masterNodeMutex = new Object();
private volatile DiscoveryNode masterNode;
private volatile int retryCount;
private final AtomicBoolean notifiedMasterFailure = new AtomicBoolean();
public MasterFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService, DiscoveryNodesProvider nodesProvider) {
super(settings);
this.threadPool = threadPool;
this.transportService = transportService;
this.nodesProvider = nodesProvider;
this.connectOnNetworkDisconnect = componentSettings.getAsBoolean("connect_on_network_disconnect", true);
this.pingInterval = componentSettings.getAsTime("ping_interval", timeValueSeconds(1));
this.pingRetryTimeout = componentSettings.getAsTime("ping_timeout", timeValueSeconds(30));
this.pingRetryCount = componentSettings.getAsInt("ping_retries", 3);
this.registerConnectionListener = componentSettings.getAsBoolean("register_connection_listener", true);
logger.debug("[master] uses ping_interval [{}], ping_timeout [{}], ping_retries [{}]", pingInterval, pingRetryTimeout, pingRetryCount);
this.connectionListener = new FDConnectionListener();
if (registerConnectionListener) {
transportService.addConnectionListener(connectionListener);
}
transportService.registerHandler(MasterPingRequestHandler.ACTION, new MasterPingRequestHandler());
}
public DiscoveryNode masterNode() {
return this.masterNode;
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
public void restart(DiscoveryNode masterNode, String reason) {
synchronized (masterNodeMutex) {
if (logger.isDebugEnabled()) {
logger.debug("[master] restarting fault detection against master [{}], reason [{}]", masterNode, reason);
}
innerStop();
innerStart(masterNode);
}
}
public void start(final DiscoveryNode masterNode, String reason) {
synchronized (masterNodeMutex) {
if (logger.isDebugEnabled()) {
logger.debug("[master] starting fault detection against master [{}], reason [{}]", masterNode, reason);
}
innerStart(masterNode);
}
}
private void innerStart(final DiscoveryNode masterNode) {
this.masterNode = masterNode;
this.retryCount = 0;
this.notifiedMasterFailure.set(false);
// try and connect to make sure we are connected
try {
transportService.connectToNode(masterNode);
} catch (final Exception e) {
// notify master failure (which stops also) and bail..
notifyMasterFailure(masterNode, "failed to perform initial connect [" + e.getMessage() + "]");
return;
}
if (masterPinger != null) {
masterPinger.stop();
}
this.masterPinger = new MasterPinger();
// start the ping process
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, masterPinger);
}
public void stop(String reason) {
synchronized (masterNodeMutex) {
if (masterNode != null) {
if (logger.isDebugEnabled()) {
logger.debug("[master] stopping fault detection against master [{}], reason [{}]", masterNode, reason);
}
}
innerStop();
}
}
private void innerStop() {
// also will stop the next ping schedule
this.retryCount = 0;
if (masterPinger != null) {
masterPinger.stop();
masterPinger = null;
}
this.masterNode = null;
}
public void close() {
stop("closing");
this.listeners.clear();
transportService.removeConnectionListener(connectionListener);
transportService.removeHandler(MasterPingRequestHandler.ACTION);
}
private void handleTransportDisconnect(DiscoveryNode node) {
synchronized (masterNodeMutex) {
if (!node.equals(this.masterNode)) {
return;
}
if (connectOnNetworkDisconnect) {
try {
transportService.connectToNode(node);
// if all is well, make sure we restart the pinger
if (masterPinger != null) {
masterPinger.stop();
}
this.masterPinger = new MasterPinger();
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, masterPinger);
} catch (Exception e) {
logger.trace("[master] [{}] transport disconnected (with verified connect)", masterNode);
notifyMasterFailure(masterNode, "transport disconnected (with verified connect)");
}
} else {
logger.trace("[master] [{}] transport disconnected", node);
notifyMasterFailure(node, "transport disconnected");
}
}
}
private void notifyDisconnectedFromMaster() {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onDisconnectedFromMaster();
}
}
});
}
private void notifyMasterFailure(final DiscoveryNode masterNode, final String reason) {
if (notifiedMasterFailure.compareAndSet(false, true)) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onMasterFailure(masterNode, reason);
}
}
});
stop("master failure, " + reason);
}
}
private class FDConnectionListener implements TransportConnectionListener {
@Override
public void onNodeConnected(DiscoveryNode node) {
}
@Override
public void onNodeDisconnected(DiscoveryNode node) {
handleTransportDisconnect(node);
}
}
private class MasterPinger implements Runnable {
private volatile boolean running = true;
public void stop() {
this.running = false;
}
@Override
public void run() {
if (!running) {
// return and don't spawn...
return;
}
final DiscoveryNode masterToPing = masterNode;
if (masterToPing == null) {
// master is null, should not happen, but we are still running, so reschedule
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, MasterPinger.this);
return;
}
transportService.sendRequest(masterToPing, MasterPingRequestHandler.ACTION, new MasterPingRequest(nodesProvider.nodes().localNode().id(), masterToPing.id()), options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout),
new BaseTransportResponseHandler<MasterPingResponseResponse>() {
@Override
public MasterPingResponseResponse newInstance() {
return new MasterPingResponseResponse();
}
@Override
public void handleResponse(MasterPingResponseResponse response) {
if (!running) {
return;
}
// reset the counter, we got a good result
MasterFaultDetection.this.retryCount = 0;
// check if the master node did not get switched on us..., if it did, we simply return with no reschedule
if (masterToPing.equals(MasterFaultDetection.this.masterNode())) {
if (!response.connectedToMaster) {
logger.trace("[master] [{}] does not have us registered with it...", masterToPing);
notifyDisconnectedFromMaster();
}
// we don't stop on disconnection from master, we keep pinging it
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, MasterPinger.this);
}
}
@Override
public void handleException(TransportException exp) {
if (!running) {
return;
}
if (exp instanceof ConnectTransportException) {
// ignore this one, we already handle it by registering a connection listener
return;
}
synchronized (masterNodeMutex) {
// check if the master node did not get switched on us...
if (masterToPing.equals(MasterFaultDetection.this.masterNode())) {
if (exp.getCause() instanceof NoLongerMasterException) {
logger.debug("[master] pinging a master {} that is no longer a master", masterNode);
notifyMasterFailure(masterToPing, "no longer master");
return;
} else if (exp.getCause() instanceof NotMasterException) {
logger.debug("[master] pinging a master {} that is not the master", masterNode);
notifyMasterFailure(masterToPing, "not master");
return;
} else if (exp.getCause() instanceof NodeDoesNotExistOnMasterException) {
logger.debug("[master] pinging a master {} but we do not exists on it, act as if its master failure", masterNode);
notifyMasterFailure(masterToPing, "do not exists on master, act as master failure");
return;
}
int retryCount = ++MasterFaultDetection.this.retryCount;
logger.trace("[master] failed to ping [{}], retry [{}] out of [{}]", exp, masterNode, retryCount, pingRetryCount);
if (retryCount >= pingRetryCount) {
logger.debug("[master] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", masterNode, pingRetryCount, pingRetryTimeout);
// not good, failure
notifyMasterFailure(masterToPing, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout");
} else {
// resend the request, not reschedule, rely on send timeout
transportService.sendRequest(masterToPing, MasterPingRequestHandler.ACTION, new MasterPingRequest(nodesProvider.nodes().localNode().id(), masterToPing.id()), options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout), this);
}
}
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}
}
static class NoLongerMasterException extends ElasticsearchIllegalStateException {
@Override
public Throwable fillInStackTrace() {
return null;
}
}
static class NotMasterException extends ElasticsearchIllegalStateException {
@Override
public Throwable fillInStackTrace() {
return null;
}
}
static class NodeDoesNotExistOnMasterException extends ElasticsearchIllegalStateException {
@Override
public Throwable fillInStackTrace() {
return null;
}
}
private class MasterPingRequestHandler extends BaseTransportRequestHandler<MasterPingRequest> {
public static final String ACTION = "discovery/zen/fd/masterPing";
@Override
public MasterPingRequest newInstance() {
return new MasterPingRequest();
}
@Override
public void messageReceived(MasterPingRequest request, TransportChannel channel) throws Exception {
DiscoveryNodes nodes = nodesProvider.nodes();
// check if we are really the same master as the one we seemed to be think we are
// this can happen if the master got "kill -9" and then another node started using the same port
if (!request.masterNodeId.equals(nodes.localNodeId())) {
throw new NotMasterException();
}
// if we are no longer master, fail...
if (!nodes.localNodeMaster()) {
throw new NoLongerMasterException();
}
if (!nodes.nodeExists(request.nodeId)) {
throw new NodeDoesNotExistOnMasterException();
}
// send a response, and note if we are connected to the master or not
channel.sendResponse(new MasterPingResponseResponse(nodes.nodeExists(request.nodeId)));
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
private static class MasterPingRequest extends TransportRequest {
private String nodeId;
private String masterNodeId;
private MasterPingRequest() {
}
private MasterPingRequest(String nodeId, String masterNodeId) {
this.nodeId = nodeId;
this.masterNodeId = masterNodeId;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
nodeId = in.readString();
masterNodeId = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(nodeId);
out.writeString(masterNodeId);
}
}
private static class MasterPingResponseResponse extends TransportResponse {
private boolean connectedToMaster;
private MasterPingResponseResponse() {
}
private MasterPingResponseResponse(boolean connectedToMaster) {
this.connectedToMaster = connectedToMaster;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
connectedToMaster = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(connectedToMaster);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_discovery_zen_fd_MasterFaultDetection.java
|
1,478 |
public class RoutingNodesIterator implements Iterator<RoutingNode>, Iterable<MutableShardRouting> {
private RoutingNode current;
private final Iterator<RoutingNode> delegate;
public RoutingNodesIterator(Iterator<RoutingNode> iterator) {
delegate = iterator;
}
@Override
public boolean hasNext() {
return delegate.hasNext();
}
@Override
public RoutingNode next() {
return current = delegate.next();
}
public RoutingNodeIterator nodeShards() {
return new RoutingNodeIterator(current);
}
@Override
public void remove() {
delegate.remove();
}
@Override
public Iterator<MutableShardRouting> iterator() {
return nodeShards();
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_routing_RoutingNodes.java
|
292 |
new Thread(new Runnable() {
public void run() {
try {
latch.await(30, TimeUnit.SECONDS);
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
lock.destroy();
}
}).start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_lock_ClientConditionTest.java
|
5,159 |
public abstract class InternalSingleBucketAggregation extends InternalAggregation implements SingleBucketAggregation {
protected long docCount;
protected InternalAggregations aggregations;
protected InternalSingleBucketAggregation() {} // for serialization
/**
* Creates a single bucket aggregation.
*
* @param name The aggregation name.
* @param docCount The document count in the single bucket.
* @param aggregations The already built sub-aggregations that are associated with the bucket.
*/
protected InternalSingleBucketAggregation(String name, long docCount, InternalAggregations aggregations) {
super(name);
this.docCount = docCount;
this.aggregations = aggregations;
}
@Override
public long getDocCount() {
return docCount;
}
@Override
public InternalAggregations getAggregations() {
return aggregations;
}
@Override
public InternalAggregation reduce(ReduceContext reduceContext) {
List<InternalAggregation> aggregations = reduceContext.aggregations();
if (aggregations.size() == 1) {
InternalSingleBucketAggregation reduced = ((InternalSingleBucketAggregation) aggregations.get(0));
reduced.aggregations.reduce(reduceContext.cacheRecycler());
return reduced;
}
InternalSingleBucketAggregation reduced = null;
List<InternalAggregations> subAggregationsList = new ArrayList<InternalAggregations>(aggregations.size());
for (InternalAggregation aggregation : aggregations) {
if (reduced == null) {
reduced = (InternalSingleBucketAggregation) aggregation;
} else {
this.docCount += ((InternalSingleBucketAggregation) aggregation).docCount;
}
subAggregationsList.add(((InternalSingleBucketAggregation) aggregation).aggregations);
}
reduced.aggregations = InternalAggregations.reduce(subAggregationsList, reduceContext.cacheRecycler());
return reduced;
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
docCount = in.readVLong();
aggregations = InternalAggregations.readAggregations(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeVLong(docCount);
aggregations.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(CommonFields.DOC_COUNT, docCount);
aggregations.toXContentInternal(builder, params);
return builder.endObject();
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_InternalSingleBucketAggregation.java
|
475 |
makeDbCall(databaseDocumentTxTwo, new ODocumentHelper.ODbRelatedCall<Object>() {
public Object call() {
doc2.reset();
doc2.fromStream(buffer2.buffer);
return null;
}
});
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseCompare.java
|
3,931 |
public class RangeFilterParser implements FilterParser {
public static final String NAME = "range";
@Inject
public RangeFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Boolean cache = null;
CacheKeyFilter.Key cacheKey = null;
String fieldName = null;
Object from = null;
Object to = null;
boolean includeLower = true;
boolean includeUpper = true;
String execution = "index";
String filterName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if ("from".equals(currentFieldName)) {
from = parser.objectBytes();
} else if ("to".equals(currentFieldName)) {
to = parser.objectBytes();
} else if ("include_lower".equals(currentFieldName) || "includeLower".equals(currentFieldName)) {
includeLower = parser.booleanValue();
} else if ("include_upper".equals(currentFieldName) || "includeUpper".equals(currentFieldName)) {
includeUpper = parser.booleanValue();
} else if ("gt".equals(currentFieldName)) {
from = parser.objectBytes();
includeLower = false;
} else if ("gte".equals(currentFieldName) || "ge".equals(currentFieldName)) {
from = parser.objectBytes();
includeLower = true;
} else if ("lt".equals(currentFieldName)) {
to = parser.objectBytes();
includeUpper = false;
} else if ("lte".equals(currentFieldName) || "le".equals(currentFieldName)) {
to = parser.objectBytes();
includeUpper = true;
} else {
throw new QueryParsingException(parseContext.index(), "[range] filter does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else if ("execution".equals(currentFieldName)) {
execution = parser.text();
} else {
throw new QueryParsingException(parseContext.index(), "[range] filter does not support [" + currentFieldName + "]");
}
}
}
if (fieldName == null) {
throw new QueryParsingException(parseContext.index(), "[range] filter no field specified for range filter");
}
Filter filter = null;
MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
if (smartNameFieldMappers != null) {
if (smartNameFieldMappers.hasMapper()) {
boolean explicitlyCached = cache != null && cache;
if (execution.equals("index")) {
if (cache == null) {
cache = true;
}
FieldMapper mapper = smartNameFieldMappers.mapper();
if (mapper instanceof DateFieldMapper) {
filter = ((DateFieldMapper) mapper).rangeFilter(from, to, includeLower, includeUpper, parseContext, explicitlyCached);
} else {
filter = mapper.rangeFilter(from, to, includeLower, includeUpper, parseContext);
}
} else if ("fielddata".equals(execution)) {
if (cache == null) {
cache = false;
}
FieldMapper mapper = smartNameFieldMappers.mapper();
if (!(mapper instanceof NumberFieldMapper)) {
throw new QueryParsingException(parseContext.index(), "[range] filter field [" + fieldName + "] is not a numeric type");
}
if (mapper instanceof DateFieldMapper) {
filter = ((DateFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext, explicitlyCached);
} else {
filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext);
}
} else {
throw new QueryParsingException(parseContext.index(), "[range] filter doesn't support [" + execution + "] execution");
}
}
}
if (filter == null) {
if (cache == null) {
cache = true;
}
filter = new TermRangeFilter(fieldName, BytesRefs.toBytesRef(from), BytesRefs.toBytesRef(to), includeLower, includeUpper);
}
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
filter = wrapSmartNameFilter(filter, smartNameFieldMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_RangeFilterParser.java
|
392 |
public class ClusterSearchShardsAction extends ClusterAction<ClusterSearchShardsRequest, ClusterSearchShardsResponse, ClusterSearchShardsRequestBuilder> {
public static final ClusterSearchShardsAction INSTANCE = new ClusterSearchShardsAction();
public static final String NAME = "cluster/shards/search_shards";
private ClusterSearchShardsAction() {
super(NAME);
}
@Override
public ClusterSearchShardsResponse newResponse() {
return new ClusterSearchShardsResponse();
}
@Override
public ClusterSearchShardsRequestBuilder newRequestBuilder(ClusterAdminClient client) {
return new ClusterSearchShardsRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_shards_ClusterSearchShardsAction.java
|
373 |
public static class TestCollator
implements Collator<Map.Entry<String, Integer>, Integer> {
@Override
public Integer collate(Iterable<Map.Entry<String, Integer>> values) {
int sum = 0;
for (Map.Entry<String, Integer> entry : values) {
sum += entry.getValue();
}
return sum;
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_DistributedMapperClientMapReduceTest.java
|
570 |
trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) {
firedEvents.add(event);
}
});
| 0true
|
core_src_test_java_com_orientechnologies_orient_core_index_OCompositeIndexDefinitionTest.java
|
76 |
public class ThreadAssociatedWithOtherTransactionException extends IllegalStateException
{
public ThreadAssociatedWithOtherTransactionException( Thread thread, Transaction alreadyAssociatedTx,
Transaction tx )
{
super( "Thread '" + thread.getName() + "' tried to resume " + tx + ", but had already " +
alreadyAssociatedTx + " associated" );
}
}
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_ThreadAssociatedWithOtherTransactionException.java
|
739 |
public class CollectionSizeRequest extends CollectionRequest {
public CollectionSizeRequest() {
}
public CollectionSizeRequest(String name) {
super(name);
}
@Override
protected Operation prepareOperation() {
return new CollectionSizeOperation(name);
}
@Override
public int getClassId() {
return CollectionPortableHook.COLLECTION_SIZE;
}
@Override
public String getRequiredAction() {
return ActionConstants.ACTION_READ;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_client_CollectionSizeRequest.java
|
476 |
public class GetAliasesRequest extends MasterNodeReadOperationRequest<GetAliasesRequest> {
private String[] indices = Strings.EMPTY_ARRAY;
private String[] aliases = Strings.EMPTY_ARRAY;
private IndicesOptions indicesOptions = IndicesOptions.strict();
public GetAliasesRequest(String[] aliases) {
this.aliases = aliases;
}
public GetAliasesRequest(String alias) {
this.aliases = new String[]{alias};
}
public GetAliasesRequest() {
}
public GetAliasesRequest indices(String... indices) {
this.indices = indices;
return this;
}
public GetAliasesRequest aliases(String... aliases) {
this.aliases = aliases;
return this;
}
public GetAliasesRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
public String[] indices() {
return indices;
}
public String[] aliases() {
return aliases;
}
public IndicesOptions indicesOptions() {
return indicesOptions;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
indices = in.readStringArray();
aliases = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
readLocal(in, Version.V_1_0_0_RC2);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(indices);
out.writeStringArray(aliases);
indicesOptions.writeIndicesOptions(out);
writeLocal(out, Version.V_1_0_0_RC2);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_alias_get_GetAliasesRequest.java
|
1,467 |
public class FindRefinementsVisitor extends Visitor implements NaturalVisitor {
private final Declaration declaration;
private Set<Tree.StatementOrArgument> declarationNodes =
new HashSet<Tree.StatementOrArgument>();
public FindRefinementsVisitor(Declaration declaration) {
this.declaration = declaration;
}
public Set<Tree.StatementOrArgument> getDeclarationNodes() {
return declarationNodes;
}
protected boolean isRefinement(Declaration dec) {
return dec!=null && dec.refines(declaration) ||
dec instanceof Setter && ((Setter)dec).getGetter()
.refines(declaration);
}
@Override
public void visit(Tree.SpecifierStatement that) {
if (that.getRefinement() &&
isRefinement(that.getDeclaration())) {
declarationNodes.add(that);
}
super.visit(that);
}
@Override
public void visit(Tree.Declaration that) {
if (isRefinement(that.getDeclarationModel())) {
declarationNodes.add(that);
}
super.visit(that);
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_util_FindRefinementsVisitor.java
|
157 |
public abstract class AbstractStructuredContentRuleProcessor implements StructuredContentRuleProcessor {
private static final Log LOG = LogFactory.getLog(AbstractStructuredContentRuleProcessor.class);
private Map expressionCache = Collections.synchronizedMap(new LRUMap(1000));
private ParserContext parserContext;
private Map<String, String> contextClassNames = new HashMap<String, String> ();
/**
* Having a parser context that imports the classes speeds MVEL by up to 60%.
* @return
*/
protected ParserContext getParserContext() {
if (parserContext == null) {
parserContext = new ParserContext();
parserContext.addImport("MVEL", MVEL.class);
parserContext.addImport("MvelHelper", MvelHelper.class);
/* Getting errors when the following is in place.
for (String key : contextClassNames.keySet()) {
String className = contextClassNames.get(key);
try {
Class c = Class.forName(className);
parserContext.addImport(key, c);
} catch (ClassNotFoundException e) {
LOG.error("Error resolving classname while setting up MVEL context, rule processing based on the key " + key + " will not be optimized", e);
}
} */
}
return parserContext;
}
/**
* Helpful method for processing a boolean MVEL expression and associated arguments.
*
* Caches the expression in an LRUCache.
* @param expression
* @param vars
* @return the result of the expression
*/
protected Boolean executeExpression(String expression, Map<String, Object> vars) {
Serializable exp = (Serializable) expressionCache.get(expression);
vars.put("MVEL", MVEL.class);
if (exp == null) {
try {
exp = MVEL.compileExpression(expression, getParserContext());
} catch (CompileException ce) {
LOG.warn("Compile exception processing phrase: " + expression,ce);
return Boolean.FALSE;
}
expressionCache.put(expression, exp);
}
try {
return (Boolean) MVEL.executeExpression(exp, vars);
} catch (Exception e) {
LOG.error(e);
}
return false;
}
/**
* List of class names to add to the MVEL ParserContext.
*
* @return
* @see {@link ParserContext}
*/
public Map<String, String> getContextClassNames() {
return contextClassNames;
}
/**
* List of class names to add to the MVEL ParserContext.
*
* @return
* @see {@link ParserContext}
*/
public void setContextClassNames(Map<String, String> contextClassNames) {
this.contextClassNames = contextClassNames;
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_service_AbstractStructuredContentRuleProcessor.java
|
517 |
public class OFastConcurrentModificationException extends OConcurrentModificationException {
private static final long serialVersionUID = 1L;
private static final OGlobalConfiguration CONFIG = OGlobalConfiguration.DB_MVCC_THROWFAST;
private static final boolean ENABLED = CONFIG.getValueAsBoolean();
private static final String MESSAGE = "This is a fast-thrown exception. Disable " + CONFIG.getKey() + " to see full exception stacktrace and message.";
private static final OFastConcurrentModificationException INSTANCE = new OFastConcurrentModificationException();
public OFastConcurrentModificationException() {
}
public static boolean enabled() {
return ENABLED;
}
public static OFastConcurrentModificationException instance() {
return INSTANCE;
}
public String getMessage() {
return MESSAGE;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_exception_OFastConcurrentModificationException.java
|
175 |
.setCallback(new Callback<Object>() {
public void notify(Object object) {
endpoint.sendResponse(filter(object), getCallId());
}
});
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_TargetClientRequest.java
|
323 |
@RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientMapTest {
static HazelcastInstance client;
static HazelcastInstance server;
static TestMapStore flushMapStore = new TestMapStore();
static TestMapStore transientMapStore = new TestMapStore();
@BeforeClass
public static void init() {
Config config = new Config();
config.getMapConfig("flushMap").
setMapStoreConfig(new MapStoreConfig()
.setWriteDelaySeconds(1000)
.setImplementation(flushMapStore));
config.getMapConfig("putTransientMap").
setMapStoreConfig(new MapStoreConfig()
.setWriteDelaySeconds(1000)
.setImplementation(transientMapStore));
server = Hazelcast.newHazelcastInstance(config);
client = HazelcastClient.newHazelcastClient(null);
}
public IMap createMap() {
return client.getMap(randomString());
}
@AfterClass
public static void destroy() {
client.shutdown();
Hazelcast.shutdownAll();
}
@Test
public void testIssue537() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(2);
final CountDownLatch nullLatch = new CountDownLatch(2);
final IMap map = createMap();
final EntryListener listener = new EntryAdapter() {
public void entryAdded(EntryEvent event) {
latch.countDown();
}
public void entryEvicted(EntryEvent event) {
final Object value = event.getValue();
final Object oldValue = event.getOldValue();
if (value != null) {
nullLatch.countDown();
}
if (oldValue != null) {
nullLatch.countDown();
}
latch.countDown();
}
};
final String id = map.addEntryListener(listener, true);
map.put("key1", new GenericEvent("value1"), 2, TimeUnit.SECONDS);
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertTrue(nullLatch.await(1, TimeUnit.SECONDS));
map.removeEntryListener(id);
map.put("key2", new GenericEvent("value2"));
assertEquals(1, map.size());
}
@Test
public void testContains() throws Exception {
final IMap map = createMap();
fillMap(map);
assertFalse(map.containsKey("key10"));
assertTrue(map.containsKey("key1"));
assertFalse(map.containsValue("value10"));
assertTrue(map.containsValue("value1"));
}
@Test
public void testGet() {
final IMap map = createMap();
fillMap(map);
for (int i = 0; i < 10; i++) {
Object o = map.get("key" + i);
assertEquals("value" + i, o);
}
}
@Test
public void testRemoveAndDelete() {
final IMap map = createMap();
fillMap(map);
assertNull(map.remove("key10"));
map.delete("key9");
assertEquals(9, map.size());
for (int i = 0; i < 9; i++) {
Object o = map.remove("key" + i);
assertEquals("value" + i, o);
}
assertEquals(0, map.size());
}
@Test
public void testRemoveIfSame() {
final IMap map = createMap();
fillMap(map);
assertFalse(map.remove("key2", "value"));
assertEquals(10, map.size());
assertTrue(map.remove("key2", "value2"));
assertEquals(9, map.size());
}
@Test
public void testFlush() throws InterruptedException {
flushMapStore.latch = new CountDownLatch(1);
IMap<Object, Object> map = client.getMap("flushMap");
map.put(1l, "value");
map.flush();
assertOpenEventually(flushMapStore.latch, 5);
}
@Test
public void testGetAllPutAll() {
final IMap map = createMap();
Map mm = new HashMap();
for (int i = 0; i < 100; i++) {
mm.put(i, i);
}
map.putAll(mm);
assertEquals(map.size(), 100);
for (int i = 0; i < 100; i++) {
assertEquals(map.get(i), i);
}
Set ss = new HashSet();
ss.add(1);
ss.add(3);
Map m2 = map.getAll(ss);
assertEquals(m2.size(), 2);
assertEquals(m2.get(1), 1);
assertEquals(m2.get(3), 3);
}
@Test
public void testAsyncGet() throws Exception {
final IMap map = createMap();
fillMap(map);
Future f = map.getAsync("key1");
Object o = f.get();
assertEquals("value1", o);
}
@Test
public void testAsyncPut() throws Exception {
final IMap map = createMap();
fillMap(map);
Future f = map.putAsync("key3", "value");
Object o = f.get();
assertEquals("value3", o);
assertEquals("value", map.get("key3"));
}
@Test
public void testAsyncPutWithTtl() throws Exception {
final IMap map = createMap();
final CountDownLatch latch = new CountDownLatch(1);
map.addEntryListener(new EntryAdapter<String, String>() {
public void entryEvicted(EntryEvent<String, String> event) {
latch.countDown();
}
}, true);
Future<String> f1 = map.putAsync("key", "value1", 3, TimeUnit.SECONDS);
String f1Val = f1.get();
assertNull(f1Val);
assertEquals("value1", map.get("key"));
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertNull(map.get("key"));
}
@Test
public void testAsyncRemove() throws Exception {
final IMap map = createMap();
fillMap(map);
Future f = map.removeAsync("key4");
Object o = f.get();
assertEquals("value4", o);
assertEquals(9, map.size());
}
@Test
public void testTryPutRemove() throws Exception {
final IMap map = createMap();
assertTrue(map.tryPut("key1", "value1", 1, TimeUnit.SECONDS));
assertTrue(map.tryPut("key2", "value2", 1, TimeUnit.SECONDS));
map.lock("key1");
map.lock("key2");
final CountDownLatch latch = new CountDownLatch(2);
new Thread() {
public void run() {
boolean result = map.tryPut("key1", "value3", 1, TimeUnit.SECONDS);
if (!result) {
latch.countDown();
}
}
}.start();
new Thread() {
public void run() {
boolean result = map.tryRemove("key2", 1, TimeUnit.SECONDS);
if (!result) {
latch.countDown();
}
}
}.start();
assertTrue(latch.await(20, TimeUnit.SECONDS));
assertEquals("value1", map.get("key1"));
assertEquals("value2", map.get("key2"));
map.forceUnlock("key1");
map.forceUnlock("key2");
}
@Test
public void testPutTtl() throws Exception {
final IMap map = createMap();
map.put("key1", "value1", 1, TimeUnit.SECONDS);
assertNotNull(map.get("key1"));
Thread.sleep(2000);
assertNull(map.get("key1"));
}
@Test
public void testPutIfAbsent() throws Exception {
final IMap map = createMap();
assertNull(map.putIfAbsent("key1", "value1"));
assertEquals("value1", map.putIfAbsent("key1", "value3"));
}
@Test
public void testPutIfAbsentTtl() throws Exception {
final IMap map = createMap();
assertNull(map.putIfAbsent("key1", "value1", 1, TimeUnit.SECONDS));
assertEquals("value1", map.putIfAbsent("key1", "value3", 1, TimeUnit.SECONDS));
Thread.sleep(6000);
assertNull(map.putIfAbsent("key1", "value3", 1, TimeUnit.SECONDS));
assertEquals("value3", map.putIfAbsent("key1", "value4", 1, TimeUnit.SECONDS));
}
@Test
public void testSet() throws Exception {
final IMap map = createMap();
map.set("key1", "value1");
assertEquals("value1", map.get("key1"));
map.set("key1", "value2");
assertEquals("value2", map.get("key1"));
map.set("key1", "value3", 1, TimeUnit.SECONDS);
assertEquals("value3", map.get("key1"));
Thread.sleep(2000);
assertNull(map.get("key1"));
}
@Test
public void testPutTransient() throws InterruptedException {
transientMapStore.latch = new CountDownLatch(1);
IMap<Object, Object> map = client.getMap("putTransientMap");
map.putTransient(3l, "value1", 100, TimeUnit.SECONDS);
map.flush();
assertFalse(transientMapStore.latch.await(5, TimeUnit.SECONDS));
}
@Test
public void testLock() throws Exception {
final IMap map = createMap();
map.put("key1", "value1");
assertEquals("value1", map.get("key1"));
map.lock("key1");
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
public void run() {
map.tryPut("key1", "value2", 1, TimeUnit.SECONDS);
latch.countDown();
}
}.start();
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals("value1", map.get("key1"));
map.forceUnlock("key1");
}
@Test
public void testLockTtl() throws Exception {
final IMap map = createMap();
map.put("key1", "value1");
assertEquals("value1", map.get("key1"));
map.lock("key1", 2, TimeUnit.SECONDS);
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
public void run() {
map.tryPut("key1", "value2", 5, TimeUnit.SECONDS);
latch.countDown();
}
}.start();
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertFalse(map.isLocked("key1"));
assertEquals("value2", map.get("key1"));
map.forceUnlock("key1");
}
@Test
public void testLockTtl2() throws Exception {
final IMap map = createMap();
map.lock("key1", 3, TimeUnit.SECONDS);
final CountDownLatch latch = new CountDownLatch(2);
new Thread() {
public void run() {
if (!map.tryLock("key1")) {
latch.countDown();
}
try {
if (map.tryLock("key1", 5, TimeUnit.SECONDS)) {
latch.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
assertTrue(latch.await(10, TimeUnit.SECONDS));
map.forceUnlock("key1");
}
@Test
public void testTryLock() throws Exception {
final IMap map = createMap();
final IMap tempMap = map;
assertTrue(tempMap.tryLock("key1", 2, TimeUnit.SECONDS));
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
public void run() {
try {
if (!tempMap.tryLock("key1", 2, TimeUnit.SECONDS)) {
latch.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
assertTrue(latch.await(100, TimeUnit.SECONDS));
assertTrue(tempMap.isLocked("key1"));
final CountDownLatch latch2 = new CountDownLatch(1);
new Thread() {
public void run() {
try {
if (tempMap.tryLock("key1", 20, TimeUnit.SECONDS)) {
latch2.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
Thread.sleep(1000);
tempMap.unlock("key1");
assertTrue(latch2.await(100, TimeUnit.SECONDS));
assertTrue(tempMap.isLocked("key1"));
tempMap.forceUnlock("key1");
}
@Test
public void testForceUnlock() throws Exception {
final IMap map = createMap();
map.lock("key1");
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
public void run() {
map.forceUnlock("key1");
latch.countDown();
}
}.start();
assertTrue(latch.await(100, TimeUnit.SECONDS));
assertFalse(map.isLocked("key1"));
}
@Test
public void testValues() {
final IMap map = createMap();
fillMap(map);
final Collection values = map.values(new SqlPredicate("this == value1"));
assertEquals(1, values.size());
assertEquals("value1", values.iterator().next());
}
@Test
public void testReplace() throws Exception {
final IMap map = createMap();
assertNull(map.replace("key1", "value1"));
map.put("key1", "value1");
assertEquals("value1", map.replace("key1", "value2"));
assertEquals("value2", map.get("key1"));
assertFalse(map.replace("key1", "value1", "value3"));
assertEquals("value2", map.get("key1"));
assertTrue(map.replace("key1", "value2", "value3"));
assertEquals("value3", map.get("key1"));
}
@Test
public void testSubmitToKey() throws Exception {
final IMap map = createMap();
map.put(1, 1);
Future f = map.submitToKey(1, new IncrementorEntryProcessor());
assertEquals(2, f.get());
assertEquals(2, map.get(1));
}
@Test
public void testSubmitToNonExistentKey() throws Exception {
final IMap map = createMap();
Future f = map.submitToKey(11, new IncrementorEntryProcessor());
assertEquals(1, f.get());
assertEquals(1, map.get(11));
}
@Test
public void testSubmitToKeyWithCallback() throws Exception {
final IMap map = createMap();
map.put(1, 1);
final CountDownLatch latch = new CountDownLatch(1);
ExecutionCallback executionCallback = new ExecutionCallback() {
@Override
public void onResponse(Object response) {
latch.countDown();
}
@Override
public void onFailure(Throwable t) {
}
};
map.submitToKey(1, new IncrementorEntryProcessor(), executionCallback);
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals(2, map.get(1));
}
@Test
public void testListener() throws InterruptedException {
final IMap map = createMap();
final CountDownLatch latch1Add = new CountDownLatch(5);
final CountDownLatch latch1Remove = new CountDownLatch(2);
final CountDownLatch latch2Add = new CountDownLatch(1);
final CountDownLatch latch2Remove = new CountDownLatch(1);
EntryListener listener1 = new EntryAdapter() {
public void entryAdded(EntryEvent event) {
latch1Add.countDown();
}
public void entryRemoved(EntryEvent event) {
latch1Remove.countDown();
}
};
EntryListener listener2 = new EntryAdapter() {
public void entryAdded(EntryEvent event) {
latch2Add.countDown();
}
public void entryRemoved(EntryEvent event) {
latch2Remove.countDown();
}
};
map.addEntryListener(listener1, false);
map.addEntryListener(listener2, "key3", true);
Thread.sleep(1000);
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
map.put("key4", "value4");
map.put("key5", "value5");
map.remove("key1");
map.remove("key3");
assertTrue(latch1Add.await(10, TimeUnit.SECONDS));
assertTrue(latch1Remove.await(10, TimeUnit.SECONDS));
assertTrue(latch2Add.await(5, TimeUnit.SECONDS));
assertTrue(latch2Remove.await(5, TimeUnit.SECONDS));
}
@Test
public void testPredicateListenerWithPortableKey() throws InterruptedException {
final IMap tradeMap = createMap();
final CountDownLatch countDownLatch = new CountDownLatch(1);
final AtomicInteger atomicInteger = new AtomicInteger(0);
EntryListener listener = new EntryAdapter() {
@Override
public void entryAdded(EntryEvent event) {
atomicInteger.incrementAndGet();
countDownLatch.countDown();
}
};
AuthenticationRequest key = new AuthenticationRequest(new UsernamePasswordCredentials("a", "b"));
tradeMap.addEntryListener(listener, key, true);
AuthenticationRequest key2 = new AuthenticationRequest(new UsernamePasswordCredentials("a", "c"));
tradeMap.put(key2, 1);
assertFalse(countDownLatch.await(5, TimeUnit.SECONDS));
assertEquals(0, atomicInteger.get());
}
@Test
public void testBasicPredicate() {
final IMap map = createMap();
fillMap(map);
final Collection collection = map.values(new SqlPredicate("this == value1"));
assertEquals("value1", collection.iterator().next());
final Set set = map.keySet(new SqlPredicate("this == value1"));
assertEquals("key1", set.iterator().next());
final Set<Map.Entry<String, String>> set1 = map.entrySet(new SqlPredicate("this == value1"));
assertEquals("key1", set1.iterator().next().getKey());
assertEquals("value1", set1.iterator().next().getValue());
}
private void fillMap(IMap map) {
for (int i = 0; i < 10; i++) {
map.put("key" + i, "value" + i);
}
}
/**
* Issue #923
*/
@Test
public void testPartitionAwareKey() {
String name = randomString();
PartitionAwareKey key = new PartitionAwareKey("key", "123");
String value = "value";
IMap<Object, Object> map1 = server.getMap(name);
map1.put(key, value);
assertEquals(value, map1.get(key));
IMap<Object, Object> map2 = client.getMap(name);
assertEquals(value, map2.get(key));
}
private static class PartitionAwareKey implements PartitionAware, Serializable {
private final String key;
private final String pk;
private PartitionAwareKey(String key, String pk) {
this.key = key;
this.pk = pk;
}
@Override
public Object getPartitionKey() {
return pk;
}
}
@Test
public void testExecuteOnKeys() throws Exception {
String name = randomString();
IMap<Integer, Integer> map = client.getMap(name);
IMap<Integer, Integer> map2 = client.getMap(name);
for (int i = 0; i < 10; i++) {
map.put(i, 0);
}
Set keys = new HashSet();
keys.add(1);
keys.add(4);
keys.add(7);
keys.add(9);
final Map<Integer, Object> resultMap = map2.executeOnKeys(keys, new IncrementorEntryProcessor());
assertEquals(1, resultMap.get(1));
assertEquals(1, resultMap.get(4));
assertEquals(1, resultMap.get(7));
assertEquals(1, resultMap.get(9));
assertEquals(1, (int) map.get(1));
assertEquals(0, (int) map.get(2));
assertEquals(0, (int) map.get(3));
assertEquals(1, (int) map.get(4));
assertEquals(0, (int) map.get(5));
assertEquals(0, (int) map.get(6));
assertEquals(1, (int) map.get(7));
assertEquals(0, (int) map.get(8));
assertEquals(1, (int) map.get(9));
}
/**
* Issue #996
*/
@Test
public void testEntryListener() throws InterruptedException {
final CountDownLatch gateAdd = new CountDownLatch(2);
final CountDownLatch gateRemove = new CountDownLatch(1);
final CountDownLatch gateEvict = new CountDownLatch(1);
final CountDownLatch gateUpdate = new CountDownLatch(1);
final String mapName = randomString();
final IMap<Object, Object> serverMap = server.getMap(mapName);
serverMap.put(3, new Deal(3));
final IMap<Object, Object> clientMap = client.getMap(mapName);
assertEquals(1, clientMap.size());
final EntryListener listener = new EntListener(gateAdd, gateRemove, gateEvict, gateUpdate);
clientMap.addEntryListener(listener, new SqlPredicate("id=1"), 2, true);
clientMap.put(2, new Deal(1));
clientMap.put(2, new Deal(1));
clientMap.remove(2);
clientMap.put(2, new Deal(1));
clientMap.evict(2);
assertTrue(gateAdd.await(10, TimeUnit.SECONDS));
assertTrue(gateRemove.await(10, TimeUnit.SECONDS));
assertTrue(gateEvict.await(10, TimeUnit.SECONDS));
assertTrue(gateUpdate.await(10, TimeUnit.SECONDS));
}
static class EntListener implements EntryListener<Integer, Deal>, Serializable {
private final CountDownLatch _gateAdd;
private final CountDownLatch _gateRemove;
private final CountDownLatch _gateEvict;
private final CountDownLatch _gateUpdate;
EntListener(CountDownLatch gateAdd, CountDownLatch gateRemove, CountDownLatch gateEvict, CountDownLatch gateUpdate) {
_gateAdd = gateAdd;
_gateRemove = gateRemove;
_gateEvict = gateEvict;
_gateUpdate = gateUpdate;
}
@Override
public void entryAdded(EntryEvent<Integer, Deal> arg0) {
_gateAdd.countDown();
}
@Override
public void entryEvicted(EntryEvent<Integer, Deal> arg0) {
_gateEvict.countDown();
}
@Override
public void entryRemoved(EntryEvent<Integer, Deal> arg0) {
_gateRemove.countDown();
}
@Override
public void entryUpdated(EntryEvent<Integer, Deal> arg0) {
_gateUpdate.countDown();
}
}
static class Deal implements Serializable {
Integer id;
Deal(Integer id) {
this.id = id;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
private static class IncrementorEntryProcessor extends AbstractEntryProcessor implements DataSerializable {
IncrementorEntryProcessor() {
super(true);
}
public Object process(Map.Entry entry) {
Integer value = (Integer) entry.getValue();
if (value == null) {
value = 0;
}
if (value == -1) {
entry.setValue(null);
return null;
}
value++;
entry.setValue(value);
return value;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
}
@Override
public void readData(ObjectDataInput in) throws IOException {
}
public void processBackup(Map.Entry entry) {
entry.setValue((Integer) entry.getValue() + 1);
}
}
@Test
public void testMapStatistics() throws Exception {
String name = randomString();
final LocalMapStats localMapStats = server.getMap(name).getLocalMapStats();
final IMap map = client.getMap(name);
final int operationCount = 1000;
for (int i = 0; i < operationCount; i++) {
map.put(i, i);
map.get(i);
map.remove(i);
}
assertEquals("put count", operationCount, localMapStats.getPutOperationCount());
assertEquals("get count", operationCount, localMapStats.getGetOperationCount());
assertEquals("remove count", operationCount, localMapStats.getRemoveOperationCount());
assertTrue("put latency", 0 < localMapStats.getTotalPutLatency());
assertTrue("get latency", 0 < localMapStats.getTotalGetLatency());
assertTrue("remove latency", 0 < localMapStats.getTotalRemoveLatency());
}
static class TestMapStore extends MapStoreAdapter<Long, String> {
public volatile CountDownLatch latch;
@Override
public void store(Long key, String value) {
if (latch != null) {
latch.countDown();
}
}
@Override
public void storeAll(Map<Long, String> map) {
if (latch != null) {
latch.countDown();
}
}
@Override
public void deleteAll(Collection<Long> keys) {
if (latch != null) {
latch.countDown();
}
}
@Override
public void delete(Long key) {
if (latch != null) {
latch.countDown();
}
}
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTest.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.