Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
143k
| target
class label 2
classes | project
stringlengths 33
157
|
---|---|---|---|
5,295 |
class InternalOrder extends Terms.Order {
/**
* Order by the (higher) count of each term.
*/
public static final InternalOrder COUNT_DESC = new InternalOrder((byte) 1, "_count", false, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
int cmp = - Longs.compare(o1.getDocCount(), o2.getDocCount());
if (cmp == 0) {
cmp = o1.compareTerm(o2);
}
return cmp;
}
});
/**
* Order by the (lower) count of each term.
*/
public static final InternalOrder COUNT_ASC = new InternalOrder((byte) 2, "_count", true, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
int cmp = Longs.compare(o1.getDocCount(), o2.getDocCount());
if (cmp == 0) {
cmp = o1.compareTerm(o2);
}
return cmp;
}
});
/**
* Order by the terms.
*/
public static final InternalOrder TERM_DESC = new InternalOrder((byte) 3, "_term", false, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
return - o1.compareTerm(o2);
}
});
/**
* Order by the terms.
*/
public static final InternalOrder TERM_ASC = new InternalOrder((byte) 4, "_term", true, new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
return o1.compareTerm(o2);
}
});
final byte id;
final String key;
final boolean asc;
protected final Comparator<Terms.Bucket> comparator;
InternalOrder(byte id, String key, boolean asc, Comparator<Terms.Bucket> comparator) {
this.id = id;
this.key = key;
this.asc = asc;
this.comparator = comparator;
}
byte id() {
return id;
}
String key() {
return key;
}
boolean asc() {
return asc;
}
@Override
protected Comparator<Terms.Bucket> comparator(Aggregator aggregator) {
return comparator;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject().field(key, asc ? "asc" : "desc").endObject();
}
public static InternalOrder validate(InternalOrder order, Aggregator termsAggregator) {
if (!(order instanceof Aggregation)) {
return order;
}
String aggName = ((Aggregation) order).aggName();
Aggregator[] subAggregators = termsAggregator.subAggregators();
for (int i = 0; i < subAggregators.length; i++) {
Aggregator aggregator = subAggregators[i];
if (aggregator.name().equals(aggName)) {
// we can only apply order on metrics sub-aggregators
if (!(aggregator instanceof MetricsAggregator)) {
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured to order by sub-aggregation ["
+ aggName + "] which is is not a metrics aggregation. Terms aggregation order can only refer to metrics aggregations");
}
if (aggregator instanceof MetricsAggregator.MultiValue) {
String valueName = ((Aggregation) order).metricName();
if (valueName == null) {
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured with a sub-aggregation order ["
+ aggName + "] which is a multi-valued aggregation, yet no metric name was specified");
}
if (!((MetricsAggregator.MultiValue) aggregator).hasMetric(valueName)) {
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured with a sub-aggregation order ["
+ aggName + "] and value [" + valueName + "] yet the referred sub aggregator holds no metric that goes by this name");
}
return order;
}
// aggregator must be of a single value type
// todo we can also choose to be really strict and verify that the user didn't specify a value name and if so fail?
return order;
}
}
throw new AggregationExecutionException("terms aggregation [" + termsAggregator.name() + "] is configured with a sub-aggregation order ["
+ aggName + "] but no sub aggregation with this name is configured");
}
static class Aggregation extends InternalOrder {
static final byte ID = 0;
Aggregation(String key, boolean asc) {
super(ID, key, asc, new MultiBucketsAggregation.Bucket.SubAggregationComparator<Terms.Bucket>(key, asc));
}
Aggregation(String aggName, String metricName, boolean asc) {
super(ID, key(aggName, metricName), asc, new MultiBucketsAggregation.Bucket.SubAggregationComparator<Terms.Bucket>(aggName, metricName, asc));
}
String aggName() {
int index = key.indexOf('.');
return index < 0 ? key : key.substring(0, index);
}
String metricName() {
int index = key.indexOf('.');
return index < 0 ? null : key.substring(index + 1, key.length());
}
private static String key(String aggName, String valueName) {
return (valueName == null) ? aggName : aggName + "." + valueName;
}
@Override
protected Comparator<Terms.Bucket> comparator(Aggregator termsAggregator) {
if (termsAggregator == null) {
return comparator;
}
// Internal Optimization:
//
// in this phase, if the order is based on sub-aggregations, we need to use a different comparator
// to avoid constructing buckets for ordering purposes (we can potentially have a lot of buckets and building
// them will cause loads of redundant object constructions). The "special" comparators here will fetch the
// sub aggregation values directly from the sub aggregators bypassing bucket creation. Note that the comparator
// attached to the order will still be used in the reduce phase of the Aggregation.
final Aggregator aggregator = subAggregator(aggName(), termsAggregator);
assert aggregator != null && aggregator instanceof MetricsAggregator : "this should be picked up before the aggregation is executed";
if (aggregator instanceof MetricsAggregator.MultiValue) {
final String valueName = metricName();
assert valueName != null : "this should be picked up before the aggregation is executed";
return new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
double v1 = ((MetricsAggregator.MultiValue) aggregator).metric(valueName, ((InternalTerms.Bucket) o1).bucketOrd);
double v2 = ((MetricsAggregator.MultiValue) aggregator).metric(valueName, ((InternalTerms.Bucket) o2).bucketOrd);
// some metrics may return NaN (eg. avg, variance, etc...) in which case we'd like to push all of those to
// the bottom
if (v1 == Double.NaN) {
return asc ? 1 : -1;
}
return asc ? Double.compare(v1, v2) : Double.compare(v2, v1);
}
};
}
return new Comparator<Terms.Bucket>() {
@Override
public int compare(Terms.Bucket o1, Terms.Bucket o2) {
double v1 = ((MetricsAggregator.SingleValue) aggregator).metric(((InternalTerms.Bucket) o1).bucketOrd);
double v2 = ((MetricsAggregator.SingleValue) aggregator).metric(((InternalTerms.Bucket) o2).bucketOrd);
// some metrics may return NaN (eg. avg, variance, etc...) in which case we'd like to push all of those to
// the bottom
if (v1 == Double.NaN) {
return asc ? 1 : -1;
}
return asc ? Double.compare(v1, v2) : Double.compare(v2, v1);
}
};
}
private Aggregator subAggregator(String aggName, Aggregator termsAggregator) {
Aggregator[] subAggregators = termsAggregator.subAggregators();
for (int i = 0; i < subAggregators.length; i++) {
if (subAggregators[i].name().equals(aggName)) {
return subAggregators[i];
}
}
return null;
}
}
public static class Streams {
public static void writeOrder(InternalOrder order, StreamOutput out) throws IOException {
out.writeByte(order.id());
if (order instanceof Aggregation) {
out.writeBoolean(((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).asc());
out.writeString(((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).aggName());
boolean hasValueName = ((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).valueName() != null;
out.writeBoolean(hasValueName);
if (hasValueName) {
out.writeString(((MultiBucketsAggregation.Bucket.SubAggregationComparator) order.comparator).valueName());
}
}
}
public static InternalOrder readOrder(StreamInput in) throws IOException {
byte id = in.readByte();
switch (id) {
case 1: return InternalOrder.COUNT_DESC;
case 2: return InternalOrder.COUNT_ASC;
case 3: return InternalOrder.TERM_DESC;
case 4: return InternalOrder.TERM_ASC;
case 0:
boolean asc = in.readBoolean();
String key = in.readString();
if (in.readBoolean()) {
return new InternalOrder.Aggregation(key, in.readString(), asc);
}
return new InternalOrder.Aggregation(key, asc);
default:
throw new RuntimeException("unknown terms order");
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_terms_InternalOrder.java
|
76 |
public class OSharedResourceExternal extends OSharedResourceAbstract implements OSharedResource {
@Override
public void acquireExclusiveLock() {
super.acquireExclusiveLock();
}
@Override
public void acquireSharedLock() {
super.acquireSharedLock();
}
@Override
public void releaseExclusiveLock() {
super.releaseExclusiveLock();
}
@Override
public void releaseSharedLock() {
super.releaseSharedLock();
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_concur_resource_OSharedResourceExternal.java
|
47 |
@Component("blSandBoxItemCustomPersistenceHandler")
public class SandBoxItemCustomPersistenceHandler extends CustomPersistenceHandlerAdapter {
private final Log LOG = LogFactory.getLog(SandBoxItemCustomPersistenceHandler.class);
@Resource(name="blSandBoxService")
protected SandBoxService sandBoxService;
@Resource(name="blAdminSecurityService")
protected AdminSecurityService adminSecurityService;
@Resource(name="blAdminSecurityRemoteService")
protected SecurityVerifier adminRemoteSecurityService;
@Override
public Boolean willHandleSecurity(PersistencePackage persistencePackage) {
return true;
}
@Override
public Boolean canHandleFetch(PersistencePackage persistencePackage) {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
boolean isSandboxItem = SandBoxItem.class.getName().equals(ceilingEntityFullyQualifiedClassname);
if (isSandboxItem) {
return persistencePackage.getCustomCriteria()[4].equals("standard");
}
return false;
}
@Override
public Boolean canHandleAdd(PersistencePackage persistencePackage) {
return canHandleFetch(persistencePackage);
}
@Override
public Boolean canHandleRemove(PersistencePackage persistencePackage) {
return canHandleFetch(persistencePackage);
}
@Override
public Boolean canHandleUpdate(PersistencePackage persistencePackage) {
return canHandleFetch(persistencePackage);
}
protected List<SandBoxItem> retrieveSandBoxItems(List<Long> ids, DynamicEntityDao dynamicEntityDao, SandBox mySandBox) {
if (CollectionUtils.isEmpty(ids)) {
throw new IllegalArgumentException("The passed in ids parameter is empty");
}
//declare SandBoxItemImpl explicitly, as we do not want to retrieve other polymorphic types (e.g. WorkflowSandBoxItemImpl)
Criteria criteria = dynamicEntityDao.createCriteria(SandBoxItemImpl.class);
criteria.add(Restrictions.in("id", ids));
criteria.add(Restrictions.or(Restrictions.eq("originalSandBoxId", mySandBox.getId()), Restrictions.eq("sandBoxId", mySandBox.getId())));
return criteria.list();
}
@Override
public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
String[] customCriteria = persistencePackage.getCustomCriteria();
if (ArrayUtils.isEmpty(customCriteria) || customCriteria.length != 5) {
ServiceException e = new ServiceException("Invalid request for entity: " + ceilingEntityFullyQualifiedClassname);
LOG.error("Invalid request for entity: " + ceilingEntityFullyQualifiedClassname, e);
throw e;
}
AdminUser adminUser = adminRemoteSecurityService.getPersistentAdminUser();
if (adminUser == null) {
ServiceException e = new ServiceException("Unable to determine current user logged in status");
throw e;
}
try {
String moduleKey = customCriteria[0];
String operation = customCriteria[1];
List<Long> targets = new ArrayList<Long>();
if (!StringUtils.isEmpty(customCriteria[2])) {
String[] parts = customCriteria[2].split(",");
for (String part : parts) {
try {
targets.add(Long.valueOf(part));
} catch (NumberFormatException e) {
//do nothing
}
}
}
String comment = customCriteria[3];
String requiredPermission;
if (moduleKey.equals("userSandBox")) {
requiredPermission = "PERMISSION_ALL_USER_SANDBOX";
} else {
requiredPermission = "PERMISSION_ALL_APPROVER_SANDBOX";
}
boolean allowOperation = false;
for (AdminRole role : adminUser.getAllRoles()) {
for (AdminPermission permission : role.getAllPermissions()) {
if (permission.getName().equals(requiredPermission)) {
allowOperation = true;
break;
}
}
}
if (!allowOperation) {
ServiceException e = new ServiceException("Current user does not have permission to perform operation");
LOG.error("Current user does not have permission to perform operation", e);
throw e;
}
SandBox originalSandBox;
SandBox currentSandBox;
if (moduleKey.equals("userSandBox")) {
currentSandBox = sandBoxService.retrieveUserSandBox(null, adminUser);
originalSandBox = currentSandBox;
} else {
originalSandBox = sandBoxService.retrieveUserSandBox(null, adminUser);
currentSandBox = sandBoxService.retrieveApprovalSandBox(originalSandBox);
}
if (operation.equals("promoteAll")) {
sandBoxService.promoteAllSandBoxItems(currentSandBox, comment);
} else if (operation.equals("promoteSelected")) {
List<SandBoxItem> items = retrieveSandBoxItems(targets, dynamicEntityDao, currentSandBox);
sandBoxService.promoteSelectedItems(currentSandBox, comment, items);
} else if (operation.equals("revertRejectAll")) {
if (moduleKey.equals("userSandBox")) {
sandBoxService.revertAllSandBoxItems(originalSandBox, currentSandBox);
} else {
sandBoxService.rejectAllSandBoxItems(originalSandBox, currentSandBox, comment);
}
} else if (operation.equals("revertRejectSelected")) {
List<SandBoxItem> items = retrieveSandBoxItems(targets, dynamicEntityDao, currentSandBox);
if (moduleKey.equals("userSandBox")) {
sandBoxService.revertSelectedSandBoxItems(currentSandBox, items);
} else {
sandBoxService.rejectSelectedSandBoxItems(currentSandBox, comment, items);
}
}
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Map<String, FieldMetadata> originalProps = helper.getSimpleMergedProperties(SandBoxItem.class.getName(), persistencePerspective);
cto.get("sandBoxId").setFilterValue(currentSandBox.getId().toString());
cto.get("archivedFlag").setFilterValue(Boolean.FALSE.toString());
List<FilterMapping> filterMappings = helper.getFilterMappings(persistencePerspective, cto, SandBoxItem.class.getName(), originalProps);
//declare SandBoxItemImpl explicitly, as we do not want to retrieve other polymorphic types (e.g. WorkflowSandBoxItemImpl)
List<Serializable> records = helper.getPersistentRecords(SandBoxItem.class.getName(), filterMappings, cto.getFirstResult(), cto.getMaxResults());
Entity[] results = helper.getRecords(originalProps, records);
int totalRecords = helper.getTotalRecords(SandBoxItem.class.getName(), filterMappings);
DynamicResultSet response = new DynamicResultSet(results, totalRecords);
return response;
} catch (Exception e) {
throw new ServiceException("Unable to execute persistence activity for entity: "+ceilingEntityFullyQualifiedClassname, e);
}
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_admin_server_handler_SandBoxItemCustomPersistenceHandler.java
|
1,487 |
@SuppressWarnings("unchecked")
public class OObjectDatabaseTx extends ODatabasePojoAbstract<Object> implements ODatabaseObject, OUserObject2RecordHandler {
public static final String TYPE = "object";
protected ODictionary<Object> dictionary;
protected OEntityManager entityManager;
protected boolean saveOnlyDirty;
protected boolean lazyLoading;
protected boolean automaticSchemaGeneration;
protected OMetadataObject metadata;
public OObjectDatabaseTx(final String iURL) {
super(new ODatabaseDocumentTx(iURL));
underlying.setDatabaseOwner(this);
init();
}
public <T> T newInstance(final Class<T> iType) {
return (T) newInstance(iType.getSimpleName(), null, new Object[0]);
}
public <T> T newInstance(final Class<T> iType, Object... iArgs) {
return (T) newInstance(iType.getSimpleName(), null, iArgs);
}
public <RET> RET newInstance(String iClassName) {
return (RET) newInstance(iClassName, null, new Object[0]);
}
@Override
public <THISDB extends ODatabase> THISDB open(String iUserName, String iUserPassword) {
super.open(iUserName, iUserPassword);
entityManager.registerEntityClass(OUser.class);
entityManager.registerEntityClass(ORole.class);
metadata = new OMetadataObject(underlying.getMetadata());
return (THISDB) this;
}
@Override
public OMetadataObject getMetadata() {
checkOpeness();
if (metadata == null)
metadata = new OMetadataObject(underlying.getMetadata());
return metadata;
}
/**
* Create a new POJO by its class name. Assure to have called the registerEntityClasses() declaring the packages that are part of
* entity classes.
*
* @see OEntityManager#registerEntityClasses(String)
*/
public <RET extends Object> RET newInstance(final String iClassName, final Object iEnclosingClass, Object... iArgs) {
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, iClassName);
try {
Class<?> entityClass = entityManager.getEntityClass(iClassName);
if (entityClass != null) {
RET enhanced = (RET) OObjectEntityEnhancer.getInstance().getProxiedInstance(entityManager.getEntityClass(iClassName),
iEnclosingClass, underlying.newInstance(iClassName), null, iArgs);
return (RET) enhanced;
} else {
throw new OSerializationException("Type " + iClassName
+ " cannot be serialized because is not part of registered entities. To fix this error register this class");
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error on creating object of class " + iClassName, e, ODatabaseException.class);
}
return null;
}
/**
* Create a new POJO by its class name. Assure to have called the registerEntityClasses() declaring the packages that are part of
* entity classes.
*
* @see OEntityManager#registerEntityClasses(String)
*/
public <RET extends Object> RET newInstance(final String iClassName, final Object iEnclosingClass, ODocument iDocument,
Object... iArgs) {
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, iClassName);
try {
Class<?> entityClass = entityManager.getEntityClass(iClassName);
if (entityClass != null) {
RET enhanced = (RET) OObjectEntityEnhancer.getInstance().getProxiedInstance(entityManager.getEntityClass(iClassName),
iEnclosingClass, iDocument, null, iArgs);
return (RET) enhanced;
} else {
throw new OSerializationException("Type " + iClassName
+ " cannot be serialized because is not part of registered entities. To fix this error register this class");
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error on creating object of class " + iClassName, e, ODatabaseException.class);
}
return null;
}
public <RET> OObjectIteratorClass<RET> browseClass(final Class<RET> iClusterClass) {
return browseClass(iClusterClass, true);
}
public <RET> OObjectIteratorClass<RET> browseClass(final Class<RET> iClusterClass, final boolean iPolymorphic) {
if (iClusterClass == null)
return null;
return browseClass(iClusterClass.getSimpleName(), iPolymorphic);
}
public <RET> OObjectIteratorClass<RET> browseClass(final String iClassName) {
return browseClass(iClassName, true);
}
public <RET> OObjectIteratorClass<RET> browseClass(final String iClassName, final boolean iPolymorphic) {
checkOpeness();
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_READ, iClassName);
return new OObjectIteratorClass<RET>(this, (ODatabaseRecordAbstract) getUnderlying().getUnderlying(), iClassName, iPolymorphic);
}
public <RET> OObjectIteratorCluster<RET> browseCluster(final String iClusterName) {
checkOpeness();
checkSecurity(ODatabaseSecurityResources.CLUSTER, ORole.PERMISSION_READ, iClusterName);
return (OObjectIteratorCluster<RET>) new OObjectIteratorCluster<Object>(this, (ODatabaseRecordAbstract) getUnderlying()
.getUnderlying(), getClusterIdByName(iClusterName));
}
public <RET> RET load(final Object iPojo) {
return (RET) load(iPojo, null);
}
public <RET> RET reload(final Object iPojo) {
return (RET) reload(iPojo, null, true);
}
public <RET> RET reload(final Object iPojo, final boolean iIgnoreCache) {
return (RET) reload(iPojo, null, iIgnoreCache);
}
public <RET> RET reload(Object iPojo, final String iFetchPlan, final boolean iIgnoreCache) {
checkOpeness();
if (iPojo == null)
return null;
// GET THE ASSOCIATED DOCUMENT
final ODocument record = getRecordByUserObject(iPojo, true);
underlying.reload(record, iFetchPlan, iIgnoreCache);
iPojo = stream2pojo(record, iPojo, iFetchPlan, true);
return (RET) iPojo;
}
public <RET> RET load(final Object iPojo, final String iFetchPlan) {
return (RET) load(iPojo, iFetchPlan, false);
}
@Override
public void attach(final Object iPojo) {
OObjectEntitySerializer.attach(iPojo, this);
}
public <RET> RET attachAndSave(final Object iPojo) {
attach(iPojo);
return (RET) save(iPojo);
}
@Override
/**
* Method that detaches all fields contained in the document to the given object. It returns by default a proxied instance. To get
* a detached non proxied instance @see {@link OObjectEntitySerializer.detach(T o, ODatabaseObject db, boolean
* returnNonProxiedInstance)}
*
* @param <T>
* @param o
* :- the object to detach
* @return the detached object
*/
public <RET> RET detach(final Object iPojo) {
return (RET) OObjectEntitySerializer.detach(iPojo, this);
}
/**
* Method that detaches all fields contained in the document to the given object.
*
* @param <RET>
* @param iPojo
* :- the object to detach
* @param returnNonProxiedInstance
* :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id
* and @Version fields it could procude data replication
* @return the object serialized or with detached data
*/
public <RET> RET detach(final Object iPojo, boolean returnNonProxiedInstance) {
return (RET) OObjectEntitySerializer.detach(iPojo, this, returnNonProxiedInstance);
}
/**
* Method that detaches all fields contained in the document to the given object and recursively all object tree. This may throw a
* {@link StackOverflowError} with big objects tree. To avoid it set the stack size with -Xss java option
*
* @param <RET>
* @param iPojo
* :- the object to detach
* @param returnNonProxiedInstance
* :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id
* and @Version fields it could procude data replication
* @return the object serialized or with detached data
*/
public <RET> RET detachAll(final Object iPojo, boolean returnNonProxiedInstance) {
return (RET) OObjectEntitySerializer.detachAll(iPojo, this, returnNonProxiedInstance);
}
public <RET> RET load(final Object iPojo, final String iFetchPlan, final boolean iIgnoreCache) {
return (RET) load(iPojo, iFetchPlan, iIgnoreCache, false);
}
@Override
public <RET> RET load(Object iPojo, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone) {
checkOpeness();
if (iPojo == null)
return null;
// GET THE ASSOCIATED DOCUMENT
ODocument record = getRecordByUserObject(iPojo, true);
try {
record.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
record = underlying.load(record, iFetchPlan, iIgnoreCache, loadTombstone);
return (RET) stream2pojo(record, iPojo, iFetchPlan);
} finally {
record.setInternalStatus(ORecordElement.STATUS.LOADED);
}
}
public <RET> RET load(final ORID iRecordId) {
return (RET) load(iRecordId, null);
}
public <RET> RET load(final ORID iRecordId, final String iFetchPlan) {
return (RET) load(iRecordId, iFetchPlan, false);
}
public <RET> RET load(final ORID iRecordId, final String iFetchPlan, final boolean iIgnoreCache) {
return (RET) load(iRecordId, iFetchPlan, iIgnoreCache, false);
}
@Override
public <RET> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone) {
checkOpeness();
if (iRecordId == null)
return null;
// GET THE ASSOCIATED DOCUMENT
final ODocument record = (ODocument) underlying.load(iRecordId, iFetchPlan, iIgnoreCache, loadTombstone);
if (record == null)
return null;
return (RET) OObjectEntityEnhancer.getInstance().getProxiedInstance(record.getClassName(), entityManager, record, null);
}
/**
* Saves an object to the databasein synchronous mode . First checks if the object is new or not. In case it's new a new ODocument
* is created and bound to the object, otherwise the ODocument is retrieved and updated. The object is introspected using the Java
* Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*/
public <RET> RET save(final Object iContent) {
return (RET) save(iContent, (String) null, OPERATION_MODE.SYNCHRONOUS, false, null, null);
}
/**
* Saves an object to the database specifying the mode. First checks if the object is new or not. In case it's new a new ODocument
* is created and bound to the object, otherwise the ODocument is retrieved and updated. The object is introspected using the Java
* Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*/
public <RET> RET save(final Object iContent, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) {
return (RET) save(iContent, null, iMode, false, iRecordCreatedCallback, iRecordUpdatedCallback);
}
/**
* Saves an object in synchronous mode to the database forcing a record cluster where to store it. First checks if the object is
* new or not. In case it's new a new ODocument is created and bound to the object, otherwise the ODocument is retrieved and
* updated. The object is introspected using the Java Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*
* Before to use the specified cluster a check is made to know if is allowed and figures in the configured and the record is valid
* following the constraints declared in the schema.
*
* @see ORecordSchemaAware#validate()
*/
public <RET> RET save(final Object iPojo, final String iClusterName) {
return (RET) save(iPojo, iClusterName, OPERATION_MODE.SYNCHRONOUS, false, null, null);
}
@Override
public boolean updatedReplica(Object iPojo) {
OSerializationThreadLocal.INSTANCE.get().clear();
// GET THE ASSOCIATED DOCUMENT
final Object proxiedObject = OObjectEntitySerializer.serializeObject(iPojo, this);
final ODocument record = getRecordByUserObject(proxiedObject, true);
boolean result;
try {
record.setInternalStatus(com.orientechnologies.orient.core.db.record.ORecordElement.STATUS.MARSHALLING);
result = underlying.updatedReplica(record);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).updateLoadedFieldMap(proxiedObject);
// RE-REGISTER FOR NEW RECORDS SINCE THE ID HAS CHANGED
registerUserObject(proxiedObject, record);
} finally {
record.setInternalStatus(com.orientechnologies.orient.core.db.record.ORecordElement.STATUS.LOADED);
}
return result;
}
/**
* Saves an object to the database forcing a record cluster where to store it. First checks if the object is new or not. In case
* it's new a new ODocument is created and bound to the object, otherwise the ODocument is retrieved and updated. The object is
* introspected using the Java Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*
* Before to use the specified cluster a check is made to know if is allowed and figures in the configured and the record is valid
* following the constraints declared in the schema.
*
* @see ORecordSchemaAware#validate()
*/
public <RET> RET save(final Object iPojo, final String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) {
checkOpeness();
if (iPojo == null)
return (RET) iPojo;
else if (OMultiValue.isMultiValue(iPojo)) {
// MULTI VALUE OBJECT: STORE SINGLE POJOS
for (Object pojo : OMultiValue.getMultiValueIterable(iPojo)) {
save(pojo, iClusterName);
}
return (RET) iPojo;
} else {
OSerializationThreadLocal.INSTANCE.get().clear();
// GET THE ASSOCIATED DOCUMENT
final Object proxiedObject = OObjectEntitySerializer.serializeObject(iPojo, this);
final ODocument record = getRecordByUserObject(proxiedObject, true);
try {
record.setInternalStatus(ORecordElement.STATUS.MARSHALLING);
if (!saveOnlyDirty || record.isDirty()) {
// REGISTER BEFORE TO SERIALIZE TO AVOID PROBLEMS WITH CIRCULAR DEPENDENCY
// registerUserObject(iPojo, record);
deleteOrphans((((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler())));
ODocument savedRecord = underlying.save(record, iClusterName, iMode, iForceCreate, iRecordCreatedCallback,
iRecordUpdatedCallback);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).setDoc(savedRecord);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).updateLoadedFieldMap(proxiedObject);
// RE-REGISTER FOR NEW RECORDS SINCE THE ID HAS CHANGED
registerUserObject(proxiedObject, record);
}
} finally {
record.setInternalStatus(ORecordElement.STATUS.LOADED);
}
return (RET) proxiedObject;
}
}
public ODatabaseObject delete(final Object iPojo) {
checkOpeness();
if (iPojo == null)
return this;
ODocument record = getRecordByUserObject(iPojo, false);
if (record == null) {
final ORecordId rid = OObjectSerializerHelper.getObjectID(this, iPojo);
if (rid == null)
throw new OObjectNotDetachedException("Cannot retrieve the object's ID for '" + iPojo + "' because has not been detached");
record = (ODocument) underlying.load(rid);
}
deleteCascade(record);
underlying.delete(record);
if (getTransaction() instanceof OTransactionNoTx)
unregisterPojo(iPojo, record);
return this;
}
@Override
public ODatabaseObject delete(final ORID iRID) {
checkOpeness();
if (iRID == null)
return this;
final ORecordInternal<?> record = iRID.getRecord();
if (record instanceof ODocument) {
Object iPojo = getUserObjectByRecord(record, null);
deleteCascade((ODocument) record);
underlying.delete(record);
if (getTransaction() instanceof OTransactionNoTx)
unregisterPojo(iPojo, (ODocument) record);
}
return this;
}
@Override
public ODatabaseObject delete(final ORID iRID, final ORecordVersion iVersion) {
deleteRecord(iRID, iVersion, false);
return this;
}
@Override
public ODatabaseComplex<Object> cleanOutRecord(ORID iRID, ORecordVersion iVersion) {
deleteRecord(iRID, iVersion, true);
return this;
}
private boolean deleteRecord(ORID iRID, ORecordVersion iVersion, boolean prohibitTombstones) {
checkOpeness();
if (iRID == null)
return true;
ODocument record = iRID.getRecord();
if (record != null) {
Object iPojo = getUserObjectByRecord(record, null);
deleteCascade(record);
if (prohibitTombstones)
underlying.cleanOutRecord(iRID, iVersion);
else
underlying.delete(iRID, iVersion);
if (getTransaction() instanceof OTransactionNoTx)
unregisterPojo(iPojo, record);
}
return false;
}
protected void deleteCascade(final ODocument record) {
if (record == null)
return;
List<String> toDeleteCascade = OObjectEntitySerializer.getCascadeDeleteFields(record.getClassName());
if (toDeleteCascade != null) {
for (String field : toDeleteCascade) {
Object toDelete = record.field(field);
if (toDelete instanceof OIdentifiable) {
if (toDelete != null)
delete(((OIdentifiable) toDelete).getIdentity());
} else if (toDelete instanceof Collection) {
for (OIdentifiable cascadeRecord : ((Collection<OIdentifiable>) toDelete)) {
if (cascadeRecord != null)
delete(((OIdentifiable) cascadeRecord).getIdentity());
}
} else if (toDelete instanceof Map) {
for (OIdentifiable cascadeRecord : ((Map<Object, OIdentifiable>) toDelete).values()) {
if (cascadeRecord != null)
delete(((OIdentifiable) cascadeRecord).getIdentity());
}
}
}
}
}
public long countClass(final String iClassName) {
checkOpeness();
return underlying.countClass(iClassName);
}
public long countClass(final Class<?> iClass) {
checkOpeness();
return underlying.countClass(iClass.getSimpleName());
}
public ODictionary<Object> getDictionary() {
checkOpeness();
if (dictionary == null)
dictionary = new ODictionaryWrapper(this, underlying.getDictionary().getIndex());
return dictionary;
}
@Override
public ODatabasePojoAbstract<Object> commit() {
try {
// BY PASS DOCUMENT DB
((ODatabaseRecordTx) underlying.getUnderlying()).commit();
if (getTransaction().getAllRecordEntries() != null) {
// UPDATE ID & VERSION FOR ALL THE RECORDS
Object pojo = null;
for (ORecordOperation entry : getTransaction().getAllRecordEntries()) {
switch (entry.type) {
case ORecordOperation.CREATED:
case ORecordOperation.UPDATED:
break;
case ORecordOperation.DELETED:
final ORecordInternal<?> rec = entry.getRecord();
if (rec instanceof ODocument)
unregisterPojo(pojo, (ODocument) rec);
break;
}
}
}
} finally {
getTransaction().close();
}
return this;
}
@Override
public ODatabasePojoAbstract<Object> rollback() {
try {
// COPY ALL TX ENTRIES
final List<ORecordOperation> newEntries;
if (getTransaction().getCurrentRecordEntries() != null) {
newEntries = new ArrayList<ORecordOperation>();
for (ORecordOperation entry : getTransaction().getCurrentRecordEntries())
if (entry.type == ORecordOperation.CREATED)
newEntries.add(entry);
} else
newEntries = null;
// BY PASS DOCUMENT DB
((ODatabaseRecordTx) underlying.getUnderlying()).rollback();
} finally {
getTransaction().close();
}
return this;
}
public OEntityManager getEntityManager() {
return entityManager;
}
@Override
public ODatabaseDocument getUnderlying() {
return underlying;
}
/**
* Returns the version number of the object. Version starts from 0 assigned on creation.
*
* @param iPojo
* User object
*/
@Override
public ORecordVersion getVersion(final Object iPojo) {
checkOpeness();
final ODocument record = getRecordByUserObject(iPojo, false);
if (record != null)
return record.getRecordVersion();
return OObjectSerializerHelper.getObjectVersion(iPojo);
}
/**
* Returns the object unique identity.
*
* @param iPojo
* User object
*/
@Override
public ORID getIdentity(final Object iPojo) {
checkOpeness();
if (iPojo instanceof OIdentifiable)
return ((OIdentifiable) iPojo).getIdentity();
final ODocument record = getRecordByUserObject(iPojo, false);
if (record != null)
return record.getIdentity();
return OObjectSerializerHelper.getObjectID(this, iPojo);
}
public boolean isSaveOnlyDirty() {
return saveOnlyDirty;
}
public void setSaveOnlyDirty(boolean saveOnlyDirty) {
this.saveOnlyDirty = saveOnlyDirty;
}
public boolean isAutomaticSchemaGeneration() {
return automaticSchemaGeneration;
}
public void setAutomaticSchemaGeneration(boolean automaticSchemaGeneration) {
this.automaticSchemaGeneration = automaticSchemaGeneration;
}
public Object newInstance() {
checkOpeness();
return new ODocument();
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final byte iOperation) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation);
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final int iOperation, Object iResourceSpecific) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation, iResourceSpecific);
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final int iOperation, Object... iResourcesSpecific) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation, iResourcesSpecific);
}
@Override
public ODocument pojo2Stream(final Object iPojo, final ODocument iRecord) {
if (iPojo instanceof ProxyObject) {
return ((OObjectProxyMethodHandler) ((ProxyObject) iPojo).getHandler()).getDoc();
}
return OObjectSerializerHelper.toStream(iPojo, iRecord, getEntityManager(),
getMetadata().getSchema().getClass(iPojo.getClass().getSimpleName()), this, this, saveOnlyDirty);
}
@Override
public Object stream2pojo(ODocument iRecord, final Object iPojo, final String iFetchPlan) {
return stream2pojo(iRecord, iPojo, iFetchPlan, false);
}
public Object stream2pojo(ODocument iRecord, final Object iPojo, final String iFetchPlan, boolean iReload) {
if (iRecord.getInternalStatus() == ORecordElement.STATUS.NOT_LOADED)
iRecord = (ODocument) iRecord.load();
if (iReload) {
if (iPojo != null) {
if (iPojo instanceof Proxy) {
((OObjectProxyMethodHandler) ((ProxyObject) iPojo).getHandler()).setDoc(iRecord);
((OObjectProxyMethodHandler) ((ProxyObject) iPojo).getHandler()).updateLoadedFieldMap(iPojo);
return iPojo;
} else
return OObjectEntityEnhancer.getInstance().getProxiedInstance(iPojo.getClass(), iRecord);
} else
return OObjectEntityEnhancer.getInstance().getProxiedInstance(iRecord.getClassName(), entityManager, iRecord, null);
} else if (!(iPojo instanceof Proxy))
return OObjectEntityEnhancer.getInstance().getProxiedInstance(iPojo.getClass(), iRecord);
else
return iPojo;
}
public boolean isLazyLoading() {
return lazyLoading;
}
public void setLazyLoading(final boolean lazyLoading) {
this.lazyLoading = lazyLoading;
}
public String getType() {
return TYPE;
}
@Override
public ODocument getRecordByUserObject(Object iPojo, boolean iCreateIfNotAvailable) {
if (iPojo instanceof Proxy)
return OObjectEntitySerializer.getDocument((Proxy) iPojo);
return OObjectEntitySerializer.getDocument((Proxy) OObjectEntitySerializer.serializeObject(iPojo, this));
}
@Override
public Object getUserObjectByRecord(final OIdentifiable iRecord, final String iFetchPlan, final boolean iCreate) {
final ODocument document = iRecord.getRecord();
return OObjectEntityEnhancer.getInstance().getProxiedInstance(document.getClassName(), getEntityManager(), document, null);
}
@Override
public void registerUserObject(final Object iObject, final ORecordInternal<?> iRecord) {
}
public void registerUserObjectAfterLinkSave(ORecordInternal<?> iRecord) {
}
@Override
public void unregisterPojo(final Object iObject, final ODocument iRecord) {
}
public void registerClassMethodFilter(Class<?> iClass, OObjectMethodFilter iMethodFilter) {
OObjectEntityEnhancer.getInstance().registerClassMethodFilter(iClass, iMethodFilter);
}
public void deregisterClassMethodFilter(final Class<?> iClass) {
OObjectEntityEnhancer.getInstance().deregisterClassMethodFilter(iClass);
}
protected void init() {
entityManager = OEntityManager.getEntityManagerByDatabaseURL(getURL());
entityManager.setClassHandler(OObjectEntityClassHandler.getInstance());
saveOnlyDirty = OGlobalConfiguration.OBJECT_SAVE_ONLY_DIRTY.getValueAsBoolean();
OObjectSerializerHelper.register();
lazyLoading = true;
if (!isClosed() && entityManager.getEntityClass(OUser.class.getSimpleName()) == null) {
entityManager.registerEntityClass(OUser.class);
entityManager.registerEntityClass(ORole.class);
}
}
protected void deleteOrphans(final OObjectProxyMethodHandler handler) {
for (ORID orphan : handler.getOrphans()) {
final ODocument doc = orphan.getRecord();
deleteCascade(doc);
underlying.delete(doc);
}
handler.getOrphans().clear();
}
}
| 1no label
|
object_src_main_java_com_orientechnologies_orient_object_db_OObjectDatabaseTx.java
|
4,446 |
return (FD) cache.get(key, new Callable<AtomicFieldData>() {
@Override
public AtomicFieldData call() throws Exception {
SegmentReaderUtils.registerCoreListener(context.reader(), IndexFieldCache.this);
AtomicFieldData fieldData = indexFieldData.loadDirect(context);
if (indexService != null) {
ShardId shardId = ShardUtils.extractShardId(context.reader());
if (shardId != null) {
IndexShard shard = indexService.shard(shardId.id());
if (shard != null) {
key.listener = shard.fieldData();
}
}
}
if (key.listener != null) {
key.listener.onLoad(fieldNames, fieldDataType, fieldData);
}
return fieldData;
}
});
| 1no label
|
src_main_java_org_elasticsearch_indices_fielddata_cache_IndicesFieldDataCache.java
|
161 |
private class Itr extends AbstractItr {
Node<E> startNode() { return first(); }
Node<E> nextNode(Node<E> p) { return succ(p); }
}
| 0true
|
src_main_java_jsr166y_ConcurrentLinkedDeque.java
|
4,478 |
pool.execute(new Runnable() {
@Override
public void run() {
IndexInput indexInput = null;
try {
final int BUFFER_SIZE = (int) recoverySettings.fileChunkSize().bytes();
byte[] buf = new byte[BUFFER_SIZE];
StoreFileMetaData md = shard.store().metaData(name);
// TODO: maybe use IOContext.READONCE?
indexInput = shard.store().openInputRaw(name, IOContext.READ);
boolean shouldCompressRequest = recoverySettings.compress();
if (CompressorFactory.isCompressed(indexInput)) {
shouldCompressRequest = false;
}
long len = indexInput.length();
long readCount = 0;
while (readCount < len) {
if (shard.state() == IndexShardState.CLOSED) { // check if the shard got closed on us
throw new IndexShardClosedException(shard.shardId());
}
int toRead = readCount + BUFFER_SIZE > len ? (int) (len - readCount) : BUFFER_SIZE;
long position = indexInput.getFilePointer();
if (recoverySettings.rateLimiter() != null) {
recoverySettings.rateLimiter().pause(toRead);
}
indexInput.readBytes(buf, 0, toRead, false);
BytesArray content = new BytesArray(buf, 0, toRead);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FILE_CHUNK, new RecoveryFileChunkRequest(request.recoveryId(), request.shardId(), name, position, len, md.checksum(), content),
TransportRequestOptions.options().withCompress(shouldCompressRequest).withType(TransportRequestOptions.Type.RECOVERY).withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
readCount += toRead;
}
} catch (Throwable e) {
lastException.set(e);
} finally {
IOUtils.closeWhileHandlingException(indexInput);
latch.countDown();
}
}
});
| 1no label
|
src_main_java_org_elasticsearch_indices_recovery_RecoverySource.java
|
1,862 |
@Controller("blAdminBasicEntityController")
@RequestMapping("/{sectionKey:.+}")
public class AdminBasicEntityController extends AdminAbstractController {
protected static final Log LOG = LogFactory.getLog(AdminBasicEntityController.class);
// ******************************************
// REQUEST-MAPPING BOUND CONTROLLER METHODS *
// ******************************************
/**
* Renders the main entity listing for the specified class, which is based on the current sectionKey with some optional
* criteria.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param criteria a Map of property name -> list critiera values
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "", method = RequestMethod.GET)
public String viewEntityList(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@RequestParam MultiValueMap<String, String> requestParams) throws Exception {
String sectionKey = getSectionKey(pathVars);
String sectionClassName = getClassNameForSection(sectionKey);
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName, requestParams);
ClassMetadata cmd = service.getClassMetadata(ppr);
DynamicResultSet drs = service.getRecords(ppr);
ListGrid listGrid = formService.buildMainListGrid(drs, cmd, sectionKey);
List<EntityFormAction> mainActions = new ArrayList<EntityFormAction>();
addAddActionIfAllowed(sectionClassName, cmd, mainActions);
Field firstField = listGrid.getHeaderFields().iterator().next();
if (requestParams.containsKey(firstField.getName())) {
model.addAttribute("mainSearchTerm", requestParams.get(firstField.getName()).get(0));
}
model.addAttribute("entityFriendlyName", cmd.getPolymorphicEntities().getFriendlyName());
model.addAttribute("currentUrl", request.getRequestURL().toString());
model.addAttribute("listGrid", listGrid);
model.addAttribute("mainActions", mainActions);
model.addAttribute("viewType", "entityList");
setModelAttributes(model, sectionKey);
return "modules/defaultContainer";
}
/**
* Adds the "Add" button to the main entity form if the current user has permissions to create new instances
* of the entity and all of the fields in the entity aren't marked as read only.
*
* @param sectionClassName
* @param cmd
* @param mainActions
*/
protected void addAddActionIfAllowed(String sectionClassName, ClassMetadata cmd, List<EntityFormAction> mainActions) {
// If the user does not have create permissions, we will not add the "Add New" button
boolean canCreate = true;
try {
adminRemoteSecurityService.securityCheck(sectionClassName, EntityOperationType.ADD);
} catch (ServiceException e) {
if (e instanceof SecurityServiceException) {
canCreate = false;
}
}
if (canCreate) {
checkReadOnly: {
//check if all the metadata is read only
for (Property property : cmd.getProperties()) {
if (property.getMetadata() instanceof BasicFieldMetadata) {
if (((BasicFieldMetadata) property.getMetadata()).getReadOnly() == null ||
!((BasicFieldMetadata) property.getMetadata()).getReadOnly()) {
break checkReadOnly;
}
}
}
canCreate = false;
}
}
if (canCreate) {
mainActions.add(DefaultMainActions.ADD);
}
mainEntityActionsExtensionManager.modifyMainActions(cmd, mainActions);
}
/**
* Renders the modal form that is used to add a new parent level entity. Note that this form cannot render any
* subcollections as operations on those collections require the parent level entity to first be saved and have
* and id. Once the entity is initially saved, we will redirect the user to the normal manage entity screen where
* they can then perform operations on sub collections.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param entityType
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/add", method = RequestMethod.GET)
public String viewAddEntityForm(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@RequestParam(defaultValue = "") String entityType) throws Exception {
String sectionKey = getSectionKey(pathVars);
String sectionClassName = getClassNameForSection(sectionKey);
ClassMetadata cmd = service.getClassMetadata(getSectionPersistencePackageRequest(sectionClassName));
// If the entity type isn't specified, we need to determine if there are various polymorphic types for this entity.
if (StringUtils.isBlank(entityType)) {
if (cmd.getPolymorphicEntities().getChildren().length == 0) {
entityType = cmd.getPolymorphicEntities().getFullyQualifiedClassname();
} else {
entityType = getDefaultEntityType();
}
} else {
entityType = URLDecoder.decode(entityType, "UTF-8");
}
// If we still don't have a type selected, that means that there were indeed multiple possible types and we
// will be allowing the user to pick his desired type.
if (StringUtils.isBlank(entityType)) {
List<ClassTree> entityTypes = getAddEntityTypes(cmd.getPolymorphicEntities());
model.addAttribute("entityTypes", entityTypes);
model.addAttribute("viewType", "modal/entityTypeSelection");
String requestUri = request.getRequestURI();
if (!request.getContextPath().equals("/") && requestUri.startsWith(request.getContextPath())) {
requestUri = requestUri.substring(request.getContextPath().length() + 1, requestUri.length());
}
model.addAttribute("currentUri", requestUri);
} else {
EntityForm entityForm = formService.createEntityForm(cmd);
// We need to make sure that the ceiling entity is set to the interface and the specific entity type
// is set to the type we're going to be creating.
entityForm.setCeilingEntityClassname(cmd.getCeilingType());
entityForm.setEntityType(entityType);
// When we initially build the class metadata (and thus, the entity form), we had all of the possible
// polymorphic fields built out. Now that we have a concrete entity type to render, we can remove the
// fields that are not applicable for this given entity type.
formService.removeNonApplicableFields(cmd, entityForm, entityType);
model.addAttribute("entityForm", entityForm);
model.addAttribute("viewType", "modal/entityAdd");
}
model.addAttribute("entityFriendlyName", cmd.getPolymorphicEntities().getFriendlyName());
model.addAttribute("currentUrl", request.getRequestURL().toString());
model.addAttribute("modalHeaderType", "addEntity");
setModelAttributes(model, sectionKey);
return "modules/modalContainer";
}
/**
* Processes the request to add a new entity. If successful, returns a redirect to the newly created entity.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param entityForm
* @param result
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/add", method = RequestMethod.POST)
public String addEntity(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception {
String sectionKey = getSectionKey(pathVars);
extractDynamicFormFields(entityForm);
Entity entity = service.addEntity(entityForm, getSectionCustomCriteria());
entityFormValidator.validate(entityForm, entity, result);
if (result.hasErrors()) {
String sectionClassName = getClassNameForSection(sectionKey);
ClassMetadata cmd = service.getClassMetadata(getSectionPersistencePackageRequest(sectionClassName));
entityForm.clearFieldsMap();
formService.populateEntityForm(cmd, entity, entityForm);
formService.removeNonApplicableFields(cmd, entityForm, entityForm.getEntityType());
model.addAttribute("viewType", "modal/entityAdd");
model.addAttribute("currentUrl", request.getRequestURL().toString());
model.addAttribute("modalHeaderType", "addEntity");
setModelAttributes(model, sectionKey);
return "modules/modalContainer";
}
// Note that AJAX Redirects need the context path prepended to them
return "ajaxredirect:" + getContextPath(request) + sectionKey + "/" + entity.getPMap().get("id").getValue();
}
/**
* Renders the main entity form for the specified entity
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param modal - whether or not to show the entity in a read-only modal
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
public String viewEntityForm(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable("id") String id) throws Exception {
String sectionKey = getSectionKey(pathVars);
String sectionClassName = getClassNameForSection(sectionKey);
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName);
ClassMetadata cmd = service.getClassMetadata(ppr);
Entity entity = service.getRecord(ppr, id, cmd, false);
Map<String, DynamicResultSet> subRecordsMap = service.getRecordsForAllSubCollections(ppr, entity);
EntityForm entityForm = formService.createEntityForm(cmd, entity, subRecordsMap);
model.addAttribute("entity", entity);
model.addAttribute("entityForm", entityForm);
model.addAttribute("currentUrl", request.getRequestURL().toString());
setModelAttributes(model, sectionKey);
boolean readable = false;
for (Property property : cmd.getProperties()) {
FieldMetadata fieldMetadata = property.getMetadata();
if (fieldMetadata instanceof BasicFieldMetadata) {
if (!((BasicFieldMetadata) fieldMetadata).getReadOnly()) {
readable = true;
break;
}
} else {
if (((CollectionMetadata) fieldMetadata).isMutable()) {
readable = true;
break;
}
}
}
if (!readable) {
entityForm.setReadOnly();
}
// If the user does not have edit permissions, we will go ahead and make the form read only to prevent confusion
try {
adminRemoteSecurityService.securityCheck(sectionClassName, EntityOperationType.UPDATE);
} catch (ServiceException e) {
if (e instanceof SecurityServiceException) {
entityForm.setReadOnly();
}
}
if (isAjaxRequest(request)) {
entityForm.setReadOnly();
model.addAttribute("viewType", "modal/entityView");
model.addAttribute("modalHeaderType", "viewEntity");
return "modules/modalContainer";
} else {
model.addAttribute("viewType", "entityEdit");
return "modules/defaultContainer";
}
}
/**
* Attempts to save the given entity. If validation is unsuccessful, it will re-render the entity form with
* error fields highlighted. On a successful save, it will refresh the entity page.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param entityForm
* @param result
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}", method = RequestMethod.POST)
public String saveEntity(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result,
RedirectAttributes ra) throws Exception {
String sectionKey = getSectionKey(pathVars);
String sectionClassName = getClassNameForSection(sectionKey);
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName);
extractDynamicFormFields(entityForm);
Entity entity = service.updateEntity(entityForm, getSectionCustomCriteria());
entityFormValidator.validate(entityForm, entity, result);
if (result.hasErrors()) {
model.addAttribute("headerFlash", "save.unsuccessful");
model.addAttribute("headerFlashAlert", true);
Map<String, DynamicResultSet> subRecordsMap = service.getRecordsForAllSubCollections(ppr, entity);
ClassMetadata cmd = service.getClassMetadata(ppr);
entityForm.clearFieldsMap();
formService.populateEntityForm(cmd, entity, subRecordsMap, entityForm);
model.addAttribute("entity", entity);
model.addAttribute("currentUrl", request.getRequestURL().toString());
setModelAttributes(model, sectionKey);
if (isAjaxRequest(request)) {
entityForm.setReadOnly();
model.addAttribute("viewType", "modal/entityView");
model.addAttribute("modalHeaderType", "viewEntity");
return "modules/modalContainer";
} else {
model.addAttribute("viewType", "entityEdit");
return "modules/defaultContainer";
}
}
ra.addFlashAttribute("headerFlash", "save.successful");
return "redirect:/" + sectionKey + "/" + id;
}
/**
* Attempts to remove the given entity.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/delete", method = RequestMethod.POST)
public String removeEntity(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception {
String sectionKey = getSectionKey(pathVars);
try {
service.removeEntity(entityForm, getSectionCustomCriteria());
} catch (ServiceException e) {
if (e.containsCause(ConstraintViolationException.class)) {
// Create a flash attribute for the unsuccessful delete
FlashMap fm = new FlashMap();
fm.put("headerFlash", "delete.unsuccessful");
fm.put("headerFlashAlert", true);
request.setAttribute(DispatcherServlet.OUTPUT_FLASH_MAP_ATTRIBUTE, fm);
// Make sure we have this error show up in our logs
LOG.error("Could not delete record", e);
// Refresh the page
return "redirect:/" + sectionKey + "/" + id;
}
throw e;
}
return "redirect:/" + sectionKey;
}
/**
* Shows the modal dialog that is used to select a "to-one" collection item. For example, this could be used to show
* a list of categories for the ManyToOne field "defaultCategory" in Product.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param owningClass
* @param collectionField
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{owningClass:.*}/{collectionField:.*}/select", method = RequestMethod.GET)
public String showSelectCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value = "owningClass") String owningClass,
@PathVariable(value="collectionField") String collectionField,
@RequestParam MultiValueMap<String, String> requestParams) throws Exception {
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(owningClass, requestParams);
ClassMetadata mainMetadata = service.getClassMetadata(ppr);
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
FieldMetadata md = collectionProperty.getMetadata();
ppr = PersistencePackageRequest.fromMetadata(md);
ppr.addFilterAndSortCriteria(getCriteria(requestParams));
ppr.setStartIndex(getStartIndex(requestParams));
ppr.setMaxIndex(getMaxIndex(requestParams));
if (md instanceof BasicFieldMetadata) {
DynamicResultSet drs = service.getRecords(ppr);
ListGrid listGrid = formService.buildCollectionListGrid(null, drs, collectionProperty, owningClass);
model.addAttribute("listGrid", listGrid);
model.addAttribute("viewType", "modal/simpleSelectEntity");
}
model.addAttribute("currentUrl", request.getRequestURL().toString());
model.addAttribute("modalHeaderType", "selectCollectionItem");
model.addAttribute("collectionProperty", collectionProperty);
setModelAttributes(model, owningClass);
return "modules/modalContainer";
}
@RequestMapping(value = "/{collectionField:.*}/details", method = RequestMethod.GET)
public @ResponseBody Map<String, String> getCollectionValueDetails(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="collectionField") String collectionField,
@RequestParam String ids,
@RequestParam MultiValueMap<String, String> requestParams) throws Exception {
String sectionKey = getSectionKey(pathVars);
String sectionClassName = getClassNameForSection(sectionKey);
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName, requestParams);
ClassMetadata mainMetadata = service.getClassMetadata(ppr);
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
FieldMetadata md = collectionProperty.getMetadata();
ppr = PersistencePackageRequest.fromMetadata(md);
ppr.setStartIndex(getStartIndex(requestParams));
ppr.setMaxIndex(getMaxIndex(requestParams));
if (md instanceof BasicFieldMetadata) {
String idProp = ((BasicFieldMetadata) md).getForeignKeyProperty();
String displayProp = ((BasicFieldMetadata) md).getForeignKeyDisplayValueProperty();
List<String> filterValues = Arrays.asList(ids.split(FILTER_VALUE_SEPARATOR_REGEX));
ppr.addFilterAndSortCriteria(new FilterAndSortCriteria(idProp, filterValues));
DynamicResultSet drs = service.getRecords(ppr);
Map<String, String> returnMap = new HashMap<String, String>();
for (Entity e : drs.getRecords()) {
String id = e.getPMap().get(idProp).getValue();
String disp = e.getPMap().get(displayProp).getDisplayValue();
if (StringUtils.isBlank(disp)) {
disp = e.getPMap().get(displayProp).getValue();
}
returnMap.put(id, disp);
}
return returnMap;
}
return null;
}
/**
* Shows the modal popup for the current selected "to-one" field. For instance, if you are viewing a list of products
* then this method is invoked when a user clicks on the name of the default category field.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param collectionField
* @param id
* @return
* @throws Exception
*/
@RequestMapping(value = "/{collectionField:.*}/{id}/view", method = RequestMethod.GET)
public String viewCollectionItemDetails(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="collectionField") String collectionField,
@PathVariable(value="id") String id) throws Exception {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName));
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
BasicFieldMetadata md = (BasicFieldMetadata) collectionProperty.getMetadata();
AdminSection section = adminNavigationService.findAdminSectionByClass(md.getForeignKeyClass());
String sectionUrlKey = (section.getUrl().startsWith("/")) ? section.getUrl().substring(1) : section.getUrl();
Map<String, String> varsForField = new HashMap<String, String>();
varsForField.put("sectionKey", sectionUrlKey);
return viewEntityForm(request, response, model, varsForField, id);
}
/**
* Returns the records for a given collectionField filtered by a particular criteria
*
* @param request
* @param response
* @param model
* @param pathVars
* @param collectionField
* @param criteriaForm
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}", method = RequestMethod.GET)
public String getCollectionFieldRecords(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@PathVariable(value="collectionField") String collectionField,
@RequestParam MultiValueMap<String, String> requestParams) throws Exception {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName, requestParams);
ClassMetadata mainMetadata = service.getClassMetadata(ppr);
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
ppr = getSectionPersistencePackageRequest(mainClassName);
Entity entity = service.getRecord(ppr, id, mainMetadata, false);
// Next, we must get the new list grid that represents this collection
ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, requestParams, sectionKey);
model.addAttribute("listGrid", listGrid);
model.addAttribute("currentParams", new ObjectMapper().writeValueAsString(requestParams));
// We return the new list grid so that it can replace the currently visible one
setModelAttributes(model, sectionKey);
return "views/standaloneListGrid";
}
/**
* Shows the modal dialog that is used to add an item to a given collection. There are several possible outcomes
* of this call depending on the type of the specified collection field.
*
* <ul>
* <li>
* <b>Basic Collection (Persist)</b> - Renders a blank form for the specified target entity so that the user may
* enter information and associate the record with this collection. Used by fields such as ProductAttribute.
* </li>
* <li>
* <b>Basic Collection (Lookup)</b> - Renders a list grid that allows the user to click on an entity and select it.
* Used by fields such as "allParentCategories".
* </li>
* <li>
* <b>Adorned Collection (without form)</b> - Renders a list grid that allows the user to click on an entity and
* select it. The view rendered by this is identical to basic collection (lookup), but will perform the operation
* on an adorned field, which may carry extra meta-information about the created relationship, such as order.
* </li>
* <li>
* <b>Adorned Collection (with form)</b> - Renders a list grid that allows the user to click on an entity and
* select it. Once the user selects the entity, he will be presented with an empty form based on the specified
* "maintainedAdornedTargetFields" for this field. Used by fields such as "crossSellProducts", which in addition
* to linking an entity, provide extra fields, such as a promotional message.
* </li>
* <li>
* <b>Map Collection</b> - Renders a form for the target entity that has an additional key field. This field is
* populated either from the configured map keys, or as a result of a lookup in the case of a key based on another
* entity. Used by fields such as the mediaMap on a Sku.
* </li>
*
* @param request
* @param response
* @param model
* @param sectionKey
* @param id
* @param collectionField
* @param requestParams
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}/add", method = RequestMethod.GET)
public String showAddCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value = "id") String id,
@PathVariable(value = "collectionField") String collectionField,
@RequestParam MultiValueMap<String, String> requestParams) throws Exception {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName));
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
FieldMetadata md = collectionProperty.getMetadata();
PersistencePackageRequest ppr = PersistencePackageRequest.fromMetadata(md)
.withFilterAndSortCriteria(getCriteria(requestParams))
.withStartIndex(getStartIndex(requestParams))
.withMaxIndex(getMaxIndex(requestParams));
if (md instanceof BasicCollectionMetadata) {
BasicCollectionMetadata fmd = (BasicCollectionMetadata) md;
if (fmd.getAddMethodType().equals(AddMethodType.PERSIST)) {
ClassMetadata cmd = service.getClassMetadata(ppr);
// If the entity type isn't specified, we need to determine if there are various polymorphic types
// for this entity.
String entityType = null;
if (requestParams.containsKey("entityType")) {
entityType = requestParams.get("entityType").get(0);
}
if (StringUtils.isBlank(entityType)) {
if (cmd.getPolymorphicEntities().getChildren().length == 0) {
entityType = cmd.getPolymorphicEntities().getFullyQualifiedClassname();
} else {
entityType = getDefaultEntityType();
}
} else {
entityType = URLDecoder.decode(entityType, "UTF-8");
}
if (StringUtils.isBlank(entityType)) {
List<ClassTree> entityTypes = getAddEntityTypes(cmd.getPolymorphicEntities());
model.addAttribute("entityTypes", entityTypes);
model.addAttribute("viewType", "modal/entityTypeSelection");
model.addAttribute("entityFriendlyName", cmd.getPolymorphicEntities().getFriendlyName());
String requestUri = request.getRequestURI();
if (!request.getContextPath().equals("/") && requestUri.startsWith(request.getContextPath())) {
requestUri = requestUri.substring(request.getContextPath().length() + 1, requestUri.length());
}
model.addAttribute("currentUri", requestUri);
model.addAttribute("modalHeaderType", "addEntity");
setModelAttributes(model, sectionKey);
return "modules/modalContainer";
} else {
ppr = ppr.withCeilingEntityClassname(entityType);
}
}
}
//service.getContextSpecificRelationshipId(mainMetadata, entity, prefix);
model.addAttribute("currentParams", new ObjectMapper().writeValueAsString(requestParams));
return buildAddCollectionItemModel(request, response, model, id, collectionField, sectionKey, collectionProperty, md, ppr, null, null);
}
/**
* Adds the requested collection item
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param collectionField
* @param entityForm
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}/add", method = RequestMethod.POST)
public String addCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@PathVariable(value="collectionField") String collectionField,
@ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName));
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName);
Entity entity = service.getRecord(ppr, id, mainMetadata, false);
// First, we must save the collection entity
Entity savedEntity = service.addSubCollectionEntity(entityForm, mainMetadata, collectionProperty, entity);
entityFormValidator.validate(entityForm, savedEntity, result);
if (result.hasErrors()) {
FieldMetadata md = collectionProperty.getMetadata();
ppr = PersistencePackageRequest.fromMetadata(md);
return buildAddCollectionItemModel(request, response, model, id, collectionField, sectionKey, collectionProperty,
md, ppr, entityForm, savedEntity);
}
// Next, we must get the new list grid that represents this collection
ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, null, sectionKey);
model.addAttribute("listGrid", listGrid);
// We return the new list grid so that it can replace the currently visible one
setModelAttributes(model, sectionKey);
return "views/standaloneListGrid";
}
/**
* Builds out all of the model information needed for showing the add modal for collection items on both the initial GET
* as well as after a POST with validation errors
*
* @param request
* @param model
* @param id
* @param collectionField
* @param sectionKey
* @param collectionProperty
* @param md
* @param ppr
* @return the appropriate view to display for the modal
* @see {@link #addCollectionItem(HttpServletRequest, HttpServletResponse, Model, Map, String, String, EntityForm, BindingResult)}
* @see {@link #showAddCollectionItem(HttpServletRequest, HttpServletResponse, Model, Map, String, String, MultiValueMap)}
* @throws ServiceException
*/
protected String buildAddCollectionItemModel(HttpServletRequest request, HttpServletResponse response,
Model model,
String id,
String collectionField,
String sectionKey,
Property collectionProperty,
FieldMetadata md, PersistencePackageRequest ppr, EntityForm entityForm, Entity entity) throws ServiceException {
if (entityForm != null) {
entityForm.clearFieldsMap();
}
if (md instanceof BasicCollectionMetadata) {
BasicCollectionMetadata fmd = (BasicCollectionMetadata) md;
// When adding items to basic collections, we will sometimes show a form to persist a new record
// and sometimes show a list grid to allow the user to associate an existing record.
if (fmd.getAddMethodType().equals(AddMethodType.PERSIST)) {
ClassMetadata collectionMetadata = service.getClassMetadata(ppr);
if (entityForm == null) {
entityForm = formService.createEntityForm(collectionMetadata);
entityForm.setCeilingEntityClassname(ppr.getCeilingEntityClassname());
entityForm.setEntityType(ppr.getCeilingEntityClassname());
} else {
formService.populateEntityForm(collectionMetadata, entityForm);
formService.populateEntityFormFieldValues(collectionMetadata, entity, entityForm);
}
formService.removeNonApplicableFields(collectionMetadata, entityForm, ppr.getCeilingEntityClassname());
entityForm.getTabs().iterator().next().getIsVisible();
model.addAttribute("entityForm", entityForm);
model.addAttribute("viewType", "modal/simpleAddEntity");
} else {
DynamicResultSet drs = service.getRecords(ppr);
ListGrid listGrid = formService.buildCollectionListGrid(id, drs, collectionProperty, sectionKey);
listGrid.setPathOverride(request.getRequestURL().toString());
model.addAttribute("listGrid", listGrid);
model.addAttribute("viewType", "modal/simpleSelectEntity");
}
} else if (md instanceof AdornedTargetCollectionMetadata) {
AdornedTargetCollectionMetadata fmd = (AdornedTargetCollectionMetadata) md;
// Even though this field represents an adorned target collection, the list we want to show in the modal
// is the standard list grid for the target entity of this field
ppr.setOperationTypesOverride(null);
ppr.setType(PersistencePackageRequest.Type.STANDARD);
ClassMetadata collectionMetadata = service.getClassMetadata(ppr);
DynamicResultSet drs = service.getRecords(ppr);
ListGrid listGrid = formService.buildMainListGrid(drs, collectionMetadata, sectionKey);
listGrid.setSubCollectionFieldName(collectionField);
listGrid.setPathOverride(request.getRequestURL().toString());
listGrid.setFriendlyName(collectionMetadata.getPolymorphicEntities().getFriendlyName());
if (entityForm == null) {
entityForm = formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id);
} else {
formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id, entityForm);
formService.populateEntityFormFieldValues(collectionMetadata, entity, entityForm);
}
listGrid.setListGridType(ListGrid.Type.ADORNED);
for (Entry<String, Field> entry : entityForm.getFields().entrySet()) {
if (entry.getValue().getIsVisible()) {
listGrid.setListGridType(ListGrid.Type.ADORNED_WITH_FORM);
break;
}
}
model.addAttribute("listGrid", listGrid);
model.addAttribute("entityForm", entityForm);
model.addAttribute("viewType", "modal/adornedSelectEntity");
} else if (md instanceof MapMetadata) {
MapMetadata fmd = (MapMetadata) md;
ClassMetadata collectionMetadata = service.getClassMetadata(ppr);
if (entityForm == null) {
entityForm = formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id);
} else {
formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id, entityForm);
formService.populateEntityFormFieldValues(collectionMetadata, entity, entityForm);
}
model.addAttribute("entityForm", entityForm);
model.addAttribute("viewType", "modal/mapAddEntity");
}
model.addAttribute("currentUrl", request.getRequestURL().toString());
model.addAttribute("modalHeaderType", "addCollectionItem");
model.addAttribute("collectionProperty", collectionProperty);
setModelAttributes(model, sectionKey);
return "modules/modalContainer";
}
/**
* Shows the appropriate modal dialog to edit the selected collection item
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param collectionField
* @param collectionItemId
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}", method = RequestMethod.GET)
public String showUpdateCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@PathVariable(value="collectionField") String collectionField,
@PathVariable(value="collectionItemId") String collectionItemId) throws Exception {
return showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId,
"updateCollectionItem");
}
/**
* Shows the appropriate modal dialog to view the selected collection item. This will display the modal as readonly
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param collectionField
* @param collectionItemId
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}/view", method = RequestMethod.GET)
public String showViewCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@PathVariable(value="collectionField") String collectionField,
@PathVariable(value="collectionItemId") String collectionItemId) throws Exception {
String returnPath = showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId,
"viewCollectionItem");
// Since this is a read-only view, actions don't make sense in this context
EntityForm ef = (EntityForm) model.asMap().get("entityForm");
ef.removeAllActions();
return returnPath;
}
protected String showViewUpdateCollection(HttpServletRequest request, Model model, Map<String, String> pathVars,
String id, String collectionField, String collectionItemId, String modalHeaderType) throws ServiceException {
return showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId, modalHeaderType, null, null);
}
/**
* Shows the view and populates the model for updating a collection item. You can also pass in an entityform and entity
* which are optional. If they are not passed in then they are automatically looked up
*
* @param request
* @param model
* @param pathVars
* @param id
* @param collectionField
* @param collectionItemId
* @param modalHeaderType
* @param ef
* @param entity
* @return
* @throws ServiceException
*/
protected String showViewUpdateCollection(HttpServletRequest request, Model model, Map<String, String> pathVars,
String id, String collectionField, String collectionItemId, String modalHeaderType, EntityForm entityForm, Entity entity) throws ServiceException {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName));
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
FieldMetadata md = collectionProperty.getMetadata();
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName);
Entity parentEntity = service.getRecord(ppr, id, mainMetadata, false);
ppr = PersistencePackageRequest.fromMetadata(md);
if (md instanceof BasicCollectionMetadata &&
((BasicCollectionMetadata) md).getAddMethodType().equals(AddMethodType.PERSIST)) {
BasicCollectionMetadata fmd = (BasicCollectionMetadata) md;
ClassMetadata collectionMetadata = service.getClassMetadata(ppr);
if (entity == null) {
entity = service.getRecord(ppr, collectionItemId, collectionMetadata, true);
}
Map<String, DynamicResultSet> subRecordsMap = service.getRecordsForAllSubCollections(ppr, entity);
if (entityForm == null) {
entityForm = formService.createEntityForm(collectionMetadata, entity, subRecordsMap);
} else {
entityForm.clearFieldsMap();
formService.populateEntityForm(collectionMetadata, entity, subRecordsMap, entityForm);
//remove all the actions since we're not trying to redisplay them on the form
entityForm.removeAllActions();
}
entityForm.removeAction(DefaultEntityFormActions.DELETE);
model.addAttribute("entityForm", entityForm);
model.addAttribute("viewType", "modal/simpleEditEntity");
} else if (md instanceof AdornedTargetCollectionMetadata &&
((AdornedTargetCollectionMetadata) md).getMaintainedAdornedTargetFields().length > 0) {
AdornedTargetCollectionMetadata fmd = (AdornedTargetCollectionMetadata) md;
if (entity == null) {
entity = service.getAdvancedCollectionRecord(mainMetadata, parentEntity, collectionProperty,
collectionItemId);
}
boolean populateTypeAndId = true;
if (entityForm == null) {
entityForm = formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id);
} else {
entityForm.clearFieldsMap();
String entityType = entityForm.getEntityType();
formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id, entityForm);
entityForm.setEntityType(entityType);
populateTypeAndId = false;
}
ClassMetadata cmd = service.getClassMetadata(ppr);
for (String field : fmd.getMaintainedAdornedTargetFields()) {
Property p = cmd.getPMap().get(field);
if (p != null && p.getMetadata() instanceof AdornedTargetCollectionMetadata) {
// Because we're dealing with a nested adorned target collection, this particular request must act
// directly on the first adorned target collection. Because of this, we need the actual id property
// from the entity that models the adorned target relationship, and not the id of the target object.
Property alternateIdProperty = entity.getPMap().get(BasicPersistenceModule.ALTERNATE_ID_PROPERTY);
DynamicResultSet drs = service.getRecordsForCollection(cmd, entity, p, null, null, null, alternateIdProperty.getValue());
ListGrid listGrid = formService.buildCollectionListGrid(alternateIdProperty.getValue(), drs, p, ppr.getAdornedList().getAdornedTargetEntityClassname());
listGrid.setListGridType(ListGrid.Type.INLINE);
listGrid.getToolbarActions().add(DefaultListGridActions.ADD);
entityForm.addListGrid(listGrid, EntityForm.DEFAULT_TAB_NAME, EntityForm.DEFAULT_TAB_ORDER);
} else if (p != null && p.getMetadata() instanceof MapMetadata) {
// See above comment for AdornedTargetCollectionMetadata
MapMetadata mmd = (MapMetadata) p.getMetadata();
Property alternateIdProperty = entity.getPMap().get(BasicPersistenceModule.ALTERNATE_ID_PROPERTY);
DynamicResultSet drs = service.getRecordsForCollection(cmd, entity, p, null, null, null,
alternateIdProperty.getValue());
ListGrid listGrid = formService.buildCollectionListGrid(alternateIdProperty.getValue(), drs, p,
mmd.getTargetClass());
listGrid.setListGridType(ListGrid.Type.INLINE);
listGrid.getToolbarActions().add(DefaultListGridActions.ADD);
entityForm.addListGrid(listGrid, EntityForm.DEFAULT_TAB_NAME, EntityForm.DEFAULT_TAB_ORDER);
}
}
formService.populateEntityFormFields(entityForm, entity, populateTypeAndId, populateTypeAndId);
formService.populateAdornedEntityFormFields(entityForm, entity, ppr.getAdornedList());
boolean atLeastOneBasicField = false;
for (Entry<String, Field> entry : entityForm.getFields().entrySet()) {
if (entry.getValue().getIsVisible()) {
atLeastOneBasicField = true;
break;
}
}
if (!atLeastOneBasicField) {
entityForm.removeAction(DefaultEntityFormActions.SAVE);
}
model.addAttribute("entityForm", entityForm);
model.addAttribute("viewType", "modal/adornedEditEntity");
} else if (md instanceof MapMetadata) {
MapMetadata fmd = (MapMetadata) md;
ClassMetadata collectionMetadata = service.getClassMetadata(ppr);
if (entity == null) {
entity = service.getAdvancedCollectionRecord(mainMetadata, parentEntity, collectionProperty,
collectionItemId);
}
boolean populateTypeAndId = true;
if (entityForm == null) {
entityForm = formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id);
} else {
//save off the prior key before clearing out the fields map as it will not appear
//back on the saved entity
String priorKey = entityForm.getFields().get("priorKey").getValue();
entityForm.clearFieldsMap();
formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id, entityForm);
entityForm.getFields().get("priorKey").setValue(priorKey);
populateTypeAndId = false;
}
formService.populateEntityFormFields(entityForm, entity, populateTypeAndId, populateTypeAndId);
formService.populateMapEntityFormFields(entityForm, entity);
model.addAttribute("entityForm", entityForm);
model.addAttribute("viewType", "modal/mapEditEntity");
}
model.addAttribute("currentUrl", request.getRequestURL().toString());
model.addAttribute("modalHeaderType", modalHeaderType);
model.addAttribute("collectionProperty", collectionProperty);
setModelAttributes(model, sectionKey);
return "modules/modalContainer";
}
/**
* Updates the specified collection item
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param collectionField
* @param entityForm
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}", method = RequestMethod.POST)
public String updateCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@PathVariable(value="collectionField") String collectionField,
@PathVariable(value="collectionItemId") String collectionItemId,
@ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName));
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName);
Entity entity = service.getRecord(ppr, id, mainMetadata, false);
// First, we must save the collection entity
Entity savedEntity = service.updateSubCollectionEntity(entityForm, mainMetadata, collectionProperty, entity, collectionItemId);
entityFormValidator.validate(entityForm, savedEntity, result);
if (result.hasErrors()) {
return showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId,
"updateCollectionItem", entityForm, savedEntity);
}
// Next, we must get the new list grid that represents this collection
ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, null, sectionKey);
model.addAttribute("listGrid", listGrid);
// We return the new list grid so that it can replace the currently visible one
setModelAttributes(model, sectionKey);
return "views/standaloneListGrid";
}
/**
* Updates the given colleciton item's sequence. This should only be triggered for adorned target collections
* where a sort field is specified -- any other invocation is incorrect and will result in an exception.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param collectionField
* @param collectionItemId
* @return an object explaining the state of the operation
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}/sequence", method = RequestMethod.POST)
public @ResponseBody Map<String, Object> updateCollectionItemSequence(HttpServletRequest request,
HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@PathVariable(value="collectionField") String collectionField,
@PathVariable(value="collectionItemId") String collectionItemId,
@RequestParam(value="newSequence") String newSequence) throws Exception {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName));
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
FieldMetadata md = collectionProperty.getMetadata();
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName);
Entity parentEntity = service.getRecord(ppr, id, mainMetadata, false);
ppr = PersistencePackageRequest.fromMetadata(md);
if (md instanceof AdornedTargetCollectionMetadata) {
AdornedTargetCollectionMetadata fmd = (AdornedTargetCollectionMetadata) md;
AdornedTargetList atl = ppr.getAdornedList();
// Get an entity form for the entity
EntityForm entityForm = formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id);
Entity entity = service.getAdvancedCollectionRecord(mainMetadata, parentEntity, collectionProperty,
collectionItemId);
formService.populateEntityFormFields(entityForm, entity);
formService.populateAdornedEntityFormFields(entityForm, entity, ppr.getAdornedList());
// Set the new sequence (note that it will come in 0-indexed but the persistence module expects 1-indexed)
int sequenceValue = Integer.parseInt(newSequence) + 1;
Field field = entityForm.findField(atl.getSortField());
field.setValue(String.valueOf(sequenceValue));
Map<String, Object> responseMap = new HashMap<String, Object>();
service.updateSubCollectionEntity(entityForm, mainMetadata, collectionProperty, entity, collectionItemId);
responseMap.put("status", "ok");
responseMap.put("field", collectionField);
return responseMap;
} else {
throw new UnsupportedOperationException("Cannot handle sequencing for non adorned target collection fields.");
}
}
/**
* Removes the requested collection item
*
* Note that the request must contain a parameter called "key" when attempting to remove a collection item from a
* map collection.
*
* @param request
* @param response
* @param model
* @param pathVars
* @param id
* @param collectionField
* @param collectionItemId
* @return the return view path
* @throws Exception
*/
@RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}/delete", method = RequestMethod.POST)
public String removeCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@PathVariable(value="collectionField") String collectionField,
@PathVariable(value="collectionItemId") String collectionItemId) throws Exception {
String sectionKey = getSectionKey(pathVars);
String mainClassName = getClassNameForSection(sectionKey);
ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName));
Property collectionProperty = mainMetadata.getPMap().get(collectionField);
String priorKey = request.getParameter("key");
PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName);
Entity entity = service.getRecord(ppr, id, mainMetadata, false);
// First, we must remove the collection entity
service.removeSubCollectionEntity(mainMetadata, collectionProperty, entity, collectionItemId, priorKey);
// Next, we must get the new list grid that represents this collection
ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, null, sectionKey);
model.addAttribute("listGrid", listGrid);
// We return the new list grid so that it can replace the currently visible one
setModelAttributes(model, sectionKey);
return "views/standaloneListGrid";
}
// *********************************
// ADDITIONAL SPRING-BOUND METHODS *
// *********************************
/**
* Invoked on every request to provide the ability to register specific binders for Spring's binding process.
* By default, we register a binder that treats empty Strings as null and a Boolean editor that supports either true
* or false. If the value is passed in as null, it will treat it as false.
*
* @param binder
*/
@InitBinder
public void initBinder(WebDataBinder binder) {
binder.registerCustomEditor(String.class, new StringTrimmerEditor(true));
binder.registerCustomEditor(Boolean.class, new NonNullBooleanEditor());
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_web_controller_entity_AdminBasicEntityController.java
|
676 |
public class GetWarmersRequestBuilder extends ClusterInfoRequestBuilder<GetWarmersRequest, GetWarmersResponse, GetWarmersRequestBuilder> {
public GetWarmersRequestBuilder(InternalGenericClient client, String... indices) {
super(client, new GetWarmersRequest().indices(indices));
}
public GetWarmersRequestBuilder setWarmers(String... warmers) {
request.warmers(warmers);
return this;
}
public GetWarmersRequestBuilder addWarmers(String... warmers) {
request.warmers(ObjectArrays.concat(request.warmers(), warmers, String.class));
return this;
}
@Override
protected void doExecute(ActionListener<GetWarmersResponse> listener) {
((IndicesAdminClient) client).getWarmers(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_warmer_get_GetWarmersRequestBuilder.java
|
2,575 |
clusterService.submitStateUpdateTask("zen-disco-master_failed (" + masterNode + ")", Priority.URGENT, new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
if (!masterNode.id().equals(currentState.nodes().masterNodeId())) {
// master got switched on us, no need to send anything
return currentState;
}
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(currentState.nodes())
// make sure the old master node, which has failed, is not part of the nodes we publish
.remove(masterNode.id())
.masterNodeId(null).build();
if (!electMaster.hasEnoughMasterNodes(discoveryNodes)) {
return rejoin(ClusterState.builder(currentState).nodes(discoveryNodes).build(), "not enough master nodes after master left (reason = " + reason + ")");
}
final DiscoveryNode electedMaster = electMaster.electMaster(discoveryNodes); // elect master
if (localNode.equals(electedMaster)) {
master = true;
masterFD.stop("got elected as new master since master left (reason = " + reason + ")");
nodesFD.start();
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).masterNodeId(localNode.id()).build();
latestDiscoNodes = discoveryNodes;
return ClusterState.builder(currentState).nodes(latestDiscoNodes).build();
} else {
nodesFD.stop();
if (electedMaster != null) {
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).masterNodeId(electedMaster.id()).build();
masterFD.restart(electedMaster, "possible elected master since master left (reason = " + reason + ")");
latestDiscoNodes = discoveryNodes;
return ClusterState.builder(currentState)
.nodes(latestDiscoNodes)
.build();
} else {
return rejoin(ClusterState.builder(currentState).nodes(discoveryNodes).build(), "master_left and no other node elected to become master");
}
}
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
sendInitialStateEventIfNeeded();
}
});
| 1no label
|
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
|
249 |
public class OEmptyCache implements OCache {
@Override
public void startup() {
}
@Override
public void shutdown() {
}
@Override
public boolean isEnabled() {
return false;
}
@Override
public boolean enable() {
return false;
}
@Override
public boolean disable() {
return false;
}
@Override
public ORecordInternal<?> get(ORID id) {
return null;
}
@Override
public ORecordInternal<?> put(ORecordInternal<?> record) {
return null;
}
@Override
public ORecordInternal<?> remove(ORID id) {
return null;
}
@Override
public void clear() {
}
@Override
public int size() {
return 0;
}
@Override
public int limit() {
return 0;
}
@Override
public Collection<ORID> keys() {
return Collections.emptyList();
}
@Override
public void lock(ORID id) {
}
@Override
public void unlock(ORID id) {
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_cache_OEmptyCache.java
|
1,321 |
public final class ExecutorPortableHook implements PortableHook {
public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.EXECUTOR_PORTABLE_FACTORY, -13);
public static final int IS_SHUTDOWN_REQUEST = 1;
public static final int CANCELLATION_REQUEST = 2;
public static final int TARGET_CALLABLE_REQUEST = 3;
public static final int PARTITION_CALLABLE_REQUEST = 4;
@Override
public int getFactoryId() {
return F_ID;
}
@Override
public PortableFactory createFactory() {
return new PortableFactory() {
@Override
public Portable create(int classId) {
switch (classId) {
case IS_SHUTDOWN_REQUEST:
return new IsShutdownRequest();
case CANCELLATION_REQUEST:
return new CancellationRequest();
case TARGET_CALLABLE_REQUEST:
return new TargetCallableRequest();
case PARTITION_CALLABLE_REQUEST:
return new PartitionCallableRequest();
default:
return null;
}
}
};
}
@Override
public Collection<ClassDefinition> getBuiltinDefinitions() {
return null;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_executor_ExecutorPortableHook.java
|
4,223 |
public abstract class FsDirectoryService extends AbstractIndexShardComponent implements DirectoryService, StoreRateLimiting.Listener, StoreRateLimiting.Provider {
protected final FsIndexStore indexStore;
private final CounterMetric rateLimitingTimeInNanos = new CounterMetric();
public FsDirectoryService(ShardId shardId, @IndexSettings Settings indexSettings, IndexStore indexStore) {
super(shardId, indexSettings);
this.indexStore = (FsIndexStore) indexStore;
}
@Override
public final long throttleTimeInNanos() {
return rateLimitingTimeInNanos.count();
}
@Override
public final StoreRateLimiting rateLimiting() {
return indexStore.rateLimiting();
}
protected final LockFactory buildLockFactory() throws IOException {
String fsLock = componentSettings.get("lock", componentSettings.get("fs_lock", "native"));
LockFactory lockFactory = NoLockFactory.getNoLockFactory();
if (fsLock.equals("native")) {
// TODO LUCENE MONITOR: this is not needed in next Lucene version
lockFactory = new NativeFSLockFactory();
} else if (fsLock.equals("simple")) {
lockFactory = new SimpleFSLockFactory();
} else if (fsLock.equals("none")) {
lockFactory = NoLockFactory.getNoLockFactory();
}
return lockFactory;
}
@Override
public final void renameFile(Directory dir, String from, String to) throws IOException {
final FSDirectory fsDirectory = DirectoryUtils.getLeaf(dir, FSDirectory.class);
if (fsDirectory == null) {
throw new ElasticsearchIllegalArgumentException("Can not rename file on non-filesystem based directory ");
}
File directory = fsDirectory.getDirectory();
File old = new File(directory, from);
File nu = new File(directory, to);
if (nu.exists())
if (!nu.delete())
throw new IOException("Cannot delete " + nu);
if (!old.exists()) {
throw new FileNotFoundException("Can't rename from [" + from + "] to [" + to + "], from does not exists");
}
boolean renamed = false;
for (int i = 0; i < 3; i++) {
if (old.renameTo(nu)) {
renamed = true;
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new InterruptedIOException(e.getMessage());
}
}
if (!renamed) {
throw new IOException("Failed to rename, from [" + from + "], to [" + to + "]");
}
}
@Override
public final void fullDelete(Directory dir) throws IOException {
final FSDirectory fsDirectory = DirectoryUtils.getLeaf(dir, FSDirectory.class);
if (fsDirectory == null) {
throw new ElasticsearchIllegalArgumentException("Can not fully delete on non-filesystem based directory");
}
FileSystemUtils.deleteRecursively(fsDirectory.getDirectory());
// if we are the last ones, delete also the actual index
String[] list = fsDirectory.getDirectory().getParentFile().list();
if (list == null || list.length == 0) {
FileSystemUtils.deleteRecursively(fsDirectory.getDirectory().getParentFile());
}
}
@Override
public Directory[] build() throws IOException {
File[] locations = indexStore.shardIndexLocations(shardId);
Directory[] dirs = new Directory[locations.length];
for (int i = 0; i < dirs.length; i++) {
FileSystemUtils.mkdirs(locations[i]);
FSDirectory wrapped = newFSDirectory(locations[i], buildLockFactory());
dirs[i] = new RateLimitedFSDirectory(wrapped, this, this) ;
}
return dirs;
}
protected abstract FSDirectory newFSDirectory(File location, LockFactory lockFactory) throws IOException;
@Override
public final void onPause(long nanos) {
rateLimitingTimeInNanos.inc(nanos);
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_store_fs_FsDirectoryService.java
|
194 |
@Test
public class JNADirectMemoryTest {
public void testLong() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
long value = rnd.nextLong();
long pointer = directMemory.allocate(OLongSerializer.LONG_SIZE);
directMemory.setLong(pointer, value);
Assert.assertEquals(directMemory.getLong(pointer), value);
directMemory.free(pointer);
}
public void testInt() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
int value = rnd.nextInt();
long pointer = directMemory.allocate(OIntegerSerializer.INT_SIZE);
directMemory.setInt(pointer, value);
Assert.assertEquals(directMemory.getInt(pointer), value);
directMemory.free(pointer);
}
public void testChar() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
char value = (char) rnd.nextInt();
long pointer = directMemory.allocate(OCharSerializer.CHAR_SIZE);
directMemory.setChar(pointer, value);
Assert.assertEquals(directMemory.getChar(pointer), value);
directMemory.free(pointer);
}
public void testByte() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
byte[] value = new byte[1];
rnd.nextBytes(value);
long pointer = directMemory.allocate(1);
directMemory.setByte(pointer, value[0]);
Assert.assertEquals(directMemory.getByte(pointer), value[0]);
directMemory.free(pointer);
}
public void testAllocateBytes() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
byte[] value = new byte[256];
rnd.nextBytes(value);
long pointer = directMemory.allocate(value);
Assert.assertEquals(directMemory.get(pointer, value.length), value);
directMemory.free(pointer);
}
public void testBytesWithoutOffset() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
byte[] value = new byte[256];
rnd.nextBytes(value);
long pointer = directMemory.allocate(value.length);
directMemory.set(pointer, value, 0, value.length);
Assert.assertEquals(directMemory.get(pointer, value.length), value);
Assert.assertEquals(directMemory.get(pointer, value.length / 2), Arrays.copyOf(value, value.length / 2));
byte[] result = new byte[value.length];
directMemory.get(pointer, result, value.length / 2, value.length / 2);
byte[] expectedResult = new byte[value.length];
System.arraycopy(value, 0, expectedResult, expectedResult.length / 2, expectedResult.length / 2);
Assert.assertEquals(result, expectedResult);
directMemory.free(pointer);
}
public void testBytesWithOffset() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
byte[] value = new byte[256];
rnd.nextBytes(value);
long pointer = directMemory.allocate(value.length);
directMemory.set(pointer, value, value.length / 2, value.length / 2);
Assert.assertEquals(directMemory.get(pointer, value.length / 2), Arrays.copyOfRange(value, value.length / 2, value.length));
directMemory.free(pointer);
}
public void testCopyData() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
byte[] value = new byte[256];
rnd.nextBytes(value);
long pointer = directMemory.allocate(value.length);
directMemory.set(pointer, value, 0, value.length);
directMemory.moveData(pointer, pointer + value.length / 2, value.length / 2);
System.arraycopy(value, 0, value, value.length / 2, value.length / 2);
Assert.assertEquals(value, directMemory.get(pointer, value.length));
directMemory.free(pointer);
}
public void testCopyDataOverlap() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
byte[] value = new byte[256];
rnd.nextBytes(value);
long pointer = directMemory.allocate(value.length);
directMemory.set(pointer, value, 0, value.length);
directMemory.moveData(pointer, pointer + 1, value.length / 3);
System.arraycopy(value, 0, value, 1, value.length / 3);
Assert.assertEquals(value, directMemory.get(pointer, value.length));
directMemory.free(pointer);
}
public void testCopyDataOverlapInterval() {
final Random rnd = new Random();
ODirectMemory directMemory = new OJNADirectMemory();
byte[] value = new byte[256];
rnd.nextBytes(value);
long pointer = directMemory.allocate(value.length);
directMemory.set(pointer, value, 0, value.length);
directMemory.moveData(pointer + 2, pointer + 5, value.length / 3);
System.arraycopy(value, 2, value, 5, value.length / 3);
Assert.assertEquals(value, directMemory.get(pointer, value.length));
directMemory.free(pointer);
}
}
| 0true
|
nativeos_src_test_java_com_orientechnologies_nio_JNADirectMemoryTest.java
|
1,546 |
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, WritableComparable> {
private String key;
private boolean isVertex;
private WritableHandler handler;
private SafeMapperOutputs outputs;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
this.key = context.getConfiguration().get(KEY);
this.handler = new WritableHandler(context.getConfiguration().getClass(TYPE, Text.class, WritableComparable.class));
this.outputs = new SafeMapperOutputs(context);
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, WritableComparable>.Context context) throws IOException, InterruptedException {
this.outputs.write(Tokens.GRAPH, NullWritable.get(), value);
if (this.isVertex) {
if (value.hasPaths()) {
WritableComparable writable = this.handler.set(ElementPicker.getProperty(value, this.key));
for (int i = 0; i < value.pathCount(); i++) {
this.outputs.write(Tokens.SIDEEFFECT, NullWritable.get(), writable);
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L);
}
} else {
long edgesProcessed = 0;
for (final Edge e : value.getEdges(Direction.OUT)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
WritableComparable writable = this.handler.set(ElementPicker.getProperty(edge, this.key));
for (int i = 0; i < edge.pathCount(); i++) {
this.outputs.write(Tokens.SIDEEFFECT, NullWritable.get(), writable);
}
edgesProcessed++;
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed);
}
}
@Override
public void cleanup(final Mapper<NullWritable, FaunusVertex, NullWritable, WritableComparable>.Context context) throws IOException, InterruptedException {
this.outputs.close();
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_PropertyMap.java
|
94 |
WITHIN {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).within((Geoshape) condition);
}
@Override
public String toString() {
return "within";
}
@Override
public boolean hasNegation() {
return false;
}
@Override
public TitanPredicate negate() {
throw new UnsupportedOperationException();
}
};
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geo.java
|
1,766 |
public class TargetModeType implements Serializable {
private static final long serialVersionUID = 1L;
private static final Map<String, TargetModeType> TYPES = new HashMap<String, TargetModeType>();
public static final TargetModeType SANDBOX = new TargetModeType("sandbox", "entity manager target for the sandbox datasource");
public static final TargetModeType STAGE = new TargetModeType("stage", "entity manager target for the stage datasource");
public static final TargetModeType PRODUCTION = new TargetModeType("production", "entity manager target for the production datasource");
public static TargetModeType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public TargetModeType() {
//do nothing
}
public TargetModeType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TargetModeType other = (TargetModeType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_TargetModeType.java
|
48 |
public class ODefaultComparator implements Comparator<Object> {
public static final ODefaultComparator INSTANCE = new ODefaultComparator();
@SuppressWarnings("unchecked")
public int compare(final Object objectOne, final Object objectTwo) {
if (objectOne instanceof Comparable)
return ((Comparable<Object>) objectOne).compareTo(objectTwo);
final Comparator<?> comparator = OComparatorFactory.INSTANCE.getComparator(objectOne.getClass());
if (comparator != null)
return ((Comparator<Object>) comparator).compare(objectOne, objectTwo);
throw new IllegalStateException("Object of class" + objectOne.getClass().getName() + " can not be compared");
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_comparator_ODefaultComparator.java
|
1,013 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_GIFTWRAP_ORDER_ITEM")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationClass(friendlyName = "GiftWrapOrderItemImpl_giftWrapOrderItem")
public class GiftWrapOrderItemImpl extends DiscreteOrderItemImpl implements GiftWrapOrderItem {
private static final long serialVersionUID = 1L;
@OneToMany(fetch = FetchType.LAZY, mappedBy = "giftWrapOrderItem", targetEntity = OrderItemImpl.class,
cascade = {CascadeType.MERGE, CascadeType.PERSIST})
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderItemImpl_Price_Details",
tab = OrderItemImpl.Presentation.Tab.Name.Advanced, tabOrder = OrderItemImpl.Presentation.Tab.Order.Advanced)
protected List<OrderItem> wrappedItems = new ArrayList<OrderItem>();
public List<OrderItem> getWrappedItems() {
return wrappedItems;
}
public void setWrappedItems(List<OrderItem> wrappedItems) {
this.wrappedItems = wrappedItems;
}
@Override
public OrderItem clone() {
GiftWrapOrderItem orderItem = (GiftWrapOrderItem) super.clone();
if (wrappedItems != null) orderItem.getWrappedItems().addAll(wrappedItems);
return orderItem;
}
@Override
public int hashCode() {
final int prime = super.hashCode();
int result = super.hashCode();
result = prime * result + ((wrappedItems == null) ? 0 : wrappedItems.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
GiftWrapOrderItemImpl other = (GiftWrapOrderItemImpl) obj;
if (!super.equals(obj)) {
return false;
}
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (wrappedItems == null) {
if (other.wrappedItems != null)
return false;
} else if (!wrappedItems.equals(other.wrappedItems))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_GiftWrapOrderItemImpl.java
|
259 |
public class OBasicCommandContext implements OCommandContext {
public static final String EXECUTION_BEGUN = "EXECUTION_BEGUN";
public static final String TIMEOUT_MS = "TIMEOUT_MS";
public static final String TIMEOUT_STRATEGY = "TIMEOUT_STARTEGY";
protected boolean recordMetrics = false;
protected OCommandContext parent;
protected OCommandContext child;
protected Map<String, Object> variables;
// MANAGES THE TIMEOUT
private long executionStartedOn;
private long timeoutMs;
private com.orientechnologies.orient.core.command.OCommandContext.TIMEOUT_STRATEGY timeoutStrategy;
public OBasicCommandContext() {
}
public Object getVariable(String iName) {
return getVariable(iName, null);
}
public Object getVariable(String iName, final Object iDefault) {
if (iName == null)
return iDefault;
Object result = null;
if (iName.startsWith("$"))
iName = iName.substring(1);
int pos = OStringSerializerHelper.getLowerIndexOf(iName, 0, ".", "[");
String firstPart;
String lastPart;
if (pos > -1) {
firstPart = iName.substring(0, pos);
if (iName.charAt(pos) == '.')
pos++;
lastPart = iName.substring(pos);
if (firstPart.equalsIgnoreCase("PARENT") && parent != null) {
// UP TO THE PARENT
if (lastPart.startsWith("$"))
result = parent.getVariable(lastPart.substring(1));
else
result = ODocumentHelper.getFieldValue(parent, lastPart);
return result != null ? result : iDefault;
} else if (firstPart.equalsIgnoreCase("ROOT")) {
OCommandContext p = this;
while (p.getParent() != null)
p = p.getParent();
if (lastPart.startsWith("$"))
result = p.getVariable(lastPart.substring(1));
else
result = ODocumentHelper.getFieldValue(p, lastPart, this);
return result != null ? result : iDefault;
}
} else {
firstPart = iName;
lastPart = null;
}
if (firstPart.equalsIgnoreCase("CONTEXT"))
result = getVariables();
else if (firstPart.equalsIgnoreCase("PARENT"))
result = parent;
else if (firstPart.equalsIgnoreCase("ROOT")) {
OCommandContext p = this;
while (p.getParent() != null)
p = p.getParent();
result = p;
} else {
if (variables != null && variables.containsKey(firstPart))
result = variables.get(firstPart);
else if (child != null)
result = child.getVariable(firstPart);
}
if (pos > -1)
result = ODocumentHelper.getFieldValue(result, lastPart, this);
return result != null ? result : iDefault;
}
public OCommandContext setVariable(String iName, final Object iValue) {
if (iName == null)
return null;
if (iName.startsWith("$"))
iName = iName.substring(1);
init();
int pos = OStringSerializerHelper.getHigherIndexOf(iName, 0, ".", "[");
if (pos > -1) {
Object nested = getVariable(iName.substring(0, pos));
if (nested != null && nested instanceof OCommandContext)
((OCommandContext) nested).setVariable(iName.substring(pos + 1), iValue);
} else
variables.put(iName, iValue);
return this;
}
public long updateMetric(final String iName, final long iValue) {
if (!recordMetrics)
return -1;
init();
Long value = (Long) variables.get(iName);
if (value == null)
value = iValue;
else
value = new Long(value.longValue() + iValue);
variables.put(iName, value);
return value.longValue();
}
/**
* Returns a read-only map with all the variables.
*/
public Map<String, Object> getVariables() {
final HashMap<String, Object> map = new HashMap<String, Object>();
if (child != null)
map.putAll(child.getVariables());
if (variables != null)
map.putAll(variables);
return map;
}
/**
* Set the inherited context avoiding to copy all the values every time.
*
* @return
*/
public OCommandContext setChild(final OCommandContext iContext) {
if (iContext == null) {
if (child != null) {
// REMOVE IT
child.setParent(null);
child = null;
}
} else if (child != iContext) {
// ADD IT
child = iContext;
iContext.setParent(this);
}
return this;
}
public OCommandContext getParent() {
return parent;
}
public OCommandContext setParent(final OCommandContext iParentContext) {
if (parent != iParentContext) {
parent = iParentContext;
if (parent != null)
parent.setChild(this);
}
return this;
}
@Override
public String toString() {
return getVariables().toString();
}
private void init() {
if (variables == null)
variables = new HashMap<String, Object>();
}
public boolean isRecordingMetrics() {
return recordMetrics;
}
public OCommandContext setRecordingMetrics(final boolean recordMetrics) {
this.recordMetrics = recordMetrics;
return this;
}
@Override
public void beginExecution(final long iTimeout, final TIMEOUT_STRATEGY iStrategy) {
if (iTimeout > 0) {
executionStartedOn = System.currentTimeMillis();
timeoutMs = iTimeout;
timeoutStrategy = iStrategy;
}
}
public boolean checkTimeout() {
if (timeoutMs > 0) {
if (System.currentTimeMillis() - executionStartedOn > timeoutMs) {
// TIMEOUT!
switch (timeoutStrategy) {
case RETURN:
return false;
case EXCEPTION:
throw new OTimeoutException("Command execution timeout exceed (" + timeoutMs + "ms)");
}
}
}
return true;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OBasicCommandContext.java
|
2,350 |
public class DefaultContext<KeyIn, ValueIn>
implements Context<KeyIn, ValueIn> {
private final Map<KeyIn, Combiner<KeyIn, ValueIn, ?>> combiners = new HashMap<KeyIn, Combiner<KeyIn, ValueIn, ?>>();
private final CombinerFactory<KeyIn, ValueIn, ?> combinerFactory;
private final MapCombineTask mapCombineTask;
private final AtomicInteger collected = new AtomicInteger(0);
private volatile int partitionId;
protected DefaultContext(CombinerFactory<KeyIn, ValueIn, ?> combinerFactory, MapCombineTask mapCombineTask) {
this.mapCombineTask = mapCombineTask;
this.combinerFactory = combinerFactory != null ? combinerFactory : new CollectingCombinerFactory<KeyIn, ValueIn>();
}
public void setPartitionId(int partitionId) {
this.partitionId = partitionId;
}
@Override
public void emit(KeyIn key, ValueIn value) {
Combiner<KeyIn, ValueIn, ?> combiner = getOrCreateCombiner(key);
combiner.combine(key, value);
collected.incrementAndGet();
mapCombineTask.onEmit(this, partitionId);
}
public <Chunk> Map<KeyIn, Chunk> requestChunk() {
Map<KeyIn, Chunk> chunkMap = new HashMap<KeyIn, Chunk>(combiners.size());
for (Map.Entry<KeyIn, Combiner<KeyIn, ValueIn, ?>> entry : combiners.entrySet()) {
Chunk chunk = (Chunk) entry.getValue().finalizeChunk();
chunkMap.put(entry.getKey(), chunk);
}
collected.set(0);
return chunkMap;
}
public int getCollected() {
return collected.get();
}
public <Chunk> Map<KeyIn, Chunk> finish() {
for (Combiner<KeyIn, ValueIn, ?> combiner : combiners.values()) {
combiner.finalizeCombine();
}
return requestChunk();
}
private Combiner<KeyIn, ValueIn, ?> getOrCreateCombiner(KeyIn key) {
Combiner<KeyIn, ValueIn, ?> combiner = combiners.get(key);
if (combiner == null) {
combiner = combinerFactory.newCombiner(key);
combiners.put(key, combiner);
combiner.beginCombine();
}
return combiner;
}
/**
* This {@link com.hazelcast.mapreduce.CombinerFactory} implementation is used
* if no specific CombinerFactory was set in the configuration of the job to
* do mapper aside combining of the emitted values.<br/>
*
* @param <KeyIn> type of the key
* @param <ValueIn> type of the value
*/
private static class CollectingCombinerFactory<KeyIn, ValueIn>
implements CombinerFactory<KeyIn, ValueIn, List<ValueIn>> {
@Override
public Combiner<KeyIn, ValueIn, List<ValueIn>> newCombiner(KeyIn key) {
return new Combiner<KeyIn, ValueIn, List<ValueIn>>() {
private final List<ValueIn> values = new ArrayList<ValueIn>();
@Override
public void combine(KeyIn key, ValueIn value) {
values.add(value);
}
@Override
public List<ValueIn> finalizeChunk() {
List<ValueIn> values = new ArrayList<ValueIn>(this.values);
this.values.clear();
return values;
}
};
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_task_DefaultContext.java
|
273 |
@SuppressWarnings("serial")
public abstract class OCommandRequestAbstract implements OCommandRequestInternal {
protected OCommandResultListener resultListener;
protected OProgressListener progressListener;
protected int limit = -1;
protected long timeoutMs = OGlobalConfiguration.COMMAND_TIMEOUT.getValueAsLong();
protected TIMEOUT_STRATEGY timeoutStrategy = TIMEOUT_STRATEGY.EXCEPTION;
protected Map<Object, Object> parameters;
protected String fetchPlan = null;
protected boolean useCache = false;
protected OCommandContext context;
protected OCommandRequestAbstract() {
}
public OCommandResultListener getResultListener() {
return resultListener;
}
public void setResultListener(OCommandResultListener iListener) {
resultListener = iListener;
}
public Map<Object, Object> getParameters() {
return parameters;
}
protected void setParameters(final Object... iArgs) {
if (iArgs != null && iArgs.length > 0)
parameters = convertToParameters(iArgs);
}
@SuppressWarnings("unchecked")
protected Map<Object, Object> convertToParameters(final Object... iArgs) {
final Map<Object, Object> params;
if (iArgs.length == 1 && iArgs[0] instanceof Map) {
params = (Map<Object, Object>) iArgs[0];
} else {
params = new HashMap<Object, Object>(iArgs.length);
for (int i = 0; i < iArgs.length; ++i) {
Object par = iArgs[i];
if (par instanceof OIdentifiable && ((OIdentifiable) par).getIdentity().isValid())
// USE THE RID ONLY
par = ((OIdentifiable) par).getIdentity();
params.put(i, par);
}
}
return params;
}
public OProgressListener getProgressListener() {
return progressListener;
}
public OCommandRequestAbstract setProgressListener(OProgressListener progressListener) {
this.progressListener = progressListener;
return this;
}
public void reset() {
}
public int getLimit() {
return limit;
}
public OCommandRequestAbstract setLimit(final int limit) {
this.limit = limit;
return this;
}
public String getFetchPlan() {
return fetchPlan;
}
@SuppressWarnings("unchecked")
public <RET extends OCommandRequest> RET setFetchPlan(String fetchPlan) {
this.fetchPlan = fetchPlan;
return (RET) this;
}
public boolean isUseCache() {
return useCache;
}
public void setUseCache(boolean useCache) {
this.useCache = useCache;
}
@Override
public OCommandContext getContext() {
if (context == null)
context = new OBasicCommandContext();
return context;
}
public OCommandRequestAbstract setContext(final OCommandContext iContext) {
context = iContext;
return this;
}
public long getTimeoutTime() {
return timeoutMs;
}
public void setTimeout(final long timeout, TIMEOUT_STRATEGY strategy) {
this.timeoutMs = timeout;
this.timeoutStrategy = strategy;
}
public TIMEOUT_STRATEGY getTimeoutStrategy() {
return timeoutStrategy;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandRequestAbstract.java
|
1,623 |
@Component("blDynamicEntityDao")
@Scope("prototype")
public class DynamicEntityDaoImpl implements DynamicEntityDao {
private static final Log LOG = LogFactory.getLog(DynamicEntityDaoImpl.class);
protected static final Map<String,Map<String, FieldMetadata>> METADATA_CACHE = new LRUMap<String, Map<String, FieldMetadata>>(100, 1000);
/*
* This is the same as POLYMORPHIC_ENTITY_CACHE, except that it does not contain classes that are abstract or have been marked for exclusion
* from polymorphism
*/
protected EntityManager standardEntityManager;
@Resource(name="blMetadata")
protected Metadata metadata;
@Resource(name="blEJB3ConfigurationDao")
protected EJB3ConfigurationDao ejb3ConfigurationDao;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Resource(name="blMetadataProviders")
protected List<FieldMetadataProvider> fieldMetadataProviders = new ArrayList<FieldMetadataProvider>();
@Resource(name= "blDefaultFieldMetadataProvider")
protected FieldMetadataProvider defaultFieldMetadataProvider;
@Resource(name="blAppConfigurationRemoteService")
protected AppConfigurationService appConfigurationRemoteService;
protected DynamicDaoHelper dynamicDaoHelper = new DynamicDaoHelperImpl();
@Value("${cache.entity.dao.metadata.ttl}")
protected int cacheEntityMetaDataTtl;
protected long lastCacheFlushTime = System.currentTimeMillis();
@Override
public Criteria createCriteria(Class<?> entityClass) {
return ((HibernateEntityManager) getStandardEntityManager()).getSession().createCriteria(entityClass);
}
@Override
public Serializable persist(Serializable entity) {
standardEntityManager.persist(entity);
standardEntityManager.flush();
return entity;
}
@Override
public Serializable merge(Serializable entity) {
Serializable response = standardEntityManager.merge(entity);
standardEntityManager.flush();
return response;
}
@Override
public void flush() {
standardEntityManager.flush();
}
@Override
public void detach(Serializable entity) {
standardEntityManager.detach(entity);
}
@Override
public void refresh(Serializable entity) {
standardEntityManager.refresh(entity);
}
@Override
public Serializable retrieve(Class<?> entityClass, Object primaryKey) {
return (Serializable) standardEntityManager.find(entityClass, primaryKey);
}
@Override
public void remove(Serializable entity) {
boolean isArchivable = Status.class.isAssignableFrom(entity.getClass());
if (isArchivable) {
((Status) entity).setArchived('Y');
merge(entity);
} else {
standardEntityManager.remove(entity);
standardEntityManager.flush();
}
}
@Override
public void clear() {
standardEntityManager.clear();
}
@Override
public PersistentClass getPersistentClass(String targetClassName) {
return ejb3ConfigurationDao.getConfiguration().getClassMapping(targetClassName);
}
protected boolean useCache() {
if (cacheEntityMetaDataTtl < 0) {
return true;
}
if (cacheEntityMetaDataTtl == 0) {
return false;
} else {
if ((System.currentTimeMillis() - lastCacheFlushTime) > cacheEntityMetaDataTtl) {
lastCacheFlushTime = System.currentTimeMillis();
METADATA_CACHE.clear();
DynamicDaoHelperImpl.POLYMORPHIC_ENTITY_CACHE.clear();
DynamicDaoHelperImpl.POLYMORPHIC_ENTITY_CACHE_WO_EXCLUSIONS.clear();
return true; // cache is empty
} else {
return true;
}
}
}
@Override
public Class<?>[] getAllPolymorphicEntitiesFromCeiling(Class<?> ceilingClass) {
return getAllPolymorphicEntitiesFromCeiling(ceilingClass, true);
}
/* (non-Javadoc)
* @see org.broadleafcommerce.openadmin.server.dao.DynamicEntityDao#getAllPolymorphicEntitiesFromCeiling(java.lang.Class)
*/
@Override
public Class<?>[] getAllPolymorphicEntitiesFromCeiling(Class<?> ceilingClass, boolean includeUnqualifiedPolymorphicEntities) {
return dynamicDaoHelper.getAllPolymorphicEntitiesFromCeiling(ceilingClass, getSessionFactory(),
includeUnqualifiedPolymorphicEntities, useCache());
}
public Class<?>[] sortEntities(Class<?> ceilingClass, List<Class<?>> entities) {
return dynamicDaoHelper.sortEntities(ceilingClass, entities);
}
protected void addClassToTree(Class<?> clazz, ClassTree tree) {
Class<?> testClass;
try {
testClass = Class.forName(tree.getFullyQualifiedClassname());
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
if (clazz.equals(testClass)) {
return;
}
if (clazz.getSuperclass().equals(testClass)) {
ClassTree myTree = new ClassTree(clazz.getName(), isExcludeClassFromPolymorphism(clazz));
createClassTreeFromAnnotation(clazz, myTree);
tree.setChildren((ClassTree[]) ArrayUtils.add(tree.getChildren(), myTree));
} else {
for (ClassTree child : tree.getChildren()) {
addClassToTree(clazz, child);
}
}
}
protected void createClassTreeFromAnnotation(Class<?> clazz, ClassTree myTree) {
AdminPresentationClass classPresentation = clazz.getAnnotation(AdminPresentationClass.class);
if (classPresentation != null) {
String friendlyName = classPresentation.friendlyName();
if (!StringUtils.isEmpty(friendlyName)) {
myTree.setFriendlyName(friendlyName);
}
}
}
@Override
public ClassTree getClassTree(Class<?>[] polymorphicClasses) {
String ceilingClass = null;
for (Class<?> clazz : polymorphicClasses) {
AdminPresentationClass classPresentation = clazz.getAnnotation(AdminPresentationClass.class);
if (classPresentation != null) {
String ceilingEntity = classPresentation.ceilingDisplayEntity();
if (!StringUtils.isEmpty(ceilingEntity)) {
ceilingClass = ceilingEntity;
break;
}
}
}
if (ceilingClass != null) {
int pos = -1;
int j = 0;
for (Class<?> clazz : polymorphicClasses) {
if (clazz.getName().equals(ceilingClass)) {
pos = j;
break;
}
j++;
}
if (pos >= 0) {
Class<?>[] temp = new Class<?>[pos + 1];
System.arraycopy(polymorphicClasses, 0, temp, 0, j + 1);
polymorphicClasses = temp;
}
}
ClassTree classTree = null;
if (!ArrayUtils.isEmpty(polymorphicClasses)) {
Class<?> topClass = polymorphicClasses[polymorphicClasses.length-1];
classTree = new ClassTree(topClass.getName(), isExcludeClassFromPolymorphism(topClass));
createClassTreeFromAnnotation(topClass, classTree);
for (int j=polymorphicClasses.length-1; j >= 0; j--) {
addClassToTree(polymorphicClasses[j], classTree);
}
classTree.finalizeStructure(1);
}
return classTree;
}
@Override
public ClassTree getClassTreeFromCeiling(Class<?> ceilingClass) {
Class<?>[] sortedEntities = getAllPolymorphicEntitiesFromCeiling(ceilingClass);
return getClassTree(sortedEntities);
}
@Override
public Map<String, FieldMetadata> getSimpleMergedProperties(String entityName, PersistencePerspective persistencePerspective) {
Class<?>[] entityClasses;
try {
entityClasses = getAllPolymorphicEntitiesFromCeiling(Class.forName(entityName));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
if (!ArrayUtils.isEmpty(entityClasses)) {
return getMergedProperties(
entityName,
entityClasses,
(ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY),
persistencePerspective.getAdditionalNonPersistentProperties(),
persistencePerspective.getAdditionalForeignKeys(),
MergedPropertyType.PRIMARY,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
} else {
Map<String, FieldMetadata> mergedProperties = new HashMap<String, FieldMetadata>();
Class<?> targetClass;
try {
targetClass = Class.forName(entityName);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
Map<String, FieldMetadata> attributesMap = metadata.getFieldPresentationAttributes(null, targetClass, this, "");
for (String property : attributesMap.keySet()) {
FieldMetadata presentationAttribute = attributesMap.get(property);
if (!presentationAttribute.getExcluded()) {
Field field = FieldManager.getSingleField(targetClass, property);
if (!Modifier.isStatic(field.getModifiers())) {
boolean handled = false;
for (FieldMetadataProvider provider : fieldMetadataProviders) {
FieldProviderResponse response = provider.addMetadataFromFieldType(
new AddMetadataFromFieldTypeRequest(field, targetClass, null, new ForeignKey[]{},
MergedPropertyType.PRIMARY, null, null, "",
property, null, false, 0, attributesMap, presentationAttribute,
((BasicFieldMetadata) presentationAttribute).getExplicitFieldType(), field.getType(), this),
mergedProperties);
if (FieldProviderResponse.NOT_HANDLED != response) {
handled = true;
}
if (FieldProviderResponse.HANDLED_BREAK == response) {
break;
}
}
if (!handled) {
//this provider is not included in the provider list on purpose - it is designed to handle basic
//AdminPresentation fields, and those fields not admin presentation annotated at all
defaultFieldMetadataProvider.addMetadataFromFieldType(
new AddMetadataFromFieldTypeRequest(field, targetClass, null, new ForeignKey[]{},
MergedPropertyType.PRIMARY, null, null, "", property,
null, false, 0, attributesMap, presentationAttribute, ((BasicFieldMetadata) presentationAttribute).getExplicitFieldType(),
field.getType(), this), mergedProperties);
}
}
}
}
return mergedProperties;
}
}
@Override
public Map<String, FieldMetadata> getMergedProperties(
String ceilingEntityFullyQualifiedClassname,
Class<?>[] entities,
ForeignKey foreignField,
String[] additionalNonPersistentProperties,
ForeignKey[] additionalForeignFields,
MergedPropertyType mergedPropertyType,
Boolean populateManyToOneFields,
String[] includeFields,
String[] excludeFields,
String configurationKey,
String prefix
) {
Map<String, FieldMetadata> mergedProperties = getMergedPropertiesRecursively(
ceilingEntityFullyQualifiedClassname,
entities,
foreignField,
additionalNonPersistentProperties,
additionalForeignFields,
mergedPropertyType,
populateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
new ArrayList<Class<?>>(),
prefix,
false
);
final List<String> removeKeys = new ArrayList<String>();
for (final String key : mergedProperties.keySet()) {
if (mergedProperties.get(key).getExcluded() != null && mergedProperties.get(key).getExcluded()) {
removeKeys.add(key);
}
}
for (String removeKey : removeKeys) {
mergedProperties.remove(removeKey);
}
// Allow field metadata providers to contribute additional fields here. These latestage handlers take place
// after any cached lookups occur, and are ideal for adding in dynamic properties that are not globally cacheable
// like properties gleaned from reflection typically are.
Set<String> keys = new HashSet<String>(mergedProperties.keySet());
for (Class<?> targetClass : entities) {
for (String key : keys) {
LateStageAddMetadataRequest amr = new LateStageAddMetadataRequest(key, null, targetClass, this, "");
boolean foundOneOrMoreHandlers = false;
for (FieldMetadataProvider fieldMetadataProvider : fieldMetadataProviders) {
FieldProviderResponse response = fieldMetadataProvider.lateStageAddMetadata(amr, mergedProperties);
if (FieldProviderResponse.NOT_HANDLED != response) {
foundOneOrMoreHandlers = true;
}
if (FieldProviderResponse.HANDLED_BREAK == response) {
break;
}
}
if (!foundOneOrMoreHandlers) {
defaultFieldMetadataProvider.lateStageAddMetadata(amr, mergedProperties);
}
}
}
return mergedProperties;
}
protected Map<String, FieldMetadata> getMergedPropertiesRecursively(
final String ceilingEntityFullyQualifiedClassname,
final Class<?>[] entities,
final ForeignKey foreignField,
final String[] additionalNonPersistentProperties,
final ForeignKey[] additionalForeignFields,
final MergedPropertyType mergedPropertyType,
final Boolean populateManyToOneFields,
final String[] includeFields,
final String[] excludeFields,
final String configurationKey,
final List<Class<?>> parentClasses,
final String prefix,
final Boolean isParentExcluded
) {
PropertyBuilder propertyBuilder = new PropertyBuilder() {
@Override
public Map<String, FieldMetadata> execute(Boolean overridePopulateManyToOne) {
Map<String, FieldMetadata> mergedProperties = new HashMap<String, FieldMetadata>();
Boolean classAnnotatedPopulateManyToOneFields;
if (overridePopulateManyToOne != null) {
classAnnotatedPopulateManyToOneFields = overridePopulateManyToOne;
} else {
classAnnotatedPopulateManyToOneFields = populateManyToOneFields;
}
buildPropertiesFromPolymorphicEntities(
entities,
foreignField,
additionalNonPersistentProperties,
additionalForeignFields,
mergedPropertyType,
classAnnotatedPopulateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
ceilingEntityFullyQualifiedClassname,
mergedProperties,
parentClasses,
prefix,
isParentExcluded
);
return mergedProperties;
}
};
Map<String, FieldMetadata> mergedProperties = metadata.overrideMetadata(entities, propertyBuilder, prefix, isParentExcluded, ceilingEntityFullyQualifiedClassname, configurationKey, this);
applyIncludesAndExcludes(includeFields, excludeFields, prefix, isParentExcluded, mergedProperties);
applyForeignKeyPrecedence(foreignField, additionalForeignFields, mergedProperties);
return mergedProperties;
}
protected void applyForeignKeyPrecedence(ForeignKey foreignField, ForeignKey[] additionalForeignFields, Map<String, FieldMetadata> mergedProperties) {
for (String key : mergedProperties.keySet()) {
boolean isForeign = false;
if (foreignField != null) {
isForeign = foreignField.getManyToField().equals(key);
}
if (!isForeign && !ArrayUtils.isEmpty(additionalForeignFields)) {
for (ForeignKey foreignKey : additionalForeignFields) {
isForeign = foreignKey.getManyToField().equals(key);
if (isForeign) {
break;
}
}
}
if (isForeign) {
FieldMetadata metadata = mergedProperties.get(key);
metadata.setExcluded(false);
}
}
}
protected void applyIncludesAndExcludes(String[] includeFields, String[] excludeFields, String prefix, Boolean isParentExcluded, Map<String, FieldMetadata> mergedProperties) {
//check includes
if (!ArrayUtils.isEmpty(includeFields)) {
for (String include : includeFields) {
for (String key : mergedProperties.keySet()) {
String testKey = prefix + key;
if (!(testKey.startsWith(include + ".") || testKey.equals(include))) {
FieldMetadata metadata = mergedProperties.get(key);
LOG.debug("applyIncludesAndExcludes:Excluding " + key + " because this field did not appear in the explicit includeFields list");
metadata.setExcluded(true);
} else {
FieldMetadata metadata = mergedProperties.get(key);
if (!isParentExcluded) {
LOG.debug("applyIncludesAndExcludes:Showing " + key + " because this field appears in the explicit includeFields list");
metadata.setExcluded(false);
}
}
}
}
} else if (!ArrayUtils.isEmpty(excludeFields)) {
//check excludes
for (String exclude : excludeFields) {
for (String key : mergedProperties.keySet()) {
String testKey = prefix + key;
if (testKey.startsWith(exclude + ".") || testKey.equals(exclude)) {
FieldMetadata metadata = mergedProperties.get(key);
LOG.debug("applyIncludesAndExcludes:Excluding " + key + " because this field appears in the explicit excludeFields list");
metadata.setExcluded(true);
} else {
FieldMetadata metadata = mergedProperties.get(key);
if (!isParentExcluded) {
LOG.debug("applyIncludesAndExcludes:Showing " + key + " because this field did not appear in the explicit excludeFields list");
metadata.setExcluded(false);
}
}
}
}
}
}
protected String pad(String s, int length, char pad) {
StringBuilder buffer = new StringBuilder(s);
while (buffer.length() < length) {
buffer.insert(0, pad);
}
return buffer.toString();
}
protected String getCacheKey(ForeignKey foreignField, String[] additionalNonPersistentProperties, ForeignKey[] additionalForeignFields, MergedPropertyType mergedPropertyType, Boolean populateManyToOneFields, Class<?> clazz, String configurationKey, Boolean isParentExcluded) {
StringBuilder sb = new StringBuilder(150);
sb.append(clazz.hashCode());
sb.append(foreignField==null?"":foreignField.toString());
sb.append(configurationKey);
sb.append(isParentExcluded);
if (additionalNonPersistentProperties != null) {
for (String prop : additionalNonPersistentProperties) {
sb.append(prop);
}
}
if (additionalForeignFields != null) {
for (ForeignKey key : additionalForeignFields) {
sb.append(key.toString());
}
}
sb.append(mergedPropertyType);
sb.append(populateManyToOneFields);
String digest;
try {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] messageDigest = md.digest(sb.toString().getBytes());
BigInteger number = new BigInteger(1,messageDigest);
digest = number.toString(16);
} catch(NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
return pad(digest, 32, '0');
}
protected void buildPropertiesFromPolymorphicEntities(
Class<?>[] entities,
ForeignKey foreignField,
String[] additionalNonPersistentProperties,
ForeignKey[] additionalForeignFields,
MergedPropertyType mergedPropertyType,
Boolean populateManyToOneFields,
String[] includeFields,
String[] excludeFields,
String configurationKey,
String ceilingEntityFullyQualifiedClassname,
Map<String, FieldMetadata> mergedProperties,
List<Class<?>> parentClasses,
String prefix,
Boolean isParentExcluded
) {
for (Class<?> clazz : entities) {
String cacheKey = getCacheKey(foreignField, additionalNonPersistentProperties, additionalForeignFields, mergedPropertyType, populateManyToOneFields, clazz, configurationKey, isParentExcluded);
Map<String, FieldMetadata> cacheData = null;
synchronized(DynamicDaoHelperImpl.LOCK_OBJECT) {
if (useCache()) {
cacheData = METADATA_CACHE.get(cacheKey);
}
if (cacheData == null) {
Map<String, FieldMetadata> props = getPropertiesForEntityClass(
clazz,
foreignField,
additionalNonPersistentProperties,
additionalForeignFields,
mergedPropertyType,
populateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
ceilingEntityFullyQualifiedClassname,
parentClasses,
prefix,
isParentExcluded
);
//first check all the properties currently in there to see if my entity inherits from them
for (Class<?> clazz2 : entities) {
if (!clazz2.getName().equals(clazz.getName())) {
for (Map.Entry<String, FieldMetadata> entry : props.entrySet()) {
FieldMetadata metadata = entry.getValue();
try {
if (Class.forName(metadata.getInheritedFromType()).isAssignableFrom(clazz2)) {
String[] both = (String[]) ArrayUtils.addAll(metadata.getAvailableToTypes(), new String[]{clazz2.getName()});
metadata.setAvailableToTypes(both);
}
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
}
}
METADATA_CACHE.put(cacheKey, props);
cacheData = props;
}
}
//clone the metadata before passing to the system
Map<String, FieldMetadata> clonedCache = new HashMap<String, FieldMetadata>(cacheData.size());
for (Map.Entry<String, FieldMetadata> entry : cacheData.entrySet()) {
clonedCache.put(entry.getKey(), entry.getValue().cloneFieldMetadata());
}
mergedProperties.putAll(clonedCache);
}
}
@Override
public Field[] getAllFields(Class<?> targetClass) {
Field[] allFields = new Field[]{};
boolean eof = false;
Class<?> currentClass = targetClass;
while (!eof) {
Field[] fields = currentClass.getDeclaredFields();
allFields = (Field[]) ArrayUtils.addAll(allFields, fields);
if (currentClass.getSuperclass() != null) {
currentClass = currentClass.getSuperclass();
} else {
eof = true;
}
}
return allFields;
}
@Override
public Map<String, FieldMetadata> getPropertiesForPrimitiveClass(
String propertyName,
String friendlyPropertyName,
Class<?> targetClass,
Class<?> parentClass,
MergedPropertyType mergedPropertyType
) {
Map<String, FieldMetadata> fields = new HashMap<String, FieldMetadata>();
BasicFieldMetadata presentationAttribute = new BasicFieldMetadata();
presentationAttribute.setFriendlyName(friendlyPropertyName);
if (String.class.isAssignableFrom(targetClass)) {
presentationAttribute.setExplicitFieldType(SupportedFieldType.STRING);
presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL);
fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.STRING, null, parentClass, presentationAttribute, mergedPropertyType, this));
} else if (Boolean.class.isAssignableFrom(targetClass)) {
presentationAttribute.setExplicitFieldType(SupportedFieldType.BOOLEAN);
presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL);
fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.BOOLEAN, null, parentClass, presentationAttribute, mergedPropertyType, this));
} else if (Date.class.isAssignableFrom(targetClass)) {
presentationAttribute.setExplicitFieldType(SupportedFieldType.DATE);
presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL);
fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.DATE, null, parentClass, presentationAttribute, mergedPropertyType, this));
} else if (Money.class.isAssignableFrom(targetClass)) {
presentationAttribute.setExplicitFieldType(SupportedFieldType.MONEY);
presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL);
fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.MONEY, null, parentClass, presentationAttribute, mergedPropertyType, this));
} else if (
Byte.class.isAssignableFrom(targetClass) ||
Integer.class.isAssignableFrom(targetClass) ||
Long.class.isAssignableFrom(targetClass) ||
Short.class.isAssignableFrom(targetClass)
) {
presentationAttribute.setExplicitFieldType(SupportedFieldType.INTEGER);
presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL);
fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.INTEGER, null, parentClass, presentationAttribute, mergedPropertyType, this));
} else if (
Double.class.isAssignableFrom(targetClass) ||
BigDecimal.class.isAssignableFrom(targetClass)
) {
presentationAttribute.setExplicitFieldType(SupportedFieldType.DECIMAL);
presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL);
fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.DECIMAL, null, parentClass, presentationAttribute, mergedPropertyType, this));
}
((BasicFieldMetadata) fields.get(propertyName)).setLength(255);
((BasicFieldMetadata) fields.get(propertyName)).setForeignKeyCollection(false);
((BasicFieldMetadata) fields.get(propertyName)).setRequired(true);
((BasicFieldMetadata) fields.get(propertyName)).setUnique(true);
((BasicFieldMetadata) fields.get(propertyName)).setScale(100);
((BasicFieldMetadata) fields.get(propertyName)).setPrecision(100);
return fields;
}
@Override
public SessionFactory getSessionFactory() {
return dynamicDaoHelper.getSessionFactory((HibernateEntityManager) standardEntityManager);
}
@Override
public Map<String, Object> getIdMetadata(Class<?> entityClass) {
return dynamicDaoHelper.getIdMetadata(entityClass, (HibernateEntityManager) standardEntityManager);
}
@Override
public List<String> getPropertyNames(Class<?> entityClass) {
return dynamicDaoHelper.getPropertyNames(entityClass, (HibernateEntityManager) standardEntityManager);
}
@Override
public List<Type> getPropertyTypes(Class<?> entityClass) {
return dynamicDaoHelper.getPropertyTypes(entityClass, (HibernateEntityManager) standardEntityManager);
}
protected Map<String, FieldMetadata> getPropertiesForEntityClass(
Class<?> targetClass,
ForeignKey foreignField,
String[] additionalNonPersistentProperties,
ForeignKey[] additionalForeignFields,
MergedPropertyType mergedPropertyType,
Boolean populateManyToOneFields,
String[] includeFields,
String[] excludeFields,
String configurationKey,
String ceilingEntityFullyQualifiedClassname,
List<Class<?>> parentClasses,
String prefix,
Boolean isParentExcluded
) {
Map<String, FieldMetadata> presentationAttributes = metadata.getFieldPresentationAttributes(null, targetClass, this, "");
if (isParentExcluded) {
for (String key : presentationAttributes.keySet()) {
LOG.debug("getPropertiesForEntityClass:Excluding " + key + " because parent is excluded.");
presentationAttributes.get(key).setExcluded(true);
}
}
Map idMetadata = getIdMetadata(targetClass);
Map<String, FieldMetadata> fields = new HashMap<String, FieldMetadata>();
String idProperty = (String) idMetadata.get("name");
List<String> propertyNames = getPropertyNames(targetClass);
propertyNames.add(idProperty);
Type idType = (Type) idMetadata.get("type");
List<Type> propertyTypes = getPropertyTypes(targetClass);
propertyTypes.add(idType);
PersistentClass persistentClass = getPersistentClass(targetClass.getName());
Iterator testIter = persistentClass.getPropertyIterator();
List<Property> propertyList = new ArrayList<Property>();
//check the properties for problems
while(testIter.hasNext()) {
Property property = (Property) testIter.next();
if (property.getName().contains(".")) {
throw new IllegalArgumentException("Properties from entities that utilize a period character ('.') in their name are incompatible with this system. The property name in question is: (" + property.getName() + ") from the class: (" + targetClass.getName() + ")");
}
propertyList.add(property);
}
buildProperties(
targetClass,
foreignField,
additionalForeignFields,
additionalNonPersistentProperties,
mergedPropertyType,
presentationAttributes,
propertyList,
fields,
propertyNames,
propertyTypes,
idProperty,
populateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
ceilingEntityFullyQualifiedClassname,
parentClasses,
prefix,
isParentExcluded
);
BasicFieldMetadata presentationAttribute = new BasicFieldMetadata();
presentationAttribute.setExplicitFieldType(SupportedFieldType.STRING);
presentationAttribute.setVisibility(VisibilityEnum.HIDDEN_ALL);
if (!ArrayUtils.isEmpty(additionalNonPersistentProperties)) {
Class<?>[] entities = getAllPolymorphicEntitiesFromCeiling(targetClass);
for (String additionalNonPersistentProperty : additionalNonPersistentProperties) {
if (StringUtils.isEmpty(prefix) || (!StringUtils.isEmpty(prefix) && additionalNonPersistentProperty.startsWith(prefix))) {
String myAdditionalNonPersistentProperty = additionalNonPersistentProperty;
//get final property if this is a dot delimited property
int finalDotPos = additionalNonPersistentProperty.lastIndexOf('.');
if (finalDotPos >= 0) {
myAdditionalNonPersistentProperty = myAdditionalNonPersistentProperty.substring(finalDotPos + 1, myAdditionalNonPersistentProperty.length());
}
//check all the polymorphic types on this target class to see if the end property exists
Field testField = null;
Method testMethod = null;
for (Class<?> clazz : entities) {
try {
testMethod = clazz.getMethod(myAdditionalNonPersistentProperty);
if (testMethod != null) {
break;
}
} catch (NoSuchMethodException e) {
//do nothing - method does not exist
}
testField = getFieldManager().getField(clazz, myAdditionalNonPersistentProperty);
if (testField != null) {
break;
}
}
//if the property exists, add it to the metadata for this class
if (testField != null || testMethod != null) {
fields.put(additionalNonPersistentProperty, metadata.getFieldMetadata(prefix, additionalNonPersistentProperty, propertyList, SupportedFieldType.STRING, null, targetClass, presentationAttribute, mergedPropertyType, this));
}
}
}
}
return fields;
}
protected void buildProperties(
Class<?> targetClass,
ForeignKey foreignField,
ForeignKey[] additionalForeignFields,
String[] additionalNonPersistentProperties,
MergedPropertyType mergedPropertyType,
Map<String, FieldMetadata> presentationAttributes,
List<Property> componentProperties,
Map<String, FieldMetadata> fields,
List<String> propertyNames,
List<Type> propertyTypes,
String idProperty,
Boolean populateManyToOneFields,
String[] includeFields,
String[] excludeFields,
String configurationKey,
String ceilingEntityFullyQualifiedClassname,
List<Class<?>> parentClasses,
String prefix,
Boolean isParentExcluded
) {
int j = 0;
Comparator<String> propertyComparator = new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
//check for property name equality and for map field properties
if (o1.equals(o2) || o1.startsWith(o2 + FieldManager.MAPFIELDSEPARATOR) || o2.startsWith(o1 + FieldManager.MAPFIELDSEPARATOR)) {
return 0;
}
return o1.compareTo(o2);
}
};
List<String> presentationKeyList = new ArrayList<String>(presentationAttributes.keySet());
Collections.sort(presentationKeyList);
for (String propertyName : propertyNames) {
final Type type = propertyTypes.get(j);
boolean isPropertyForeignKey = testForeignProperty(foreignField, prefix, propertyName);
int additionalForeignKeyIndexPosition = findAdditionalForeignKeyIndex(additionalForeignFields, prefix, propertyName);
j++;
Field myField = getFieldManager().getField(targetClass, propertyName);
if (myField == null) {
//try to get the field with the prefix - needed for advanced collections that appear in @Embedded classes
myField = getFieldManager().getField(targetClass, prefix + propertyName);
}
if (
!type.isAnyType() && !type.isCollectionType() ||
isPropertyForeignKey ||
additionalForeignKeyIndexPosition >= 0 ||
Collections.binarySearch(presentationKeyList, propertyName, propertyComparator) >= 0
) {
if (myField != null) {
boolean handled = false;
for (FieldMetadataProvider provider : fieldMetadataProviders) {
FieldMetadata presentationAttribute = presentationAttributes.get(propertyName);
if (presentationAttribute != null) {
setExcludedBasedOnShowIfProperty(presentationAttribute);
}
FieldProviderResponse response = provider.addMetadataFromFieldType(
new AddMetadataFromFieldTypeRequest(myField, targetClass, foreignField, additionalForeignFields,
mergedPropertyType, componentProperties, idProperty, prefix,
propertyName, type, isPropertyForeignKey, additionalForeignKeyIndexPosition,
presentationAttributes, presentationAttribute, null, type.getReturnedClass(), this), fields);
if (FieldProviderResponse.NOT_HANDLED != response) {
handled = true;
}
if (FieldProviderResponse.HANDLED_BREAK == response) {
break;
}
}
if (!handled) {
buildBasicProperty(myField, targetClass, foreignField, additionalForeignFields,
additionalNonPersistentProperties, mergedPropertyType, presentationAttributes,
componentProperties, fields, idProperty, populateManyToOneFields, includeFields,
excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, parentClasses,
prefix, isParentExcluded, propertyName, type, isPropertyForeignKey, additionalForeignKeyIndexPosition);
}
}
}
}
}
public Boolean testPropertyInclusion(FieldMetadata presentationAttribute) {
setExcludedBasedOnShowIfProperty(presentationAttribute);
return !(presentationAttribute != null && ((presentationAttribute.getExcluded() != null && presentationAttribute.getExcluded()) || (presentationAttribute.getChildrenExcluded() != null && presentationAttribute.getChildrenExcluded())));
}
protected boolean setExcludedBasedOnShowIfProperty(FieldMetadata fieldMetadata) {
if(fieldMetadata != null && fieldMetadata.getShowIfProperty()!=null && !fieldMetadata.getShowIfProperty().equals("")
&& appConfigurationRemoteService.getBooleanPropertyValue(fieldMetadata.getShowIfProperty())!=null
&& !appConfigurationRemoteService.getBooleanPropertyValue(fieldMetadata.getShowIfProperty())
) {
//do not include this in the display if it returns false.
fieldMetadata.setExcluded(true);
return false;
}
return true;
}
protected Boolean testPropertyRecursion(String prefix, List<Class<?>> parentClasses, String propertyName, Class<?> targetClass,
String ceilingEntityFullyQualifiedClassname) {
Boolean includeField = true;
if (!StringUtils.isEmpty(prefix)) {
Field testField = getFieldManager().getField(targetClass, propertyName);
if (testField == null) {
Class<?>[] entities;
try {
entities = getAllPolymorphicEntitiesFromCeiling(Class.forName(ceilingEntityFullyQualifiedClassname));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
for (Class<?> clazz : entities) {
testField = getFieldManager().getField(clazz, propertyName);
if (testField != null) {
break;
}
}
String testProperty = prefix + propertyName;
if (testField == null) {
testField = getFieldManager().getField(targetClass, testProperty);
}
if (testField == null) {
for (Class<?> clazz : entities) {
testField = getFieldManager().getField(clazz, testProperty);
if (testField != null) {
break;
}
}
}
}
if (testField != null) {
Class<?> testType = testField.getType();
for (Class<?> parentClass : parentClasses) {
if (parentClass.isAssignableFrom(testType) || testType.isAssignableFrom(parentClass)) {
includeField = false;
break;
}
}
if (includeField && (targetClass.isAssignableFrom(testType) || testType.isAssignableFrom(targetClass))) {
includeField = false;
}
}
}
return includeField;
}
protected void buildBasicProperty(
Field field,
Class<?> targetClass,
ForeignKey foreignField,
ForeignKey[] additionalForeignFields,
String[] additionalNonPersistentProperties,
MergedPropertyType mergedPropertyType,
Map<String, FieldMetadata> presentationAttributes,
List<Property> componentProperties,
Map<String, FieldMetadata> fields,
String idProperty,
Boolean populateManyToOneFields,
String[] includeFields,
String[] excludeFields,
String configurationKey,
String ceilingEntityFullyQualifiedClassname,
List<Class<?>> parentClasses,
String prefix,
Boolean isParentExcluded,
String propertyName,
Type type,
boolean propertyForeignKey,
int additionalForeignKeyIndexPosition) {
FieldMetadata presentationAttribute = presentationAttributes.get(propertyName);
Boolean amIExcluded = isParentExcluded || !testPropertyInclusion(presentationAttribute);
Boolean includeField = testPropertyRecursion(prefix, parentClasses, propertyName, targetClass,
ceilingEntityFullyQualifiedClassname);
SupportedFieldType explicitType = null;
if (presentationAttribute != null && presentationAttribute instanceof BasicFieldMetadata) {
explicitType = ((BasicFieldMetadata) presentationAttribute).getExplicitFieldType();
}
Class<?> returnedClass = type.getReturnedClass();
checkProp: {
if (type.isComponentType() && includeField) {
buildComponentProperties(
targetClass,
foreignField,
additionalForeignFields,
additionalNonPersistentProperties,
mergedPropertyType,
fields,
idProperty,
populateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
ceilingEntityFullyQualifiedClassname,
propertyName,
type,
returnedClass,
parentClasses,
amIExcluded,
prefix
);
break checkProp;
}
/*
* Currently we do not support ManyToOne fields whose class type is the same
* as the target type, since this forms an infinite loop and will cause a stack overflow.
*/
if (
type.isEntityType() &&
!returnedClass.isAssignableFrom(targetClass) &&
populateManyToOneFields &&
includeField
) {
buildEntityProperties(
fields,
foreignField,
additionalForeignFields,
additionalNonPersistentProperties,
populateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
ceilingEntityFullyQualifiedClassname,
propertyName,
returnedClass,
targetClass,
parentClasses,
prefix,
amIExcluded
);
break checkProp;
}
}
//Don't include this property if it failed manyToOne inclusion and is not a specified foreign key
if (includeField || propertyForeignKey || additionalForeignKeyIndexPosition >= 0) {
defaultFieldMetadataProvider.addMetadataFromFieldType(
new AddMetadataFromFieldTypeRequest(field, targetClass, foreignField, additionalForeignFields,
mergedPropertyType, componentProperties, idProperty, prefix, propertyName, type,
propertyForeignKey, additionalForeignKeyIndexPosition, presentationAttributes,
presentationAttribute, explicitType, returnedClass, this), fields);
}
}
protected boolean testForeignProperty(ForeignKey foreignField, String prefix, String propertyName) {
boolean isPropertyForeignKey = false;
if (foreignField != null) {
isPropertyForeignKey = foreignField.getManyToField().equals(prefix + propertyName);
}
return isPropertyForeignKey;
}
protected int findAdditionalForeignKeyIndex(ForeignKey[] additionalForeignFields, String prefix, String propertyName) {
int additionalForeignKeyIndexPosition = -1;
if (additionalForeignFields != null) {
additionalForeignKeyIndexPosition = Arrays.binarySearch(additionalForeignFields, new ForeignKey(prefix + propertyName, null, null), new Comparator<ForeignKey>() {
@Override
public int compare(ForeignKey o1, ForeignKey o2) {
return o1.getManyToField().compareTo(o2.getManyToField());
}
});
}
return additionalForeignKeyIndexPosition;
}
protected void buildEntityProperties(
Map<String, FieldMetadata> fields,
ForeignKey foreignField,
ForeignKey[] additionalForeignFields,
String[] additionalNonPersistentProperties,
Boolean populateManyToOneFields,
String[] includeFields,
String[] excludeFields,
String configurationKey,
String ceilingEntityFullyQualifiedClassname,
String propertyName,
Class<?> returnedClass,
Class<?> targetClass,
List<Class<?>> parentClasses,
String prefix,
Boolean isParentExcluded
) {
Class<?>[] polymorphicEntities = getAllPolymorphicEntitiesFromCeiling(returnedClass);
List<Class<?>> clonedParentClasses = new ArrayList<Class<?>>();
for (Class<?> parentClass : parentClasses) {
clonedParentClasses.add(parentClass);
}
clonedParentClasses.add(targetClass);
Map<String, FieldMetadata> newFields = getMergedPropertiesRecursively(
ceilingEntityFullyQualifiedClassname,
polymorphicEntities,
foreignField,
additionalNonPersistentProperties,
additionalForeignFields,
MergedPropertyType.PRIMARY,
populateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
clonedParentClasses,
prefix + propertyName + '.',
isParentExcluded
);
for (FieldMetadata newMetadata : newFields.values()) {
newMetadata.setInheritedFromType(targetClass.getName());
newMetadata.setAvailableToTypes(new String[]{targetClass.getName()});
}
Map<String, FieldMetadata> convertedFields = new HashMap<String, FieldMetadata>(newFields.size());
for (Map.Entry<String, FieldMetadata> key : newFields.entrySet()) {
convertedFields.put(propertyName + '.' + key.getKey(), key.getValue());
}
fields.putAll(convertedFields);
}
protected void buildComponentProperties(
Class<?> targetClass,
ForeignKey foreignField,
ForeignKey[] additionalForeignFields,
String[] additionalNonPersistentProperties,
MergedPropertyType mergedPropertyType,
Map<String, FieldMetadata> fields,
String idProperty,
Boolean populateManyToOneFields,
String[] includeFields,
String[] excludeFields,
String configurationKey,
String ceilingEntityFullyQualifiedClassname,
String propertyName,
Type type,
Class<?> returnedClass,
List<Class<?>> parentClasses,
Boolean isParentExcluded,
String prefix
) {
String[] componentProperties = ((ComponentType) type).getPropertyNames();
List<String> componentPropertyNames = Arrays.asList(componentProperties);
Type[] componentTypes = ((ComponentType) type).getSubtypes();
List<Type> componentPropertyTypes = Arrays.asList(componentTypes);
String tempPrefix = "";
int pos = prefix.indexOf(".");
if (pos > 0 && pos < prefix.length()-1) {
//only use part of the prefix if it's more than one layer deep
tempPrefix = prefix.substring(pos + 1, prefix.length());
}
Map<String, FieldMetadata> componentPresentationAttributes = metadata.getFieldPresentationAttributes(targetClass, returnedClass, this, tempPrefix + propertyName + ".");
if (isParentExcluded) {
for (String key : componentPresentationAttributes.keySet()) {
LOG.debug("buildComponentProperties:Excluding " + key + " because the parent was excluded");
componentPresentationAttributes.get(key).setExcluded(true);
}
}
PersistentClass persistentClass = getPersistentClass(targetClass.getName());
Property property;
try {
property = persistentClass.getProperty(propertyName);
} catch (MappingException e) {
property = persistentClass.getProperty(prefix + propertyName);
}
Iterator componentPropertyIterator = ((org.hibernate.mapping.Component) property.getValue()).getPropertyIterator();
List<Property> componentPropertyList = new ArrayList<Property>();
while(componentPropertyIterator.hasNext()) {
componentPropertyList.add((Property) componentPropertyIterator.next());
}
Map<String, FieldMetadata> newFields = new HashMap<String, FieldMetadata>();
buildProperties(
targetClass,
foreignField,
additionalForeignFields,
additionalNonPersistentProperties,
mergedPropertyType,
componentPresentationAttributes,
componentPropertyList,
newFields,
componentPropertyNames,
componentPropertyTypes,
idProperty,
populateManyToOneFields,
includeFields,
excludeFields,
configurationKey,
ceilingEntityFullyQualifiedClassname,
parentClasses,
propertyName + ".",
isParentExcluded
);
Map<String, FieldMetadata> convertedFields = new HashMap<String, FieldMetadata>();
for (String key : newFields.keySet()) {
convertedFields.put(propertyName + "." + key, newFields.get(key));
}
fields.putAll(convertedFields);
}
@Override
public EntityManager getStandardEntityManager() {
return standardEntityManager;
}
@Override
public void setStandardEntityManager(EntityManager entityManager) {
this.standardEntityManager = entityManager;
}
public EJB3ConfigurationDao getEjb3ConfigurationDao() {
return ejb3ConfigurationDao;
}
public void setEjb3ConfigurationDao(EJB3ConfigurationDao ejb3ConfigurationDao) {
this.ejb3ConfigurationDao = ejb3ConfigurationDao;
}
@Override
public FieldManager getFieldManager() {
return new FieldManager(entityConfiguration, this);
}
@Override
public EntityConfiguration getEntityConfiguration() {
return entityConfiguration;
}
@Override
public void setEntityConfiguration(EntityConfiguration entityConfiguration) {
this.entityConfiguration = entityConfiguration;
}
@Override
public Metadata getMetadata() {
return metadata;
}
@Override
public void setMetadata(Metadata metadata) {
this.metadata = metadata;
}
public List<FieldMetadataProvider> getFieldMetadataProviders() {
return fieldMetadataProviders;
}
public void setFieldMetadataProviders(List<FieldMetadataProvider> fieldMetadataProviders) {
this.fieldMetadataProviders = fieldMetadataProviders;
}
@Override
public FieldMetadataProvider getDefaultFieldMetadataProvider() {
return defaultFieldMetadataProvider;
}
public void setDefaultFieldMetadataProvider(FieldMetadataProvider defaultFieldMetadataProvider) {
this.defaultFieldMetadataProvider = defaultFieldMetadataProvider;
}
protected boolean isExcludeClassFromPolymorphism(Class<?> clazz) {
return dynamicDaoHelper.isExcludeClassFromPolymorphism(clazz);
}
public DynamicDaoHelper getDynamicDaoHelper() {
return dynamicDaoHelper;
}
public void setDynamicDaoHelper(DynamicDaoHelper dynamicDaoHelper) {
this.dynamicDaoHelper = dynamicDaoHelper;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_DynamicEntityDaoImpl.java
|
106 |
public static class Order {
public static final int Basic = 1000;
public static final int Page = 2000;
public static final int Rules = 1000;
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageImpl.java
|
1,612 |
public class ManagementCenterService {
private final static AtomicBoolean DISPLAYED_HOSTED_MANAGEMENT_CENTER_INFO = new AtomicBoolean(false);
public static final int HTTP_SUCCESS = 200;
private final HazelcastInstanceImpl instance;
private final TaskPollThread taskPollThread;
private final StateSendThread stateSendThread;
private final ILogger logger;
private final ConsoleCommandHandler commandHandler;
private final ManagementCenterConfig managementCenterConfig;
private final SerializationService serializationService;
private final ManagementCenterIdentifier identifier;
private final AtomicBoolean isRunning = new AtomicBoolean(false);
private final String clusterId;
private final String securityToken;
private volatile String managementCenterUrl;
private volatile boolean urlChanged = false;
private volatile boolean versionMismatch = false;
public ManagementCenterService(HazelcastInstanceImpl instance) {
this.instance = instance;
logger = instance.node.getLogger(ManagementCenterService.class);
managementCenterConfig = getManagementCenterConfig();
securityToken = managementCenterConfig.getSecurityToken();
managementCenterUrl = getManagementCenterUrl();
clusterId = getClusterId();
commandHandler = new ConsoleCommandHandler(instance);
taskPollThread = new TaskPollThread();
stateSendThread = new StateSendThread();
serializationService = instance.node.getSerializationService();
identifier = newManagementCenterIdentifier();
registerListeners();
logHostedManagementCenterMessages();
}
private void logHostedManagementCenterMessages() {
if (isHostedManagementCenterEnabled()) {
if (isSecurityTokenAvailable()) {
logHostedManagementCenterLoginUrl();
} else {
logHostedManagementCenterRegisterUrl();
}
}
}
private boolean isSecurityTokenAvailable() {
return !isNullOrEmpty(managementCenterConfig.getSecurityToken());
}
private String getManagementCenterUrl() {
if (isHostedManagementCenterEnabled()) {
return getHostedManagementCenterUrl();
} else {
return managementCenterConfig.getUrl();
}
}
private boolean isHostedManagementCenterEnabled() {
if (!getGroupProperties().HOSTED_MANAGEMENT_ENABLED.getBoolean()) {
return false;
}
return isNullOrEmpty(managementCenterConfig.getUrl());
}
private GroupProperties getGroupProperties() {
return instance.node.getGroupProperties();
}
private String getHostedManagementCenterUrl() {
return getGroupProperties().HOSTED_MANAGEMENT_URL.getString();
}
private void registerListeners() {
if(!managementCenterConfig.isEnabled()){
return;
}
instance.getLifecycleService().addLifecycleListener(new LifecycleListenerImpl());
instance.getCluster().addMembershipListener(new MemberListenerImpl());
}
private void logHostedManagementCenterLoginUrl() {
if (managementCenterConfig.isEnabled()) {
logger.info("======================================================");
logger.info("You can access your Hazelcast instance at:");
logger.info(getHostedManagementCenterUrl() + "/start.do?clusterid=" + clusterId);
logger.info("======================================================");
} else {
logger.info("======================================================");
logger.info("To see your application on the Hosted Management Center, " +
"you need to enable the ManagementCenterConfig.");
logger.info("======================================================");
}
}
private void logHostedManagementCenterRegisterUrl() {
//we only want to display the page for hosted management registration once. We don't want to pollute
//the logfile.
if (!DISPLAYED_HOSTED_MANAGEMENT_CENTER_INFO.compareAndSet(false, true)) {
return;
}
logger.info("======================================================");
logger.info("Manage your Hazelcast cluster with the Management Center SaaS Application");
logger.info("To register, copy/paste the following url in your browser and follow the instructions.");
logger.info(getHostedManagementCenterUrl() + "/register.jsp");
logger.info("======================================================");
}
private String getClusterId() {
String clusterId = managementCenterConfig.getClusterId();
if(!isNullOrEmpty(clusterId)){
return clusterId;
}
if (!isHostedManagementCenterEnabled()) {
return null;
}
return newClusterId();
}
private String newClusterId() {
IAtomicReference<String> clusterIdReference = instance.getAtomicReference("___clusterIdGenerator");
String id = clusterIdReference.get();
if (id == null) {
id = UUID.randomUUID().toString().replace("-", "");
if (!clusterIdReference.compareAndSet(null, id)) {
id = clusterIdReference.get();
}
}
return id;
}
private ManagementCenterConfig getManagementCenterConfig() {
ManagementCenterConfig config = instance.node.config.getManagementCenterConfig();
if (config == null) {
throw new IllegalStateException("ManagementCenterConfig can't be null!");
}
return config;
}
private ManagementCenterIdentifier newManagementCenterIdentifier() {
Address address = instance.node.address;
String groupName = instance.getConfig().getGroupConfig().getName();
String version = instance.node.getBuildInfo().getVersion();
return new ManagementCenterIdentifier(version, groupName, address.getHost() + ":" + address.getPort());
}
private static String cleanupUrl(String url) {
if (url == null) {
return null;
}
return url.endsWith("/") ? url : url + '/';
}
public void start() {
if (managementCenterUrl == null) {
logger.warning("Can't start Hazelcast Management Center Service: web-server URL is null!");
return;
}
if (!isRunning.compareAndSet(false, true)) {
//it is already started
return;
}
taskPollThread.start();
stateSendThread.start();
logger.info("Hazelcast will connect to Hazelcast Management Center on address: \n" + managementCenterUrl);
}
public void shutdown() {
if (!isRunning.compareAndSet(true, false)) {
//it is already shutdown.
return;
}
logger.info("Shutting down Hazelcast Management Center Service");
try {
interruptThread(stateSendThread);
interruptThread(taskPollThread);
} catch (Throwable ignored) {
}
}
public byte[] clusterWideUpdateManagementCenterUrl(String groupName, String groupPass, String newUrl) {
try {
GroupConfig groupConfig = instance.getConfig().getGroupConfig();
if (!(groupConfig.getName().equals(groupName) && groupConfig.getPassword().equals(groupPass))) {
return HttpCommand.RES_403;
}
final Collection<MemberImpl> memberList = instance.node.clusterService.getMemberList();
for (MemberImpl member : memberList) {
send(member.getAddress(), new UpdateManagementCenterUrlOperation(newUrl));
}
return HttpCommand.RES_204;
} catch (Throwable throwable) {
logger.warning("New Management Center url cannot be assigned.", throwable);
return HttpCommand.RES_500;
}
}
public void updateManagementCenterUrl(String newUrl) {
if (newUrl == null) {
return;
}
if (newUrl.equals(managementCenterUrl)) {
return;
}
managementCenterUrl = newUrl;
if (!isRunning()) {
start();
}
urlChanged = true;
logger.info("Management Center URL has changed. " +
"Hazelcast will connect to Management Center on address: \n" + managementCenterUrl);
}
private void interruptThread(Thread t) {
if (t != null) {
t.interrupt();
}
}
public void signalVersionMismatch() {
versionMismatch = true;
}
public Object callOnAddress(Address address, Operation operation) {
//todo: why are we always executing on the mapservice??
OperationService operationService = instance.node.nodeEngine.getOperationService();
Future future = operationService.invokeOnTarget(MapService.SERVICE_NAME, operation, address);
try {
return future.get();
} catch (Throwable t) {
StringWriter s = new StringWriter();
t.printStackTrace(new PrintWriter(s));
return s.toString();
}
}
public Object callOnMember(Member member, Operation operation) {
Address address = ((MemberImpl) member).getAddress();
return callOnAddress(address, operation);
}
public void send(Address address, Operation operation) {
//todo: clean up needed.
OperationService operationService = instance.node.nodeEngine.getOperationService();
operationService.createInvocationBuilder(MapService.SERVICE_NAME, operation, address).invoke();
}
public HazelcastInstanceImpl getHazelcastInstance() {
return instance;
}
public ConsoleCommandHandler getCommandHandler() {
return commandHandler;
}
private boolean isRunning() {
return isRunning.get();
}
private void post(HttpURLConnection connection) throws IOException {
//we need to call 'getResponseCode'. If we don't the data placed in the outputstream, will not be send to the
//managementcenter. For more information see:
//http://stackoverflow.com/questions/4844535/why-do-you-have-to-call-urlconnectiongetinputstream-to-be-able-to-write-out-to
int responseCode = connection.getResponseCode();
if (responseCode != HTTP_SUCCESS) {
logger.warning("Failed to send response, responseCode:" + responseCode + " url:" + connection.getURL());
}
}
private void sleepOnVersionMismatch() throws InterruptedException {
if (versionMismatch) {
Thread.sleep(1000 * 60);
versionMismatch = false;
}
}
private class StateSendThread extends Thread {
private final TimedMemberStateFactory timedMemberStateFactory;
private final int updateIntervalMs;
private StateSendThread() {
super(instance.getThreadGroup(), instance.node.getThreadNamePrefix("MC.State.Sender"));
timedMemberStateFactory = new TimedMemberStateFactory(instance);
updateIntervalMs = calcUpdateInterval();
}
private int calcUpdateInterval() {
int updateInterval = managementCenterConfig.getUpdateInterval();
return updateInterval > 0 ? updateInterval * 1000 : 5000;
}
@Override
public void run() {
try {
while (isRunning()) {
sleepOnVersionMismatch();
sendState();
sleep();
}
} catch (Throwable throwable) {
inspectOutputMemoryError(throwable);
logger.warning("Hazelcast Management Center Service will be shutdown due to exception.", throwable);
shutdown();
}
}
private void sleep() throws InterruptedException {
Thread.sleep(updateIntervalMs);
}
private void sendState() throws InterruptedException, MalformedURLException {
URL url = newCollectorUrl();
try {
//todo: does the connection not need to be closed?
HttpURLConnection connection = openConnection(url);
OutputStream outputStream = connection.getOutputStream();
try {
identifier.write(outputStream);
ObjectDataOutputStream out = serializationService.createObjectDataOutputStream(outputStream);
TimedMemberState timedMemberState = timedMemberStateFactory.createTimedMemberState();
timedMemberState.writeData(out);
outputStream.flush();
post(connection);
} finally {
closeResource(outputStream);
}
} catch (ConnectException e) {
if (logger.isFinestEnabled()) {
logger.finest(e);
} else {
logger.info("Failed to connect to:" + url);
}
} catch (Exception e) {
logger.warning(e);
}
}
private HttpURLConnection openConnection(URL url) throws IOException {
if (logger.isFinestEnabled()) {
logger.finest("Opening collector connection:" + url);
}
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setConnectTimeout(5000);
connection.setReadTimeout(5000);
return connection;
}
private URL newCollectorUrl() throws MalformedURLException {
String url = cleanupUrl(managementCenterUrl) + "collector.do";
if (clusterId != null) {
url += "?clusterid=" + clusterId;
}
if (securityToken != null) {
if (clusterId == null) {
url += "?securitytoken=" + securityToken;
} else {
url += "&securitytoken=" + securityToken;
}
}
return new URL(url);
}
}
private class TaskPollThread extends Thread {
private final Map<Integer, Class<? extends ConsoleRequest>> consoleRequests =
new HashMap<Integer, Class<? extends ConsoleRequest>>();
private final Random rand = new Random();
TaskPollThread() {
super(instance.node.threadGroup, instance.node.getThreadNamePrefix("MC.Task.Poller"));
register(new RuntimeStateRequest());
register(new ThreadDumpRequest());
register(new ExecuteScriptRequest());
register(new EvictLocalMapRequest());
register(new ConsoleCommandRequest());
register(new MapConfigRequest());
register(new MemberConfigRequest());
register(new ClusterPropsRequest());
register(new GetLogsRequest());
register(new RunGcRequest());
register(new GetMemberSystemPropertiesRequest());
register(new GetMapEntryRequest());
register(new VersionMismatchLogRequest());
register(new ShutdownMemberRequest());
register(new GetSystemWarningsRequest());
}
public void register(ConsoleRequest consoleRequest) {
consoleRequests.put(consoleRequest.getType(), consoleRequest.getClass());
}
public void processTaskAndPostResponse(int taskId, ConsoleRequest task) {
try {
//todo: don't we need to close this connection?
HttpURLConnection connection = openPostResponseConnection();
OutputStream outputStream = connection.getOutputStream();
try {
identifier.write(outputStream);
ObjectDataOutputStream out = serializationService.createObjectDataOutputStream(outputStream);
out.writeInt(taskId);
out.writeInt(task.getType());
task.writeResponse(ManagementCenterService.this, out);
out.flush();
post(connection);
} finally {
closeResource(outputStream);
}
} catch (Exception e) {
logger.warning("Failed process task:" + task, e);
}
}
private HttpURLConnection openPostResponseConnection() throws IOException {
URL url = newPostResponseUrl();
if (logger.isFinestEnabled()) {
logger.finest("Opening sendResponse connection:" + url);
}
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setConnectTimeout(2000);
connection.setReadTimeout(2000);
return connection;
}
private URL newPostResponseUrl() throws MalformedURLException {
return new URL(cleanupUrl(managementCenterUrl) + "putResponse.do");
}
@Override
public void run() {
try {
while (isRunning()) {
sleepOnVersionMismatch();
processTask();
sleep();
}
} catch (Throwable throwable) {
inspectOutputMemoryError(throwable);
logger.warning("Problem on Hazelcast Management Center Service while polling for a task.", throwable);
}
}
private void sleep() throws InterruptedException {
//todo: magic numbers are no good.
//todo: why the random part
//todo: we want configurable frequency for task polling
Thread.sleep(700 + rand.nextInt(300));
}
private void processTask() {
ObjectDataInputStream inputStream = null;
try {
//todo: don't we need to close the connection?
inputStream = openTaskInputStream();
int taskId = inputStream.readInt();
if (taskId <= 0) {
return;
}
ConsoleRequest task = newTask(inputStream);
processTaskAndPostResponse(taskId, task);
} catch (Exception e) {
//todo: even if there is an internal error with the task, we don't see it. That is kinda shitty
logger.finest(e);
} finally {
IOUtil.closeResource(inputStream);
}
}
private ObjectDataInputStream openTaskInputStream() throws IOException {
URLConnection connection = openGetTaskConnection();
InputStream inputStream = connection.getInputStream();
return serializationService.createObjectDataInputStream(inputStream);
}
private ConsoleRequest newTask(ObjectDataInputStream inputStream)
throws InstantiationException, IllegalAccessException, IOException {
int requestType = inputStream.readInt();
Class<? extends ConsoleRequest> requestClass = consoleRequests.get(requestType);
if (requestClass == null) {
throw new RuntimeException("Failed to find a request for requestType:" + requestType);
}
ConsoleRequest task = requestClass.newInstance();
task.readData(inputStream);
return task;
}
private URLConnection openGetTaskConnection() throws IOException {
URL url = newGetTaskUrl();
if (logger.isFinestEnabled()) {
logger.finest("Opening getTask connection:" + url);
}
URLConnection connection = url.openConnection();
//todo: why do we set this property if the connection is not going to be re-used?
connection.setRequestProperty("Connection", "keep-alive");
return connection;
}
private URL newGetTaskUrl() throws MalformedURLException {
GroupConfig groupConfig = instance.getConfig().getGroupConfig();
Address localAddress = ((MemberImpl) instance.node.getClusterService().getLocalMember()).getAddress();
String urlString = cleanupUrl(managementCenterUrl) + "getTask.do?member=" + localAddress.getHost()
+ ":" + localAddress.getPort() + "&cluster=" + groupConfig.getName();
if (clusterId != null) {
urlString += "&clusterid=" + clusterId;
}
if (securityToken != null) {
urlString += "&securitytoken=" + securityToken;
}
return new URL(urlString);
}
}
private class LifecycleListenerImpl implements LifecycleListener {
@Override
public void stateChanged(final LifecycleEvent event) {
if (event.getState() == LifecycleState.STARTED) {
try {
start();
} catch (Exception e) {
logger.severe("ManagementCenterService could not be started!", e);
}
}
}
}
public class MemberListenerImpl implements MembershipListener {
@Override
public void memberAdded(MembershipEvent membershipEvent) {
try {
Member member = membershipEvent.getMember();
if (member != null && instance.node.isMaster() && urlChanged) {
Operation operation = new UpdateManagementCenterUrlOperation(managementCenterUrl);
callOnMember(member, operation);
}
} catch (Exception e) {
logger.warning("Web server url cannot be send to the newly joined member", e);
}
}
@Override
public void memberRemoved(MembershipEvent membershipEvent) {
}
@Override
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_management_ManagementCenterService.java
|
190 |
new ThreadLocal<ThreadLocalRandom>() {
protected ThreadLocalRandom initialValue() {
return new ThreadLocalRandom();
}
};
| 0true
|
src_main_java_jsr166y_ThreadLocalRandom.java
|
54 |
public class HttpGetCommand extends HttpCommand {
boolean nextLine;
public HttpGetCommand(String uri) {
super(TextCommandType.HTTP_GET, uri);
}
public boolean readFrom(ByteBuffer cb) {
while (cb.hasRemaining()) {
char c = (char) cb.get();
if (c == '\n') {
if (nextLine) {
return true;
}
nextLine = true;
} else if (c != '\r') {
nextLine = false;
}
}
return false;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_rest_HttpGetCommand.java
|
3,431 |
public final class ResponseHandlerFactory {
private static final NoResponseHandler NO_RESPONSE_HANDLER = new NoResponseHandler();
public static void setRemoteResponseHandler(NodeEngine nodeEngine, Operation op) {
op.setResponseHandler(createRemoteResponseHandler(nodeEngine, op));
}
public static ResponseHandler createRemoteResponseHandler(NodeEngine nodeEngine, Operation op) {
if (op.getCallId() == 0) {
if (op.returnsResponse()) {
throw new HazelcastException("Op: " + op.getClass().getName() + " can not return response without call-id!");
}
return NO_RESPONSE_HANDLER;
}
return new RemoteInvocationResponseHandler(nodeEngine, op);
}
public static ResponseHandler createEmptyResponseHandler() {
return NO_RESPONSE_HANDLER;
}
private static class NoResponseHandler implements ResponseHandler {
@Override
public void sendResponse(final Object obj) {
}
@Override
public boolean isLocal() {
return false;
}
}
public static ResponseHandler createErrorLoggingResponseHandler(ILogger logger) {
return new ErrorLoggingResponseHandler(logger);
}
private static class ErrorLoggingResponseHandler implements ResponseHandler {
private final ILogger logger;
private ErrorLoggingResponseHandler(ILogger logger) {
this.logger = logger;
}
@Override
public void sendResponse(final Object obj) {
if (obj instanceof Throwable) {
Throwable t = (Throwable) obj;
logger.severe(t);
}
}
@Override
public boolean isLocal() {
return true;
}
}
private static class RemoteInvocationResponseHandler implements ResponseHandler {
private final NodeEngine nodeEngine;
private final Operation op;
private final AtomicBoolean sent = new AtomicBoolean(false);
private RemoteInvocationResponseHandler(NodeEngine nodeEngine, Operation op) {
this.nodeEngine = nodeEngine;
this.op = op;
}
@Override
public void sendResponse(Object obj) {
long callId = op.getCallId();
Connection conn = op.getConnection();
if (!sent.compareAndSet(false, true)) {
throw new ResponseAlreadySentException("NormalResponse already sent for call: " + callId
+ " to " + conn.getEndPoint() + ", current-response: " + obj);
}
NormalResponse response;
if (!(obj instanceof NormalResponse)) {
response = new NormalResponse(obj, op.getCallId(), 0, op.isUrgent());
} else {
response = (NormalResponse) obj;
}
nodeEngine.getOperationService().send(response, op.getCallerAddress());
}
@Override
public boolean isLocal() {
return false;
}
}
private ResponseHandlerFactory() {
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_spi_impl_ResponseHandlerFactory.java
|
4,276 |
public class SimpleFsTranslogFile implements FsTranslogFile {
private final long id;
private final ShardId shardId;
private final RafReference raf;
private final AtomicInteger operationCounter = new AtomicInteger();
private final AtomicLong lastPosition = new AtomicLong(0);
private final AtomicLong lastWrittenPosition = new AtomicLong(0);
private volatile long lastSyncPosition = 0;
public SimpleFsTranslogFile(ShardId shardId, long id, RafReference raf) throws IOException {
this.shardId = shardId;
this.id = id;
this.raf = raf;
raf.raf().setLength(0);
}
public long id() {
return this.id;
}
public int estimatedNumberOfOperations() {
return operationCounter.get();
}
public long translogSizeInBytes() {
return lastWrittenPosition.get();
}
public Translog.Location add(byte[] data, int from, int size) throws IOException {
long position = lastPosition.getAndAdd(size);
raf.channel().write(ByteBuffer.wrap(data, from, size), position);
lastWrittenPosition.getAndAdd(size);
operationCounter.incrementAndGet();
return new Translog.Location(id, position, size);
}
public byte[] read(Translog.Location location) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(location.size);
raf.channel().read(buffer, location.translogLocation);
return buffer.array();
}
public void close(boolean delete) {
sync();
raf.decreaseRefCount(delete);
}
/**
* Returns a snapshot on this file, <tt>null</tt> if it failed to snapshot.
*/
public FsChannelSnapshot snapshot() throws TranslogException {
try {
if (!raf.increaseRefCount()) {
return null;
}
return new FsChannelSnapshot(this.id, raf, lastWrittenPosition.get(), operationCounter.get());
} catch (Exception e) {
throw new TranslogException(shardId, "Failed to snapshot", e);
}
}
@Override
public boolean syncNeeded() {
return lastWrittenPosition.get() != lastSyncPosition;
}
public void sync() {
try {
// check if we really need to sync here...
long last = lastWrittenPosition.get();
if (last == lastSyncPosition) {
return;
}
lastSyncPosition = last;
raf.channel().force(false);
} catch (Exception e) {
// ignore
}
}
@Override
public void reuse(FsTranslogFile other) {
// nothing to do there
}
@Override
public void updateBufferSize(int bufferSize) throws TranslogException {
// nothing to do here...
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_translog_fs_SimpleFsTranslogFile.java
|
2,110 |
public class MapProxyImpl<K, V> extends MapProxySupport implements IMap<K, V>, InitializingObject {
public MapProxyImpl(final String name, final MapService mapService, final NodeEngine nodeEngine) {
super(name, mapService, nodeEngine);
}
@Override
public V get(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Data key = service.toData(k, partitionStrategy);
return (V) service.toObject(getInternal(key));
}
@Override
public V put(final K k, final V v) {
return put(k, v, -1, null);
}
@Override
public V put(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
final Data result = putInternal(key, value, ttl, timeunit);
return (V) service.toObject(result);
}
@Override
public boolean tryPut(final K k, final V v, final long timeout, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
return tryPutInternal(key, value, timeout, timeunit);
}
@Override
public V putIfAbsent(final K k, final V v) {
return putIfAbsent(k, v, -1, null);
}
@Override
public V putIfAbsent(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
final Data result = putIfAbsentInternal(key, value, ttl, timeunit);
return (V) service.toObject(result);
}
@Override
public void putTransient(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
putTransientInternal(key, value, ttl, timeunit);
}
@Override
public boolean replace(final K k, final V o, final V v) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (o == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data oldValue = service.toData(o);
final Data value = service.toData(v);
return replaceInternal(key, oldValue, value);
}
@Override
public V replace(final K k, final V v) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
return (V) service.toObject(replaceInternal(key, value));
}
@Override
public void set(K key, V value) {
set(key, value, -1, TimeUnit.MILLISECONDS);
}
@Override
public void set(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
setInternal(key, value, ttl, timeunit);
}
@Override
public V remove(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data result = removeInternal(key);
return (V) service.toObject(result);
}
@Override
public boolean remove(final Object k, final Object v) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
return removeInternal(key, value);
}
@Override
public void delete(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final Data key = getService().toData(k, partitionStrategy);
deleteInternal(key);
}
@Override
public boolean containsKey(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data key = getService().toData(k, partitionStrategy);
return containsKeyInternal(key);
}
@Override
public boolean containsValue(final Object v) {
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
Data value = getService().toData(v);
return containsValueInternal(value);
}
@Override
public void lock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
lockSupport.lock(nodeEngine, k);
}
@Override
public void lock(final Object key, final long leaseTime, final TimeUnit timeUnit) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
shouldBePositive(leaseTime, "leaseTime");
Data k = getService().toData(key, partitionStrategy);
lockSupport.lock(getNodeEngine(), k, timeUnit.toMillis(leaseTime));
}
@Override
public void unlock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
lockSupport.unlock(nodeEngine, k);
}
@Override
public boolean tryRemove(final K key, final long timeout, final TimeUnit timeunit) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data k = getService().toData(key, partitionStrategy);
return tryRemoveInternal(k, timeout, timeunit);
}
@Override
public Future<V> getAsync(final K k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data key = getService().toData(k, partitionStrategy);
NodeEngine nodeEngine = getNodeEngine();
return new DelegatingFuture<V>(getAsyncInternal(key), nodeEngine.getSerializationService());
}
@Override
public boolean isLocked(final K k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data key = getService().toData(k, partitionStrategy);
NodeEngine nodeEngine = getNodeEngine();
return lockSupport.isLocked(nodeEngine, key);
}
@Override
public Future putAsync(final K key, final V value) {
return putAsync(key, value, -1, null);
}
@Override
public ICompletableFuture putAsync(final K key, final V value, final long ttl, final TimeUnit timeunit) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (value == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
Data k = service.toData(key, partitionStrategy);
Data v = service.toData(value);
return new DelegatingFuture<V>(putAsyncInternal(k, v, ttl, timeunit),
getNodeEngine().getSerializationService());
}
@Override
public ICompletableFuture removeAsync(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data k = getService().toData(key, partitionStrategy);
return new DelegatingFuture<V>(removeAsyncInternal(k), getNodeEngine().getSerializationService());
}
@Override
public Map<K, V> getAll(final Set<K> keys) {
Set<Data> ks = new HashSet(keys.size());
MapService service = getService();
for (K key : keys) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data k = service.toData(key, partitionStrategy);
ks.add(k);
}
return (Map<K, V>) getAllObjectInternal(ks);
}
@Override
public void putAll(final Map<? extends K, ? extends V> m) {
// Note, putAllInternal() will take care of the null key/value checks.
putAllInternal(m);
}
@Override
public boolean tryLock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
return lockSupport.tryLock(nodeEngine, k);
}
@Override
public boolean tryLock(final K key, final long time, final TimeUnit timeunit) throws InterruptedException {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
return lockSupport.tryLock(nodeEngine, k, time, timeunit);
}
@Override
public void forceUnlock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
lockSupport.forceUnlock(nodeEngine, k);
}
@Override
public String addInterceptor(MapInterceptor interceptor) {
if (interceptor == null) {
throw new NullPointerException("Interceptor should not be null!");
}
return addMapInterceptorInternal(interceptor);
}
@Override
public void removeInterceptor(String id) {
if (id == null) {
throw new NullPointerException("Interceptor id should not be null!");
}
removeMapInterceptorInternal(id);
}
@Override
public String addLocalEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addLocalEntryListenerInternal(listener, predicate, null, includeValue);
}
@Override
public String addLocalEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, K key, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addLocalEntryListenerInternal(listener, predicate, getService().toData(key, partitionStrategy), includeValue);
}
@Override
public String addEntryListener(final EntryListener listener, final boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
return addEntryListenerInternal(listener, null, includeValue);
}
@Override
public String addEntryListener(final EntryListener<K, V> listener, final K key, final boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
return addEntryListenerInternal(listener, getService().toData(key, partitionStrategy), includeValue);
}
@Override
public String addEntryListener(
EntryListener<K, V> listener, Predicate<K, V> predicate, K key, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addEntryListenerInternal(listener, predicate, getService().toData(key, partitionStrategy), includeValue);
}
@Override
public String addEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addEntryListenerInternal(listener, predicate, null, includeValue);
}
@Override
public boolean removeEntryListener(String id) {
if (id == null) {
throw new NullPointerException("Listener id should not be null!");
}
return removeEntryListenerInternal(id);
}
@Override
public EntryView<K, V> getEntryView(K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
SimpleEntryView<K, V> entryViewInternal =
(SimpleEntryView) getEntryViewInternal(getService().toData(key, partitionStrategy));
if (entryViewInternal == null) {
return null;
}
Data value = (Data) entryViewInternal.getValue();
entryViewInternal.setKey(key);
entryViewInternal.setValue((V) getService().toObject(value));
return entryViewInternal;
}
@Override
public boolean evict(final Object key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
return evictInternal(getService().toData(key, partitionStrategy));
}
@Override
public void clear() {
clearInternal();
}
@Override
public Set<K> keySet() {
Set<Data> dataSet = keySetInternal();
HashSet<K> keySet = new HashSet<K>();
for (Data data : dataSet) {
keySet.add((K) getService().toObject(data));
}
return keySet;
}
@Override
public Collection<V> values() {
Collection<Data> dataSet = valuesInternal();
Collection<V> valueSet = new ArrayList<V>();
for (Data data : dataSet) {
valueSet.add((V) getService().toObject(data));
}
return valueSet;
}
@Override
public Set entrySet() {
Set<Entry<Data, Data>> entries = entrySetInternal();
Set<Entry<K, V>> resultSet = new HashSet<Entry<K, V>>();
for (Entry<Data, Data> entry : entries) {
resultSet.add(new AbstractMap.SimpleImmutableEntry((K) getService().toObject(entry.getKey()),
(V) getService().toObject(entry.getValue())));
}
return resultSet;
}
@Override
public Set<K> keySet(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return query(predicate, IterationType.KEY, false);
}
@Override
public Set entrySet(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return query(predicate, IterationType.ENTRY, false);
}
@Override
public Collection<V> values(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return query(predicate, IterationType.VALUE, false);
}
@Override
public Set<K> localKeySet() {
final Set<Data> dataSet = localKeySetInternal();
final Set<K> keySet = new HashSet<K>(dataSet.size());
for (Data data : dataSet) {
keySet.add((K) getService().toObject(data));
}
return keySet;
}
@Override
public Set<K> localKeySet(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return queryLocal(predicate, IterationType.KEY, false);
}
@Override
public Object executeOnKey(K key, EntryProcessor entryProcessor) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
return service.toObject(executeOnKeyInternal(service.toData(key, partitionStrategy), entryProcessor));
}
@Override
public Map<K, Object> executeOnKeys(Set<K> keys, EntryProcessor entryProcessor) {
if (keys == null || keys.size() == 0) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Set<Data> dataKeys = new HashSet<Data>(keys.size());
for(K key : keys)
{
dataKeys.add(service.toData(key, partitionStrategy));
}
return executeOnKeysInternal(dataKeys, entryProcessor);
}
@Override
public void submitToKey(K key, EntryProcessor entryProcessor, ExecutionCallback callback) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Data keyData = service.toData(key, partitionStrategy);
executeOnKeyInternal(keyData,entryProcessor,callback);
}
@Override
public ICompletableFuture submitToKey(K key, EntryProcessor entryProcessor) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Data keyData = service.toData(key, partitionStrategy);
ICompletableFuture f = executeOnKeyInternal(keyData,entryProcessor,null);
return new DelegatingFuture(f,service.getSerializationService());
}
protected Object invoke(Operation operation, int partitionId) throws Throwable {
NodeEngine nodeEngine = getNodeEngine();
Future f = nodeEngine.getOperationService().invokeOnPartition(SERVICE_NAME, operation, partitionId);
Object response = f.get();
Object returnObj = getService().toObject(response);
if (returnObj instanceof Throwable) {
throw (Throwable) returnObj;
}
return returnObj;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("IMap");
sb.append("{name='").append(name).append('\'');
sb.append('}');
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_proxy_MapProxyImpl.java
|
612 |
new OIndexEngine.ValuesResultListener() {
@Override
public boolean addResult(OIdentifiable identifiable) {
return resultListener.addResult(identifiable);
}
});
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_index_OIndexMultiValues.java
|
4,126 |
public class ShardSearchService extends AbstractIndexShardComponent {
private final ShardSlowLogSearchService slowLogSearchService;
private final StatsHolder totalStats = new StatsHolder();
private final CounterMetric openContexts = new CounterMetric();
private volatile Map<String, StatsHolder> groupsStats = ImmutableMap.of();
@Inject
public ShardSearchService(ShardId shardId, @IndexSettings Settings indexSettings, ShardSlowLogSearchService slowLogSearchService) {
super(shardId, indexSettings);
this.slowLogSearchService = slowLogSearchService;
}
/**
* Returns the stats, including group specific stats. If the groups are null/0 length, then nothing
* is returned for them. If they are set, then only groups provided will be returned, or
* <tt>_all</tt> for all groups.
*/
public SearchStats stats(String... groups) {
SearchStats.Stats total = totalStats.stats();
Map<String, SearchStats.Stats> groupsSt = null;
if (groups != null && groups.length > 0) {
if (groups.length == 1 && groups[0].equals("_all")) {
groupsSt = new HashMap<String, SearchStats.Stats>(groupsStats.size());
for (Map.Entry<String, StatsHolder> entry : groupsStats.entrySet()) {
groupsSt.put(entry.getKey(), entry.getValue().stats());
}
} else {
groupsSt = new HashMap<String, SearchStats.Stats>(groups.length);
for (String group : groups) {
StatsHolder statsHolder = groupsStats.get(group);
if (statsHolder != null) {
groupsSt.put(group, statsHolder.stats());
}
}
}
}
return new SearchStats(total, openContexts.count(), groupsSt);
}
public void onPreQueryPhase(SearchContext searchContext) {
totalStats.queryCurrent.inc();
if (searchContext.groupStats() != null) {
for (int i = 0; i < searchContext.groupStats().size(); i++) {
groupStats(searchContext.groupStats().get(i)).queryCurrent.inc();
}
}
}
public void onFailedQueryPhase(SearchContext searchContext) {
totalStats.queryCurrent.dec();
if (searchContext.groupStats() != null) {
for (int i = 0; i < searchContext.groupStats().size(); i++) {
groupStats(searchContext.groupStats().get(i)).queryCurrent.dec();
}
}
}
public void onQueryPhase(SearchContext searchContext, long tookInNanos) {
totalStats.queryMetric.inc(tookInNanos);
totalStats.queryCurrent.dec();
if (searchContext.groupStats() != null) {
for (int i = 0; i < searchContext.groupStats().size(); i++) {
StatsHolder statsHolder = groupStats(searchContext.groupStats().get(i));
statsHolder.queryMetric.inc(tookInNanos);
statsHolder.queryCurrent.dec();
}
}
slowLogSearchService.onQueryPhase(searchContext, tookInNanos);
}
public void onPreFetchPhase(SearchContext searchContext) {
totalStats.fetchCurrent.inc();
if (searchContext.groupStats() != null) {
for (int i = 0; i < searchContext.groupStats().size(); i++) {
groupStats(searchContext.groupStats().get(i)).fetchCurrent.inc();
}
}
}
public void onFailedFetchPhase(SearchContext searchContext) {
totalStats.fetchCurrent.dec();
if (searchContext.groupStats() != null) {
for (int i = 0; i < searchContext.groupStats().size(); i++) {
groupStats(searchContext.groupStats().get(i)).fetchCurrent.dec();
}
}
}
public void onFetchPhase(SearchContext searchContext, long tookInNanos) {
totalStats.fetchMetric.inc(tookInNanos);
totalStats.fetchCurrent.dec();
if (searchContext.groupStats() != null) {
for (int i = 0; i < searchContext.groupStats().size(); i++) {
StatsHolder statsHolder = groupStats(searchContext.groupStats().get(i));
statsHolder.fetchMetric.inc(tookInNanos);
statsHolder.fetchCurrent.dec();
}
}
slowLogSearchService.onFetchPhase(searchContext, tookInNanos);
}
public void clear() {
totalStats.clear();
synchronized (this) {
if (!groupsStats.isEmpty()) {
MapBuilder<String, StatsHolder> typesStatsBuilder = MapBuilder.newMapBuilder();
for (Map.Entry<String, StatsHolder> typeStats : groupsStats.entrySet()) {
if (typeStats.getValue().totalCurrent() > 0) {
typeStats.getValue().clear();
typesStatsBuilder.put(typeStats.getKey(), typeStats.getValue());
}
}
groupsStats = typesStatsBuilder.immutableMap();
}
}
}
private StatsHolder groupStats(String group) {
StatsHolder stats = groupsStats.get(group);
if (stats == null) {
synchronized (this) {
stats = groupsStats.get(group);
if (stats == null) {
stats = new StatsHolder();
groupsStats = MapBuilder.newMapBuilder(groupsStats).put(group, stats).immutableMap();
}
}
}
return stats;
}
public void onNewContext(SearchContext context) {
openContexts.inc();
}
public void onFreeContext(SearchContext context) {
openContexts.dec();
}
static class StatsHolder {
public final MeanMetric queryMetric = new MeanMetric();
public final MeanMetric fetchMetric = new MeanMetric();
public final CounterMetric queryCurrent = new CounterMetric();
public final CounterMetric fetchCurrent = new CounterMetric();
public SearchStats.Stats stats() {
return new SearchStats.Stats(
queryMetric.count(), TimeUnit.NANOSECONDS.toMillis(queryMetric.sum()), queryCurrent.count(),
fetchMetric.count(), TimeUnit.NANOSECONDS.toMillis(fetchMetric.sum()), fetchCurrent.count());
}
public long totalCurrent() {
return queryCurrent.count() + fetchCurrent.count();
}
public void clear() {
queryMetric.clear();
fetchMetric.clear();
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_search_stats_ShardSearchService.java
|
1,833 |
@Component("blMatchesFieldValidator")
public class MatchesFieldValidator extends ValidationConfigurationBasedPropertyValidator {
@Override
public boolean validateInternal(Entity entity,
Serializable instance,
Map<String, FieldMetadata> entityFieldMetadata,
Map<String, String> validationConfiguration,
BasicFieldMetadata propertyMetadata,
String propertyName,
String value) {
String otherField = validationConfiguration.get("otherField");
return StringUtils.equals(entity.getPMap().get(otherField).getValue(), value);
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_validation_MatchesFieldValidator.java
|
597 |
public class ServiceStatusType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, ServiceStatusType> TYPES = new LinkedHashMap<String, ServiceStatusType>();
public static final ServiceStatusType UP = new ServiceStatusType("UP", "Up");
public static final ServiceStatusType DOWN = new ServiceStatusType("DOWN", "Down");
public static final ServiceStatusType PAUSED = new ServiceStatusType("PAUSED", "Paused");
public static ServiceStatusType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public ServiceStatusType() {
//do nothing
}
public ServiceStatusType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)){
TYPES.put(type, this);
} else {
throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName());
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ServiceStatusType other = (ServiceStatusType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_vendor_service_type_ServiceStatusType.java
|
675 |
public class GetWarmersRequest extends ClusterInfoRequest<GetWarmersRequest> {
private String[] warmers = Strings.EMPTY_ARRAY;
public GetWarmersRequest warmers(String[] warmers) {
this.warmers = warmers;
return this;
}
public String[] warmers() {
return warmers;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
warmers = in.readStringArray();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(warmers);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_warmer_get_GetWarmersRequest.java
|
1,781 |
@Component("blMapStructurePersistenceModule")
@Scope("prototype")
public class MapStructurePersistenceModule extends BasicPersistenceModule {
private static final Log LOG = LogFactory.getLog(MapStructurePersistenceModule.class);
@Override
public boolean isCompatible(OperationType operationType) {
return OperationType.MAP.equals(operationType);
}
@Override
public void extractProperties(Class<?>[] inheritanceLine, Map<MergedPropertyType, Map<String, FieldMetadata>> mergedProperties, List<Property> properties) throws NumberFormatException {
if (mergedProperties.get(MergedPropertyType.MAPSTRUCTUREKEY) != null) {
extractPropertiesFromMetadata(inheritanceLine, mergedProperties.get(MergedPropertyType.MAPSTRUCTUREKEY), properties, false, MergedPropertyType.MAPSTRUCTUREKEY);
}
if (mergedProperties.get(MergedPropertyType.MAPSTRUCTUREVALUE) != null) {
extractPropertiesFromMetadata(inheritanceLine, mergedProperties.get(MergedPropertyType.MAPSTRUCTUREVALUE), properties, false, MergedPropertyType.MAPSTRUCTUREVALUE);
}
}
protected Entity[] getMapRecords(Serializable record, MapStructure mapStructure, Map<String, FieldMetadata> ceilingMergedProperties, Map<String, FieldMetadata> valueMergedProperties, Property symbolicIdProperty) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException, SecurityException, IllegalArgumentException, ClassNotFoundException, NoSuchFieldException {
//compile a list of mapKeys that were used as mapFields
List<String> mapFieldKeys = new ArrayList<String>();
String mapProperty = mapStructure.getMapProperty();
for (Map.Entry<String, FieldMetadata> entry : ceilingMergedProperties.entrySet()) {
if (entry.getKey().startsWith(mapProperty + FieldManager.MAPFIELDSEPARATOR)) {
mapFieldKeys.add(entry.getKey().substring(entry.getKey().indexOf(FieldManager.MAPFIELDSEPARATOR) + FieldManager.MAPFIELDSEPARATOR.length(), entry.getKey().length()));
}
}
Collections.sort(mapFieldKeys);
FieldManager fieldManager = getFieldManager();
Map map;
try {
map = (Map) fieldManager.getFieldValue(record, mapProperty);
} catch (FieldNotAvailableException e) {
throw new IllegalArgumentException(e);
}
List<Entity> entities = new ArrayList<Entity>(map.size());
for (Object key : map.keySet()) {
if (key instanceof String && mapFieldKeys.contains(key)) {
continue;
}
Entity entityItem = new Entity();
entityItem.setType(new String[]{record.getClass().getName()});
entities.add(entityItem);
List<Property> props = new ArrayList<Property>();
Property propertyItem = new Property();
propertyItem.setName(mapStructure.getKeyPropertyName());
props.add(propertyItem);
String strVal;
if (Date.class.isAssignableFrom(key.getClass())) {
strVal = getSimpleDateFormatter().format((Date) key);
} else if (Timestamp.class.isAssignableFrom(key.getClass())) {
strVal = getSimpleDateFormatter().format(new Date(((Timestamp) key).getTime()));
} else if (Calendar.class.isAssignableFrom(key.getClass())) {
strVal = getSimpleDateFormatter().format(((Calendar) key).getTime());
} else if (Double.class.isAssignableFrom(key.getClass())) {
strVal = getDecimalFormatter().format(key);
} else if (BigDecimal.class.isAssignableFrom(key.getClass())) {
strVal = getDecimalFormatter().format(key);
} else {
strVal = key.toString();
}
propertyItem.setValue(strVal);
PersistentClass persistentClass = persistenceManager.getDynamicEntityDao().getPersistentClass(mapStructure.getValueClassName());
if (persistentClass == null) {
Property temp = new Property();
temp.setName(((SimpleValueMapStructure) mapStructure).getValuePropertyName());
temp.setValue(String.valueOf(map.get(key)));
props.add(temp);
} else {
extractPropertiesFromPersistentEntity(valueMergedProperties, (Serializable) map.get(key), props);
}
if (symbolicIdProperty != null) {
props.add(symbolicIdProperty);
}
Property[] properties = new Property[props.size()];
properties = props.toArray(properties);
entityItem.setProperties(properties);
}
return entities.toArray(new Entity[entities.size()]);
}
@Override
public void updateMergedProperties(PersistencePackage persistencePackage, Map<MergedPropertyType, Map<String, FieldMetadata>> allMergedProperties) throws ServiceException {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
MapStructure mapStructure = (MapStructure) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE);
if (mapStructure != null) {
PersistentClass persistentClass = persistenceManager.getDynamicEntityDao().getPersistentClass(mapStructure.getKeyClassName());
Map<String, FieldMetadata> keyMergedProperties;
if (persistentClass == null) {
keyMergedProperties = persistenceManager.getDynamicEntityDao().getPropertiesForPrimitiveClass(
mapStructure.getKeyPropertyName(),
mapStructure.getKeyPropertyFriendlyName(),
Class.forName(mapStructure.getKeyClassName()),
Class.forName(ceilingEntityFullyQualifiedClassname),
MergedPropertyType.MAPSTRUCTUREKEY
);
} else {
keyMergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
mapStructure.getKeyClassName(),
new Class[]{Class.forName(mapStructure.getKeyClassName())},
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.MAPSTRUCTUREKEY,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
}
allMergedProperties.put(MergedPropertyType.MAPSTRUCTUREKEY, keyMergedProperties);
persistentClass = persistenceManager.getDynamicEntityDao().getPersistentClass(mapStructure.getValueClassName());
Map<String, FieldMetadata> valueMergedProperties;
if (persistentClass == null) {
if (!SimpleValueMapStructure.class.isAssignableFrom(mapStructure.getClass())) {
throw new IllegalStateException("The map structure was determined to not be a simple value, but the system was unable to identify the entity designated for the map structure value(" + mapStructure.getValueClassName() + ")");
}
valueMergedProperties = persistenceManager.getDynamicEntityDao().getPropertiesForPrimitiveClass(
((SimpleValueMapStructure) mapStructure).getValuePropertyName(),
((SimpleValueMapStructure) mapStructure).getValuePropertyFriendlyName(),
Class.forName(mapStructure.getValueClassName()),
Class.forName(ceilingEntityFullyQualifiedClassname),
MergedPropertyType.MAPSTRUCTUREVALUE
);
} else {
valueMergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
mapStructure.getValueClassName(),
new Class[]{Class.forName(mapStructure.getValueClassName())},
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.MAPSTRUCTUREVALUE,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
}
allMergedProperties.put(MergedPropertyType.MAPSTRUCTUREVALUE, valueMergedProperties);
//clear out all but the primary key field from the owning entity
// Iterator<Map.Entry<String, FieldMetadata>> itr = allMergedProperties.get(MergedPropertyType.PRIMARY).entrySet().iterator();
// while (itr.hasNext()) {
// Map.Entry<String, FieldMetadata> entry = itr.next();
// if (!(entry.getValue() instanceof BasicFieldMetadata) || !SupportedFieldType.ID.equals(((BasicFieldMetadata) entry.getValue()).getFieldType())) {
// itr.remove();
// }
// }
}
} catch (Exception e) {
throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e);
}
}
@Override
public Entity add(PersistencePackage persistencePackage) throws ServiceException {
String[] customCriteria = persistencePackage.getCustomCriteria();
if (customCriteria != null && customCriteria.length > 0) {
LOG.warn("custom persistence handlers and custom criteria not supported for add types other than BASIC");
}
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Entity entity = persistencePackage.getEntity();
MapStructure mapStructure = (MapStructure) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE);
if (!mapStructure.getMutable()) {
throw new SecurityServiceException("Field not mutable");
}
try {
Map<String, FieldMetadata> ceilingMergedProperties = getSimpleMergedProperties(entity.getType()[0],
persistencePerspective);
String mapKey = entity.findProperty(mapStructure.getKeyPropertyName()).getValue();
if (StringUtils.isEmpty(mapKey)) {
entity.addValidationError(mapStructure.getKeyPropertyName(), RequiredPropertyValidator.ERROR_MESSAGE);
LOG.debug("No key property passed in for map, failing validation");
}
if (ceilingMergedProperties.containsKey(mapStructure.getMapProperty() + FieldManager.MAPFIELDSEPARATOR + mapKey)) {
throw new ServiceException("\"" + mapKey + "\" is a reserved property name.");
}
Serializable instance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(entity.getType()
[0]), Long.valueOf(entity.findProperty("symbolicId").getValue()));
Assert.isTrue(instance != null, "Entity not found");
FieldManager fieldManager = getFieldManager();
Map map = (Map) fieldManager.getFieldValue(instance, mapStructure.getMapProperty());
if (map.containsKey(mapKey)) {
entity.addValidationError(mapStructure.getKeyPropertyName(), "keyExistsValidationError");
}
PersistentClass persistentClass = persistenceManager.getDynamicEntityDao().getPersistentClass(mapStructure.getValueClassName());
Map<String, FieldMetadata> valueUnfilteredMergedProperties;
if (persistentClass == null) {
valueUnfilteredMergedProperties = persistenceManager.getDynamicEntityDao().getPropertiesForPrimitiveClass(
((SimpleValueMapStructure) mapStructure).getValuePropertyName(),
((SimpleValueMapStructure) mapStructure).getValuePropertyFriendlyName(),
Class.forName(mapStructure.getValueClassName()),
Class.forName(entity.getType()[0]),
MergedPropertyType.MAPSTRUCTUREVALUE
);
} else {
valueUnfilteredMergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
mapStructure.getValueClassName(),
new Class[]{Class.forName(mapStructure.getValueClassName())},
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.MAPSTRUCTUREVALUE,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
}
Map<String, FieldMetadata> valueMergedProperties = filterOutCollectionMetadata(valueUnfilteredMergedProperties);
if (persistentClass != null) {
Serializable valueInstance = (Serializable) Class.forName(mapStructure.getValueClassName()).newInstance();
valueInstance = createPopulatedInstance(valueInstance, entity, valueMergedProperties, false);
if (valueInstance instanceof ValueAssignable) {
//This is likely a OneToMany map (see productAttributes) whose map key is actually the name field from
//the mapped entity.
((ValueAssignable) valueInstance).setName(entity.findProperty(mapStructure.getKeyPropertyName()).getValue());
}
if (mapStructure.getManyToField() != null) {
//Need to fulfill a bi-directional association back to the parent entity
fieldManager.setFieldValue(valueInstance, mapStructure.getManyToField(), instance);
}
valueInstance = persistenceManager.getDynamicEntityDao().persist(valueInstance);
/*
* TODO this map manipulation code currently assumes the key value is a String. This should be widened to accept
* additional types of primitive objects.
*/
map.put(mapKey, valueInstance);
} else {
String propertyName = ((SimpleValueMapStructure) mapStructure).getValuePropertyName();
String value = entity.findProperty(propertyName).getValue();
Object convertedPrimitive = convertPrimitiveBasedOnType(propertyName, value, valueMergedProperties);
map.put(mapKey, convertedPrimitive);
}
Entity[] responses = getMapRecords(instance, mapStructure, ceilingMergedProperties, valueMergedProperties, entity.findProperty("symbolicId"));
for (Entity response : responses) {
if (response.findProperty(mapStructure.getKeyPropertyName()).getValue().equals(persistencePackage.getEntity().findProperty(mapStructure.getKeyPropertyName()).getValue())) {
return response;
}
}
return responses[0];
} catch (Exception e) {
throw new ServiceException("Problem updating entity : " + e.getMessage(), e);
}
}
protected Object convertPrimitiveBasedOnType(String valuePropertyName, String value, Map<String, FieldMetadata> valueMergedProperties) throws ParseException {
switch(((BasicFieldMetadata) valueMergedProperties.get(valuePropertyName)).getFieldType()) {
case BOOLEAN :
return Boolean.parseBoolean(value);
case DATE :
return getSimpleDateFormatter().parse(value);
case DECIMAL :
return new BigDecimal(value);
case MONEY :
return new Money(value);
case INTEGER :
return Integer.parseInt(value);
default :
return value;
}
}
@Override
public Entity update(PersistencePackage persistencePackage) throws ServiceException {
String[] customCriteria = persistencePackage.getCustomCriteria();
if (customCriteria != null && customCriteria.length > 0) {
LOG.warn("custom persistence handlers and custom criteria not supported for update types other than BASIC");
}
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Entity entity = persistencePackage.getEntity();
MapStructure mapStructure = (MapStructure) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE);
if (!mapStructure.getMutable()) {
throw new SecurityServiceException("Field not mutable");
}
try {
Map<String, FieldMetadata> ceilingMergedProperties = getSimpleMergedProperties(entity.getType()[0],
persistencePerspective);
String mapKey = entity.findProperty(mapStructure.getKeyPropertyName()).getValue();
if (ceilingMergedProperties.containsKey(mapStructure.getMapProperty() + FieldManager.MAPFIELDSEPARATOR + mapKey)) {
throw new ServiceException("\"" + mapKey + "\" is a reserved property name.");
}
Serializable instance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(entity.getType()[0]), Long.valueOf(entity.findProperty("symbolicId").getValue()));
Assert.isTrue(instance != null, "Entity not found");
FieldManager fieldManager = getFieldManager();
Map map = (Map) fieldManager.getFieldValue(instance, mapStructure.getMapProperty());
PersistentClass persistentClass = persistenceManager.getDynamicEntityDao().getPersistentClass(mapStructure.getValueClassName());
Map<String, FieldMetadata> valueUnfilteredMergedProperties;
if (persistentClass == null) {
valueUnfilteredMergedProperties = persistenceManager.getDynamicEntityDao().getPropertiesForPrimitiveClass(
((SimpleValueMapStructure) mapStructure).getValuePropertyName(),
((SimpleValueMapStructure) mapStructure).getValuePropertyFriendlyName(),
Class.forName(mapStructure.getValueClassName()),
Class.forName(entity.getType()[0]),
MergedPropertyType.MAPSTRUCTUREVALUE
);
} else {
valueUnfilteredMergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
mapStructure.getValueClassName(),
new Class[]{Class.forName(mapStructure.getValueClassName())},
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.MAPSTRUCTUREVALUE,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
}
Map<String, FieldMetadata> valueMergedProperties = filterOutCollectionMetadata(valueUnfilteredMergedProperties);
if (StringUtils.isEmpty(mapKey)) {
entity.addValidationError(mapStructure.getKeyPropertyName(), RequiredPropertyValidator.ERROR_MESSAGE);
LOG.debug("No key property passed in for map, failing validation");
}
if (persistentClass != null) {
Serializable valueInstance = (Serializable) map.get(entity.findProperty("priorKey").getValue());
if (map.get(mapKey) != null && !map.get(mapKey).equals(valueInstance)) {
entity.addValidationError(mapStructure.getKeyPropertyName(), "keyExistsValidationError");
}
if (StringUtils.isNotBlank(mapStructure.getMapKeyValueProperty())) {
Property p = entity.findProperty("key");
Property newP = new Property();
newP.setName(mapStructure.getMapKeyValueProperty());
newP.setValue(p.getValue());
newP.setIsDirty(p.getIsDirty());
entity.addProperty(newP);
}
//allow validation on other properties in order to show key validation errors along with all the other properties
//validation errors
valueInstance = createPopulatedInstance(valueInstance, entity, valueMergedProperties, false);
if (StringUtils.isNotEmpty(mapKey) && !entity.isValidationFailure()) {
if (!entity.findProperty("priorKey").getValue().equals(mapKey)) {
map.remove(entity.findProperty("priorKey").getValue());
}
/*
* TODO this map manipulation code currently assumes the key value is a String. This should be widened to accept
* additional types of primitive objects.
*/
map.put(entity.findProperty(mapStructure.getKeyPropertyName()).getValue(), valueInstance);
}
} else {
if (StringUtils.isNotEmpty(mapKey) && !entity.isValidationFailure()) {
map.put(entity.findProperty(mapStructure.getKeyPropertyName()).getValue(), entity.findProperty(((SimpleValueMapStructure) mapStructure).getValuePropertyName()).getValue());
}
}
instance = persistenceManager.getDynamicEntityDao().merge(instance);
Entity[] responses = getMapRecords(instance, mapStructure, ceilingMergedProperties, valueMergedProperties, entity.findProperty("symbolicId"));
for (Entity response : responses) {
if (response.findProperty(mapStructure.getKeyPropertyName()).getValue().equals(persistencePackage.getEntity().findProperty(mapStructure.getKeyPropertyName()).getValue())) {
return response;
}
}
return responses[0];
} catch (Exception e) {
throw new ServiceException("Problem updating entity : " + e.getMessage(), e);
}
}
@Override
public void remove(PersistencePackage persistencePackage) throws ServiceException {
String[] customCriteria = persistencePackage.getCustomCriteria();
if (customCriteria != null && customCriteria.length > 0) {
LOG.warn("custom persistence handlers and custom criteria not supported for remove types other than BASIC");
}
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Entity entity = persistencePackage.getEntity();
MapStructure mapStructure = (MapStructure) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE);
if (!mapStructure.getMutable()) {
throw new SecurityServiceException("Field not mutable");
}
try {
Map<String, FieldMetadata> ceilingMergedProperties = getSimpleMergedProperties(entity.getType()[0],
persistencePerspective);
String mapKey = entity.findProperty(mapStructure.getKeyPropertyName()).getValue();
if (ceilingMergedProperties.containsKey(mapStructure.getMapProperty() + FieldManager.MAPFIELDSEPARATOR + mapKey)) {
throw new ServiceException("\"" + mapKey + "\" is a reserved property name.");
}
Serializable instance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(entity.getType()[0]), Long.valueOf(entity.findProperty("symbolicId").getValue()));
Assert.isTrue(instance != null, "Entity not found");
FieldManager fieldManager = getFieldManager();
Map map = (Map) fieldManager.getFieldValue(instance, mapStructure.getMapProperty());
Object value = map.remove(entity.findProperty("priorKey").getValue());
if (mapStructure.getDeleteValueEntity()) {
persistenceManager.getDynamicEntityDao().remove((Serializable) value);
}
} catch (Exception e) {
throw new ServiceException("Problem removing entity : " + e.getMessage(), e);
}
}
@Override
public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException {
Entity[] payload;
int totalRecords;
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
if (StringUtils.isEmpty(persistencePackage.getFetchTypeFullyQualifiedClassname())) {
persistencePackage.setFetchTypeFullyQualifiedClassname(ceilingEntityFullyQualifiedClassname);
}
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Class<?>[] entities = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname);
Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
ceilingEntityFullyQualifiedClassname,
entities,
(ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY),
persistencePerspective.getAdditionalNonPersistentProperties(),
persistencePerspective.getAdditionalForeignKeys(),
MergedPropertyType.PRIMARY,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
MapStructure mapStructure = (MapStructure) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE);
PersistentClass persistentClass = persistenceManager.getDynamicEntityDao().getPersistentClass(mapStructure.getValueClassName());
Map<String, FieldMetadata> valueUnfilteredMergedProperties;
if (persistentClass == null) {
valueUnfilteredMergedProperties = persistenceManager.getDynamicEntityDao().getPropertiesForPrimitiveClass(
((SimpleValueMapStructure) mapStructure).getValuePropertyName(),
((SimpleValueMapStructure) mapStructure).getValuePropertyFriendlyName(),
Class.forName(mapStructure.getValueClassName()),
Class.forName(ceilingEntityFullyQualifiedClassname),
MergedPropertyType.MAPSTRUCTUREVALUE
);
} else {
valueUnfilteredMergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
mapStructure.getValueClassName(),
new Class[]{Class.forName(mapStructure.getValueClassName())},
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.MAPSTRUCTUREVALUE,
false,
new String[]{},
new String[]{},
null,
""
);
}
Map<String, FieldMetadata> valueMergedProperties = filterOutCollectionMetadata(valueUnfilteredMergedProperties);
List<FilterMapping> filterMappings = getFilterMappings(persistencePerspective, cto, persistencePackage
.getFetchTypeFullyQualifiedClassname(), mergedProperties);
totalRecords = getTotalRecords(persistencePackage.getFetchTypeFullyQualifiedClassname(), filterMappings);
if (totalRecords > 1) {
throw new ServiceException("Queries to retrieve an entity containing a MapStructure must return only 1 entity. Your query returned ("+totalRecords+") values.");
}
List<Serializable> records = getPersistentRecords(persistencePackage.getFetchTypeFullyQualifiedClassname(), filterMappings, cto.getFirstResult(), cto.getMaxResults());
Map<String, FieldMetadata> ceilingMergedProperties = getSimpleMergedProperties(ceilingEntityFullyQualifiedClassname,
persistencePerspective);
payload = getMapRecords(records.get(0), mapStructure, ceilingMergedProperties, valueMergedProperties, null);
} catch (Exception e) {
throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e);
}
DynamicResultSet results = new DynamicResultSet(null, payload, payload.length);
return results;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_MapStructurePersistenceModule.java
|
615 |
public class IndexShardStats implements Iterable<ShardStats>, Streamable {
private ShardId shardId;
private ShardStats[] shards;
private IndexShardStats() {}
public IndexShardStats(ShardId shardId, ShardStats[] shards) {
this.shardId = shardId;
this.shards = shards;
}
public ShardId getShardId() {
return this.shardId;
}
public ShardStats[] getShards() {
return shards;
}
public ShardStats getAt(int position) {
return shards[position];
}
@Override
public Iterator<ShardStats> iterator() {
return Iterators.forArray(shards);
}
private CommonStats total = null;
public CommonStats getTotal() {
if (total != null) {
return total;
}
CommonStats stats = new CommonStats();
for (ShardStats shard : shards) {
stats.add(shard.getStats());
}
total = stats;
return stats;
}
private CommonStats primary = null;
public CommonStats getPrimary() {
if (primary != null) {
return primary;
}
CommonStats stats = new CommonStats();
for (ShardStats shard : shards) {
if (shard.getShardRouting().primary()) {
stats.add(shard.getStats());
}
}
primary = stats;
return stats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
shardId = ShardId.readShardId(in);
int shardSize = in.readVInt();
shards = new ShardStats[shardSize];
for (int i = 0; i < shardSize; i++) {
shards[i] = ShardStats.readShardStats(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
shardId.writeTo(out);
out.writeVInt(shards.length);
for (ShardStats stats : shards) {
stats.writeTo(out);
}
}
public static IndexShardStats readIndexShardStats(StreamInput in) throws IOException {
IndexShardStats indexShardStats = new IndexShardStats();
indexShardStats.readFrom(in);
return indexShardStats;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_stats_IndexShardStats.java
|
5,221 |
public class HistogramParser implements Aggregator.Parser {
@Override
public String type() {
return InternalHistogram.TYPE.name();
}
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<NumericValuesSource>(NumericValuesSource.class);
String field = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false;
long minDocCount = 1;
InternalOrder order = (InternalOrder) InternalOrder.KEY_ASC;
long interval = -1;
boolean assumeSorted = false;
String format = null;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) {
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("interval".equals(currentFieldName)) {
interval = parser.longValue();
} else if ("min_doc_count".equals(currentFieldName) || "minDocCount".equals(currentFieldName)) {
minDocCount = parser.longValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("order".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
String dir = parser.text();
boolean asc = "asc".equals(dir);
if (!asc && !"desc".equals(dir)) {
throw new SearchParseException(context, "Unknown order direction [" + dir + "] in aggregation [" + aggregationName + "]. Should be either [asc] or [desc]");
}
order = resolveOrder(currentFieldName, asc);
}
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in aggregation [" + aggregationName + "].");
}
}
if (interval < 0) {
throw new SearchParseException(context, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
}
Rounding rounding = new Rounding.Interval(interval);
if (script != null) {
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
if (field == null) {
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, InternalHistogram.FACTORY);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, InternalHistogram.FACTORY);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
if (format != null) {
config.formatter(new ValueFormatter.Number.Pattern(format));
}
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, InternalHistogram.FACTORY);
}
static InternalOrder resolveOrder(String key, boolean asc) {
if ("_key".equals(key)) {
return (InternalOrder) (asc ? InternalOrder.KEY_ASC : InternalOrder.KEY_DESC);
}
if ("_count".equals(key)) {
return (InternalOrder) (asc ? InternalOrder.COUNT_ASC : InternalOrder.COUNT_DESC);
}
int i = key.indexOf('.');
if (i < 0) {
return new InternalOrder.Aggregation(key, null, asc);
}
return new InternalOrder.Aggregation(key.substring(0, i), key.substring(i + 1), asc);
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_histogram_HistogramParser.java
|
73 |
public interface TransactionBuilder {
/**
* Makes the transaction read only. Any writes will cause an exception.
* Read-only transactions do not have to maintain certain data structures and can hence be more efficient.
*
* @return
*/
public TransactionBuilder readOnly();
/**
* Enabling batch loading disables a number of consistency checks inside Titan to speed up the ingestion of
* data under the assumptions that inconsistencies are resolved prior to loading.
*
* @return
*/
public TransactionBuilder enableBatchLoading();
/**
* Disables batch loading by ensuring that consistency checks are applied in this transaction. This allows
* an individual transaction to use consistency checks when the graph as a whole is configured to not use them,
* which is useful when defining schema elements in a graph with batch-loading enabled.
*
* @return
*/
public TransactionBuilder disableBatchLoading();
/**
* Configures the size of the internal caches used in the transaction.
*
* @param size
* @return
*/
public TransactionBuilder setVertexCacheSize(int size);
/**
* Configures the initial size of the map of modified vertices held by this
* transaction. This is a performance hint, not a hard upper bound. The map
* will grow if the transaction ends up modifying more vertices than
* expected.
*
* @param size initial size of the transaction's dirty vertex collection
* @return
*/
public TransactionBuilder setDirtyVertexSize(int size);
/**
* Enables/disables checks that verify that each vertex actually exists in the underlying data store when it is retrieved.
* This might be useful to address common data degradation issues but has adverse impacts on performance due to
* repeated existence checks.
* <p/>
* Note, that these checks apply to vertex retrievals inside the query execution engine and not to vertex ids provided
* by the user.
*
* @param enabled
* @return
*/
public TransactionBuilder checkInternalVertexExistence(boolean enabled);
/**
* Enables/disables checking whether the vertex with a user provided id indeed exists. If the user is absolutely sure
* that the vertices for the ids provided in this transaction exist in the underlying data store, then disabling the
* vertex existence check will improve performance because it eliminates a database call.
* However, if a provided vertex id does not exist in the database and checking is disabled, Titan will assume it
* exists which can lead to data and query inconsistencies.
*
* @param enabled
* @return
*/
public TransactionBuilder checkExternalVertexExistence(boolean enabled);
/**
* Enables/disables consistency checking and locking for this transaction. Disabling consistency checks improves
* performance but requires that the user ensures consistency at the application level. Use with great care.
*
* @param enabled
* @return
*/
public TransactionBuilder consistencyChecks(boolean enabled);
/**
* Sets the timestamp for this transaction. The transaction will be recorded
* with this timestamp in those storage backends where the timestamp is
* recorded.
*
* @param timestampSinceEpoch
* number of units elapsed since the UNIX Epoch, that is,
* 00:00:00 UTC, Thursday, 1 January 1970
* @param unit
* units of the {@code timestampSinceEpoch argument}
* @return
*/
public TransactionBuilder setCommitTime(long timestampSinceEpoch, TimeUnit unit);
/**
* Sets the group name for this transaction which provides a way for gathering
* reporting on multiple transactions into one group.
*
* By setting a group one enables Metrics for this transaction, and defines what string
* should start the transaction's metric names.
* <p>
* If null, Metrics collection is totally disabled for this transaction.
* <p>
* If empty, Metrics collection is enabled, but there will be no prefix.
* Where the default setting would generate metrics names in the form
* "prefix.x.y.z", this transaction will instead use metric names in the
* form "x.y.z".
* <p>
* If nonempty, Metrics collection is enabled and the prefix will be used
* for all of this transaction's measurements.
* <p>
* Note: setting this to a non-null value only partially overrides
* {@link GraphDatabaseConfiguration#BASIC_METRICS} = false in the graph
* database configuration. When Metrics are disabled at the graph level and
* enabled at the transaction level, storage backend timings and counters
* will remain disabled.
* <p>
* The default value is
* {@link GraphDatabaseConfiguration#METRICS_PREFIX_DEFAULT}.
*
* Sets the name prefix used for Metrics recorded by this transaction. If
* metrics is enabled via {@link GraphDatabaseConfiguration#BASIC_METRICS},
* this string will be prepended to all Titan metric names.
*
* @param name
* Metric name prefix for this transaction
* @return
*/
public TransactionBuilder setGroupName(String name);
/**
* Name of the log to be used for logging the mutations in this transaction. If no log identifier is set,
* then this transaction will not be logged.
*
* @param logName
* @return
*/
public TransactionBuilder setLogIdentifier(String logName);
/**
* Configures this transaction such that queries against partitioned vertices are
* restricted to the given partitions.
*
* @param partitions
* @return
*/
public TransactionBuilder setRestrictedPartitions(int[] partitions);
/**
* Configures a custom option on this transaction which will be passed through to the storage and indexing backends.
* @param k
* @param v
* @return
*/
public TransactionBuilder setCustomOption(String k, Object v);
/**
* Starts and returns the transaction build by this builder
*
* @return A new transaction configured according to this builder
*/
public TitanTransaction start();
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_TransactionBuilder.java
|
1,468 |
public class OSQLFunctionDijkstra extends OSQLFunctionPathFinder<Float> {
public static final String NAME = "dijkstra";
private static final Float MIN = new Float(0f);
private String paramWeightFieldName;
public OSQLFunctionDijkstra() {
super(NAME, 3, 4);
}
public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) {
final OrientBaseGraph graph = OGraphCommandExecutorSQLFactory.getGraph();
final ORecordInternal<?> record = (ORecordInternal<?>) (iCurrentRecord != null ? iCurrentRecord.getRecord() : null);
Object source = iParameters[0];
if (OMultiValue.isMultiValue(source)) {
if (OMultiValue.getSize(source) > 1)
throw new IllegalArgumentException("Only one sourceVertex is allowed");
source = OMultiValue.getFirstValue(source);
}
paramSourceVertex = graph.getVertex((OIdentifiable) OSQLHelper.getValue(source, record, iContext));
Object dest = iParameters[1];
if (OMultiValue.isMultiValue(dest)) {
if (OMultiValue.getSize(dest) > 1)
throw new IllegalArgumentException("Only one destinationVertex is allowed");
dest = OMultiValue.getFirstValue(dest);
}
paramDestinationVertex = graph.getVertex((OIdentifiable) OSQLHelper.getValue(dest, record, iContext));
paramWeightFieldName = (String) OSQLHelper.getValue(iParameters[2], record, iContext);
if (iParameters.length > 3)
paramDirection = Direction.valueOf(iParameters[3].toString().toUpperCase());
return super.execute(iParameters, iContext);
}
public String getSyntax() {
return "Syntax error: dijkstra(<sourceVertex>, <destinationVertex>, <weightEdgeFieldName>, [<direction>])";
}
@Override
protected Float getShortestDistance(final Vertex destination) {
if (destination == null)
return Float.MAX_VALUE;
final Float d = distance.get(destination);
return d == null ? Float.MAX_VALUE : d;
}
@Override
protected Float getMinimumDistance() {
return MIN;
}
protected Float getDistance(final Vertex node, final Vertex target) {
final Iterator<Edge> edges = ((OrientVertex) node).getEdges((OrientVertex) target, paramDirection).iterator();
if (edges.hasNext()) {
final Edge e = edges.next();
if (e != null) {
final Object fieldValue = e.getProperty(paramWeightFieldName);
if (fieldValue != null)
if (fieldValue instanceof Float)
return (Float) fieldValue;
else if (fieldValue instanceof Number)
return ((Number) fieldValue).floatValue();
}
}
return MIN;
}
@Override
protected Float sumDistances(final Float iDistance1, final Float iDistance2) {
return iDistance1.floatValue() + iDistance2.floatValue();
}
}
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionDijkstra.java
|
1,444 |
public static class EdgeMap extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private TitanGraph graph;
private boolean trackState;
private ModifiableHadoopConfiguration faunusConf;
private LoaderScriptWrapper loaderScript;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
faunusConf = ModifiableHadoopConfiguration.of(DEFAULT_COMPAT.getContextConfiguration(context));
graph = TitanGraphOutputMapReduce.generateGraph(faunusConf);
trackState = context.getConfiguration().getBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_STATE, false);
// Check whether a script is defined in the config
if (faunusConf.has(OUTPUT_LOADER_SCRIPT_FILE)) {
Path scriptPath = new Path(faunusConf.get(OUTPUT_LOADER_SCRIPT_FILE));
FileSystem scriptFS = FileSystem.get(DEFAULT_COMPAT.getJobContextConfiguration(context));
loaderScript = new LoaderScriptWrapper(scriptFS, scriptPath);
}
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
try {
for (final TitanEdge edge : value.query().queryAll().direction(IN).titanEdges()) {
this.getCreateOrDeleteEdge(value, (StandardFaunusEdge)edge, context);
}
} catch (final Exception e) {
graph.rollback();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.FAILED_TRANSACTIONS, 1L);
throw new IOException(e.getMessage(), e);
}
}
@Override
public void cleanup(final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
try {
graph.commit();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.SUCCESSFUL_TRANSACTIONS, 1L);
} catch (Exception e) {
LOGGER.error("Could not commit transaction during Reduce.cleanup(): ", e);
graph.rollback();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.FAILED_TRANSACTIONS, 1L);
throw new IOException(e.getMessage(), e);
}
graph.shutdown();
}
public TitanEdge getCreateOrDeleteEdge(final FaunusVertex faunusVertex, final StandardFaunusEdge faunusEdge, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws InterruptedException {
final Direction dir = IN;
final TitanVertex titanVertex = (TitanVertex) this.graph.getVertex(faunusVertex.getProperty(TITAN_ID));
if (null != loaderScript && loaderScript.hasEdgeMethod()) {
TitanEdge te = loaderScript.getEdge(faunusEdge, titanVertex, getOtherTitanVertex(faunusVertex, faunusEdge, dir.opposite(), graph), graph, context);
synchronizeRelationProperties(graph, faunusEdge, te, context);
return te;
} else {
return (TitanEdge) getCreateOrDeleteRelation(graph, trackState, dir, faunusVertex, titanVertex, faunusEdge, context);
}
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_formats_util_TitanGraphOutputMapReduce.java
|
1,471 |
public class OSQLFunctionIn extends OSQLFunctionMove {
public static final String NAME = "in";
public OSQLFunctionIn() {
super(NAME, 0, -1);
}
@Override
protected Object move(final OrientBaseGraph graph, final OIdentifiable iRecord, final String[] iLabels) {
return v2v(graph, iRecord, Direction.IN, iLabels);
}
}
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionIn.java
|
263 |
public interface OCommandDistributedReplicateRequest {
public boolean isReplicated();
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandDistributedReplicateRequest.java
|
288 |
@Repository("blDataDrivenEnumerationDao")
public class DataDrivenEnumerationDaoImpl implements DataDrivenEnumerationDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public DataDrivenEnumeration readEnumByKey(String enumKey) {
TypedQuery<DataDrivenEnumeration> query = new TypedQueryBuilder<DataDrivenEnumeration>(DataDrivenEnumeration.class, "dde")
.addRestriction("dde.key", "=", enumKey)
.toQuery(em);
return query.getSingleResult();
}
@Override
public DataDrivenEnumerationValue readEnumValueByKey(String enumKey, String enumValueKey) {
TypedQuery<DataDrivenEnumerationValue> query =
new TypedQueryBuilder<DataDrivenEnumerationValue>(DataDrivenEnumerationValue.class, "ddev")
.addRestriction("ddev.type.key", "=", enumKey)
.addRestriction("ddev.key", "=", enumValueKey)
.toQuery(em);
return query.getSingleResult();
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_enumeration_dao_DataDrivenEnumerationDaoImpl.java
|
391 |
new Thread() {
public void run() {
try {
if (mm.tryLock(key, 4, TimeUnit.SECONDS)) {
tryLockSuccess.countDown();
}
} catch (InterruptedException e) {
fail(e.getMessage());
}
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapLockTest.java
|
660 |
@Repository("blProductDao")
public class ProductDaoImpl implements ProductDao {
@PersistenceContext(unitName="blPU")
protected EntityManager em;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
protected Long currentDateResolution = 10000L;
protected Date cachedDate = SystemTime.asDate();
@Override
public Product save(Product product) {
return em.merge(product);
}
@Override
public Product readProductById(Long productId) {
return em.find(ProductImpl.class, productId);
}
@Override
public List<Product> readProductsByIds(List<Long> productIds) {
if (productIds == null || productIds.size() == 0) {
return null;
}
// Set up the criteria query that specifies we want to return Products
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Product> criteria = builder.createQuery(Product.class);
Root<ProductImpl> product = criteria.from(ProductImpl.class);
criteria.select(product);
// We only want results that match the product IDs
criteria.where(product.get("id").as(Long.class).in(productIds));
TypedQuery<Product> query = em.createQuery(criteria);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Product> readProductsByName(String searchName) {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_PRODUCTS_BY_NAME", Product.class);
query.setParameter("name", searchName + '%');
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Product> readProductsByName(@Nonnull String searchName, @Nonnull int limit, @Nonnull int offset) {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_PRODUCTS_BY_NAME", Product.class);
query.setParameter("name", searchName + '%');
query.setFirstResult(offset);
query.setMaxResults(limit);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
protected Date getCurrentDateAfterFactoringInDateResolution() {
Date returnDate = SystemTime.getCurrentDateWithinTimeResolution(cachedDate, currentDateResolution);
if (returnDate != cachedDate) {
if (SystemTime.shouldCacheDate()) {
cachedDate = returnDate;
}
}
return returnDate;
}
@Override
public List<Product> readActiveProductsByCategory(Long categoryId) {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
return readActiveProductsByCategoryInternal(categoryId, currentDate);
}
@Override
@Deprecated
public List<Product> readActiveProductsByCategory(Long categoryId, Date currentDate) {
return readActiveProductsByCategoryInternal(categoryId, currentDate);
}
protected List<Product> readActiveProductsByCategoryInternal(Long categoryId, Date currentDate) {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_ACTIVE_PRODUCTS_BY_CATEGORY", Product.class);
query.setParameter("categoryId", categoryId);
query.setParameter("currentDate", currentDate);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Product> readFilteredActiveProductsByQuery(String query, ProductSearchCriteria searchCriteria) {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
return readFilteredActiveProductsByQueryInternal(query, currentDate, searchCriteria);
}
@Override
@Deprecated
public List<Product> readFilteredActiveProductsByQuery(String query, Date currentDate, ProductSearchCriteria searchCriteria) {
return readFilteredActiveProductsByQueryInternal(query, currentDate, searchCriteria);
}
protected List<Product> readFilteredActiveProductsByQueryInternal(String query, Date currentDate, ProductSearchCriteria searchCriteria) {
// Set up the criteria query that specifies we want to return Products
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Product> criteria = builder.createQuery(Product.class);
// The root of our search is Product since we are searching
Root<ProductImpl> product = criteria.from(ProductImpl.class);
// We also want to filter on attributes from sku and productAttributes
Join<Product, Sku> sku = product.join("defaultSku");
// Product objects are what we want back
criteria.select(product);
// We only want results that match the search query
List<Predicate> restrictions = new ArrayList<Predicate>();
String lq = query.toLowerCase();
restrictions.add(
builder.or(
builder.like(builder.lower(sku.get("name").as(String.class)), '%' + lq + '%'),
builder.like(builder.lower(sku.get("longDescription").as(String.class)), '%' + lq + '%')
)
);
attachProductSearchCriteria(searchCriteria, product, sku, restrictions);
attachActiveRestriction(currentDate, product, sku, restrictions);
attachOrderBy(searchCriteria, product, sku, criteria);
// Execute the query with the restrictions
criteria.where(restrictions.toArray(new Predicate[restrictions.size()]));
TypedQuery<Product> typedQuery = em.createQuery(criteria);
//don't cache - not really practical for open ended search
return typedQuery.getResultList();
}
@Override
public List<Product> readFilteredActiveProductsByCategory(Long categoryId, ProductSearchCriteria searchCriteria) {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
return readFilteredActiveProductsByCategoryInternal(categoryId, currentDate, searchCriteria);
}
@Override
@Deprecated
public List<Product> readFilteredActiveProductsByCategory(Long categoryId, Date currentDate,
ProductSearchCriteria searchCriteria) {
return readFilteredActiveProductsByCategoryInternal(categoryId, currentDate, searchCriteria);
}
protected List<Product> readFilteredActiveProductsByCategoryInternal(Long categoryId, Date currentDate,
ProductSearchCriteria searchCriteria) {
// Set up the criteria query that specifies we want to return Products
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Product> criteria = builder.createQuery(Product.class);
Root<ProductImpl> product = criteria.from(ProductImpl.class);
// We want to filter on attributes from product and sku
Join<Product, Sku> sku = product.join("defaultSku");
ListJoin<Product, CategoryProductXref> categoryXref = product.joinList("allParentCategoryXrefs");
// Product objects are what we want back
criteria.select(product);
// We only want results from the selected category
List<Predicate> restrictions = new ArrayList<Predicate>();
restrictions.add(builder.equal(categoryXref.get("categoryProductXref").get("category").get("id"), categoryId));
attachProductSearchCriteria(searchCriteria, product, sku, restrictions);
attachActiveRestriction(currentDate, product, sku, restrictions);
attachOrderBy(searchCriteria, product, sku, criteria);
// Execute the query with the restrictions
criteria.where(restrictions.toArray(new Predicate[restrictions.size()]));
TypedQuery<Product> typedQuery = em.createQuery(criteria);
//don't cache - not really practical for open ended search
return typedQuery.getResultList();
}
protected void attachActiveRestriction(Date currentDate, Path<? extends Product> product,
Path<? extends Sku> sku, List<Predicate> restrictions) {
CriteriaBuilder builder = em.getCriteriaBuilder();
// Add the product archived status flag restriction
restrictions.add(builder.or(
builder.isNull(product.get("archiveStatus").get("archived")),
builder.equal(product.get("archiveStatus").get("archived"), 'N')));
// Add the active start/end date restrictions
restrictions.add(builder.lessThan(sku.get("activeStartDate").as(Date.class), currentDate));
restrictions.add(builder.or(
builder.isNull(sku.get("activeEndDate")),
builder.greaterThan(sku.get("activeEndDate").as(Date.class), currentDate)));
}
protected void attachOrderBy(ProductSearchCriteria searchCriteria,
From<?, ? extends Product> product, Path<? extends Sku> sku, CriteriaQuery<?> criteria) {
if (StringUtils.isNotBlank(searchCriteria.getSortQuery())) {
CriteriaBuilder builder = em.getCriteriaBuilder();
List<Order> sorts = new ArrayList<Order>();
String sortQueries = searchCriteria.getSortQuery();
for (String sortQuery : sortQueries.split(",")) {
String[] sort = sortQuery.split(" ");
if (sort.length == 2) {
String key = sort[0];
boolean asc = sort[1].toLowerCase().contains("asc");
// Determine whether we should use the product path or the sku path
Path<?> pathToUse;
if (key.contains("defaultSku.")) {
pathToUse = sku;
key = key.substring("defaultSku.".length());
} else if (key.contains("product.")) {
pathToUse = product;
key = key.substring("product.".length());
} else {
// We don't know which path this facet is built on - resolves previous bug that attempted
// to attach search facet to any query parameter
continue;
}
if (asc) {
sorts.add(builder.asc(pathToUse.get(key)));
} else {
sorts.add(builder.desc(pathToUse.get(key)));
}
}
}
criteria.orderBy(sorts.toArray(new Order[sorts.size()]));
}
}
protected void attachProductSearchCriteria(ProductSearchCriteria searchCriteria,
From<?, ? extends Product> product, From<?, ? extends Sku> sku, List<Predicate> restrictions) {
CriteriaBuilder builder = em.getCriteriaBuilder();
// Build out the filter criteria from the users request
for (Entry<String, String[]> entry : searchCriteria.getFilterCriteria().entrySet()) {
String key = entry.getKey();
List<String> eqValues = new ArrayList<String>();
List<String[]> rangeValues = new ArrayList<String[]>();
// Determine which path is the appropriate one to use
Path<?> pathToUse;
if (key.contains("defaultSku.")) {
pathToUse = sku;
key = key.substring("defaultSku.".length());
} else if (key.contains("productAttributes.")) {
pathToUse = product.join("productAttributes");
key = key.substring("productAttributes.".length());
restrictions.add(builder.equal(pathToUse.get("name").as(String.class), key));
key = "value";
} else if (key.contains("product.")) {
pathToUse = product;
key = key.substring("product.".length());
} else {
// We don't know which path this facet is built on - resolves previous bug that attempted
// to attach search facet to any query parameter
continue;
}
// Values can be equality checks (ie manufacturer=Dave's) or range checks, which take the form
// key=range[minRange:maxRange]. Figure out what type of check this is
for (String value : entry.getValue()) {
if (value.contains("range[")) {
String[] rangeValue = new String[] {
value.substring(value.indexOf("[") + 1, value.indexOf(":")),
value.substring(value.indexOf(":") + 1, value.indexOf("]"))
};
rangeValues.add(rangeValue);
} else {
eqValues.add(value);
}
}
// Add the equality range restriction with the "in" builder. That means that the query string
// ?manufacturer=Dave&manufacturer=Bob would match either Dave or Bob
if (eqValues.size() > 0) {
restrictions.add(pathToUse.get(key).in(eqValues));
}
// If we have any range restrictions, we need to build those too. Ranges are also "or"ed together,
// such that specifying range[0:5] and range[10:null] for the same field would match items
// that were valued between 0 and 5 OR over 10 for that field
List<Predicate> rangeRestrictions = new ArrayList<Predicate>();
for (String[] range : rangeValues) {
BigDecimal min = new BigDecimal(range[0]);
BigDecimal max = null;
if (range[1] != null && !range[1].equals("null")) {
max = new BigDecimal(range[1]);
}
Predicate minRange = builder.greaterThan(pathToUse.get(key).as(BigDecimal.class), min);
Predicate maxRange = null;
if (max != null) {
maxRange = builder.lessThan(pathToUse.get(key).as(BigDecimal.class), max);
rangeRestrictions.add(builder.and(minRange, maxRange));
} else {
rangeRestrictions.add(minRange);
}
}
if (rangeRestrictions.size() > 0) {
restrictions.add(builder.or(rangeRestrictions.toArray(new Predicate[rangeRestrictions.size()])));
}
}
}
@Override
public List<Product> readActiveProductsByCategory(Long categoryId, int limit, int offset) {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
return readActiveProductsByCategoryInternal(categoryId, currentDate, limit, offset);
}
@Override
@Deprecated
public List<Product> readActiveProductsByCategory(Long categoryId, Date currentDate, int limit, int offset) {
return readActiveProductsByCategoryInternal(categoryId, currentDate, limit, offset);
}
public List<Product> readActiveProductsByCategoryInternal(Long categoryId, Date currentDate, int limit, int offset) {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_ACTIVE_PRODUCTS_BY_CATEGORY", Product.class);
query.setParameter("categoryId", categoryId);
query.setParameter("currentDate", currentDate);
query.setFirstResult(offset);
query.setMaxResults(limit);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Product> readProductsByCategory(Long categoryId) {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_PRODUCTS_BY_CATEGORY", Product.class);
query.setParameter("categoryId", categoryId);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Product> readProductsByCategory(Long categoryId, int limit, int offset) {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_PRODUCTS_BY_CATEGORY", Product.class);
query.setParameter("categoryId", categoryId);
query.setFirstResult(offset);
query.setMaxResults(limit);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public void delete(Product product){
((Status) product).setArchived('Y');
em.merge(product);
}
@Override
public Product create(ProductType productType) {
return (Product) entityConfiguration.createEntityInstance(productType.getType());
}
@Override
public List<ProductBundle> readAutomaticProductBundles() {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
TypedQuery<ProductBundle> query = em.createNamedQuery("BC_READ_AUTOMATIC_PRODUCT_BUNDLES", ProductBundle.class);
query.setParameter("currentDate", currentDate);
query.setParameter("autoBundle", Boolean.TRUE);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public Long getCurrentDateResolution() {
return currentDateResolution;
}
@Override
public void setCurrentDateResolution(Long currentDateResolution) {
this.currentDateResolution = currentDateResolution;
}
@Override
public List<Product> findProductByURI(String uri) {
String urlKey = uri.substring(uri.lastIndexOf('/'));
Query query;
query = em.createNamedQuery("BC_READ_PRODUCTS_BY_OUTGOING_URL");
query.setParameter("url", uri);
query.setParameter("urlKey", urlKey);
query.setParameter("currentDate", getCurrentDateAfterFactoringInDateResolution());
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
@SuppressWarnings("unchecked")
List<Product> results = (List<Product>) query.getResultList();
return results;
}
@Override
public List<Product> readAllActiveProducts(int page, int pageSize) {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
return readAllActiveProductsInternal(page, pageSize, currentDate);
}
@Override
@Deprecated
public List<Product> readAllActiveProducts(int page, int pageSize, Date currentDate) {
return readAllActiveProductsInternal(page, pageSize, currentDate);
}
protected List<Product> readAllActiveProductsInternal(int page, int pageSize, Date currentDate) {
CriteriaQuery<Product> criteria = getCriteriaForActiveProducts(currentDate);
int firstResult = page * pageSize;
TypedQuery<Product> query = em.createQuery(criteria);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.setFirstResult(firstResult).setMaxResults(pageSize).getResultList();
}
@Override
public List<Product> readAllActiveProducts() {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
return readAllActiveProductsInternal(currentDate);
}
@Override
@Deprecated
public List<Product> readAllActiveProducts(Date currentDate) {
return readAllActiveProductsInternal(currentDate);
}
protected List<Product> readAllActiveProductsInternal(Date currentDate) {
CriteriaQuery<Product> criteria = getCriteriaForActiveProducts(currentDate);
TypedQuery<Product> query = em.createQuery(criteria);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public Long readCountAllActiveProducts() {
Date currentDate = getCurrentDateAfterFactoringInDateResolution();
return readCountAllActiveProductsInternal(currentDate);
}
@Override
@Deprecated
public Long readCountAllActiveProducts(Date currentDate) {
return readCountAllActiveProductsInternal(currentDate);
}
protected Long readCountAllActiveProductsInternal(Date currentDate) {
// Set up the criteria query that specifies we want to return a Long
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Long> criteria = builder.createQuery(Long.class);
// The root of our search is Product
Root<ProductImpl> product = criteria.from(ProductImpl.class);
// We need to filter on active date on the sku
Join<Product, Sku> sku = product.join("defaultSku");
// We want the count of products
criteria.select(builder.count(product));
// Ensure the product is currently active
List<Predicate> restrictions = new ArrayList<Predicate>();
attachActiveRestriction(currentDate, product, sku, restrictions);
// Add the restrictions to the criteria query
criteria.where(restrictions.toArray(new Predicate[restrictions.size()]));
TypedQuery<Long> query = em.createQuery(criteria);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getSingleResult();
}
protected CriteriaQuery<Product> getCriteriaForActiveProducts(Date currentDate) {
// Set up the criteria query that specifies we want to return Products
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Product> criteria = builder.createQuery(Product.class);
// The root of our search is Product
Root<ProductImpl> product = criteria.from(ProductImpl.class);
// We need to filter on active date on the sku
Join<Product, Sku> sku = product.join("defaultSku");
product.fetch("defaultSku");
// Product objects are what we want back
criteria.select(product);
// Ensure the product is currently active
List<Predicate> restrictions = new ArrayList<Predicate>();
attachActiveRestriction(currentDate, product, sku, restrictions);
// Add the restrictions to the criteria query
criteria.where(restrictions.toArray(new Predicate[restrictions.size()]));
return criteria;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_dao_ProductDaoImpl.java
|
41 |
@SuppressWarnings("unchecked")
public class OMultiValue {
/**
* Checks if a class is a multi-value type.
*
* @param iType
* Class to check
* @return true if it's an array, a collection or a map, otherwise false
*/
public static boolean isMultiValue(final Class<?> iType) {
return (iType.isArray() || Collection.class.isAssignableFrom(iType) || Map.class.isAssignableFrom(iType) || OMultiCollectionIterator.class
.isAssignableFrom(iType));
}
/**
* Checks if the object is a multi-value type.
*
* @param iObject
* Object to check
* @return true if it's an array, a collection or a map, otherwise false
*/
public static boolean isMultiValue(final Object iObject) {
return iObject == null ? false : isMultiValue(iObject.getClass());
}
public static boolean isIterable(final Object iObject) {
return iObject == null ? false : iObject instanceof Iterable<?> ? true : iObject instanceof Iterator<?>;
}
/**
* Returns the size of the multi-value object
*
* @param iObject
* Multi-value object (array, collection or map)
* @return the size of the multi value object
*/
public static int getSize(final Object iObject) {
if (iObject == null)
return 0;
if (iObject instanceof OSizeable)
return ((OSizeable) iObject).size();
if (!isMultiValue(iObject))
return 0;
if (iObject instanceof Collection<?>)
return ((Collection<Object>) iObject).size();
if (iObject instanceof Map<?, ?>)
return ((Map<?, Object>) iObject).size();
if (iObject.getClass().isArray())
return Array.getLength(iObject);
return 0;
}
/**
* Returns the first item of the Multi-value object (array, collection or map)
*
* @param iObject
* Multi-value object (array, collection or map)
* @return The first item if any
*/
public static Object getFirstValue(final Object iObject) {
if (iObject == null)
return null;
if (!isMultiValue(iObject) || getSize(iObject) == 0)
return null;
try {
if (iObject instanceof List<?>)
return ((List<Object>) iObject).get(0);
else if (iObject instanceof Collection<?>)
return ((Collection<Object>) iObject).iterator().next();
else if (iObject instanceof Map<?, ?>)
return ((Map<?, Object>) iObject).values().iterator().next();
else if (iObject.getClass().isArray())
return Array.get(iObject, 0);
} catch (Exception e) {
// IGNORE IT
OLogManager.instance().debug(iObject, "Error on reading the first item of the Multi-value field '%s'", iObject);
}
return null;
}
/**
* Returns the last item of the Multi-value object (array, collection or map)
*
* @param iObject
* Multi-value object (array, collection or map)
* @return The last item if any
*/
public static Object getLastValue(final Object iObject) {
if (iObject == null)
return null;
if (!isMultiValue(iObject))
return null;
try {
if (iObject instanceof List<?>)
return ((List<Object>) iObject).get(((List<Object>) iObject).size() - 1);
else if (iObject instanceof Collection<?>) {
Object last = null;
for (Object o : (Collection<Object>) iObject)
last = o;
return last;
} else if (iObject instanceof Map<?, ?>) {
Object last = null;
for (Object o : ((Map<?, Object>) iObject).values())
last = o;
return last;
} else if (iObject.getClass().isArray())
return Array.get(iObject, Array.getLength(iObject) - 1);
} catch (Exception e) {
// IGNORE IT
OLogManager.instance().debug(iObject, "Error on reading the last item of the Multi-value field '%s'", iObject);
}
return null;
}
/**
* Returns the iIndex item of the Multi-value object (array, collection or map)
*
* @param iObject
* Multi-value object (array, collection or map)
* @param iIndex
* integer as the position requested
* @return The first item if any
*/
public static Object getValue(final Object iObject, final int iIndex) {
if (iObject == null)
return null;
if (!isMultiValue(iObject))
return null;
if (iIndex > getSize(iObject))
return null;
try {
if (iObject instanceof List<?>)
return ((List<?>) iObject).get(iIndex);
else if (iObject instanceof Set<?>) {
int i = 0;
for (Object o : ((Set<?>) iObject)) {
if (i++ == iIndex) {
return o;
}
}
} else if (iObject instanceof Map<?, ?>) {
int i = 0;
for (Object o : ((Map<?, ?>) iObject).values()) {
if (i++ == iIndex) {
return o;
}
}
} else if (iObject.getClass().isArray())
return Array.get(iObject, iIndex);
} catch (Exception e) {
// IGNORE IT
OLogManager.instance().debug(iObject, "Error on reading the first item of the Multi-value field '%s'", iObject);
}
return null;
}
/**
* Returns an Iterable<Object> object to browse the multi-value instance (array, collection or map)
*
* @param iObject
* Multi-value object (array, collection or map)
*/
public static Iterable<Object> getMultiValueIterable(final Object iObject) {
if (iObject == null)
return null;
if (iObject instanceof Iterable<?>)
return (Iterable<Object>) iObject;
else if (iObject instanceof Collection<?>)
return ((Collection<Object>) iObject);
else if (iObject instanceof Map<?, ?>)
return ((Map<?, Object>) iObject).values();
else if (iObject.getClass().isArray())
return new OIterableObjectArray<Object>(iObject);
else if (iObject instanceof Iterator<?>) {
final List<Object> temp = new ArrayList<Object>();
for (Iterator<Object> it = (Iterator<Object>) iObject; it.hasNext();)
temp.add(it.next());
return temp;
}
return null;
}
/**
* Returns an Iterator<Object> object to browse the multi-value instance (array, collection or map)
*
* @param iObject
* Multi-value object (array, collection or map)
*/
public static Iterator<Object> getMultiValueIterator(final Object iObject) {
if (iObject == null)
return null;
if (iObject instanceof Iterator<?>)
return (Iterator<Object>) iObject;
if (!isMultiValue(iObject))
return null;
if (iObject instanceof Collection<?>)
return ((Collection<Object>) iObject).iterator();
if (iObject instanceof Map<?, ?>)
return ((Map<?, Object>) iObject).values().iterator();
if (iObject.getClass().isArray())
return new OIterableObjectArray<Object>(iObject).iterator();
return new OIterableObject<Object>(iObject);
}
/**
* Returns a stringified version of the multi-value object.
*
* @param iObject
* Multi-value object (array, collection or map)
* @return a stringified version of the multi-value object.
*/
public static String toString(final Object iObject) {
final StringBuilder sb = new StringBuilder();
if (iObject instanceof Collection<?>) {
final Collection<Object> coll = (Collection<Object>) iObject;
sb.append('[');
for (final Iterator<Object> it = coll.iterator(); it.hasNext();) {
try {
Object e = it.next();
sb.append(e == iObject ? "(this Collection)" : e);
if (it.hasNext())
sb.append(", ");
} catch (NoSuchElementException ex) {
// IGNORE THIS
}
}
return sb.append(']').toString();
} else if (iObject instanceof Map<?, ?>) {
final Map<String, Object> map = (Map<String, Object>) iObject;
Entry<String, Object> e;
sb.append('{');
for (final Iterator<Entry<String, Object>> it = map.entrySet().iterator(); it.hasNext();) {
try {
e = it.next();
sb.append(e.getKey());
sb.append(":");
sb.append(e.getValue() == iObject ? "(this Map)" : e.getValue());
if (it.hasNext())
sb.append(", ");
} catch (NoSuchElementException ex) {
// IGNORE THIS
}
}
return sb.append('}').toString();
}
return iObject.toString();
}
/**
* Utility function that add a value to the main object. It takes care about collections/array and single values.
*
* @param iObject
* MultiValue where to add value(s)
* @param iToAdd
* Single value, array of values or collections of values. Map are not supported.
* @return
*/
public static Object add(final Object iObject, final Object iToAdd) {
if (iObject != null) {
if (iObject instanceof Collection<?>) {
// COLLECTION - ?
final Collection<Object> coll = (Collection<Object>) iObject;
if (iToAdd instanceof Collection<?>) {
// COLLECTION - COLLECTION
for (Object o : (Collection<Object>) iToAdd) {
if (isMultiValue(o))
add(coll, o);
else
coll.add(o);
}
}
else if (iToAdd != null && iToAdd.getClass().isArray()) {
// ARRAY - COLLECTION
for (int i = 0; i < Array.getLength(iToAdd); ++i) {
Object o = Array.get(iToAdd, i);
if (isMultiValue(o))
add(coll, o);
else
coll.add(o);
}
} else if (iToAdd instanceof Map<?, ?>) {
// MAP
for (Entry<Object, Object> entry : ((Map<Object, Object>) iToAdd).entrySet())
coll.add(entry.getValue());
} else if (iToAdd instanceof Iterable<?>) {
// ITERABLE
for (Object o : (Iterable<?>) iToAdd)
coll.add(o);
} else if (iToAdd instanceof Iterator<?>) {
// ITERATOR
for (Iterator<?> it = (Iterator<?>) iToAdd; it.hasNext();)
coll.add(it.next());
} else
coll.add(iToAdd);
} else if (iObject.getClass().isArray()) {
// ARRAY - ?
final Object[] copy;
if (iToAdd instanceof Collection<?>) {
// ARRAY - COLLECTION
final int tot = Array.getLength(iObject) + ((Collection<Object>) iToAdd).size();
copy = Arrays.copyOf((Object[]) iObject, tot);
final Iterator<Object> it = ((Collection<Object>) iToAdd).iterator();
for (int i = Array.getLength(iObject); i < tot; ++i)
copy[i] = it.next();
} else if (iToAdd != null && iToAdd.getClass().isArray()) {
// ARRAY - ARRAY
final int tot = Array.getLength(iObject) + Array.getLength(iToAdd);
copy = Arrays.copyOf((Object[]) iObject, tot);
System.arraycopy(iToAdd, 0, iObject, Array.getLength(iObject), Array.getLength(iToAdd));
} else {
copy = Arrays.copyOf((Object[]) iObject, Array.getLength(iObject) + 1);
copy[copy.length - 1] = iToAdd;
}
return copy;
} else
throw new IllegalArgumentException("Object " + iObject + " is not a multi value");
}
return iObject;
}
/**
* Utility function that remove a value from the main object. It takes care about collections/array and single values.
*
* @param iObject
* MultiValue where to add value(s)
* @param iToRemove
* Single value, array of values or collections of values. Map are not supported.
* @param iAllOccurrences
* True if the all occurrences must be removed or false of only the first one (Like java.util.Collection.remove())
* @return
*/
public static Object remove(Object iObject, Object iToRemove, final boolean iAllOccurrences) {
if (iObject != null) {
if (iObject instanceof OMultiCollectionIterator<?>) {
final Collection<Object> list = new LinkedList<Object>();
for (Object o : ((OMultiCollectionIterator<?>) iObject))
list.add(o);
iObject = list;
}
if (iToRemove instanceof OMultiCollectionIterator<?>) {
// TRANSFORM IN SET ONCE TO OPTIMIZE LOOPS DURING REMOVE
final Set<Object> set = new HashSet<Object>();
for (Object o : ((OMultiCollectionIterator<?>) iToRemove))
set.add(o);
iToRemove = set;
}
if (iObject instanceof Collection<?>) {
// COLLECTION - ?
final Collection<Object> coll = (Collection<Object>) iObject;
if (iToRemove instanceof Collection<?>) {
// COLLECTION - COLLECTION
for (Object o : (Collection<Object>) iToRemove) {
if (isMultiValue(o))
remove(coll, o, iAllOccurrences);
else
coll.remove(o);
}
}
else if (iToRemove != null && iToRemove.getClass().isArray()) {
// ARRAY - COLLECTION
for (int i = 0; i < Array.getLength(iToRemove); ++i) {
Object o = Array.get(iToRemove, i);
if (isMultiValue(o))
remove(coll, o, iAllOccurrences);
else
coll.remove(o);
}
} else if (iToRemove instanceof Map<?, ?>) {
// MAP
for (Entry<Object, Object> entry : ((Map<Object, Object>) iToRemove).entrySet())
coll.remove(entry.getKey());
} else if (iToRemove instanceof Iterator<?>) {
// ITERATOR
if (iToRemove instanceof OMultiCollectionIterator<?>)
((OMultiCollectionIterator<?>) iToRemove).reset();
if (iAllOccurrences) {
OMultiCollectionIterator<?> it = (OMultiCollectionIterator<?>) iToRemove;
batchRemove(coll, it);
} else {
for (Iterator<?> it = (Iterator<?>) iToRemove; it.hasNext();) {
final Object itemToRemove = it.next();
while (coll.remove(itemToRemove))
if (!iAllOccurrences)
// REMOVE ONLY THE FIRST ITEM
break;
// REMOVE ALL THE ITEM
}
}
} else
coll.remove(iToRemove);
} else if (iObject.getClass().isArray()) {
// ARRAY - ?
final Object[] copy;
if (iToRemove instanceof Collection<?>) {
// ARRAY - COLLECTION
final int sourceTot = Array.getLength(iObject);
final int tot = sourceTot - ((Collection<Object>) iToRemove).size();
copy = new Object[tot];
int k = 0;
for (int i = 0; i < sourceTot; ++i) {
Object o = Array.get(iObject, i);
if (o != null) {
boolean found = false;
for (Object toRemove : (Collection<Object>) iToRemove) {
if (o.equals(toRemove)) {
// SKIP
found = true;
break;
}
}
if (!found)
copy[k++] = o;
}
}
} else if (iToRemove != null && iToRemove.getClass().isArray()) {
throw new UnsupportedOperationException("Cannot execute remove() against an array");
} else {
throw new UnsupportedOperationException("Cannot execute remove() against an array");
}
return copy;
} else
throw new IllegalArgumentException("Object " + iObject + " is not a multi value");
}
return iObject;
}
private static void batchRemove(Collection<Object> coll, Iterator<?> it) {
int approximateRemainingSize;
if (it instanceof OSizeable) {
approximateRemainingSize = ((OSizeable) it).size();
} else {
approximateRemainingSize = -1;
}
while (it.hasNext()) {
Set batch = prepareBatch(it, approximateRemainingSize);
coll.removeAll(batch);
approximateRemainingSize -= batch.size();
}
}
private static Set prepareBatch(Iterator<?> it, int approximateRemainingSize) {
final HashSet batch;
if (approximateRemainingSize > -1) {
if (approximateRemainingSize > 10000)
batch = new HashSet(13400);
else
batch = new HashSet((int) (approximateRemainingSize / 0.75));
} else {
batch = new HashSet();
}
int count = 0;
while (count < 10000 && it.hasNext()) {
batch.add(it.next());
count++;
}
return batch;
}
public static Object[] array(final Object iValue) {
return array(iValue, Object.class);
}
public static <T> T[] array(final Object iValue, final Class<? extends T> iClass) {
return array(iValue, iClass, null);
}
public static <T> T[] array(final Object iValue, final Class<? extends T> iClass, final OCallable<Object, Object> iCallback) {
if (iValue == null)
return null;
final T[] result;
if (isMultiValue(iValue)) {
// CREATE STATIC ARRAY AND FILL IT
result = (T[]) Array.newInstance(iClass, getSize(iValue));
int i = 0;
for (Iterator<T> it = (Iterator<T>) getMultiValueIterator(iValue); it.hasNext(); ++i)
result[i] = (T) convert(it.next(), iCallback);
} else if (isIterable(iValue)) {
// SIZE UNKNOWN: USE A LIST AS TEMPORARY OBJECT
final List<T> temp = new ArrayList<T>();
for (Iterator<T> it = (Iterator<T>) getMultiValueIterator(iValue); it.hasNext();)
temp.add((T) convert(it.next(), iCallback));
if (iClass.equals(Object.class))
result = (T[]) temp.toArray();
else
// CONVERT THEM
result = temp.toArray((T[]) Array.newInstance(iClass, getSize(iValue)));
} else {
result = (T[]) Array.newInstance(iClass, 1);
result[0] = (T) (T) convert(iValue, iCallback);
}
return result;
}
public static Object convert(final Object iObject, final OCallable<Object, Object> iCallback) {
return iCallback != null ? iCallback.call(iObject) : iObject;
}
public static boolean equals(final Collection<Object> col1, final Collection<Object> col2) {
if (col1.size() != col2.size())
return false;
return col1.containsAll(col2) && col2.containsAll(col1);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_OMultiValue.java
|
196 |
public static class Name {
public static final String Audit = "Auditable_Audit";
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_audit_Auditable.java
|
281 |
MimeMessagePreparator preparator = new MimeMessagePreparator() {
public void prepare(MimeMessage mimeMessage) throws Exception {
EmailTarget emailUser = (EmailTarget) props.get(EmailPropertyType.USER.getType());
EmailInfo info = (EmailInfo) props.get(EmailPropertyType.INFO.getType());
MimeMessageHelper message = new MimeMessageHelper(mimeMessage, (info.getAttachments() != null && info.getAttachments().size() > 0));
message.setTo(emailUser.getEmailAddress());
message.setFrom(info.getFromAddress());
message.setSubject(info.getSubject());
if (emailUser.getBCCAddresses() != null && emailUser.getBCCAddresses().length > 0) {
message.setBcc(emailUser.getBCCAddresses());
}
if (emailUser.getCCAddresses() != null && emailUser.getCCAddresses().length > 0) {
message.setCc(emailUser.getCCAddresses());
}
String messageBody = info.getMessageBody();
if (messageBody == null) {
messageBody = buildMessageBody(info, props);
}
message.setText(messageBody, true);
for (Attachment attachment : info.getAttachments()) {
ByteArrayDataSource dataSource = new ByteArrayDataSource(attachment.getData(), attachment.getMimeType());
message.addAttachment(attachment.getFilename(), dataSource);
}
}
};
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_email_service_message_MessageCreator.java
|
276 |
public interface OCommandRequestText extends OCommandRequestInternal, OSerializableStream {
public String getText();
public OCommandRequestText setText(String iText);
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandRequestText.java
|
638 |
public abstract class AbstractCollectionProxyImpl<S extends RemoteService, E> extends AbstractDistributedObject<S>
implements InitializingObject {
protected final String name;
protected final int partitionId;
protected AbstractCollectionProxyImpl(String name, NodeEngine nodeEngine, S service) {
super(nodeEngine, service);
this.name = name;
this.partitionId = nodeEngine.getPartitionService().getPartitionId(getNameAsPartitionAwareData());
}
@Override
public void initialize() {
final NodeEngine nodeEngine = getNodeEngine();
CollectionConfig config = getConfig(nodeEngine);
final List<ItemListenerConfig> itemListenerConfigs = config.getItemListenerConfigs();
for (ItemListenerConfig itemListenerConfig : itemListenerConfigs) {
ItemListener listener = itemListenerConfig.getImplementation();
if (listener == null && itemListenerConfig.getClassName() != null) {
try {
listener = ClassLoaderUtil.newInstance(nodeEngine.getConfigClassLoader(), itemListenerConfig.getClassName());
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
if (listener != null) {
if (listener instanceof HazelcastInstanceAware) {
((HazelcastInstanceAware) listener).setHazelcastInstance(nodeEngine.getHazelcastInstance());
}
addItemListener(listener, itemListenerConfig.isIncludeValue());
}
}
}
protected abstract CollectionConfig getConfig(NodeEngine nodeEngine);
@Override
public String getName() {
return name;
}
public boolean add(E e) {
throwExceptionIfNull(e);
final Data value = getNodeEngine().toData(e);
final CollectionAddOperation operation = new CollectionAddOperation(name, value);
final Boolean result = invoke(operation);
return result;
}
public boolean remove(Object o) {
throwExceptionIfNull(o);
final Data value = getNodeEngine().toData(o);
final CollectionRemoveOperation operation = new CollectionRemoveOperation(name, value);
final Boolean result = invoke(operation);
return result;
}
public int size() {
final CollectionSizeOperation operation = new CollectionSizeOperation(name);
final Integer result = invoke(operation);
return result;
}
public boolean isEmpty() {
return size() == 0;
}
public boolean contains(Object o) {
throwExceptionIfNull(o);
Set<Data> valueSet = new HashSet<Data>(1);
valueSet.add(getNodeEngine().toData(o));
final CollectionContainsOperation operation = new CollectionContainsOperation(name, valueSet);
final Boolean result = invoke(operation);
return result;
}
public boolean containsAll(Collection<?> c) {
throwExceptionIfNull(c);
Set<Data> valueSet = new HashSet<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(nodeEngine.toData(o));
}
final CollectionContainsOperation operation = new CollectionContainsOperation(name, valueSet);
final Boolean result = invoke(operation);
return result;
}
public boolean addAll(Collection<? extends E> c) {
throwExceptionIfNull(c);
List<Data> valueList = new ArrayList<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (E e : c) {
throwExceptionIfNull(e);
valueList.add(nodeEngine.toData(e));
}
final CollectionAddAllOperation operation = new CollectionAddAllOperation(name, valueList);
final Boolean result = invoke(operation);
return result;
}
public boolean retainAll(Collection<?> c) {
return compareAndRemove(true, c);
}
public boolean removeAll(Collection<?> c) {
return compareAndRemove(false, c);
}
private boolean compareAndRemove(boolean retain, Collection<?> c) {
throwExceptionIfNull(c);
Set<Data> valueSet = new HashSet<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(nodeEngine.toData(o));
}
final CollectionCompareAndRemoveOperation operation = new CollectionCompareAndRemoveOperation(name, retain, valueSet);
final Boolean result = invoke(operation);
return result;
}
public void clear() {
final CollectionClearOperation operation = new CollectionClearOperation(name);
invoke(operation);
}
public Iterator<E> iterator() {
return getAll().iterator();
}
public Object[] toArray() {
return getAll().toArray();
}
public <T> T[] toArray(T[] a) {
return getAll().toArray(a);
}
private Collection<E> getAll() {
final CollectionGetAllOperation operation = new CollectionGetAllOperation(name);
final SerializableCollection result = invoke(operation);
final Collection<Data> collection = result.getCollection();
final List<E> list = new ArrayList<E>(collection.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Data data : collection) {
list.add(nodeEngine.<E>toObject(data));
}
return list;
}
public String addItemListener(ItemListener<E> listener, boolean includeValue) {
final EventService eventService = getNodeEngine().getEventService();
final CollectionEventFilter filter = new CollectionEventFilter(includeValue);
final EventRegistration registration = eventService.registerListener(getServiceName(), name, filter, listener);
return registration.getId();
}
public boolean removeItemListener(String registrationId) {
EventService eventService = getNodeEngine().getEventService();
return eventService.deregisterListener(getServiceName(), name, registrationId);
}
protected <T> T invoke(CollectionOperation operation) {
final NodeEngine nodeEngine = getNodeEngine();
try {
Future f = nodeEngine.getOperationService().invokeOnPartition(getServiceName(), operation, partitionId);
return nodeEngine.toObject(f.get());
} catch (Throwable throwable) {
throw ExceptionUtil.rethrow(throwable);
}
}
protected void throwExceptionIfNull(Object o) {
if (o == null) {
throw new NullPointerException("Object is null");
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_collection_AbstractCollectionProxyImpl.java
|
2,927 |
public class PreBuiltTokenizerFactoryFactory implements TokenizerFactoryFactory {
private final TokenizerFactory tokenizerFactory;
public PreBuiltTokenizerFactoryFactory(TokenizerFactory tokenizerFactory) {
this.tokenizerFactory = tokenizerFactory;
}
@Override
public TokenizerFactory create(String name, Settings settings) {
Version indexVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
if (!Version.CURRENT.equals(indexVersion)) {
TokenizerFactory versionedTokenizerFactory = PreBuiltTokenizers.valueOf(name.toUpperCase(Locale.ROOT)).getTokenizerFactory(indexVersion);
return versionedTokenizerFactory;
}
return tokenizerFactory;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_analysis_PreBuiltTokenizerFactoryFactory.java
|
77 |
@SuppressWarnings("serial")
static final class MapReduceMappingsTask<K,V,U>
extends BulkTask<K,V,U> {
final BiFun<? super K, ? super V, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
U result;
MapReduceMappingsTask<K,V,U> rights, nextRight;
MapReduceMappingsTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceMappingsTask<K,V,U> nextRight,
BiFun<? super K, ? super V, ? extends U> transformer,
BiFun<? super U, ? super U, ? extends U> reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.reducer = reducer;
}
public final U getRawResult() { return result; }
public final void compute() {
final BiFun<? super K, ? super V, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceMappingsTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, reducer)).fork();
}
U r = null;
for (Node<K,V> p; (p = advance()) != null; ) {
U u;
if ((u = transformer.apply(p.key, p.val)) != null)
r = (r == null) ? u : reducer.apply(r, u);
}
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceMappingsTask<K,V,U>
t = (MapReduceMappingsTask<K,V,U>)c,
s = t.rights;
while (s != null) {
U tr, sr;
if ((sr = s.result) != null)
t.result = (((tr = t.result) == null) ? sr :
reducer.apply(tr, sr));
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
28 |
public abstract class AbstractCassandraBlueprintsTest extends TitanBlueprintsTest {
@Override
public void beforeSuite() {
//Do nothing
}
@Override
public TitanGraph openGraph(String uid) {
return TitanFactory.open(getGraphConfig());
}
@Override
public boolean supportsMultipleGraphs() {
return false;
}
protected abstract WriteConfiguration getGraphConfig();
}
| 0true
|
titan-cassandra_src_test_java_com_thinkaurelius_titan_blueprints_AbstractCassandraBlueprintsTest.java
|
6 |
public class StateHandlingStatementOperations implements
KeyReadOperations,
KeyWriteOperations,
EntityOperations,
SchemaReadOperations,
SchemaWriteOperations
{
private final StoreReadLayer storeLayer;
private final LegacyPropertyTrackers legacyPropertyTrackers;
private final ConstraintIndexCreator constraintIndexCreator;
public StateHandlingStatementOperations(
StoreReadLayer storeLayer, LegacyPropertyTrackers propertyTrackers,
ConstraintIndexCreator constraintIndexCreator )
{
this.storeLayer = storeLayer;
this.legacyPropertyTrackers = propertyTrackers;
this.constraintIndexCreator = constraintIndexCreator;
}
@Override
public void nodeDelete( KernelStatement state, long nodeId )
{
legacyPropertyTrackers.nodeDelete( nodeId );
state.txState().nodeDoDelete( nodeId );
}
@Override
public void relationshipDelete( KernelStatement state, long relationshipId )
{
legacyPropertyTrackers.relationshipDelete( relationshipId );
state.txState().relationshipDoDelete( relationshipId );
}
@Override
public boolean nodeHasLabel( KernelStatement state, long nodeId, int labelId ) throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
if ( state.txState().nodeIsDeletedInThisTx( nodeId ) )
{
return false;
}
if ( state.txState().nodeIsAddedInThisTx( nodeId ) )
{
TxState.UpdateTriState labelState = state.txState().labelState( nodeId, labelId );
return labelState.isTouched() && labelState.isAdded();
}
TxState.UpdateTriState labelState = state.txState().labelState( nodeId, labelId );
if ( labelState.isTouched() )
{
return labelState.isAdded();
}
}
return storeLayer.nodeHasLabel( state, nodeId, labelId );
}
@Override
public PrimitiveIntIterator nodeGetLabels( KernelStatement state, long nodeId ) throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
if ( state.txState().nodeIsDeletedInThisTx( nodeId ) )
{
return IteratorUtil.emptyPrimitiveIntIterator();
}
if ( state.txState().nodeIsAddedInThisTx( nodeId ) )
{
return toPrimitiveIntIterator(
state.txState().nodeStateLabelDiffSets( nodeId ).getAdded().iterator() );
}
return state.txState().nodeStateLabelDiffSets( nodeId ).applyPrimitiveIntIterator(
storeLayer.nodeGetLabels( state, nodeId ) );
}
return storeLayer.nodeGetLabels( state, nodeId );
}
@Override
public boolean nodeAddLabel( KernelStatement state, long nodeId, int labelId ) throws EntityNotFoundException
{
if ( nodeHasLabel( state, nodeId, labelId ) )
{
// Label is already in state or in store, no-op
return false;
}
state.txState().nodeDoAddLabel( labelId, nodeId );
return true;
}
@Override
public boolean nodeRemoveLabel( KernelStatement state, long nodeId, int labelId ) throws EntityNotFoundException
{
if ( !nodeHasLabel( state, nodeId, labelId ) )
{
// Label does not exist in state nor in store, no-op
return false;
}
state.txState().nodeDoRemoveLabel( labelId, nodeId );
return true;
}
@Override
public PrimitiveLongIterator nodesGetForLabel( KernelStatement state, int labelId )
{
if ( state.hasTxStateWithChanges() )
{
PrimitiveLongIterator wLabelChanges =
state.txState().nodesWithLabelChanged( labelId ).applyPrimitiveLongIterator(
storeLayer.nodesGetForLabel( state, labelId ) );
return state.txState().nodesDeletedInTx().applyPrimitiveLongIterator( wLabelChanges );
}
return storeLayer.nodesGetForLabel( state, labelId );
}
@Override
public IndexDescriptor indexCreate( KernelStatement state, int labelId, int propertyKey )
{
IndexDescriptor rule = new IndexDescriptor( labelId, propertyKey );
state.txState().indexRuleDoAdd( rule );
return rule;
}
@Override
public void indexDrop( KernelStatement state, IndexDescriptor descriptor ) throws DropIndexFailureException
{
state.txState().indexDoDrop( descriptor );
}
@Override
public void uniqueIndexDrop( KernelStatement state, IndexDescriptor descriptor ) throws DropIndexFailureException
{
state.txState().constraintIndexDoDrop( descriptor );
}
@Override
public UniquenessConstraint uniquenessConstraintCreate( KernelStatement state, int labelId, int propertyKeyId )
throws CreateConstraintFailureException
{
UniquenessConstraint constraint = new UniquenessConstraint( labelId, propertyKeyId );
try
{
IndexDescriptor index = new IndexDescriptor( labelId, propertyKeyId );
if ( state.txState().constraintIndexDoUnRemove( index ) ) // ..., DROP, *CREATE*
{ // creation is undoing a drop
state.txState().constraintIndexDiffSetsByLabel( labelId ).unRemove( index );
if ( !state.txState().constraintDoUnRemove( constraint ) ) // CREATE, ..., DROP, *CREATE*
{ // ... the drop we are undoing did itself undo a prior create...
state.txState().constraintsChangesForLabel( labelId ).unRemove( constraint );
state.txState().constraintDoAdd(
constraint, state.txState().indexCreatedForConstraint( constraint ) );
}
}
else // *CREATE*
{ // create from scratch
for ( Iterator<UniquenessConstraint> it = storeLayer.constraintsGetForLabelAndPropertyKey(
state, labelId, propertyKeyId ); it.hasNext(); )
{
if ( it.next().equals( labelId, propertyKeyId ) )
{
return constraint;
}
}
long indexId = constraintIndexCreator.createUniquenessConstraintIndex(
state, this, labelId, propertyKeyId );
state.txState().constraintDoAdd( constraint, indexId );
}
return constraint;
}
catch ( TransactionalException | ConstraintVerificationFailedKernelException | DropIndexFailureException e )
{
throw new CreateConstraintFailureException( constraint, e );
}
}
@Override
public Iterator<UniquenessConstraint> constraintsGetForLabelAndPropertyKey( KernelStatement state,
int labelId, int propertyKeyId )
{
return applyConstraintsDiff( state, storeLayer.constraintsGetForLabelAndPropertyKey(
state, labelId, propertyKeyId ), labelId, propertyKeyId );
}
@Override
public Iterator<UniquenessConstraint> constraintsGetForLabel( KernelStatement state, int labelId )
{
return applyConstraintsDiff( state, storeLayer.constraintsGetForLabel( state, labelId ), labelId );
}
@Override
public Iterator<UniquenessConstraint> constraintsGetAll( KernelStatement state )
{
return applyConstraintsDiff( state, storeLayer.constraintsGetAll( state ) );
}
private Iterator<UniquenessConstraint> applyConstraintsDiff( KernelStatement state,
Iterator<UniquenessConstraint> constraints, int labelId, int propertyKeyId )
{
if ( state.hasTxStateWithChanges() )
{
DiffSets<UniquenessConstraint> diff =
state.txState().constraintsChangesForLabelAndProperty( labelId, propertyKeyId );
if ( diff != null )
{
return diff.apply( constraints );
}
}
return constraints;
}
private Iterator<UniquenessConstraint> applyConstraintsDiff( KernelStatement state,
Iterator<UniquenessConstraint> constraints, int labelId )
{
if ( state.hasTxStateWithChanges() )
{
DiffSets<UniquenessConstraint> diff = state.txState().constraintsChangesForLabel( labelId );
if ( diff != null )
{
return diff.apply( constraints );
}
}
return constraints;
}
private Iterator<UniquenessConstraint> applyConstraintsDiff( KernelStatement state,
Iterator<UniquenessConstraint> constraints )
{
if ( state.hasTxStateWithChanges() )
{
DiffSets<UniquenessConstraint> diff = state.txState().constraintsChanges();
if ( diff != null )
{
return diff.apply( constraints );
}
}
return constraints;
}
@Override
public void constraintDrop( KernelStatement state, UniquenessConstraint constraint )
{
state.txState().constraintDoDrop( constraint );
}
@Override
public IndexDescriptor indexesGetForLabelAndPropertyKey( KernelStatement state, int labelId, int propertyKey )
throws SchemaRuleNotFoundException
{
Iterable<IndexDescriptor> committedRules;
try
{
committedRules = option( storeLayer.indexesGetForLabelAndPropertyKey( state, labelId,
propertyKey ) );
}
catch ( SchemaRuleNotFoundException e )
{
committedRules = emptyList();
}
DiffSets<IndexDescriptor> ruleDiffSet = state.txState().indexDiffSetsByLabel( labelId );
Iterator<IndexDescriptor> rules =
state.hasTxStateWithChanges() ? ruleDiffSet.apply( committedRules.iterator() ) : committedRules
.iterator();
IndexDescriptor single = singleOrNull( rules );
if ( single == null )
{
throw new SchemaRuleNotFoundException( "Index rule for label:" + labelId + " and property:" +
propertyKey + " not found" );
}
return single;
}
@Override
public InternalIndexState indexGetState( KernelStatement state, IndexDescriptor descriptor )
throws IndexNotFoundKernelException
{
// If index is in our state, then return populating
if ( state.hasTxStateWithChanges() )
{
if ( checkIndexState( descriptor, state.txState().indexDiffSetsByLabel( descriptor.getLabelId() ) ) )
{
return InternalIndexState.POPULATING;
}
if ( checkIndexState( descriptor, state.txState().constraintIndexDiffSetsByLabel( descriptor.getLabelId()
) ) )
{
return InternalIndexState.POPULATING;
}
}
return storeLayer.indexGetState( state, descriptor );
}
private boolean checkIndexState( IndexDescriptor indexRule, DiffSets<IndexDescriptor> diffSet )
throws IndexNotFoundKernelException
{
if ( diffSet.isAdded( indexRule ) )
{
return true;
}
if ( diffSet.isRemoved( indexRule ) )
{
throw new IndexNotFoundKernelException( String.format( "Index for label id %d on property id %d has been " +
"dropped in this transaction.",
indexRule.getLabelId(),
indexRule.getPropertyKeyId() ) );
}
return false;
}
@Override
public Iterator<IndexDescriptor> indexesGetForLabel( KernelStatement state, int labelId )
{
if ( state.hasTxStateWithChanges() )
{
return state.txState().indexDiffSetsByLabel( labelId )
.apply( storeLayer.indexesGetForLabel( state, labelId ) );
}
return storeLayer.indexesGetForLabel( state, labelId );
}
@Override
public Iterator<IndexDescriptor> indexesGetAll( KernelStatement state )
{
if ( state.hasTxStateWithChanges() )
{
return state.txState().indexChanges().apply( storeLayer.indexesGetAll( state ) );
}
return storeLayer.indexesGetAll( state );
}
@Override
public Iterator<IndexDescriptor> uniqueIndexesGetForLabel( KernelStatement state, int labelId )
{
if ( state.hasTxStateWithChanges() )
{
return state.txState().constraintIndexDiffSetsByLabel( labelId )
.apply( storeLayer.uniqueIndexesGetForLabel( state, labelId ) );
}
return storeLayer.uniqueIndexesGetForLabel( state, labelId );
}
@Override
public Iterator<IndexDescriptor> uniqueIndexesGetAll( KernelStatement state )
{
if ( state.hasTxStateWithChanges() )
{
return state.txState().constraintIndexChanges()
.apply( storeLayer.uniqueIndexesGetAll( state ) );
}
return storeLayer.uniqueIndexesGetAll( state );
}
@Override
public long nodeGetUniqueFromIndexLookup(
KernelStatement state,
IndexDescriptor index,
Object value )
throws IndexNotFoundKernelException, IndexBrokenKernelException
{
PrimitiveLongIterator committed = storeLayer.nodeGetUniqueFromIndexLookup( state, index, value );
PrimitiveLongIterator exactMatches = filterExactIndexMatches( state, index, value, committed );
PrimitiveLongIterator changeFilteredMatches = filterIndexStateChanges( state, index, value, exactMatches );
return single( changeFilteredMatches, NO_SUCH_NODE );
}
@Override
public PrimitiveLongIterator nodesGetFromIndexLookup( KernelStatement state, IndexDescriptor index, final Object value )
throws IndexNotFoundKernelException
{
PrimitiveLongIterator committed = storeLayer.nodesGetFromIndexLookup( state, index, value );
PrimitiveLongIterator exactMatches = filterExactIndexMatches( state, index, value, committed );
PrimitiveLongIterator changeFilteredMatches = filterIndexStateChanges( state, index, value, exactMatches );
return changeFilteredMatches;
}
private PrimitiveLongIterator filterExactIndexMatches(
KernelStatement state,
IndexDescriptor index,
Object value,
PrimitiveLongIterator committed )
{
if ( isNumberOrArray( value ) )
{
return filter( exactMatch( state, index.getPropertyKeyId(), value ), committed );
}
return committed;
}
private boolean isNumberOrArray( Object value )
{
return value instanceof Number || value.getClass().isArray();
}
private PrimitiveLongPredicate exactMatch(
final KernelStatement state,
final int propertyKeyId,
final Object value )
{
return new PrimitiveLongPredicate()
{
@Override
public boolean accept( long nodeId )
{
try
{
return nodeGetProperty( state, nodeId, propertyKeyId ).valueEquals( value );
}
catch ( EntityNotFoundException e )
{
throw new ThisShouldNotHappenError( "Chris", "An index claims a node by id " + nodeId +
" has the value. However, it looks like that node does not exist.", e);
}
}
};
}
private PrimitiveLongIterator filterIndexStateChanges(
KernelStatement state,
IndexDescriptor index,
Object value,
PrimitiveLongIterator nodeIds )
{
if ( state.hasTxStateWithChanges() )
{
DiffSets<Long> labelPropertyChanges = nodesWithLabelAndPropertyDiffSet( state, index, value );
DiffSets<Long> deletionChanges = state.txState().nodesDeletedInTx();
// Apply to actual index lookup
return deletionChanges.applyPrimitiveLongIterator(
labelPropertyChanges.applyPrimitiveLongIterator( nodeIds ) );
}
return nodeIds;
}
@Override
public Property nodeSetProperty( KernelStatement state, long nodeId, DefinedProperty property )
throws EntityNotFoundException
{
Property existingProperty = nodeGetProperty( state, nodeId, property.propertyKeyId() );
if ( !existingProperty.isDefined() )
{
legacyPropertyTrackers.nodeAddStoreProperty( nodeId, property );
state.neoStoreTransaction.nodeAddProperty( nodeId, property.propertyKeyId(), property.value() );
}
else
{
legacyPropertyTrackers.nodeChangeStoreProperty( nodeId, (DefinedProperty) existingProperty, property );
state.neoStoreTransaction.nodeChangeProperty( nodeId, property.propertyKeyId(), property.value() );
}
state.txState().nodeDoReplaceProperty( nodeId, existingProperty, property );
return existingProperty;
}
@Override
public Property relationshipSetProperty( KernelStatement state, long relationshipId, DefinedProperty property )
throws EntityNotFoundException
{
Property existingProperty = relationshipGetProperty( state, relationshipId, property.propertyKeyId() );
if ( !existingProperty.isDefined() )
{
legacyPropertyTrackers.relationshipAddStoreProperty( relationshipId, property );
state.neoStoreTransaction.relAddProperty( relationshipId, property.propertyKeyId(), property.value() );
}
else
{
legacyPropertyTrackers.relationshipChangeStoreProperty( relationshipId, (DefinedProperty)
existingProperty, property );
state.neoStoreTransaction.relChangeProperty( relationshipId, property.propertyKeyId(), property.value() );
}
state.txState().relationshipDoReplaceProperty( relationshipId, existingProperty, property );
return existingProperty;
}
@Override
public Property graphSetProperty( KernelStatement state, DefinedProperty property )
{
Property existingProperty = graphGetProperty( state, property.propertyKeyId() );
if ( !existingProperty.isDefined() )
{
state.neoStoreTransaction.graphAddProperty( property.propertyKeyId(), property.value() );
}
else
{
state.neoStoreTransaction.graphChangeProperty( property.propertyKeyId(), property.value() );
}
state.txState().graphDoReplaceProperty( existingProperty, property );
return existingProperty;
}
@Override
public Property nodeRemoveProperty( KernelStatement state, long nodeId, int propertyKeyId )
throws EntityNotFoundException
{
Property existingProperty = nodeGetProperty( state, nodeId, propertyKeyId );
if ( existingProperty.isDefined() )
{
legacyPropertyTrackers.nodeRemoveStoreProperty( nodeId, (DefinedProperty) existingProperty );
state.neoStoreTransaction.nodeRemoveProperty( nodeId, propertyKeyId );
}
state.txState().nodeDoRemoveProperty( nodeId, existingProperty );
return existingProperty;
}
@Override
public Property relationshipRemoveProperty( KernelStatement state, long relationshipId, int propertyKeyId )
throws EntityNotFoundException
{
Property existingProperty = relationshipGetProperty( state, relationshipId, propertyKeyId );
if ( existingProperty.isDefined() )
{
legacyPropertyTrackers.relationshipRemoveStoreProperty( relationshipId, (DefinedProperty)
existingProperty );
state.neoStoreTransaction.relRemoveProperty( relationshipId, propertyKeyId );
}
state.txState().relationshipDoRemoveProperty( relationshipId, existingProperty );
return existingProperty;
}
@Override
public Property graphRemoveProperty( KernelStatement state, int propertyKeyId )
{
Property existingProperty = graphGetProperty( state, propertyKeyId );
if ( existingProperty.isDefined() )
{
state.neoStoreTransaction.graphRemoveProperty( propertyKeyId );
}
state.txState().graphDoRemoveProperty( existingProperty );
return existingProperty;
}
@Override
public PrimitiveLongIterator nodeGetPropertyKeys( KernelStatement state, long nodeId ) throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
return new PropertyKeyIdIterator( nodeGetAllProperties( state, nodeId ) );
}
return storeLayer.nodeGetPropertyKeys( state, nodeId );
}
@Override
public Property nodeGetProperty( KernelStatement state, long nodeId, int propertyKeyId )
throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
Iterator<DefinedProperty> properties = nodeGetAllProperties( state, nodeId );
while ( properties.hasNext() )
{
Property property = properties.next();
if ( property.propertyKeyId() == propertyKeyId )
{
return property;
}
}
return Property.noNodeProperty( nodeId, propertyKeyId );
}
return storeLayer.nodeGetProperty( state, nodeId, propertyKeyId );
}
@Override
public Iterator<DefinedProperty> nodeGetAllProperties( KernelStatement state, long nodeId )
throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
if ( state.txState().nodeIsAddedInThisTx( nodeId ) )
{
return state.txState().nodePropertyDiffSets( nodeId ).getAdded().iterator();
}
if ( state.txState().nodeIsDeletedInThisTx( nodeId ) )
{
// TODO Throw IllegalStateException to conform with beans API. We may want to introduce
// EntityDeletedException instead and use it instead of returning empty values in similar places
throw new IllegalStateException( "Node " + nodeId + " has been deleted" );
}
return state.txState().nodePropertyDiffSets( nodeId )
.apply( storeLayer.nodeGetAllProperties( state, nodeId ) );
}
return storeLayer.nodeGetAllProperties( state, nodeId );
}
@Override
public PrimitiveLongIterator relationshipGetPropertyKeys( KernelStatement state, long relationshipId )
throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
return new PropertyKeyIdIterator( relationshipGetAllProperties( state, relationshipId ) );
}
return storeLayer.relationshipGetPropertyKeys( state, relationshipId );
}
@Override
public Property relationshipGetProperty( KernelStatement state, long relationshipId, int propertyKeyId )
throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
Iterator<DefinedProperty> properties = relationshipGetAllProperties( state, relationshipId );
while ( properties.hasNext() )
{
Property property = properties.next();
if ( property.propertyKeyId() == propertyKeyId )
{
return property;
}
}
return Property.noRelationshipProperty( relationshipId, propertyKeyId );
}
return storeLayer.relationshipGetProperty( state, relationshipId, propertyKeyId );
}
@Override
public Iterator<DefinedProperty> relationshipGetAllProperties( KernelStatement state, long relationshipId )
throws EntityNotFoundException
{
if ( state.hasTxStateWithChanges() )
{
if ( state.txState().relationshipIsAddedInThisTx( relationshipId ) )
{
return state.txState().relationshipPropertyDiffSets( relationshipId ).getAdded().iterator();
}
if ( state.txState().relationshipIsDeletedInThisTx( relationshipId ) )
{
// TODO Throw IllegalStateException to conform with beans API. We may want to introduce
// EntityDeletedException instead and use it instead of returning empty values in similar places
throw new IllegalStateException( "Relationship " + relationshipId + " has been deleted" );
}
return state.txState().relationshipPropertyDiffSets( relationshipId )
.apply( storeLayer.relationshipGetAllProperties( state, relationshipId ) );
}
else
{
return storeLayer.relationshipGetAllProperties( state, relationshipId );
}
}
@Override
public PrimitiveLongIterator graphGetPropertyKeys( KernelStatement state )
{
if ( state.hasTxStateWithChanges() )
{
return new PropertyKeyIdIterator( graphGetAllProperties( state ) );
}
return storeLayer.graphGetPropertyKeys( state );
}
@Override
public Property graphGetProperty( KernelStatement state, int propertyKeyId )
{
Iterator<DefinedProperty> properties = graphGetAllProperties( state );
while ( properties.hasNext() )
{
Property property = properties.next();
if ( property.propertyKeyId() == propertyKeyId )
{
return property;
}
}
return Property.noGraphProperty( propertyKeyId );
}
@Override
public Iterator<DefinedProperty> graphGetAllProperties( KernelStatement state )
{
if ( state.hasTxStateWithChanges() )
{
return state.txState().graphPropertyDiffSets().apply( storeLayer.graphGetAllProperties( state ) );
}
return storeLayer.graphGetAllProperties( state );
}
private DiffSets<Long> nodesWithLabelAndPropertyDiffSet( KernelStatement state, IndexDescriptor index, Object value )
{
TxState txState = state.txState();
int labelId = index.getLabelId();
int propertyKeyId = index.getPropertyKeyId();
// Start with nodes where the given property has changed
DiffSets<Long> diff = txState.nodesWithChangedProperty( propertyKeyId, value );
// Ensure remaining nodes have the correct label
HasLabelFilter hasLabel = new HasLabelFilter( state, labelId );
diff = diff.filter( hasLabel );
// Include newly labeled nodes that already had the correct property
HasPropertyFilter hasPropertyFilter = new HasPropertyFilter( state, propertyKeyId, value );
Iterator<Long> addedNodesWithLabel = txState.nodesWithLabelAdded( labelId ).iterator();
diff.addAll( filter( hasPropertyFilter, addedNodesWithLabel ) );
// Remove de-labeled nodes that had the correct value before
Set<Long> removedNodesWithLabel = txState.nodesWithLabelChanged( index.getLabelId() ).getRemoved();
diff.removeAll( filter( hasPropertyFilter, removedNodesWithLabel.iterator() ) );
return diff;
}
private long nodeIfNotDeleted( long nodeId, TxState txState )
{
return txState.nodeIsDeletedInThisTx( nodeId ) ? NO_SUCH_NODE : nodeId;
}
private class HasPropertyFilter implements Predicate<Long>
{
private final Object value;
private final int propertyKeyId;
private final KernelStatement state;
public HasPropertyFilter( KernelStatement state, int propertyKeyId, Object value )
{
this.state = state;
this.value = value;
this.propertyKeyId = propertyKeyId;
}
@Override
public boolean accept( Long nodeId )
{
try
{
if ( state.hasTxStateWithChanges() && state.txState().nodeIsDeletedInThisTx( nodeId ) )
{
return false;
}
Property property = nodeGetProperty( state, nodeId, propertyKeyId );
return property.isDefined() && property.valueEquals( value );
}
catch ( EntityNotFoundException e )
{
return false;
}
}
}
private class HasLabelFilter implements Predicate<Long>
{
private final int labelId;
private final KernelStatement state;
public HasLabelFilter( KernelStatement state, int labelId )
{
this.state = state;
this.labelId = labelId;
}
@Override
public boolean accept( Long nodeId )
{
try
{
return nodeHasLabel( state, nodeId, labelId );
}
catch ( EntityNotFoundException e )
{
return false;
}
}
}
//
// Methods that delegate directly to storage
//
@Override
public Long indexGetOwningUniquenessConstraintId( KernelStatement state, IndexDescriptor index )
throws SchemaRuleNotFoundException
{
return storeLayer.indexGetOwningUniquenessConstraintId( state, index );
}
@Override
public long indexGetCommittedId( KernelStatement state, IndexDescriptor index, SchemaStorage.IndexRuleKind kind )
throws SchemaRuleNotFoundException
{
return storeLayer.indexGetCommittedId( state, index, kind );
}
@Override
public String indexGetFailure( Statement state, IndexDescriptor descriptor )
throws IndexNotFoundKernelException
{
return storeLayer.indexGetFailure( state, descriptor );
}
@Override
public int labelGetForName( Statement state, String labelName )
{
return storeLayer.labelGetForName( labelName );
}
@Override
public String labelGetName( Statement state, int labelId ) throws LabelNotFoundKernelException
{
return storeLayer.labelGetName( labelId );
}
@Override
public int propertyKeyGetForName( Statement state, String propertyKeyName )
{
return storeLayer.propertyKeyGetForName( propertyKeyName );
}
@Override
public String propertyKeyGetName( Statement state, int propertyKeyId ) throws PropertyKeyIdNotFoundKernelException
{
return storeLayer.propertyKeyGetName( propertyKeyId );
}
@Override
public Iterator<Token> propertyKeyGetAllTokens( Statement state )
{
return storeLayer.propertyKeyGetAllTokens();
}
@Override
public Iterator<Token> labelsGetAllTokens( Statement state )
{
return storeLayer.labelsGetAllTokens();
}
@Override
public int relationshipTypeGetForName( Statement state, String relationshipTypeName )
{
return storeLayer.relationshipTypeGetForName( relationshipTypeName );
}
@Override
public String relationshipTypeGetName( Statement state, int relationshipTypeId ) throws
RelationshipTypeIdNotFoundKernelException
{
return storeLayer.relationshipTypeGetName( relationshipTypeId );
}
@Override
public int labelGetOrCreateForName( Statement state, String labelName ) throws IllegalTokenNameException,
TooManyLabelsException
{
return storeLayer.labelGetOrCreateForName( labelName );
}
@Override
public int propertyKeyGetOrCreateForName( Statement state, String propertyKeyName ) throws IllegalTokenNameException
{
return storeLayer.propertyKeyGetOrCreateForName( propertyKeyName );
}
@Override
public int relationshipTypeGetOrCreateForName( Statement state, String relationshipTypeName ) throws IllegalTokenNameException
{
return storeLayer.relationshipTypeGetOrCreateForName( relationshipTypeName );
}
}
| 1no label
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_StateHandlingStatementOperations.java
|
68 |
static final class MapEntry<K,V> implements Map.Entry<K,V> {
final K key; // non-null
V val; // non-null
final ConcurrentHashMapV8<K,V> map;
MapEntry(K key, V val, ConcurrentHashMapV8<K,V> map) {
this.key = key;
this.val = val;
this.map = map;
}
public K getKey() { return key; }
public V getValue() { return val; }
public int hashCode() { return key.hashCode() ^ val.hashCode(); }
public String toString() { return key + "=" + val; }
public boolean equals(Object o) {
Object k, v; Map.Entry<?,?> e;
return ((o instanceof Map.Entry) &&
(k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
(v = e.getValue()) != null &&
(k == key || k.equals(key)) &&
(v == val || v.equals(val)));
}
/**
* Sets our entry's value and writes through to the map. The
* value to return is somewhat arbitrary here. Since we do not
* necessarily track asynchronous changes, the most recent
* "previous" value could be different from what we return (or
* could even have been removed, in which case the put will
* re-establish). We do not and cannot guarantee more.
*/
public V setValue(V value) {
if (value == null) throw new NullPointerException();
V v = val;
val = value;
map.put(key, value);
return v;
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
2,893 |
public static class EqualPredicate extends AbstractPredicate {
protected Comparable value;
public EqualPredicate() {
}
public EqualPredicate(String attribute, Comparable value) {
super(attribute);
this.value = value;
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Index index = getIndex(queryContext);
return index.getRecords(value);
}
@Override
public boolean apply(Map.Entry mapEntry) {
Comparable entryValue = readAttribute(mapEntry);
if (entryValue == null) {
return value == null || value == IndexImpl.NULL;
}
value = convert(mapEntry, entryValue, value);
return entryValue.equals(value);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeObject(value);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
value = in.readObject();
}
@Override
public String toString() {
return attribute + "=" + value;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_query_Predicates.java
|
5,209 |
public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHistogramBuilder> {
private Object interval;
private Histogram.Order order;
private Long minDocCount;
private String preZone;
private String postZone;
private boolean preZoneAdjustLargeInterval;
long preOffset = 0;
long postOffset = 0;
float factor = 1.0f;
public DateHistogramBuilder(String name) {
super(name, InternalDateHistogram.TYPE.name());
}
public DateHistogramBuilder interval(long interval) {
this.interval = interval;
return this;
}
public DateHistogramBuilder interval(DateHistogram.Interval interval) {
this.interval = interval;
return this;
}
public DateHistogramBuilder order(DateHistogram.Order order) {
this.order = order;
return this;
}
public DateHistogramBuilder minDocCount(long minDocCount) {
this.minDocCount = minDocCount;
return this;
}
public DateHistogramBuilder preZone(String preZone) {
this.preZone = preZone;
return this;
}
public DateHistogramBuilder postZone(String postZone) {
this.postZone = postZone;
return this;
}
public DateHistogramBuilder preZoneAdjustLargeInterval(boolean preZoneAdjustLargeInterval) {
this.preZoneAdjustLargeInterval = preZoneAdjustLargeInterval;
return this;
}
public DateHistogramBuilder preOffset(long preOffset) {
this.preOffset = preOffset;
return this;
}
public DateHistogramBuilder postOffset(long postOffset) {
this.postOffset = postOffset;
return this;
}
public DateHistogramBuilder factor(float factor) {
this.factor = factor;
return this;
}
@Override
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
if (interval == null) {
throw new SearchSourceBuilderException("[interval] must be defined for histogram aggregation [" + name + "]");
}
if (interval instanceof Number) {
interval = TimeValue.timeValueMillis(((Number) interval).longValue()).toString();
}
builder.field("interval", interval);
if (minDocCount != null) {
builder.field("min_doc_count", minDocCount);
}
if (order != null) {
builder.field("order");
order.toXContent(builder, params);
}
if (preZone != null) {
builder.field("pre_zone", preZone);
}
if (postZone != null) {
builder.field("post_zone", postZone);
}
if (preZoneAdjustLargeInterval) {
builder.field("pre_zone_adjust_large_interval", true);
}
if (preOffset != 0) {
builder.field("pre_offset", preOffset);
}
if (postOffset != 0) {
builder.field("post_offset", postOffset);
}
if (factor != 1.0f) {
builder.field("factor", factor);
}
return builder;
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_histogram_DateHistogramBuilder.java
|
218 |
public class ClientWriteHandler extends ClientAbstractSelectionHandler implements Runnable {
private final Queue<SocketWritable> writeQueue = new ConcurrentLinkedQueue<SocketWritable>();
private final AtomicBoolean informSelector = new AtomicBoolean(true);
private final ByteBuffer buffer;
private boolean ready;
private SocketWritable lastWritable;
private volatile long lastHandle;
// private boolean initialized = false;
public ClientWriteHandler(ClientConnection connection, IOSelector ioSelector, int bufferSize) {
super(connection, ioSelector);
buffer = ByteBuffer.allocate(bufferSize);
}
@Override
public void handle() {
lastHandle = Clock.currentTimeMillis();
if (!connection.live()) {
return;
}
// if (!initialized) {
// initialized = true;
// buffer.put(Protocols.CLIENT_BINARY.getBytes());
// buffer.put(ClientTypes.JAVA.getBytes());
// registerWrite();
// }
if (lastWritable == null && (lastWritable = poll()) == null && buffer.position() == 0) {
ready = true;
return;
}
try {
while (buffer.hasRemaining() && lastWritable != null) {
boolean complete = lastWritable.writeTo(buffer);
if (complete) {
lastWritable = poll();
} else {
break;
}
}
if (buffer.position() > 0) {
buffer.flip();
try {
socketChannel.write(buffer);
} catch (Exception e) {
lastWritable = null;
handleSocketException(e);
return;
}
if (buffer.hasRemaining()) {
buffer.compact();
} else {
buffer.clear();
}
}
} catch (Throwable t) {
logger.severe("Fatal Error at WriteHandler for endPoint: " + connection.getEndPoint(), t);
} finally {
ready = false;
registerWrite();
}
}
public void enqueueSocketWritable(SocketWritable socketWritable) {
writeQueue.offer(socketWritable);
if (informSelector.compareAndSet(true, false)) {
// we don't have to call wake up if this WriteHandler is
// already in the task queue.
// we can have a counter to check this later on.
// for now, wake up regardless.
ioSelector.addTask(this);
ioSelector.wakeup();
}
}
private SocketWritable poll() {
return writeQueue.poll();
}
@Override
public void run() {
informSelector.set(true);
if (ready) {
handle();
} else {
registerWrite();
}
ready = false;
}
private void registerWrite() {
registerOp(SelectionKey.OP_WRITE);
}
@Override
public void shutdown() {
writeQueue.clear();
while (poll() != null) {
}
}
long getLastHandle() {
return lastHandle;
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientWriteHandler.java
|
77 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_STATIC_ASSET_DESC")
@EntityListeners(value = { AdminAuditableListener.class })
@Cache(usage= CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blCMSElements")
public class StaticAssetDescriptionImpl implements StaticAssetDescription {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "StaticAssetDescriptionId")
@GenericGenerator(
name="StaticAssetDescriptionId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="StaticAssetDescriptionImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.cms.file.domain.StaticAssetDescriptionImpl")
}
)
@Column(name = "STATIC_ASSET_DESC_ID")
protected Long id;
@Embedded
@AdminPresentation(excluded = true)
protected AdminAuditable auditable = new AdminAuditable();
@Column (name = "DESCRIPTION")
@AdminPresentation(friendlyName = "StaticAssetDescriptionImpl_Description", prominent = true)
protected String description;
@Column (name = "LONG_DESCRIPTION")
@AdminPresentation(friendlyName = "StaticAssetDescriptionImpl_Long_Description", largeEntry = true, visibility = VisibilityEnum.GRID_HIDDEN)
protected String longDescription;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setDescription(String description) {
this.description = description;
}
@Override
public String getLongDescription() {
return longDescription;
}
@Override
public void setLongDescription(String longDescription) {
this.longDescription = longDescription;
}
@Override
public StaticAssetDescription cloneEntity() {
StaticAssetDescriptionImpl newAssetDescription = new StaticAssetDescriptionImpl();
newAssetDescription.description = description;
newAssetDescription.longDescription = longDescription;
return newAssetDescription;
}
@Override
public AdminAuditable getAuditable() {
return auditable;
}
@Override
public void setAuditable(AdminAuditable auditable) {
this.auditable = auditable;
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_domain_StaticAssetDescriptionImpl.java
|
3,669 |
public class IndexFieldMapper extends AbstractFieldMapper<String> implements InternalMapper, RootMapper {
public static final String NAME = "_index";
public static final String CONTENT_TYPE = "_index";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = IndexFieldMapper.NAME;
public static final String INDEX_NAME = IndexFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
FIELD_TYPE.freeze();
}
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.DISABLED;
}
public static class Builder extends AbstractFieldMapper.Builder<Builder, IndexFieldMapper> {
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
indexName = Defaults.INDEX_NAME;
}
public Builder enabled(EnabledAttributeMapper enabledState) {
this.enabledState = enabledState;
return this;
}
@Override
public IndexFieldMapper build(BuilderContext context) {
return new IndexFieldMapper(name, indexName, boost, fieldType, docValues, enabledState, postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
IndexFieldMapper.Builder builder = MapperBuilders.index();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper mapper = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(mapper);
}
}
return builder;
}
}
private EnabledAttributeMapper enabledState;
public IndexFieldMapper() {
this(Defaults.NAME, Defaults.INDEX_NAME);
}
protected IndexFieldMapper(String name, String indexName) {
this(name, indexName, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null, Defaults.ENABLED_STATE, null, null, null, ImmutableSettings.EMPTY);
}
public IndexFieldMapper(String name, String indexName, float boost, FieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState,
PostingsFormatProvider postingsProvider, DocValuesFormatProvider docValuesProvider, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), boost, fieldType, docValues, Lucene.KEYWORD_ANALYZER,
Lucene.KEYWORD_ANALYZER, postingsProvider, docValuesProvider, null, null, fieldDataSettings, indexSettings);
this.enabledState = enabledState;
}
public boolean enabled() {
return this.enabledState.enabled;
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("string");
}
@Override
public boolean hasDocValues() {
return false;
}
public String value(Document document) {
Field field = (Field) document.getField(names.indexName());
return field == null ? null : value(field);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public void preParse(ParseContext context) throws IOException {
// we pre parse it and not in parse, since its not part of the root object
super.parse(context);
}
@Override
public void postParse(ParseContext context) throws IOException {
}
@Override
public void parse(ParseContext context) throws IOException {
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public boolean includeInObject() {
return false;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!enabledState.enabled) {
return;
}
fields.add(new Field(names.indexName(), context.index(), fieldType));
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if all defaults, no need to write it at all
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored() && enabledState.enabled) {
builder.field("store", fieldType().stored());
}
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
builder.field("enabled", enabledState.enabled);
}
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
}
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
IndexFieldMapper indexFieldMapperMergeWith = (IndexFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) {
this.enabledState = indexFieldMapperMergeWith.enabledState;
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_internal_IndexFieldMapper.java
|
152 |
public class StructuredContentDTO implements Serializable {
private static final long serialVersionUID = 1L;
protected Long id;
protected String contentName;
protected String contentType;
protected String localeCode;
protected Integer priority;
protected Long sandboxId;
protected Map values = new HashMap<String,String>();
protected String ruleExpression;
protected List<ItemCriteriaDTO> itemCriteriaDTOList;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getContentName() {
return contentName;
}
public void setContentName(String contentName) {
values.put("contentName", contentName);
this.contentName = contentName;
}
public String getContentType() {
return contentType;
}
public void setContentType(String contentType) {
values.put("contentType", contentType);
this.contentType = contentType;
}
public String getLocaleCode() {
return localeCode;
}
public void setLocaleCode(String localeCode) {
values.put("localeCode", localeCode);
this.localeCode = localeCode;
}
public Integer getPriority() {
return priority;
}
public void setPriority(Integer priority) {
values.put("priority", priority);
this.priority = priority;
}
public Long getSandboxId() {
return sandboxId;
}
public void setSandboxId(Long sandboxId) {
this.sandboxId = sandboxId;
}
public Map getValues() {
return values;
}
public void setValues(Map values) {
this.values = values;
}
public String getRuleExpression() {
return ruleExpression;
}
public void setRuleExpression(String ruleExpression) {
this.ruleExpression = ruleExpression;
}
public List<ItemCriteriaDTO> getItemCriteriaDTOList() {
return itemCriteriaDTOList;
}
public void setItemCriteriaDTOList(List<ItemCriteriaDTO> itemCriteriaDTOList) {
this.itemCriteriaDTOList = itemCriteriaDTOList;
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_dto_StructuredContentDTO.java
|
2,034 |
public class AddIndexOperation extends AbstractNamedOperation implements PartitionAwareOperation {
String attributeName;
boolean ordered;
public AddIndexOperation(String name, String attributeName, boolean ordered) {
super(name);
this.attributeName = attributeName;
this.ordered = ordered;
}
public AddIndexOperation() {
}
@Override
public void run() throws Exception {
MapService mapService = getService();
MapContainer mapContainer = mapService.getMapContainer(name);
RecordStore rs = mapService.getPartitionContainer(getPartitionId()).getRecordStore(name);
Map<Data, Record> records = rs.getReadonlyRecordMap();
IndexService indexService = mapContainer.getIndexService();
SerializationService ss = getNodeEngine().getSerializationService();
Index index = indexService.addOrGetIndex(attributeName, ordered);
for (Record record : records.values()) {
Data key = record.getKey();
Object value = record.getValue();
index.saveEntryIndex(new QueryEntry(ss, key, key, value));
}
}
@Override
public Object getResponse() {
return Boolean.TRUE;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeUTF(attributeName);
out.writeBoolean(ordered);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
attributeName = in.readUTF();
ordered = in.readBoolean();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_operation_AddIndexOperation.java
|
461 |
@Service("blResourceMinificationService")
public class ResourceMinificationServiceImpl implements ResourceMinificationService {
protected static final Log LOG = LogFactory.getLog(ResourceMinificationServiceImpl.class);
@Value("${minify.enabled}")
protected boolean enabled;
@Value("${minify.linebreak}")
protected int linebreak;
@Value("${minify.munge}")
protected boolean munge;
@Value("${minify.verbose}")
protected boolean verbose;
@Value("${minify.preserveAllSemiColons}")
protected boolean preserveAllSemiColons;
@Value("${minify.disableOptimizations}")
protected boolean disableOptimizations;
@Override
public byte[] minify(String filename, byte[] bytes) {
if (!enabled) {
return bytes;
}
String type = null;
if (filename.endsWith(".js")) {
type = "js";
} else if (filename.endsWith(".css")) {
type = "css";
}
if (!"js".equals(type) && !"css".equals(type)) {
throw new IllegalArgumentException("Can only minify js or css resources");
}
byte[] minifiedBytes;
// Input streams to read the bytes
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
InputStreamReader isr = new InputStreamReader(bais);
BufferedReader in = new BufferedReader(isr);
// Output streams to save the modified bytes
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OutputStreamWriter osw = new OutputStreamWriter(baos);
BufferedWriter out = new BufferedWriter(osw);
try {
if ("js".equals(type)) {
JavaScriptCompressor jsc = new JavaScriptCompressor(in, getLogBasedErrorReporter());
jsc.compress(out, linebreak, true, verbose, preserveAllSemiColons, disableOptimizations);
} else if ("css".equals(type)) {
CssCompressor cssc = new CssCompressor(in);
cssc.compress(out, 100);
}
out.flush();
minifiedBytes = baos.toByteArray();
} catch (Exception e) { // Catch everything - on a runtime exception, we still want to return the unminified bytes
LOG.warn("Could not minify resources, returned unminified bytes", e);
return bytes;
} finally {
try {
in.close();
out.close();
} catch (IOException e2) {
throw new RuntimeException(e2);
}
}
return minifiedBytes;
}
protected ErrorReporter getLogBasedErrorReporter() {
return new ErrorReporter() {
@Override
public void warning(String message, String sourceName, int line, String lineSource, int lineOffset) {
if (line < 0) {
LOG.warn(message);
} else {
LOG.warn(line + ':' + lineOffset + ':' + message);
}
}
@Override
public void error(String message, String sourceName, int line, String lineSource, int lineOffset) {
if (line < 0) {
LOG.error(message);
} else {
LOG.error(line + ':' + lineOffset + ':' + message);
}
}
@Override
public EvaluatorException runtimeError(String message, String sourceName, int line, String lineSource,
int lineOffset) {
error(message, sourceName, line, lineSource, lineOffset);
return new EvaluatorException(message);
}
};
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_resource_service_ResourceMinificationServiceImpl.java
|
1,167 |
public abstract class OQueryOperator {
public static enum ORDER {
/**
* Used when order compared to other operator can not be evaluated or has no consequences.
*/
UNKNOWNED,
/**
* Used when this operator must be before the other one
*/
BEFORE,
/**
* Used when this operator must be after the other one
*/
AFTER,
/**
* Used when this operator is equal the other one
*/
EQUAL
}
public static enum INDEX_OPERATION_TYPE {
GET, COUNT
}
/**
* Default operator order. can be used by additional operator to locate themself relatively to default ones.
* <p/>
* WARNING: ORDER IS IMPORTANT TO AVOID SUB-STRING LIKE "IS" and AND "INSTANCEOF": INSTANCEOF MUST BE PLACED BEFORE! AND ALSO FOR
* PERFORMANCE (MOST USED BEFORE)
*/
protected static final Class<?>[] DEFAULT_OPERATORS_ORDER = { OQueryOperatorEquals.class, OQueryOperatorAnd.class,
OQueryOperatorOr.class, OQueryOperatorNotEquals.class, OQueryOperatorNot.class, OQueryOperatorMinorEquals.class,
OQueryOperatorMinor.class, OQueryOperatorMajorEquals.class, OQueryOperatorContainsAll.class, OQueryOperatorMajor.class,
OQueryOperatorLike.class, OQueryOperatorMatches.class, OQueryOperatorInstanceof.class, OQueryOperatorIs.class,
OQueryOperatorIn.class, OQueryOperatorContainsKey.class, OQueryOperatorContainsValue.class, OQueryOperatorContainsText.class,
OQueryOperatorContains.class, OQueryOperatorTraverse.class, OQueryOperatorBetween.class, OQueryOperatorPlus.class,
OQueryOperatorMinus.class, OQueryOperatorMultiply.class, OQueryOperatorDivide.class, OQueryOperatorMod.class };
public final String keyword;
public final int precedence;
public final int expectedRightWords;
public final boolean unary;
public final boolean expectsParameters;
protected OQueryOperator(final String iKeyword, final int iPrecedence, final boolean iUnary) {
this(iKeyword, iPrecedence, iUnary, 1, false);
}
protected OQueryOperator(final String iKeyword, final int iPrecedence, final boolean iUnary, final int iExpectedRightWords) {
this(iKeyword, iPrecedence, iUnary, iExpectedRightWords, false);
}
protected OQueryOperator(final String iKeyword, final int iPrecedence, final boolean iUnary, final int iExpectedRightWords,
final boolean iExpectsParameters) {
keyword = iKeyword;
precedence = iPrecedence;
unary = iUnary;
expectedRightWords = iExpectedRightWords;
expectsParameters = iExpectsParameters;
}
public abstract Object evaluateRecord(final OIdentifiable iRecord, ODocument iCurrentResult,
final OSQLFilterCondition iCondition, final Object iLeft, final Object iRight, OCommandContext iContext);
/**
* Returns hint how index can be used to calculate result of operator execution.
*
* @param iLeft
* Value of left query parameter.
* @param iRight
* Value of right query parameter.
* @return Hint how index can be used to calculate result of operator execution.
*/
public abstract OIndexReuseType getIndexReuseType(Object iLeft, Object iRight);
/**
* Performs index query to calculate result of execution of given operator.
* <p/>
* Query that should be executed can be presented like: [[property0 = keyParam0] and [property1 = keyParam1] and] propertyN
* operator keyParamN.
* <p/>
* It is supped that index which passed in as parameter is used to index properties listed above and responsibility of given
* method execute query using given parameters.
* <p/>
* Multiple parameters are passed in to implement composite indexes support.
*
*
*
*
* @param iContext
* TODO
* @param index
* Instance of index that will be used to calculate result of operator execution.
* @param iOperationType
* TODO
* @param keyParams
* Parameters of query is used to calculate query result.
* @param resultListener
* @param fetchLimit
* @return Result of execution of given operator or {@code null} if given index can not be used to calculate operator result.
*/
public Object executeIndexQuery(OCommandContext iContext, OIndex<?> index, INDEX_OPERATION_TYPE iOperationType,
final List<Object> keyParams, final IndexResultListener resultListener, int fetchLimit) {
return null;
}
@Override
public String toString() {
return keyword;
}
/**
* Default State-less implementation: does not save parameters and just return itself
*
* @param iParams
* @return
*/
public OQueryOperator configure(final List<String> iParams) {
return this;
}
public String getSyntax() {
return "<left> " + keyword + " <right>";
}
public abstract ORID getBeginRidRange(final Object iLeft, final Object iRight);
public abstract ORID getEndRidRange(final Object iLeft, final Object iRight);
public boolean isUnary() {
return unary;
}
/**
* Check priority of this operator compare to given operator.
*
* @param other
* @return ORDER place of this operator compared to given operator
*/
public ORDER compare(OQueryOperator other) {
final Class<?> thisClass = this.getClass();
final Class<?> otherClass = other.getClass();
int thisPosition = -1;
int otherPosition = -1;
for (int i = 0; i < DEFAULT_OPERATORS_ORDER.length; i++) {
// subclass of default operators inherit their parent ordering
final Class<?> clazz = DEFAULT_OPERATORS_ORDER[i];
if (clazz.isAssignableFrom(thisClass)) {
thisPosition = i;
}
if (clazz.isAssignableFrom(otherClass)) {
otherPosition = i;
}
}
if (thisPosition == -1 || otherPosition == -1) {
// can not decide which comes first
return ORDER.UNKNOWNED;
}
if (thisPosition > otherPosition) {
return ORDER.AFTER;
} else if (thisPosition < otherPosition) {
return ORDER.BEFORE;
}
return ORDER.EQUAL;
}
protected void updateProfiler(final OCommandContext iContext, final OIndex<?> index, final List<Object> keyParams,
final OIndexDefinition indexDefinition) {
if (iContext.isRecordingMetrics())
iContext.updateMetric("compositeIndexUsed", +1);
final OProfilerMBean profiler = Orient.instance().getProfiler();
if (profiler.isRecording()) {
profiler.updateCounter(profiler.getDatabaseMetric(index.getDatabaseName(), "query.indexUsed"), "Used index in query", +1);
int params = indexDefinition.getParamCount();
if (params > 1) {
final String profiler_prefix = profiler.getDatabaseMetric(index.getDatabaseName(), "query.compositeIndexUsed");
profiler.updateCounter(profiler_prefix, "Used composite index in query", +1);
profiler.updateCounter(profiler_prefix + "." + params, "Used composite index in query with " + params + " params", +1);
profiler.updateCounter(profiler_prefix + "." + params + '.' + keyParams.size(), "Used composite index in query with "
+ params + " params and " + keyParams.size() + " keys", +1);
}
}
}
public interface IndexResultListener extends OIndex.IndexValuesResultListener {
Object getResult();
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_operator_OQueryOperator.java
|
59 |
public class TitanFactory {
private static final Logger log =
LoggerFactory.getLogger(TitanFactory.class);
/**
* Opens a {@link TitanGraph} database.
* <p/>
* If the argument points to a configuration file, the configuration file is loaded to configure the Titan graph
* If the string argument is a configuration short-cut, then the short-cut is parsed and used to configure the returned Titan graph.
* <p />
* A configuration short-cut is of the form:
* [STORAGE_BACKEND_NAME]:[DIRECTORY_OR_HOST]
*
* @param shortcutOrFile Configuration file name or configuration short-cut
* @return Titan graph database configured according to the provided configuration
* @see <a href="https://github.com/thinkaurelius/titan/wiki/Graph-Configuration">Graph Configuration Wiki</a>
*/
public static TitanGraph open(String shortcutOrFile) {
return open(getLocalConfiguration(shortcutOrFile));
}
/**
* Opens a {@link TitanGraph} database configured according to the provided configuration.
*
* @param configuration Configuration for the graph database
* @return Titan graph database
* @see <a href="https://github.com/thinkaurelius/titan/wiki/Graph-Configuration">Graph Configuration Wiki</a>
*/
public static TitanGraph open(Configuration configuration) {
return open(new CommonsConfiguration(configuration));
}
/**
* Opens a {@link TitanGraph} database configured according to the provided configuration.
*
* @param configuration Configuration for the graph database
* @return Titan graph database
*/
public static TitanGraph open(BasicConfiguration configuration) {
return open(configuration.getConfiguration());
}
/**
* Opens a {@link TitanGraph} database configured according to the provided configuration.
*
* @param configuration Configuration for the graph database
* @return Titan graph database
*/
public static TitanGraph open(ReadConfiguration configuration) {
return new StandardTitanGraph(new GraphDatabaseConfiguration(configuration));
}
/**
* Returns a {@link Builder} that allows to set the configuration options for opening a Titan graph database.
* <p />
* In the builder, the configuration options for the graph can be set individually. Once all options are configured,
* the graph can be opened with {@link com.thinkaurelius.titan.core.TitanFactory.Builder#open()}.
*
* @return
*/
public static Builder build() {
return new Builder();
}
//--------------------- BUILDER -------------------------------------------
public static class Builder extends UserModifiableConfiguration {
private Builder() {
super(GraphDatabaseConfiguration.buildConfiguration());
}
/**
* Configures the provided configuration path to the given value.
*
* @param path
* @param value
* @return
*/
public Builder set(String path, Object value) {
super.set(path, value);
return this;
}
/**
* Opens a Titan graph with the previously configured options.
*
* @return
*/
public TitanGraph open() {
return TitanFactory.open(super.getConfiguration());
}
}
/**
* Returns a {@link com.thinkaurelius.titan.core.log.LogProcessorFramework} for processing transaction log entries
* against the provided graph instance.
*
* @param graph
* @return
*/
public static LogProcessorFramework openTransactionLog(TitanGraph graph) {
return new StandardLogProcessorFramework((StandardTitanGraph)graph);
}
/**
* Returns a {@link TransactionRecovery} process for recovering partially failed transactions. The recovery process
* will start processing the write-ahead transaction log at the specified transaction time.
*
* @param graph
* @param sinceEpoch
* @param unit
* @return
*/
public static TransactionRecovery startTransactionRecovery(TitanGraph graph, long sinceEpoch, TimeUnit unit) {
return new StandardTransactionLogProcessor((StandardTitanGraph)graph, new StandardTimestamp(sinceEpoch,unit));
}
//###################################
// HELPER METHODS
//###################################
private static ReadConfiguration getLocalConfiguration(String shortcutOrFile) {
File file = new File(shortcutOrFile);
if (file.exists()) return getLocalConfiguration(file);
else {
int pos = shortcutOrFile.indexOf(':');
if (pos<0) pos = shortcutOrFile.length();
String backend = shortcutOrFile.substring(0,pos);
Preconditions.checkArgument(Backend.REGISTERED_STORAGE_MANAGERS_SHORTHAND.containsKey(backend.toLowerCase()), "Backend shorthand unknown: %s", backend);
String secondArg = null;
if (pos+1<shortcutOrFile.length()) secondArg = shortcutOrFile.substring(pos + 1).trim();
BaseConfiguration config = new BaseConfiguration();
ModifiableConfiguration writeConfig = new ModifiableConfiguration(ROOT_NS,new CommonsConfiguration(config), BasicConfiguration.Restriction.NONE);
writeConfig.set(STORAGE_BACKEND,backend);
ConfigOption option = Backend.REGISTERED_STORAGE_MANAGERS_SHORTHAND.get(backend.toLowerCase());
if (option==null) {
Preconditions.checkArgument(secondArg==null);
} else if (option==STORAGE_DIRECTORY || option==STORAGE_CONF_FILE) {
Preconditions.checkArgument(StringUtils.isNotBlank(secondArg),"Need to provide additional argument to initialize storage backend");
writeConfig.set(option,getAbsolutePath(secondArg));
} else if (option==STORAGE_HOSTS) {
Preconditions.checkArgument(StringUtils.isNotBlank(secondArg),"Need to provide additional argument to initialize storage backend");
writeConfig.set(option,new String[]{secondArg});
} else throw new IllegalArgumentException("Invalid configuration option for backend "+option);
return new CommonsConfiguration(config);
}
}
/**
* Load a properties file containing a Titan graph configuration.
* <p/>
* <ol>
* <li>Load the file contents into a {@link org.apache.commons.configuration.PropertiesConfiguration}</li>
* <li>For each key that points to a configuration object that is either a directory
* or local file, check
* whether the associated value is a non-null, non-absolute path. If so,
* then prepend the absolute path of the parent directory of the provided configuration {@code file}.
* This has the effect of making non-absolute backend
* paths relative to the config file's directory rather than the JVM's
* working directory.
* <li>Return the {@link ReadConfiguration} for the prepared configuration file</li>
* </ol>
* <p/>
*
* @param file A properties file to load
* @return A configuration derived from {@code file}
*/
@SuppressWarnings("unchecked")
private static ReadConfiguration getLocalConfiguration(File file) {
Preconditions.checkArgument(file != null && file.exists() && file.isFile() && file.canRead(),
"Need to specify a readable configuration file, but was given: %s", file.toString());
try {
PropertiesConfiguration configuration = new PropertiesConfiguration(file);
final File tmpParent = file.getParentFile();
final File configParent;
if (null == tmpParent) {
/*
* null usually means we were given a Titan config file path
* string like "foo.properties" that refers to the current
* working directory of the process.
*/
configParent = new File(System.getProperty("user.dir"));
} else {
configParent = tmpParent;
}
Preconditions.checkNotNull(configParent);
Preconditions.checkArgument(configParent.isDirectory());
// TODO this mangling logic is a relic from the hardcoded string days; it should be deleted and rewritten as a setting on ConfigOption
final Pattern p = Pattern.compile("(" +
Pattern.quote(STORAGE_NS.getName()) + "\\..*" +
"(" + Pattern.quote(STORAGE_DIRECTORY.getName()) + "|" +
Pattern.quote(STORAGE_CONF_FILE.getName()) + ")"
+ "|" +
Pattern.quote(INDEX_NS.getName()) + "\\..*" +
"(" + Pattern.quote(INDEX_DIRECTORY.getName()) + "|" +
Pattern.quote(INDEX_CONF_FILE.getName()) + ")"
+ ")");
final Iterator<String> keysToMangle = Iterators.filter(configuration.getKeys(), new Predicate<String>() {
@Override
public boolean apply(String key) {
if (null == key)
return false;
return p.matcher(key).matches();
}
});
while (keysToMangle.hasNext()) {
String k = keysToMangle.next();
Preconditions.checkNotNull(k);
String s = configuration.getString(k);
Preconditions.checkArgument(StringUtils.isNotBlank(s),"Invalid Configuration: key %s has null empty value",k);
configuration.setProperty(k,getAbsolutePath(configParent,s));
}
return new CommonsConfiguration(configuration);
} catch (ConfigurationException e) {
throw new IllegalArgumentException("Could not load configuration at: " + file, e);
}
}
private static final String getAbsolutePath(String file) {
return getAbsolutePath(new File(System.getProperty("user.dir")), file);
}
private static final String getAbsolutePath(final File configParent, String file) {
File storedir = new File(file);
if (!storedir.isAbsolute()) {
String newFile = configParent.getAbsolutePath() + File.separator + file;
log.debug("Overwrote relative path: was {}, now {}", file, newFile);
return newFile;
} else {
log.debug("Loaded absolute path for key: {}", file);
return file;
}
}
}
| 1no label
|
titan-core_src_main_java_com_thinkaurelius_titan_core_TitanFactory.java
|
1,070 |
public class TransportUpdateAction extends TransportInstanceSingleOperationAction<UpdateRequest, UpdateResponse> {
private final TransportDeleteAction deleteAction;
private final TransportIndexAction indexAction;
private final AutoCreateIndex autoCreateIndex;
private final TransportCreateIndexAction createIndexAction;
private final UpdateHelper updateHelper;
@Inject
public TransportUpdateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
TransportIndexAction indexAction, TransportDeleteAction deleteAction, TransportCreateIndexAction createIndexAction,
UpdateHelper updateHelper) {
super(settings, threadPool, clusterService, transportService);
this.indexAction = indexAction;
this.deleteAction = deleteAction;
this.createIndexAction = createIndexAction;
this.updateHelper = updateHelper;
this.autoCreateIndex = new AutoCreateIndex(settings);
}
@Override
protected String transportAction() {
return UpdateAction.NAME;
}
@Override
protected String executor() {
return ThreadPool.Names.INDEX;
}
@Override
protected UpdateRequest newRequest() {
return new UpdateRequest();
}
@Override
protected UpdateResponse newResponse() {
return new UpdateResponse();
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, UpdateRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, UpdateRequest request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, request.index());
}
@Override
protected boolean retryOnFailure(Throwable e) {
return TransportActions.isShardNotAvailableException(e);
}
@Override
protected boolean resolveRequest(ClusterState state, UpdateRequest request, ActionListener<UpdateResponse> listener) {
MetaData metaData = clusterService.state().metaData();
String aliasOrIndex = request.index();
request.routing((metaData.resolveIndexRouting(request.routing(), aliasOrIndex)));
request.index(metaData.concreteIndex(request.index()));
// Fail fast on the node that received the request, rather than failing when translating on the index or delete request.
if (request.routing() == null && state.getMetaData().routingRequired(request.index(), request.type())) {
throw new RoutingMissingException(request.index(), request.type(), request.id());
}
return true;
}
@Override
protected void doExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
// if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API
if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) {
request.beforeLocalFork(); // we fork on another thread...
createIndexAction.execute(new CreateIndexRequest(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
try {
innerExecute(request, listener);
} catch (Throwable e1) {
listener.onFailure(e1);
}
} else {
listener.onFailure(e);
}
}
});
} else {
innerExecute(request, listener);
}
}
private void innerExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected ShardIterator shards(ClusterState clusterState, UpdateRequest request) throws ElasticsearchException {
if (request.shardId() != -1) {
return clusterState.routingTable().index(request.index()).shard(request.shardId()).primaryShardIt();
}
ShardIterator shardIterator = clusterService.operationRouting()
.indexShards(clusterService.state(), request.index(), request.type(), request.id(), request.routing());
ShardRouting shard;
while ((shard = shardIterator.nextOrNull()) != null) {
if (shard.primary()) {
return new PlainShardIterator(shardIterator.shardId(), ImmutableList.of(shard));
}
}
return new PlainShardIterator(shardIterator.shardId(), ImmutableList.<ShardRouting>of());
}
@Override
protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener) throws ElasticsearchException {
shardOperation(request, listener, 0);
}
protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener, final int retryCount) throws ElasticsearchException {
final UpdateHelper.Result result = updateHelper.prepare(request);
switch (result.operation()) {
case UPSERT:
IndexRequest upsertRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference upsertSourceBytes = upsertRequest.source();
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
if (request.fields() != null && request.fields().length > 0) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
update.setGetResult(updateHelper.extractGetResult(request, response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
} else {
update.setGetResult(null);
}
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException || e instanceof DocumentAlreadyExistsException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new Runnable() {
@Override
public void run() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case INDEX:
IndexRequest indexRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference indexSourceBytes = indexRequest.source();
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
update.setGetResult(updateHelper.extractGetResult(request, response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes));
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new Runnable() {
@Override
public void run() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case DELETE:
DeleteRequest deleteRequest = result.action();
deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse response) {
UpdateResponse update = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), false);
update.setGetResult(updateHelper.extractGetResult(request, response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null));
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new Runnable() {
@Override
public void run() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case NONE:
UpdateResponse update = result.action();
listener.onResponse(update);
break;
default:
throw new ElasticsearchIllegalStateException("Illegal operation " + result.operation());
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_update_TransportUpdateAction.java
|
626 |
public class BroadleafRedirectController {
public String redirect(HttpServletRequest request, HttpServletResponse response, Model model) {
String path = (String) request.getSession().getAttribute("BLC_REDIRECT_URL");
if (path == null) {
path = request.getContextPath();
}
return "ajaxredirect:" + path;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_web_controller_BroadleafRedirectController.java
|
66 |
{
@Override
public boolean accept( File dir, String fileName )
{
return fileName.startsWith( prefix );
}
} );
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestXaFramework.java
|
4,031 |
public class MatchQuery {
public static enum Type {
BOOLEAN,
PHRASE,
PHRASE_PREFIX
}
public static enum ZeroTermsQuery {
NONE,
ALL
}
protected final QueryParseContext parseContext;
protected String analyzer;
protected BooleanClause.Occur occur = BooleanClause.Occur.SHOULD;
protected boolean enablePositionIncrements = true;
protected int phraseSlop = 0;
protected Fuzziness fuzziness = null;
protected int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
protected int maxExpansions = FuzzyQuery.defaultMaxExpansions;
//LUCENE 4 UPGRADE we need a default value for this!
protected boolean transpositions = false;
protected MultiTermQuery.RewriteMethod rewriteMethod;
protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod;
protected boolean lenient;
protected ZeroTermsQuery zeroTermsQuery = ZeroTermsQuery.NONE;
protected Float commonTermsCutoff = null;
public MatchQuery(QueryParseContext parseContext) {
this.parseContext = parseContext;
}
public void setAnalyzer(String analyzer) {
this.analyzer = analyzer;
}
public void setOccur(BooleanClause.Occur occur) {
this.occur = occur;
}
public void setCommonTermsCutoff(float cutoff) {
this.commonTermsCutoff = Float.valueOf(cutoff);
}
public void setEnablePositionIncrements(boolean enablePositionIncrements) {
this.enablePositionIncrements = enablePositionIncrements;
}
public void setPhraseSlop(int phraseSlop) {
this.phraseSlop = phraseSlop;
}
public void setFuzziness(Fuzziness fuzziness) {
this.fuzziness = fuzziness;
}
public void setFuzzyPrefixLength(int fuzzyPrefixLength) {
this.fuzzyPrefixLength = fuzzyPrefixLength;
}
public void setMaxExpansions(int maxExpansions) {
this.maxExpansions = maxExpansions;
}
public void setTranspositions(boolean transpositions) {
this.transpositions = transpositions;
}
public void setRewriteMethod(MultiTermQuery.RewriteMethod rewriteMethod) {
this.rewriteMethod = rewriteMethod;
}
public void setFuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) {
this.fuzzyRewriteMethod = fuzzyRewriteMethod;
}
public void setLenient(boolean lenient) {
this.lenient = lenient;
}
public void setZeroTermsQuery(ZeroTermsQuery zeroTermsQuery) {
this.zeroTermsQuery = zeroTermsQuery;
}
public Query parse(Type type, String fieldName, Object value) throws IOException {
FieldMapper mapper = null;
final String field;
MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
if (smartNameFieldMappers != null && smartNameFieldMappers.hasMapper()) {
mapper = smartNameFieldMappers.mapper();
field = mapper.names().indexName();
} else {
field = fieldName;
}
if (mapper != null && mapper.useTermQueryWithQueryString()) {
if (smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{smartNameFieldMappers.docMapper().type()});
try {
return wrapSmartNameQuery(mapper.termQuery(value, parseContext), smartNameFieldMappers, parseContext);
} catch (RuntimeException e) {
if (lenient) {
return null;
}
throw e;
} finally {
QueryParseContext.setTypes(previousTypes);
}
} else {
try {
return wrapSmartNameQuery(mapper.termQuery(value, parseContext), smartNameFieldMappers, parseContext);
} catch (RuntimeException e) {
if (lenient) {
return null;
}
throw e;
}
}
}
Analyzer analyzer = null;
if (this.analyzer == null) {
if (mapper != null) {
analyzer = mapper.searchAnalyzer();
}
if (analyzer == null && smartNameFieldMappers != null) {
analyzer = smartNameFieldMappers.searchAnalyzer();
}
if (analyzer == null) {
analyzer = parseContext.mapperService().searchAnalyzer();
}
} else {
analyzer = parseContext.mapperService().analysisService().analyzer(this.analyzer);
if (analyzer == null) {
throw new ElasticsearchIllegalArgumentException("No analyzer found for [" + this.analyzer + "]");
}
}
// Logic similar to QueryParser#getFieldQuery
final TokenStream source = analyzer.tokenStream(field, value.toString());
source.reset();
int numTokens = 0;
int positionCount = 0;
boolean severalTokensAtSamePosition = false;
final CachingTokenFilter buffer = new CachingTokenFilter(source);
buffer.reset();
final CharTermAttribute termAtt = buffer.addAttribute(CharTermAttribute.class);
final PositionIncrementAttribute posIncrAtt = buffer.addAttribute(PositionIncrementAttribute.class);
boolean hasMoreTokens = buffer.incrementToken();
while (hasMoreTokens) {
numTokens++;
int positionIncrement = posIncrAtt.getPositionIncrement();
if (positionIncrement != 0) {
positionCount += positionIncrement;
} else {
severalTokensAtSamePosition = true;
}
hasMoreTokens = buffer.incrementToken();
}
// rewind the buffer stream
buffer.reset();
source.close();
if (numTokens == 0) {
return zeroTermsQuery();
} else if (type == Type.BOOLEAN) {
if (numTokens == 1) {
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
final Query q = newTermQuery(mapper, new Term(field, termToByteRef(termAtt)));
return wrapSmartNameQuery(q, smartNameFieldMappers, parseContext);
}
if (commonTermsCutoff != null) {
ExtendedCommonTermsQuery q = new ExtendedCommonTermsQuery(occur, occur, commonTermsCutoff, positionCount == 1);
for (int i = 0; i < numTokens; i++) {
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
q.add(new Term(field, termToByteRef(termAtt)));
}
return wrapSmartNameQuery(q, smartNameFieldMappers, parseContext);
} if (severalTokensAtSamePosition && occur == Occur.MUST) {
BooleanQuery q = new BooleanQuery(positionCount == 1);
Query currentQuery = null;
for (int i = 0; i < numTokens; i++) {
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
if (posIncrAtt != null && posIncrAtt.getPositionIncrement() == 0) {
if (!(currentQuery instanceof BooleanQuery)) {
Query t = currentQuery;
currentQuery = new BooleanQuery(true);
((BooleanQuery)currentQuery).add(t, BooleanClause.Occur.SHOULD);
}
((BooleanQuery)currentQuery).add(newTermQuery(mapper, new Term(field, termToByteRef(termAtt))), BooleanClause.Occur.SHOULD);
} else {
if (currentQuery != null) {
q.add(currentQuery, occur);
}
currentQuery = newTermQuery(mapper, new Term(field, termToByteRef(termAtt)));
}
}
q.add(currentQuery, occur);
return wrapSmartNameQuery(q, smartNameFieldMappers, parseContext);
} else {
BooleanQuery q = new BooleanQuery(positionCount == 1);
for (int i = 0; i < numTokens; i++) {
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
final Query currentQuery = newTermQuery(mapper, new Term(field, termToByteRef(termAtt)));
q.add(currentQuery, occur);
}
return wrapSmartNameQuery(q, smartNameFieldMappers, parseContext);
}
} else if (type == Type.PHRASE) {
if (severalTokensAtSamePosition) {
final MultiPhraseQuery mpq = new MultiPhraseQuery();
mpq.setSlop(phraseSlop);
final List<Term> multiTerms = new ArrayList<Term>();
int position = -1;
for (int i = 0; i < numTokens; i++) {
int positionIncrement = 1;
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
positionIncrement = posIncrAtt.getPositionIncrement();
if (positionIncrement > 0 && multiTerms.size() > 0) {
if (enablePositionIncrements) {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]), position);
} else {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]));
}
multiTerms.clear();
}
position += positionIncrement;
//LUCENE 4 UPGRADE instead of string term we can convert directly from utf-16 to utf-8
multiTerms.add(new Term(field, termToByteRef(termAtt)));
}
if (enablePositionIncrements) {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]), position);
} else {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]));
}
return wrapSmartNameQuery(mpq, smartNameFieldMappers, parseContext);
} else {
PhraseQuery pq = new PhraseQuery();
pq.setSlop(phraseSlop);
int position = -1;
for (int i = 0; i < numTokens; i++) {
int positionIncrement = 1;
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
positionIncrement = posIncrAtt.getPositionIncrement();
if (enablePositionIncrements) {
position += positionIncrement;
//LUCENE 4 UPGRADE instead of string term we can convert directly from utf-16 to utf-8
pq.add(new Term(field, termToByteRef(termAtt)), position);
} else {
pq.add(new Term(field, termToByteRef(termAtt)));
}
}
return wrapSmartNameQuery(pq, smartNameFieldMappers, parseContext);
}
} else if (type == Type.PHRASE_PREFIX) {
MultiPhrasePrefixQuery mpq = new MultiPhrasePrefixQuery();
mpq.setSlop(phraseSlop);
mpq.setMaxExpansions(maxExpansions);
List<Term> multiTerms = new ArrayList<Term>();
int position = -1;
for (int i = 0; i < numTokens; i++) {
int positionIncrement = 1;
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
positionIncrement = posIncrAtt.getPositionIncrement();
if (positionIncrement > 0 && multiTerms.size() > 0) {
if (enablePositionIncrements) {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]), position);
} else {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]));
}
multiTerms.clear();
}
position += positionIncrement;
multiTerms.add(new Term(field, termToByteRef(termAtt)));
}
if (enablePositionIncrements) {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]), position);
} else {
mpq.add(multiTerms.toArray(new Term[multiTerms.size()]));
}
return wrapSmartNameQuery(mpq, smartNameFieldMappers, parseContext);
}
throw new ElasticsearchIllegalStateException("No type found for [" + type + "]");
}
private Query newTermQuery(@Nullable FieldMapper mapper, Term term) {
if (fuzziness != null) {
if (mapper != null) {
Query query = mapper.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
if (query instanceof FuzzyQuery) {
QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod);
}
}
int edits = fuzziness.asDistance(term.text());
FuzzyQuery query = new FuzzyQuery(term, edits, fuzzyPrefixLength, maxExpansions, transpositions);
QueryParsers.setRewriteMethod(query, rewriteMethod);
return query;
}
if (mapper != null) {
Query termQuery = mapper.queryStringTermQuery(term);
if (termQuery != null) {
return termQuery;
}
}
return new TermQuery(term);
}
private static BytesRef termToByteRef(CharTermAttribute attr) {
final BytesRef ref = new BytesRef();
UnicodeUtil.UTF16toUTF8(attr.buffer(), 0, attr.length(), ref);
return ref;
}
protected Query zeroTermsQuery() {
return zeroTermsQuery == ZeroTermsQuery.NONE ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery();
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_search_MatchQuery.java
|
338 |
public static class NodeRestartResponse extends NodeOperationResponse {
NodeRestartResponse() {
}
public NodeRestartResponse(DiscoveryNode node) {
super(node);
}
public static NodeRestartResponse readNodeRestartResponse(StreamInput in) throws IOException {
NodeRestartResponse res = new NodeRestartResponse();
res.readFrom(in);
return res;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_restart_NodesRestartResponse.java
|
1,594 |
public class ForeignKey implements Serializable, PersistencePerspectiveItem {
private static final long serialVersionUID = 1L;
private String manyToField;
private String originatingField;
private String foreignKeyClass;
private String currentValue;
private String dataSourceName;
private ForeignKeyRestrictionType restrictionType = ForeignKeyRestrictionType.ID_EQ;
private String displayValueProperty = "name";
private Boolean mutable = true;
public ForeignKey() {
//do nothing
}
public ForeignKey(String manyToField, String foreignKeyClass) {
this(manyToField, foreignKeyClass, null);
}
public ForeignKey(String manyToField, String foreignKeyClass, String dataSourceName) {
this(manyToField, foreignKeyClass, dataSourceName, ForeignKeyRestrictionType.ID_EQ);
}
public ForeignKey(String manyToField, String foreignKeyClass, String dataSourceName, ForeignKeyRestrictionType restrictionType) {
this(manyToField, foreignKeyClass, dataSourceName, restrictionType, "name");
}
public ForeignKey(String manyToField, String foreignKeyClass, String dataSourceName, ForeignKeyRestrictionType restrictionType, String displayValueProperty) {
this.manyToField = manyToField;
this.foreignKeyClass = foreignKeyClass;
this.dataSourceName = dataSourceName;
this.restrictionType = restrictionType;
this.displayValueProperty = displayValueProperty;
}
public String getManyToField() {
return manyToField;
}
public void setManyToField(String manyToField) {
this.manyToField = manyToField;
}
public String getForeignKeyClass() {
return foreignKeyClass;
}
public void setForeignKeyClass(String foreignKeyClass) {
this.foreignKeyClass = foreignKeyClass;
}
public String getCurrentValue() {
return currentValue;
}
public void setCurrentValue(String currentValue) {
this.currentValue = currentValue;
}
public String getDataSourceName() {
return dataSourceName;
}
public void setDataSourceName(String dataSourceName) {
this.dataSourceName = dataSourceName;
}
public ForeignKeyRestrictionType getRestrictionType() {
return restrictionType;
}
public void setRestrictionType(ForeignKeyRestrictionType restrictionType) {
this.restrictionType = restrictionType;
}
public String getDisplayValueProperty() {
return displayValueProperty;
}
public void setDisplayValueProperty(String displayValueProperty) {
this.displayValueProperty = displayValueProperty;
}
public Boolean getMutable() {
return mutable;
}
public void setMutable(Boolean mutable) {
this.mutable = mutable;
}
public String getOriginatingField() {
return originatingField;
}
public void setOriginatingField(String originatingField) {
this.originatingField = originatingField;
}
public void accept(PersistencePerspectiveItemVisitor visitor) {
visitor.visit(this);
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(manyToField);
sb.append(foreignKeyClass);
sb.append(currentValue);
sb.append(dataSourceName);
sb.append(restrictionType);
sb.append(displayValueProperty);
sb.append(originatingField);
return sb.toString();
}
public ForeignKey cloneForeignKey() {
ForeignKey foreignKey = new ForeignKey();
foreignKey.manyToField = manyToField;
foreignKey.foreignKeyClass = foreignKeyClass;
foreignKey.currentValue = currentValue;
foreignKey.dataSourceName = dataSourceName;
foreignKey.restrictionType = restrictionType;
foreignKey.displayValueProperty = displayValueProperty;
foreignKey.mutable = mutable;
foreignKey.originatingField = originatingField;
return foreignKey;
}
@Override
public PersistencePerspectiveItem clonePersistencePerspectiveItem() {
return cloneForeignKey();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ForeignKey)) return false;
ForeignKey that = (ForeignKey) o;
if (currentValue != null ? !currentValue.equals(that.currentValue) : that.currentValue != null) return false;
if (dataSourceName != null ? !dataSourceName.equals(that.dataSourceName) : that.dataSourceName != null)
return false;
if (displayValueProperty != null ? !displayValueProperty.equals(that.displayValueProperty) : that
.displayValueProperty != null)
return false;
if (foreignKeyClass != null ? !foreignKeyClass.equals(that.foreignKeyClass) : that.foreignKeyClass != null)
return false;
if (manyToField != null ? !manyToField.equals(that.manyToField) : that.manyToField != null) return false;
if (mutable != null ? !mutable.equals(that.mutable) : that.mutable != null) return false;
if (originatingField != null ? !originatingField.equals(that.originatingField) : that.originatingField != null)
return false;
if (restrictionType != that.restrictionType) return false;
return true;
}
@Override
public int hashCode() {
int result = manyToField != null ? manyToField.hashCode() : 0;
result = 31 * result + (originatingField != null ? originatingField.hashCode() : 0);
result = 31 * result + (foreignKeyClass != null ? foreignKeyClass.hashCode() : 0);
result = 31 * result + (currentValue != null ? currentValue.hashCode() : 0);
result = 31 * result + (dataSourceName != null ? dataSourceName.hashCode() : 0);
result = 31 * result + (restrictionType != null ? restrictionType.hashCode() : 0);
result = 31 * result + (displayValueProperty != null ? displayValueProperty.hashCode() : 0);
result = 31 * result + (mutable != null ? mutable.hashCode() : 0);
return result;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_ForeignKey.java
|
1,072 |
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
if (request.fields() != null && request.fields().length > 0) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
update.setGetResult(updateHelper.extractGetResult(request, response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
} else {
update.setGetResult(null);
}
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException || e instanceof DocumentAlreadyExistsException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new Runnable() {
@Override
public void run() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
| 1no label
|
src_main_java_org_elasticsearch_action_update_TransportUpdateAction.java
|
454 |
static class ClusterStatsNodeRequest extends NodeOperationRequest {
ClusterStatsRequest request;
ClusterStatsNodeRequest() {
}
ClusterStatsNodeRequest(String nodeId, ClusterStatsRequest request) {
super(request, nodeId);
this.request = request;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
request = new ClusterStatsRequest();
request.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
request.writeTo(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_stats_TransportClusterStatsAction.java
|
197 |
public class OCLibraryFactory {
public static final OCLibraryFactory INSTANCE = new OCLibraryFactory();
private static final CLibrary C_LIBRARY;
static {
if (Platform.isAIX())
C_LIBRARY = new AIXCLibrary();
else
C_LIBRARY = new GeneralCLibrary();
}
public CLibrary library() {
return C_LIBRARY;
}
}
| 0true
|
nativeos_src_main_java_com_orientechnologies_nio_OCLibraryFactory.java
|
389 |
public class TransportClusterUpdateSettingsAction extends TransportMasterNodeOperationAction<ClusterUpdateSettingsRequest, ClusterUpdateSettingsResponse> {
private final AllocationService allocationService;
private final DynamicSettings dynamicSettings;
@Inject
public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
AllocationService allocationService, @ClusterDynamicSettings DynamicSettings dynamicSettings) {
super(settings, transportService, clusterService, threadPool);
this.allocationService = allocationService;
this.dynamicSettings = dynamicSettings;
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return ClusterUpdateSettingsAction.NAME;
}
@Override
protected ClusterUpdateSettingsRequest newRequest() {
return new ClusterUpdateSettingsRequest();
}
@Override
protected ClusterUpdateSettingsResponse newResponse() {
return new ClusterUpdateSettingsResponse();
}
@Override
protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener<ClusterUpdateSettingsResponse> listener) throws ElasticsearchException {
final ImmutableSettings.Builder transientUpdates = ImmutableSettings.settingsBuilder();
final ImmutableSettings.Builder persistentUpdates = ImmutableSettings.settingsBuilder();
clusterService.submitStateUpdateTask("cluster_update_settings", Priority.URGENT, new AckedClusterStateUpdateTask() {
private volatile boolean changed = false;
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return true;
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
if (changed) {
reroute(true);
} else {
listener.onResponse(new ClusterUpdateSettingsResponse(true, transientUpdates.build(), persistentUpdates.build()));
}
}
@Override
public void onAckTimeout() {
if (changed) {
reroute(false);
} else {
listener.onResponse(new ClusterUpdateSettingsResponse(false, transientUpdates.build(), persistentUpdates.build()));
}
}
private void reroute(final boolean updateSettingsAcked) {
clusterService.submitStateUpdateTask("reroute_after_cluster_update_settings", Priority.URGENT, new AckedClusterStateUpdateTask() {
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
//we wait for the reroute ack only if the update settings was acknowledged
return updateSettingsAcked;
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
//we return when the cluster reroute is acked (the acknowledged flag depends on whether the update settings was acknowledged)
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build()));
}
@Override
public void onAckTimeout() {
//we return when the cluster reroute ack times out (acknowledged false)
listener.onResponse(new ClusterUpdateSettingsResponse(false, transientUpdates.build(), persistentUpdates.build()));
}
@Override
public TimeValue ackTimeout() {
return request.timeout();
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void onFailure(String source, Throwable t) {
//if the reroute fails we only log
logger.debug("failed to perform [{}]", t, source);
}
@Override
public ClusterState execute(final ClusterState currentState) {
// now, reroute in case things that require it changed (e.g. number of replicas)
RoutingAllocation.Result routingResult = allocationService.reroute(currentState);
if (!routingResult.changed()) {
return currentState;
}
return ClusterState.builder(currentState).routingResult(routingResult).build();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
});
}
@Override
public TimeValue ackTimeout() {
return request.timeout();
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void onFailure(String source, Throwable t) {
logger.debug("failed to perform [{}]", t, source);
listener.onFailure(t);
}
@Override
public ClusterState execute(final ClusterState currentState) {
ImmutableSettings.Builder transientSettings = ImmutableSettings.settingsBuilder();
transientSettings.put(currentState.metaData().transientSettings());
for (Map.Entry<String, String> entry : request.transientSettings().getAsMap().entrySet()) {
if (dynamicSettings.hasDynamicSetting(entry.getKey()) || entry.getKey().startsWith("logger.")) {
String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue());
if (error == null) {
transientSettings.put(entry.getKey(), entry.getValue());
transientUpdates.put(entry.getKey(), entry.getValue());
changed = true;
} else {
logger.warn("ignoring transient setting [{}], [{}]", entry.getKey(), error);
}
} else {
logger.warn("ignoring transient setting [{}], not dynamically updateable", entry.getKey());
}
}
ImmutableSettings.Builder persistentSettings = ImmutableSettings.settingsBuilder();
persistentSettings.put(currentState.metaData().persistentSettings());
for (Map.Entry<String, String> entry : request.persistentSettings().getAsMap().entrySet()) {
if (dynamicSettings.hasDynamicSetting(entry.getKey()) || entry.getKey().startsWith("logger.")) {
String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue());
if (error == null) {
persistentSettings.put(entry.getKey(), entry.getValue());
persistentUpdates.put(entry.getKey(), entry.getValue());
changed = true;
} else {
logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error);
}
} else {
logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey());
}
}
if (!changed) {
return currentState;
}
MetaData.Builder metaData = MetaData.builder(currentState.metaData())
.persistentSettings(persistentSettings.build())
.transientSettings(transientSettings.build());
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
boolean updatedReadOnly = metaData.persistentSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || metaData.transientSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false);
if (updatedReadOnly) {
blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
} else {
blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
}
return builder(currentState).metaData(metaData).blocks(blocks).build();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
});
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_cluster_settings_TransportClusterUpdateSettingsAction.java
|
315 |
new Thread() {
public void run() {
map.tryPut(key, newValue, 8, TimeUnit.SECONDS);
tryPutReturned.countDown();
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapLockTest.java
|
3,863 |
public class HasParentQueryParser implements QueryParser {
public static final String NAME = "has_parent";
@Inject
public HasParentQueryParser() {
}
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Query innerQuery = null;
boolean queryFound = false;
float boost = 1.0f;
String parentType = null;
boolean score = false;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
// TODO handle `query` element before `type` element...
String[] origTypes = QueryParseContext.setTypesWithPrevious(parentType == null ? null : new String[]{parentType});
try {
innerQuery = parseContext.parseInnerQuery();
queryFound = true;
} finally {
QueryParseContext.setTypes(origTypes);
}
} else {
throw new QueryParsingException(parseContext.index(), "[has_parent] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) {
parentType = parser.text();
} else if ("_scope".equals(currentFieldName)) {
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] query has been removed, use a filter as a facet_filter in the relevant global facet");
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
String scoreTypeValue = parser.text();
if ("score".equals(scoreTypeValue)) {
score = true;
} else if ("none".equals(scoreTypeValue)) {
score = false;
}
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
String scoreModeValue = parser.text();
if ("score".equals(scoreModeValue)) {
score = true;
} else if ("none".equals(scoreModeValue)) {
score = false;
}
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new QueryParsingException(parseContext.index(), "[has_parent] query does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new QueryParsingException(parseContext.index(), "[has_parent] query requires 'query' field");
}
if (innerQuery == null) {
return null;
}
if (parentType == null) {
throw new QueryParsingException(parseContext.index(), "[has_parent] query requires 'parent_type' field");
}
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "[has_parent] query configured 'parent_type' [" + parentType + "] is not a valid type");
}
innerQuery.setBoost(boost);
// wrap the query with type query
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
Set<String> parentTypes = new HashSet<String>(5);
parentTypes.add(parentType);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
parentTypes.add(parentFieldMapper.type());
}
}
}
Filter parentFilter;
if (parentTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.iterator().next());
parentFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
} else {
XBooleanFilter parentsFilter = new XBooleanFilter();
for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
parentsFilter.add(filter, BooleanClause.Occur.SHOULD);
}
parentFilter = parentsFilter;
}
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
Query query;
if (!deleteByQuery && score) {
query = new ParentQuery(innerQuery, parentType, childrenFilter);
} else {
query = new ParentConstantScoreQuery(innerQuery, parentType, childrenFilter);
if (deleteByQuery) {
query = new XConstantScoreQuery(new DeleteByQueryWrappingFilter(query));
}
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedFilter(queryName, new CustomQueryWrappingFilter(query));
}
return query;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_HasParentQueryParser.java
|
5,307 |
public static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public LongTerms readResult(StreamInput in) throws IOException {
LongTerms buckets = new LongTerms();
buckets.readFrom(in);
return buckets;
}
};
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_terms_LongTerms.java
|
953 |
clusterService.add(request.masterNodeTimeout(), new TimeoutClusterStateListener() {
@Override
public void postAdded() {
ClusterState clusterStateV2 = clusterService.state();
if (!clusterState.nodes().masterNodeId().equals(clusterStateV2.nodes().masterNodeId())) {
// master changes while adding the listener, try here
clusterService.remove(this);
innerExecute(request, listener, false);
}
}
@Override
public void onClose() {
clusterService.remove(this);
listener.onFailure(new NodeClosedException(clusterService.localNode()));
}
@Override
public void onTimeout(TimeValue timeout) {
clusterService.remove(this);
listener.onFailure(new MasterNotDiscoveredException());
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (event.nodesDelta().masterNodeChanged()) {
clusterService.remove(this);
innerExecute(request, listener, false);
}
}
});
| 1no label
|
src_main_java_org_elasticsearch_action_support_master_TransportMasterNodeOperationAction.java
|
20 |
public class DeleteCommand extends AbstractTextCommand {
ByteBuffer response;
private final String key;
private final int expiration;
private final boolean noreply;
public DeleteCommand(String key, int expiration, boolean noreply) {
super(TextCommandType.DELETE);
this.key = key;
this.expiration = expiration;
this.noreply = noreply;
}
public boolean readFrom(ByteBuffer cb) {
return true;
}
public void setResponse(byte[] value) {
this.response = ByteBuffer.wrap(value);
}
public boolean writeTo(ByteBuffer bb) {
if (response == null) {
response = ByteBuffer.wrap(STORED);
}
while (bb.hasRemaining() && response.hasRemaining()) {
bb.put(response.get());
}
return !response.hasRemaining();
}
public boolean shouldReply() {
return !noreply;
}
public int getExpiration() {
return expiration;
}
public String getKey() {
return key;
}
@Override
public String toString() {
return "DeleteCommand [" + type + "]{"
+ "key='" + key + '\''
+ ", expiration=" + expiration
+ ", noreply=" + noreply + '}'
+ super.toString();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_memcache_DeleteCommand.java
|
181 |
public interface OCallable<RET, PAR> {
public RET call(PAR iArgument);
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_util_OCallable.java
|
113 |
public class OLogFormatter extends Formatter {
private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS");
/**
* The end-of-line character for this platform.
*/
private static final String EOL = System.getProperty("line.separator");
@Override
public String format(final LogRecord record) {
if (record.getThrown() == null) {
return customFormatMessage(record);
}
// FORMAT THE STACK TRACE
final StringBuilder buffer = new StringBuilder();
buffer.append(record.getMessage());
Throwable current = record.getThrown();
while (current != null) {
buffer.append(EOL).append(current.getMessage());
for (StackTraceElement stackTraceElement : record.getThrown().getStackTrace()) {
buffer.append(EOL).append("-> ");
buffer.append(stackTraceElement.toString());
}
current = current.getCause();
}
return buffer.toString();
}
private String customFormatMessage(final LogRecord iRecord) {
Level iLevel = iRecord.getLevel();
String iMessage = iRecord.getMessage();
Object[] iAdditionalArgs = iRecord.getParameters();
String iRequester = getSourceClassSimpleName(iRecord.getLoggerName());
final StringBuilder buffer = new StringBuilder();
buffer.append(EOL);
synchronized (dateFormat) {
buffer.append(dateFormat.format(new Date()));
}
buffer.append(' ');
buffer.append(iLevel.getName().substring(0, 4));
buffer.append(' ');
// FORMAT THE MESSAGE
try {
if (iAdditionalArgs != null)
buffer.append(String.format(iMessage, iAdditionalArgs));
else
buffer.append(iMessage);
} catch (Exception e) {
buffer.append(iMessage);
}
if (iRequester != null) {
buffer.append(" [");
buffer.append(iRequester);
buffer.append(']');
}
return buffer.toString();
}
private String getSourceClassSimpleName(final String iSourceClassName) {
return iSourceClassName.substring(iSourceClassName.lastIndexOf(".") + 1);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_log_OLogFormatter.java
|
219 |
@RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientCountDownLatchTest {
static final String name = "test";
static HazelcastInstance hz;
static ICountDownLatch l;
@Before
public void init() {
Hazelcast.newHazelcastInstance();
hz = HazelcastClient.newHazelcastClient();
l = hz.getCountDownLatch(name);
}
@After
public void stop(){
hz.shutdown();
Hazelcast.shutdownAll();
}
@Test
public void testLatch() throws Exception {
assertTrue(l.trySetCount(20));
assertFalse(l.trySetCount(10));
assertEquals(20, l.getCount());
new Thread(){
public void run() {
for (int i=0; i<20; i++){
l.countDown();
try {
Thread.sleep(60);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}.start();
assertFalse(l.await(1, TimeUnit.SECONDS));
assertTrue(l.await(5, TimeUnit.SECONDS));
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_countdownlatch_ClientCountDownLatchTest.java
|
3,266 |
public class TopicPermission extends InstancePermission {
private static final int PUBLISH = 0x4;
private static final int LISTEN = 0x8;
private static final int ALL = CREATE | DESTROY | LISTEN | PUBLISH;
public TopicPermission(String name, String... actions) {
super(name, actions);
}
@Override
protected int initMask(String[] actions) {
int mask = NONE;
for (String action : actions) {
if (ActionConstants.ACTION_ALL.equals(action)) {
return ALL;
}
if (ActionConstants.ACTION_CREATE.equals(action)) {
mask |= CREATE;
} else if (ActionConstants.ACTION_PUBLISH.equals(action)) {
mask |= PUBLISH;
} else if (ActionConstants.ACTION_DESTROY.equals(action)) {
mask |= DESTROY;
} else if (ActionConstants.ACTION_LISTEN.equals(action)) {
mask |= LISTEN;
}
}
return mask;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_security_permission_TopicPermission.java
|
62 |
public static class Record
{
private byte type = 0;
private byte globalId[] = null;
private byte branchId[] = null;
private int seqNr = -1;
Record( byte type, byte globalId[], byte branchId[], int seqNr )
{
if ( type < 1 || type > 4 )
{
throw new IllegalArgumentException( "Illegal type: " + type );
}
this.type = type;
this.globalId = globalId;
this.branchId = branchId;
this.seqNr = seqNr;
}
public byte getType()
{
return type;
}
public byte[] getGlobalId()
{
return globalId;
}
public byte[] getBranchId()
{
return branchId;
}
public int getSequenceNumber()
{
return seqNr;
}
@Override
public String toString()
{
XidImpl xid = new XidImpl( globalId, branchId == null ? new byte[0] : branchId );
int size = 1 + sizeOf( globalId ) + sizeOf( branchId );
return "TxLogRecord[" + typeName() + "," + xid + "," + seqNr + "," + size + "]";
}
private int sizeOf( byte[] id )
{
// If id is null it means this record type doesn't have it. TX_START/MARK_COMMIT/TX_DONE
// only has the global id, whereas BRANCH_ADD has got both the global and branch ids.
if ( id == null )
{
return 0;
}
// The length of the array (1 byte) + the actual array
return 1 + id.length;
}
String typeName()
{
switch ( type )
{
case TX_START:
return "TX_START";
case BRANCH_ADD:
return "BRANCH_ADD";
case MARK_COMMIT:
return "MARK_COMMIT";
case TX_DONE:
return "TX_DONE";
default:
return "<unknown type>";
}
}
}
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_TxLog.java
|
6 |
static final class AndCompletion extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<?> snd;
final CompletableFuture<Void> dst;
AndCompletion(CompletableFuture<?> src,
CompletableFuture<?> snd,
CompletableFuture<Void> dst) {
this.src = src; this.snd = snd; this.dst = dst;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<?> b;
final CompletableFuture<Void> dst;
Object r, s; Throwable ex;
if ((dst = this.dst) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
(b = this.snd) != null &&
(s = b.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
if (ex == null && (s instanceof AltResult))
ex = ((AltResult)s).ex;
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
4,485 |
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
// create a new recovery status, and process...
RecoveryStatus recoveryStatus = new RecoveryStatus(request.recoveryId(), indexShard);
onGoingRecoveries.put(recoveryStatus.recoveryId, recoveryStatus);
doRecovery(request, recoveryStatus, listener);
}
});
| 1no label
|
src_main_java_org_elasticsearch_indices_recovery_RecoveryTarget.java
|
2,668 |
public class DefaultPortableWriter implements PortableWriter {
private final PortableSerializer serializer;
private final ClassDefinition cd;
private final BufferObjectDataOutput out;
private final int begin;
private final int offset;
private final Set<String> writtenFields;
private boolean raw;
public DefaultPortableWriter(PortableSerializer serializer, BufferObjectDataOutput out, ClassDefinition cd)
throws IOException {
this.serializer = serializer;
this.out = out;
this.cd = cd;
this.writtenFields = new HashSet<String>(cd.getFieldCount());
this.begin = out.position();
// room for final offset
out.writeZeroBytes(4);
this.offset = out.position();
// one additional for raw data
final int fieldIndexesLength = (cd.getFieldCount() + 1) * 4;
out.writeZeroBytes(fieldIndexesLength);
}
public int getVersion() {
return cd.getVersion();
}
public void writeInt(String fieldName, int value) throws IOException {
setPosition(fieldName);
out.writeInt(value);
}
public void writeLong(String fieldName, long value) throws IOException {
setPosition(fieldName);
out.writeLong(value);
}
public void writeUTF(String fieldName, String str) throws IOException {
setPosition(fieldName);
out.writeUTF(str);
}
public void writeBoolean(String fieldName, boolean value) throws IOException {
setPosition(fieldName);
out.writeBoolean(value);
}
public void writeByte(String fieldName, byte value) throws IOException {
setPosition(fieldName);
out.writeByte(value);
}
public void writeChar(String fieldName, int value) throws IOException {
setPosition(fieldName);
out.writeChar(value);
}
public void writeDouble(String fieldName, double value) throws IOException {
setPosition(fieldName);
out.writeDouble(value);
}
public void writeFloat(String fieldName, float value) throws IOException {
setPosition(fieldName);
out.writeFloat(value);
}
public void writeShort(String fieldName, short value) throws IOException {
setPosition(fieldName);
out.writeShort(value);
}
public void writePortable(String fieldName, Portable portable) throws IOException {
setPosition(fieldName);
final boolean NULL = portable == null;
out.writeBoolean(NULL);
if (!NULL) {
serializer.write(out, portable);
}
}
public void writeNullPortable(String fieldName, int factoryId, int classId) throws IOException {
setPosition(fieldName);
final boolean NULL = true;
out.writeBoolean(NULL);
}
public void writeByteArray(String fieldName, byte[] values) throws IOException {
setPosition(fieldName);
IOUtil.writeByteArray(out, values);
}
public void writeCharArray(String fieldName, char[] values) throws IOException {
setPosition(fieldName);
out.writeCharArray(values);
}
public void writeIntArray(String fieldName, int[] values) throws IOException {
setPosition(fieldName);
out.writeIntArray(values);
}
public void writeLongArray(String fieldName, long[] values) throws IOException {
setPosition(fieldName);
out.writeLongArray(values);
}
public void writeDoubleArray(String fieldName, double[] values) throws IOException {
setPosition(fieldName);
out.writeDoubleArray(values);
}
public void writeFloatArray(String fieldName, float[] values) throws IOException {
setPosition(fieldName);
out.writeFloatArray(values);
}
public void writeShortArray(String fieldName, short[] values) throws IOException {
setPosition(fieldName);
out.writeShortArray(values);
}
public void writePortableArray(String fieldName, Portable[] portables) throws IOException {
setPosition(fieldName);
final int len = portables == null ? 0 : portables.length;
out.writeInt(len);
if (len > 0) {
final int offset = out.position();
out.writeZeroBytes(len * 4);
for (int i = 0; i < portables.length; i++) {
out.writeInt(offset + i * 4, out.position());
final Portable portable = portables[i];
serializer.write(out, portable);
}
}
}
private void setPosition(String fieldName) throws IOException {
if (raw) {
throw new HazelcastSerializationException("Cannot write Portable fields after getRawDataOutput() is called!");
}
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
throw new HazelcastSerializationException("Invalid field name: '" + fieldName
+ "' for ClassDefinition {id: " + cd.getClassId() + ", version: " + cd.getVersion() + "}");
}
if (writtenFields.add(fieldName)) {
int pos = out.position();
int index = fd.getIndex();
out.writeInt(offset + index * 4, pos);
} else {
throw new HazelcastSerializationException("Field '" + fieldName + "' has already been written!");
}
}
public ObjectDataOutput getRawDataOutput() throws IOException {
if (!raw) {
int pos = out.position();
// last index
int index = cd.getFieldCount();
out.writeInt(offset + index * 4, pos);
}
raw = true;
return out;
}
void end() throws IOException {
// write final offset
out.writeInt(begin, out.position());
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_serialization_DefaultPortableWriter.java
|
178 |
private static class FailingFunction implements IFunction<Long, Long> {
@Override
public Long apply(Long input) {
throw new WoohaaException();
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_atomiclong_ClientAtomicLongTest.java
|
169 |
@Ignore("not a JUnit test")
public class SimpleMapTestFromClient {
static {
System.setProperty(GroupProperties.PROP_WAIT_SECONDS_BEFORE_JOIN, "0");
System.setProperty("java.net.preferIPv4Stack", "true");
System.setProperty("hazelcast.local.localAddress", "127.0.0.1");
System.setProperty("hazelcast.version.check.enabled", "false");
System.setProperty("hazelcast.socket.bind.any", "false");
Random rand = new Random();
int g1 = rand.nextInt(255);
int g2 = rand.nextInt(255);
int g3 = rand.nextInt(255);
// System.setProperty("hazelcast.multicast.group", "224." + g1 + "." + g2 + "." + g3);
}
public static int THREAD_COUNT = 40;
public static int ENTRY_COUNT = 10 * 1000;
public static int VALUE_SIZE = 1000;
public static final int STATS_SECONDS = 10;
public static int GET_PERCENTAGE = 40;
public static int PUT_PERCENTAGE = 40;
public static void main(String[] args) {
final ClientConfig clientConfig = new ClientConfig();
final HazelcastInstance instance1 = Hazelcast.newHazelcastInstance();
final HazelcastInstance instance2 = Hazelcast.newHazelcastInstance();
final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
final Stats stats = new Stats();
if (args != null && args.length > 0) {
for (String arg : args) {
arg = arg.trim();
if (arg.startsWith("t")) {
THREAD_COUNT = Integer.parseInt(arg.substring(1));
} else if (arg.startsWith("c")) {
ENTRY_COUNT = Integer.parseInt(arg.substring(1));
} else if (arg.startsWith("v")) {
VALUE_SIZE = Integer.parseInt(arg.substring(1));
} else if (arg.startsWith("g")) {
GET_PERCENTAGE = Integer.parseInt(arg.substring(1));
} else if (arg.startsWith("p")) {
PUT_PERCENTAGE = Integer.parseInt(arg.substring(1));
}
}
} else {
System.out.println("Help: sh test.sh t200 v130 p10 g85 ");
System.out.println(" // means 200 threads, value-size 130 bytes, 10% put, 85% get");
System.out.println("");
}
System.out.println("Starting Test with ");
System.out.println(" Thread Count: " + THREAD_COUNT);
System.out.println(" Entry Count: " + ENTRY_COUNT);
System.out.println(" Value Size: " + VALUE_SIZE);
System.out.println(" Get Percentage: " + GET_PERCENTAGE);
System.out.println(" Put Percentage: " + PUT_PERCENTAGE);
System.out.println(" Remove Percentage: " + (100 - (PUT_PERCENTAGE + GET_PERCENTAGE)));
ExecutorService es = Executors.newFixedThreadPool(THREAD_COUNT);
for (int i = 0; i < THREAD_COUNT; i++) {
es.submit(new Runnable() {
public void run() {
IMap<String, byte[]> map = client.getMap("default");
while (true) {
int key = (int) (Math.random() * ENTRY_COUNT);
int operation = ((int) (Math.random() * 100));
if (operation < GET_PERCENTAGE) {
map.get(String.valueOf(key));
stats.gets.incrementAndGet();
} else if (operation < GET_PERCENTAGE + PUT_PERCENTAGE) {
map.put(String.valueOf(key), new byte[VALUE_SIZE]);
stats.puts.incrementAndGet();
} else {
map.remove(String.valueOf(key));
stats.removes.incrementAndGet();
}
}
}
});
}
Executors.newSingleThreadExecutor().submit(new Runnable() {
public void run() {
while (true) {
try {
Thread.sleep(STATS_SECONDS * 1000);
System.out.println("cluster size:"
+ client.getCluster().getMembers().size());
Stats currentStats = stats.getAndReset();
System.out.println(currentStats);
System.out.println("Operations per Second : " + currentStats.total()
/ STATS_SECONDS);
} catch (Exception e) {
e.printStackTrace();
}
}
}
});
}
public static class Stats {
public AtomicLong puts = new AtomicLong();
public AtomicLong gets = new AtomicLong();
public AtomicLong removes = new AtomicLong();
public Stats getAndReset() {
long putsNow = puts.getAndSet(0);
long getsNow = gets.getAndSet(0);
long removesNow = removes.getAndSet(0);
Stats newOne = new Stats();
newOne.puts.set(putsNow);
newOne.gets.set(getsNow);
newOne.removes.set(removesNow);
return newOne;
}
public long total() {
return puts.get() + gets.get() + removes.get();
}
public String toString() {
return "total= " + total() + ", gets:" + gets.get() + ", puts: " + puts.get() + ", removes:" + removes.get();
}
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_SimpleMapTestFromClient.java
|
76 |
public class ClientCreateRequest extends CallableClientRequest implements Portable, RetryableRequest, SecureRequest {
private String name;
private String serviceName;
public ClientCreateRequest() {
}
public ClientCreateRequest(String name, String serviceName) {
this.name = name;
this.serviceName = serviceName;
}
@Override
public Object call() throws Exception {
ProxyService proxyService = clientEngine.getProxyService();
proxyService.initializeDistributedObject(serviceName, name);
return null;
}
@Override
public String getServiceName() {
return serviceName;
}
@Override
public int getFactoryId() {
return ClientPortableHook.ID;
}
@Override
public int getClassId() {
return ClientPortableHook.CREATE_PROXY;
}
@Override
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
writer.writeUTF("s", serviceName);
}
@Override
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
serviceName = reader.readUTF("s");
}
@Override
public Permission getRequiredPermission() {
return ActionConstants.getPermission(name, serviceName, ActionConstants.ACTION_CREATE);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_ClientCreateRequest.java
|
400 |
public class CreateSnapshotRequestBuilder extends MasterNodeOperationRequestBuilder<CreateSnapshotRequest, CreateSnapshotResponse, CreateSnapshotRequestBuilder> {
/**
* Constructs a new create snapshot request builder
*
* @param clusterAdminClient cluster admin client
*/
public CreateSnapshotRequestBuilder(ClusterAdminClient clusterAdminClient) {
super((InternalClusterAdminClient) clusterAdminClient, new CreateSnapshotRequest());
}
/**
* Constructs a new create snapshot request builder with specified repository and snapshot names
*
* @param clusterAdminClient cluster admin client
* @param repository repository name
* @param snapshot snapshot name
*/
public CreateSnapshotRequestBuilder(ClusterAdminClient clusterAdminClient, String repository, String snapshot) {
super((InternalClusterAdminClient) clusterAdminClient, new CreateSnapshotRequest(repository, snapshot));
}
/**
* Sets the snapshot name
*
* @param snapshot snapshot name
* @return this builder
*/
public CreateSnapshotRequestBuilder setSnapshot(String snapshot) {
request.snapshot(snapshot);
return this;
}
/**
* Sets the repository name
*
* @param repository repository name
* @return this builder
*/
public CreateSnapshotRequestBuilder setRepository(String repository) {
request.repository(repository);
return this;
}
/**
* Sets a list of indices that should be included into the snapshot
* <p/>
* The list of indices supports multi-index syntax. For example: "+test*" ,"-test42" will index all indices with
* prefix "test" except index "test42". Aliases are supported. An empty list or {"_all"} will snapshot all open
* indices in the cluster.
*
* @param indices
* @return this builder
*/
public CreateSnapshotRequestBuilder setIndices(String... indices) {
request.indices(indices);
return this;
}
/**
* Specifies the indices options. Like what type of requested indices to ignore. For example indices that don't exist.
*
* @param indicesOptions the desired behaviour regarding indices options
* @return this request
*/
public CreateSnapshotRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request.indicesOptions(indicesOptions);
return this;
}
/**
* If set to true the request should wait for the snapshot completion before returning.
*
* @param waitForCompletion true if
* @return this builder
*/
public CreateSnapshotRequestBuilder setWaitForCompletion(boolean waitForCompletion) {
request.waitForCompletion(waitForCompletion);
return this;
}
/**
* If set to true the request should snapshot indices with unavailable shards
*
* @param partial true if request should snapshot indices with unavailable shards
* @return this builder
*/
public CreateSnapshotRequestBuilder setPartial(boolean partial) {
request.partial(partial);
return this;
}
/**
* Sets repository-specific snapshot settings.
* <p/>
* See repository documentation for more information.
*
* @param settings repository-specific snapshot settings
* @return this builder
*/
public CreateSnapshotRequestBuilder setSettings(Settings settings) {
request.settings(settings);
return this;
}
/**
* Sets repository-specific snapshot settings.
* <p/>
* See repository documentation for more information.
*
* @param settings repository-specific snapshot settings
* @return this builder
*/
public CreateSnapshotRequestBuilder setSettings(Settings.Builder settings) {
request.settings(settings);
return this;
}
/**
* Sets repository-specific snapshot settings in YAML, JSON or properties format
* <p/>
* See repository documentation for more information.
*
* @param source repository-specific snapshot settings
* @return this builder
*/
public CreateSnapshotRequestBuilder setSettings(String source) {
request.settings(source);
return this;
}
/**
* Sets repository-specific snapshot settings.
* <p/>
* See repository documentation for more information.
*
* @param settings repository-specific snapshot settings
* @return this builder
*/
public CreateSnapshotRequestBuilder setSettings(Map<String, Object> settings) {
request.settings(settings);
return this;
}
/**
* Set to true if snapshot should include global cluster state
*
* @param includeGlobalState true if snapshot should include global cluster state
* @return this builder
*/
public CreateSnapshotRequestBuilder setIncludeGlobalState(boolean includeGlobalState) {
request.includeGlobalState(includeGlobalState);
return this;
}
@Override
protected void doExecute(ActionListener<CreateSnapshotResponse> listener) {
((ClusterAdminClient) client).createSnapshot(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_snapshots_create_CreateSnapshotRequestBuilder.java
|
91 |
@Service("blStaticAssetStorageService")
public class StaticAssetStorageServiceImpl implements StaticAssetStorageService {
@Value("${asset.server.file.system.path}")
protected String assetFileSystemPath;
@Value("${asset.server.file.classpath.directory}")
protected String assetFileClasspathDirectory;
@Value("${asset.server.max.generated.file.system.directories}")
protected int assetServerMaxGeneratedDirectories;
@Value("${asset.server.max.uploadable.file.size}")
protected long maxUploadableFileSize;
@Value("${asset.server.file.buffer.size}")
protected int fileBufferSize = 8096;
private static final Log LOG = LogFactory.getLog(StaticAssetStorageServiceImpl.class);
private static final String DEFAULT_STORAGE_DIRECTORY = System.getProperty("java.io.tmpdir");
protected String cacheDirectory;
@Resource(name="blStaticAssetService")
protected StaticAssetService staticAssetService;
@Resource(name="blArtifactService")
protected ArtifactService artifactService;
@Resource(name="blStaticAssetStorageDao")
protected StaticAssetStorageDao staticAssetStorageDao;
@Resource(name="blNamedOperationManager")
protected NamedOperationManager namedOperationManager;
protected StaticAsset findStaticAsset(String fullUrl, SandBox sandBox) {
StaticAsset staticAsset = staticAssetService.findStaticAssetByFullUrl(fullUrl, sandBox);
if (staticAsset == null && sandBox != null) {
staticAsset = staticAssetService.findStaticAssetByFullUrl(fullUrl, null);
}
return staticAsset;
}
/**
* Removes trailing "/" and ensures that there is a beginning "/"
* @param path
* @return
*/
protected String fixPath(String path) {
if (!path.startsWith("/")) {
path = "/" + path;
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path;
}
@Override
public String generateStorageFileName(StaticAsset staticAsset, boolean useSharedFile) {
return generateStorageFileName(staticAsset.getFullUrl(), useSharedFile);
}
/**
* Returns the baseDirectory for writing and reading files as the property assetFileSystemPath if it
* exists or java.tmp.io if that property has not been set.
*/
protected String getBaseDirectory() {
if (assetFileSystemPath != null && !"".equals(assetFileSystemPath.trim())) {
return assetFileSystemPath;
} else {
return DEFAULT_STORAGE_DIRECTORY;
}
}
@Override
public String generateStorageFileName(String fullUrl, boolean useSharedFile) {
String baseDirectory = getBaseDirectory();
StringBuilder fileName = new StringBuilder(fixPath(baseDirectory));
BroadleafRequestContext brc = BroadleafRequestContext.getBroadleafRequestContext();
if (brc != null) {
Site site = brc.getSite();
if (site != null && !useSharedFile) {
String siteDirectory = "/site-" + site.getId();
String siteHash = DigestUtils.md5Hex(siteDirectory);
fileName = fileName.append("/").append(siteHash.substring(0, 2)).append(siteDirectory);
}
}
// Create directories based on hash
String fileHash = DigestUtils.md5Hex(fullUrl);
for (int i = 0; i < assetServerMaxGeneratedDirectories; i++) {
if (i == 4) {
LOG.warn("Property assetServerMaxGeneratedDirectories set to high, ignoring values past 4 - value set to" +
assetServerMaxGeneratedDirectories);
break;
}
fileName = fileName.append("/").append(fileHash.substring(i * 2, (i + 1) * 2));
}
int pos = fullUrl.lastIndexOf("/");
if (pos >= 0) {
// Use the fileName as specified if possible.
fileName = fileName.append(fullUrl.substring(pos));
} else {
// Just use the hash since we didn't find a filename for this one.
fileName = fileName.append("/").append(fullUrl);
}
return fileName.toString();
}
protected boolean shouldUseSharedFile(InputStream is) {
return (is != null && is instanceof GloballySharedInputStream);
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public Map<String, String> getCacheFileModel(String fullUrl, SandBox sandBox, Map<String, String> parameterMap) throws Exception {
StaticAsset staticAsset = findStaticAsset(fullUrl, sandBox);
if (staticAsset == null) {
if (sandBox == null) {
throw new AssetNotFoundException("Unable to find an asset for the url (" + fullUrl + ") using the production sandBox.");
} else {
throw new AssetNotFoundException("Unable to find an asset for the url (" + fullUrl + ") using the sandBox id (" + sandBox.getId() + "), or the production sandBox.");
}
}
String mimeType = staticAsset.getMimeType();
//extract the values for any named parameters
Map<String, String> convertedParameters = namedOperationManager.manageNamedParameters(parameterMap);
String returnFilePath = null;
if (StorageType.FILESYSTEM.equals(staticAsset.getStorageType()) && convertedParameters.isEmpty()) {
InputStream classPathInputStream = getResourceFromClasspath(staticAsset);
if (classPathInputStream != null) {
// Create a file system cache file representing this file.
String cacheName = constructCacheFileName(staticAsset, convertedParameters, true);
File cacheFile = new File(cacheName);
if (!cacheFile.exists()) {
createCacheFile(classPathInputStream, cacheFile);
}
returnFilePath = cacheFile.getAbsolutePath();
} else {
returnFilePath = generateStorageFileName(staticAsset.getFullUrl(), false);
}
} else {
String sharedCacheName = constructCacheFileName(staticAsset, convertedParameters, true);
File cacheFile = new File(sharedCacheName);
// See if the shared file exists. This is primarily to support a multi-tenant
// implementation that shares assets across the tenants. If not, check for the
// site specific file.
if (!cacheFile.exists()) {
String cacheName = constructCacheFileName(staticAsset, convertedParameters, false);
cacheFile = new File(cacheName);
}
if (!cacheFile.exists()) {
InputStream original = findInputStreamForStaticAsset(staticAsset);
boolean useSharedFile = shouldUseSharedFile(original);
if (!convertedParameters.isEmpty()) {
Operation[] operations = artifactService.buildOperations(convertedParameters, original, staticAsset.getMimeType());
InputStream converted = artifactService.convert(original, operations, staticAsset.getMimeType());
createCacheFile(converted, cacheFile);
if ("image/gif".equals(mimeType)) {
mimeType = "image/png";
}
} else {
if (useSharedFile) {
cacheFile = new File(sharedCacheName);
createCacheFile(original, cacheFile);
} else {
createCacheFile(original, cacheFile);
}
}
}
returnFilePath = cacheFile.getAbsolutePath();
}
Map<String, String> model = new HashMap<String, String>(2);
model.put("cacheFilePath", returnFilePath);
model.put("mimeType", mimeType);
return model;
}
protected InputStream findInputStreamForStaticAsset(StaticAsset staticAsset) throws SQLException, IOException {
InputStream classPathInputStream = getResourceFromClasspath(staticAsset);
if (classPathInputStream != null) {
return classPathInputStream;
}
if (StorageType.DATABASE.equals(staticAsset.getStorageType())) {
StaticAssetStorage storage = readStaticAssetStorageByStaticAssetId(staticAsset.getId());
//there are filter operations to perform on the asset
ByteArrayOutputStream baos = new ByteArrayOutputStream();
InputStream is = null;
try {
is = storage.getFileData().getBinaryStream();
byte[] buffer = new byte[fileBufferSize];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
baos.write(buffer, 0, bytesRead);
}
baos.flush();
} finally {
if (is != null) {
try {
is.close();
} catch (Throwable e) {
//do nothing
}
}
}
InputStream original = new ByteArrayInputStream(baos.toByteArray());
return original;
} else if (StorageType.FILESYSTEM.equals(staticAsset.getStorageType())) {
FileInputStream assetFile = new FileInputStream(generateStorageFileName(staticAsset.getFullUrl(), false));
BufferedInputStream bufferedStream = new BufferedInputStream(assetFile);
bufferedStream.mark(0);
return bufferedStream;
} else {
throw new IllegalArgumentException("Unknown storage type while trying to read static asset.");
}
}
protected InputStream getResourceFromClasspath(StaticAsset staticAsset) {
if (assetFileClasspathDirectory != null && !"".equals(assetFileClasspathDirectory)) {
try {
ClassPathResource resource = new ClassPathResource(assetFileClasspathDirectory + staticAsset.getFullUrl());
if (resource.exists()) {
InputStream assetFile = resource.getInputStream();
BufferedInputStream bufferedStream = new BufferedInputStream(assetFile);
// Wrapping the buffered input stream with a globally shared stream allows us to
// vary the way the file names are generated on the file system.
// This benefits us (mainly in our demo site but their could be other uses) when we
// have assets that are shared across sites that we also need to resize.
GloballySharedInputStream globallySharedStream = new GloballySharedInputStream(bufferedStream);
globallySharedStream.mark(0);
return globallySharedStream;
} else {
return null;
}
} catch (Exception e) {
LOG.error("Error getting resource from classpath", e);
}
}
return null;
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public StaticAssetStorage findStaticAssetStorageById(Long id) {
return staticAssetStorageDao.readStaticAssetStorageById(id);
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public StaticAssetStorage create() {
return staticAssetStorageDao.create();
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public StaticAssetStorage readStaticAssetStorageByStaticAssetId(Long id) {
return staticAssetStorageDao.readStaticAssetStorageByStaticAssetId(id);
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public StaticAssetStorage save(StaticAssetStorage assetStorage) {
return staticAssetStorageDao.save(assetStorage);
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public void delete(StaticAssetStorage assetStorage) {
staticAssetStorageDao.delete(assetStorage);
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public Blob createBlob(MultipartFile uploadedFile) throws IOException {
return staticAssetStorageDao.createBlob(uploadedFile);
}
protected void createCacheFile(InputStream is, File cacheFile) throws SQLException, IOException {
if (!cacheFile.getParentFile().exists()) {
if (!cacheFile.getParentFile().mkdirs()) {
throw new RuntimeException("Unable to create middle directories for file: " + cacheFile.getAbsolutePath());
}
}
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(cacheFile));
try {
boolean eof = false;
int temp;
while (!eof) {
temp = is.read();
if (temp < 0) {
eof = true;
} else {
bos.write(temp);
}
}
} finally {
try {
bos.flush();
bos.close();
} catch (Throwable e) {
//do nothing
}
}
}
/**
* Builds a file system path for the passed in static asset and paramaterMap.
*
* If in a multi-site implementation, the system will also prefix the filepath with a site-identifier
* unless the useSharedFile parameter is set to true.
*
* @param staticAsset
* @param parameterMap
* @param useSharedFile
* @return
*/
protected String constructCacheFileName(StaticAsset staticAsset, Map<String, String> parameterMap, boolean useSharedFile) {
String fileName = generateStorageFileName(staticAsset, useSharedFile);
StringBuilder sb = new StringBuilder(200);
sb.append(fileName.substring(0, fileName.lastIndexOf('.')));
sb.append("---");
StringBuilder sb2 = new StringBuilder(200);
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
if (staticAsset.getAuditable() != null) {
sb2.append(format.format(staticAsset.getAuditable().getDateUpdated() == null ? staticAsset.getAuditable().getDateCreated() : staticAsset.getAuditable().getDateUpdated()));
}
for (Map.Entry<String, String> entry : parameterMap.entrySet()) {
sb2.append('-');
sb2.append(entry.getKey());
sb2.append('-');
sb2.append(entry.getValue());
}
String digest;
try {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] messageDigest = md.digest(sb2.toString().getBytes());
BigInteger number = new BigInteger(1,messageDigest);
digest = number.toString(16);
} catch(NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
sb.append(pad(digest, 32, '0'));
sb.append(fileName.substring(fileName.lastIndexOf('.')));
return sb.toString();
}
protected String pad(String s, int length, char pad) {
StringBuilder buffer = new StringBuilder(s);
while (buffer.length() < length) {
buffer.insert(0, pad);
}
return buffer.toString();
}
@Transactional("blTransactionManagerAssetStorageInfo")
@Override
public void createStaticAssetStorageFromFile(MultipartFile file, StaticAsset staticAsset) throws IOException {
if (StorageType.DATABASE.equals(staticAsset.getStorageType())) {
StaticAssetStorage storage = staticAssetStorageDao.create();
storage.setStaticAssetId(staticAsset.getId());
Blob uploadBlob = staticAssetStorageDao.createBlob(file);
storage.setFileData(uploadBlob);
staticAssetStorageDao.save(storage);
} else if (StorageType.FILESYSTEM.equals(staticAsset.getStorageType())) {
InputStream input = file.getInputStream();
byte[] buffer = new byte[fileBufferSize];
String destFileName = generateStorageFileName(staticAsset.getFullUrl(), false);
String tempFileName = destFileName.substring(0, destFileName.lastIndexOf("/") + 1) + UUID.randomUUID().toString();
File tmpFile = new File(tempFileName);
if (!tmpFile.getParentFile().exists()) {
if (!tmpFile.getParentFile().mkdirs()) {
throw new RuntimeException("Unable to create parent directories for file: " + destFileName);
}
}
OutputStream output = new FileOutputStream(tmpFile);
boolean deleteFile = false;
try {
int bytesRead;
int totalBytesRead = 0;
while ((bytesRead = input.read(buffer)) != -1) {
totalBytesRead += bytesRead;
if (totalBytesRead > maxUploadableFileSize) {
deleteFile = true;
throw new IOException("Maximum Upload File Size Exceeded");
}
output.write(buffer, 0, bytesRead);
}
} finally {
output.close();
if (deleteFile && tmpFile.exists()) {
tmpFile.delete();
}
}
File newFile = new File(destFileName);
if (!tmpFile.renameTo(newFile)) {
if (!newFile.exists()) {
throw new RuntimeException("Unable to rename temp file to create file named: " + destFileName);
}
}
}
}
}
| 1no label
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_service_StaticAssetStorageServiceImpl.java
|
9 |
{
private Long highest;
@Override
public boolean reached( File file, long version, LogLoader source )
{
// Here we know that the log version exists (checked in AbstractPruneStrategy#prune)
long tx = source.getFirstCommittedTxId( version );
if ( highest == null )
{
highest = source.getLastCommittedTxId();
return false;
}
return highest-tx >= maxTransactionCount;
}
};
| 1no label
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogPruneStrategies.java
|
69 |
@SuppressWarnings("serial")
static final class MapReduceEntriesTask<K,V,U>
extends BulkTask<K,V,U> {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
U result;
MapReduceEntriesTask<K,V,U> rights, nextRight;
MapReduceEntriesTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceEntriesTask<K,V,U> nextRight,
Fun<Map.Entry<K,V>, ? extends U> transformer,
BiFun<? super U, ? super U, ? extends U> reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.reducer = reducer;
}
public final U getRawResult() { return result; }
public final void compute() {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceEntriesTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, reducer)).fork();
}
U r = null;
for (Node<K,V> p; (p = advance()) != null; ) {
U u;
if ((u = transformer.apply(p)) != null)
r = (r == null) ? u : reducer.apply(r, u);
}
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
t = (MapReduceEntriesTask<K,V,U>)c,
s = t.rights;
while (s != null) {
U tr, sr;
if ((sr = s.result) != null)
t.result = (((tr = t.result) == null) ? sr :
reducer.apply(tr, sr));
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
3,257 |
public class ListPermission extends InstancePermission {
private static final int ADD = 0x4;
private static final int READ = 0x8;
private static final int REMOVE = 0x16;
private static final int LISTEN = 0x32;
private static final int ALL = ADD | REMOVE | READ | CREATE | DESTROY | LISTEN;
public ListPermission(String name, String... actions) {
super(name, actions);
}
@Override
protected int initMask(String[] actions) {
int mask = NONE;
for (String action : actions) {
if (ActionConstants.ACTION_ALL.equals(action)) {
return ALL;
}
if (ActionConstants.ACTION_CREATE.equals(action)) {
mask |= CREATE;
} else if (ActionConstants.ACTION_ADD.equals(action)) {
mask |= ADD;
} else if (ActionConstants.ACTION_REMOVE.equals(action)) {
mask |= REMOVE;
} else if (ActionConstants.ACTION_READ.equals(action)) {
mask |= READ;
} else if (ActionConstants.ACTION_DESTROY.equals(action)) {
mask |= DESTROY;
} else if (ActionConstants.ACTION_LISTEN.equals(action)) {
mask |= LISTEN;
}
}
return mask;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_security_permission_ListPermission.java
|
902 |
@SuppressWarnings({ "unchecked", "serial" })
public abstract class ORecordSchemaAwareAbstract<T> extends ORecordAbstract<T> implements ORecordSchemaAware<T> {
protected OClass _clazz;
public ORecordSchemaAwareAbstract() {
}
/**
* Validates the record following the declared constraints defined in schema such as mandatory, notNull, min, max, regexp, etc. If
* the schema is not defined for the current class or there are not constraints then the validation is ignored.
*
* @see OProperty
* @throws OValidationException
* if the document breaks some validation constraints defined in the schema
*/
public void validate() throws OValidationException {
if (ODatabaseRecordThreadLocal.INSTANCE.isDefined() && !getDatabase().isValidationEnabled())
return;
checkForLoading();
checkForFields();
if (_clazz != null) {
if (_clazz.isStrictMode()) {
// CHECK IF ALL FIELDS ARE DEFINED
for (String f : fieldNames()) {
if (_clazz.getProperty(f) == null)
throw new OValidationException("Found additional field '" + f + "'. It cannot be added because the schema class '"
+ _clazz.getName() + "' is defined as STRICT");
}
}
for (OProperty p : _clazz.properties()) {
validateField(this, p);
}
}
}
public OClass getSchemaClass() {
if (_clazz == null) {
// DESERIALIZE ONLY IF THE CLASS IS NOT SETTED: THIS PREVENT TO
// UNMARSHALL THE RECORD EVEN IF SETTED BY fromString()
checkForLoading();
checkForFields("@class");
}
return _clazz;
}
public String getClassName() {
if (_clazz != null)
return _clazz.getName();
// CLASS NOT FOUND: CHECK IF NEED LOADING AND UNMARSHALLING
checkForLoading();
checkForFields("@class");
return _clazz != null ? _clazz.getName() : null;
}
public void setClassName(final String iClassName) {
if (iClassName == null) {
_clazz = null;
return;
}
setClass(getDatabase().getMetadata().getSchema().getOrCreateClass(iClassName));
}
public void setClassNameIfExists(final String iClassName) {
if (iClassName == null) {
_clazz = null;
return;
}
setClass(getDatabase().getMetadata().getSchema().getClass(iClassName));
}
@Override
public ORecordSchemaAwareAbstract<T> reset() {
super.reset();
_clazz = null;
return this;
}
public byte[] toStream() {
return toStream(false);
}
public byte[] toStream(final boolean iOnlyDelta) {
if (_source == null)
_source = _recordFormat.toStream(this, iOnlyDelta);
invokeListenerEvent(ORecordListener.EVENT.MARSHALL);
return _source;
}
public void remove() {
throw new UnsupportedOperationException();
}
protected boolean checkForFields(final String... iFields) {
if (_status == ORecordElement.STATUS.LOADED && fields() == 0)
// POPULATE FIELDS LAZY
return deserializeFields(iFields);
return true;
}
public boolean deserializeFields(final String... iFields) {
if (_source == null)
return false;
_status = ORecordElement.STATUS.UNMARSHALLING;
_recordFormat.fromStream(_source, this, iFields);
_status = ORecordElement.STATUS.LOADED;
return true;
}
protected void setClass(final OClass iClass) {
if (iClass != null && iClass.isAbstract())
throw new OSchemaException("Cannot create a document of an abstract class");
_clazz = iClass;
}
protected void checkFieldAccess(final int iIndex) {
if (iIndex < 0 || iIndex >= fields())
throw new IndexOutOfBoundsException("Index " + iIndex + " is outside the range allowed: 0-" + fields());
}
public static void validateField(ORecordSchemaAwareAbstract<?> iRecord, OProperty p) throws OValidationException {
final Object fieldValue;
if (iRecord.containsField(p.getName())) {
if (iRecord instanceof ODocument)
// AVOID CONVERSIONS: FASTER!
fieldValue = ((ODocument) iRecord).rawField(p.getName());
else
fieldValue = iRecord.field(p.getName());
if (p.isNotNull() && fieldValue == null)
// NULLITY
throw new OValidationException("The field '" + p.getFullName() + "' cannot be null");
if (fieldValue != null && p.getRegexp() != null) {
// REGEXP
if (!fieldValue.toString().matches(p.getRegexp()))
throw new OValidationException("The field '" + p.getFullName() + "' does not match the regular expression '"
+ p.getRegexp() + "'. Field value is: " + fieldValue);
}
} else {
if (p.isMandatory())
throw new OValidationException("The field '" + p.getFullName() + "' is mandatory");
fieldValue = null;
}
final OType type = p.getType();
if (fieldValue != null && type != null) {
// CHECK TYPE
switch (type) {
case LINK:
validateLink(p, fieldValue);
break;
case LINKLIST:
if (!(fieldValue instanceof List))
throw new OValidationException("The field '" + p.getFullName()
+ "' has been declared as LINKLIST but an incompatible type is used. Value: " + fieldValue);
if (p.getLinkedClass() != null)
for (Object item : ((List<?>) fieldValue))
validateLink(p, item);
break;
case LINKSET:
if (!(fieldValue instanceof Set))
throw new OValidationException("The field '" + p.getFullName()
+ "' has been declared as LINKSET but an incompatible type is used. Value: " + fieldValue);
if (p.getLinkedClass() != null)
for (Object item : ((Set<?>) fieldValue))
validateLink(p, item);
break;
case LINKMAP:
if (!(fieldValue instanceof Map))
throw new OValidationException("The field '" + p.getFullName()
+ "' has been declared as LINKMAP but an incompatible type is used. Value: " + fieldValue);
if (p.getLinkedClass() != null)
for (Entry<?, ?> entry : ((Map<?, ?>) fieldValue).entrySet())
validateLink(p, entry.getValue());
break;
case EMBEDDED:
validateEmbedded(p, fieldValue);
break;
case EMBEDDEDLIST:
if (!(fieldValue instanceof List))
throw new OValidationException("The field '" + p.getFullName()
+ "' has been declared as EMBEDDEDLIST but an incompatible type is used. Value: " + fieldValue);
if (p.getLinkedClass() != null) {
for (Object item : ((List<?>) fieldValue))
validateEmbedded(p, item);
} else if (p.getLinkedType() != null) {
for (Object item : ((List<?>) fieldValue))
validateType(p, item);
}
break;
case EMBEDDEDSET:
if (!(fieldValue instanceof Set))
throw new OValidationException("The field '" + p.getFullName()
+ "' has been declared as EMBEDDEDSET but an incompatible type is used. Value: " + fieldValue);
if (p.getLinkedClass() != null) {
for (Object item : ((Set<?>) fieldValue))
validateEmbedded(p, item);
} else if (p.getLinkedType() != null) {
for (Object item : ((Set<?>) fieldValue))
validateType(p, item);
}
break;
case EMBEDDEDMAP:
if (!(fieldValue instanceof Map))
throw new OValidationException("The field '" + p.getFullName()
+ "' has been declared as EMBEDDEDMAP but an incompatible type is used. Value: " + fieldValue);
if (p.getLinkedClass() != null) {
for (Entry<?, ?> entry : ((Map<?, ?>) fieldValue).entrySet())
validateEmbedded(p, entry.getValue());
} else if (p.getLinkedType() != null) {
for (Entry<?, ?> entry : ((Map<?, ?>) fieldValue).entrySet())
validateType(p, entry.getValue());
}
break;
}
}
if (p.getMin() != null) {
// MIN
final String min = p.getMin();
if (p.getType().equals(OType.STRING) && (fieldValue != null && ((String) fieldValue).length() < Integer.parseInt(min)))
throw new OValidationException("The field '" + p.getFullName() + "' contains fewer characters than " + min + " requested");
else if (p.getType().equals(OType.BINARY) && (fieldValue != null && ((byte[]) fieldValue).length < Integer.parseInt(min)))
throw new OValidationException("The field '" + p.getFullName() + "' contains fewer bytes than " + min + " requested");
else if (p.getType().equals(OType.INTEGER) && (fieldValue != null && type.asInt(fieldValue) < Integer.parseInt(min)))
throw new OValidationException("The field '" + p.getFullName() + "' is less than " + min);
else if (p.getType().equals(OType.LONG) && (fieldValue != null && type.asLong(fieldValue) < Long.parseLong(min)))
throw new OValidationException("The field '" + p.getFullName() + "' is less than " + min);
else if (p.getType().equals(OType.FLOAT) && (fieldValue != null && type.asFloat(fieldValue) < Float.parseFloat(min)))
throw new OValidationException("The field '" + p.getFullName() + "' is less than " + min);
else if (p.getType().equals(OType.DOUBLE) && (fieldValue != null && type.asDouble(fieldValue) < Double.parseDouble(min)))
throw new OValidationException("The field '" + p.getFullName() + "' is less than " + min);
else if (p.getType().equals(OType.DATE)) {
try {
if (fieldValue != null
&& ((Date) fieldValue).before(iRecord.getDatabase().getStorage().getConfiguration().getDateFormatInstance()
.parse(min)))
throw new OValidationException("The field '" + p.getFullName() + "' contains the date " + fieldValue
+ " which precedes the first acceptable date (" + min + ")");
} catch (ParseException e) {
}
} else if (p.getType().equals(OType.DATETIME)) {
try {
if (fieldValue != null
&& ((Date) fieldValue).before(iRecord.getDatabase().getStorage().getConfiguration().getDateTimeFormatInstance()
.parse(min)))
throw new OValidationException("The field '" + p.getFullName() + "' contains the datetime " + fieldValue
+ " which precedes the first acceptable datetime (" + min + ")");
} catch (ParseException e) {
}
} else if ((p.getType().equals(OType.EMBEDDEDLIST) || p.getType().equals(OType.EMBEDDEDSET)
|| p.getType().equals(OType.LINKLIST) || p.getType().equals(OType.LINKSET))
&& (fieldValue != null && ((Collection<?>) fieldValue).size() < Integer.parseInt(min)))
throw new OValidationException("The field '" + p.getFullName() + "' contains fewer items than " + min + " requested");
}
if (p.getMax() != null) {
// MAX
final String max = p.getMax();
if (p.getType().equals(OType.STRING) && (fieldValue != null && ((String) fieldValue).length() > Integer.parseInt(max)))
throw new OValidationException("The field '" + p.getFullName() + "' contains more characters than " + max + " requested");
else if (p.getType().equals(OType.BINARY) && (fieldValue != null && ((byte[]) fieldValue).length > Integer.parseInt(max)))
throw new OValidationException("The field '" + p.getFullName() + "' contains more bytes than " + max + " requested");
else if (p.getType().equals(OType.INTEGER) && (fieldValue != null && type.asInt(fieldValue) > Integer.parseInt(max)))
throw new OValidationException("The field '" + p.getFullName() + "' is greater than " + max);
else if (p.getType().equals(OType.LONG) && (fieldValue != null && type.asLong(fieldValue) > Long.parseLong(max)))
throw new OValidationException("The field '" + p.getFullName() + "' is greater than " + max);
else if (p.getType().equals(OType.FLOAT) && (fieldValue != null && type.asFloat(fieldValue) > Float.parseFloat(max)))
throw new OValidationException("The field '" + p.getFullName() + "' is greater than " + max);
else if (p.getType().equals(OType.DOUBLE) && (fieldValue != null && type.asDouble(fieldValue) > Double.parseDouble(max)))
throw new OValidationException("The field '" + p.getFullName() + "' is greater than " + max);
else if (p.getType().equals(OType.DATE)) {
try {
if (fieldValue != null
&& ((Date) fieldValue).before(iRecord.getDatabase().getStorage().getConfiguration().getDateFormatInstance()
.parse(max)))
throw new OValidationException("The field '" + p.getFullName() + "' contains the date " + fieldValue
+ " which is after the last acceptable date (" + max + ")");
} catch (ParseException e) {
}
} else if (p.getType().equals(OType.DATETIME)) {
try {
if (fieldValue != null
&& ((Date) fieldValue).before(iRecord.getDatabase().getStorage().getConfiguration().getDateTimeFormatInstance()
.parse(max)))
throw new OValidationException("The field '" + p.getFullName() + "' contains the datetime " + fieldValue
+ " which is after the last acceptable datetime (" + max + ")");
} catch (ParseException e) {
}
} else if ((p.getType().equals(OType.EMBEDDEDLIST) || p.getType().equals(OType.EMBEDDEDSET)
|| p.getType().equals(OType.LINKLIST) || p.getType().equals(OType.LINKSET))
&& (fieldValue != null && ((Collection<?>) fieldValue).size() > Integer.parseInt(max)))
throw new OValidationException("The field '" + p.getFullName() + "' contains more items than " + max + " requested");
}
if (p.isReadonly() && iRecord instanceof ODocument && !iRecord.getRecordVersion().isTombstone()) {
for (String f : ((ODocument) iRecord).getDirtyFields())
if (f.equals(p.getName())) {
// check if the field is actually changed by equal.
// this is due to a limitation in the merge algorithm used server side marking all non simple fields as dirty
Object orgVal = ((ODocument) iRecord).getOriginalValue(f);
boolean simple = fieldValue != null ? OType.isSimpleType(fieldValue) : OType.isSimpleType(orgVal);
if ((simple) || (fieldValue != null && orgVal == null) || (fieldValue == null && orgVal != null)
|| (!fieldValue.equals(orgVal)))
throw new OValidationException("The field '" + p.getFullName()
+ "' is immutable and cannot be altered. Field value is: " + ((ODocument) iRecord).field(f));
}
}
}
protected static void validateType(final OProperty p, final Object value) {
if (value != null)
if (OType.convert(value, p.getLinkedType().getDefaultJavaType()) == null)
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType() + " of type '"
+ p.getLinkedType() + "' but the value is " + value);
}
protected static void validateLink(final OProperty p, final Object fieldValue) {
if (fieldValue == null)
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " but contains a null record (probably a deleted record?)");
final ORecord<?> linkedRecord;
if (fieldValue instanceof OIdentifiable)
linkedRecord = ((OIdentifiable) fieldValue).getRecord();
else if (fieldValue instanceof String)
linkedRecord = new ORecordId((String) fieldValue).getRecord();
else
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " but the value is not a record or a record-id");
if (linkedRecord != null && p.getLinkedClass() != null) {
if (!(linkedRecord instanceof ODocument))
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType() + " of type '"
+ p.getLinkedClass() + "' but the value is the record " + linkedRecord.getIdentity() + " that is not a document");
final ODocument doc = (ODocument) linkedRecord;
// AT THIS POINT CHECK THE CLASS ONLY IF != NULL BECAUSE IN CASE OF GRAPHS THE RECORD COULD BE PARTIAL
if (doc.getSchemaClass() != null && !p.getLinkedClass().isSuperClassOf(doc.getSchemaClass()))
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType() + " of type '"
+ p.getLinkedClass().getName() + "' but the value is the document " + linkedRecord.getIdentity() + " of class '"
+ doc.getSchemaClass() + "'");
}
}
protected static void validateEmbedded(final OProperty p, final Object fieldValue) {
if (fieldValue instanceof ORecordId)
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " but the value is the RecordID " + fieldValue);
else if (fieldValue instanceof OIdentifiable) {
if (((OIdentifiable) fieldValue).getIdentity().isValid())
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " but the value is a document with the valid RecordID " + fieldValue);
final OClass embeddedClass = p.getLinkedClass();
if (embeddedClass != null) {
final ORecord<?> rec = ((OIdentifiable) fieldValue).getRecord();
if (!(rec instanceof ODocument))
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " with linked class '" + embeddedClass + "' but the record was not a document");
final ODocument doc = (ODocument) rec;
if (doc.getSchemaClass() == null)
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " with linked class '" + embeddedClass + "' but the record has no class");
if (!(doc.getSchemaClass().isSubClassOf(embeddedClass)))
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " with linked class '" + embeddedClass + "' but the record is of class '" + doc.getSchemaClass().getName()
+ "' that is not a subclass of that");
}
} else
throw new OValidationException("The field '" + p.getFullName() + "' has been declared as " + p.getType()
+ " but an incompatible type is used. Value: " + fieldValue);
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_record_ORecordSchemaAwareAbstract.java
|
332 |
public class CommaDelimitedNodeValueMerge extends NodeValueMerge {
@Override
public String getDelimiter() {
return ",";
}
@Override
public String getRegEx() {
return getDelimiter();
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_handlers_CommaDelimitedNodeValueMerge.java
|
1,653 |
public abstract class Names {
public static String randomNodeName(URL nodeNames) {
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Charsets.UTF_8));
int numberOfNames = 0;
while (reader.readLine() != null) {
numberOfNames++;
}
reader.close();
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Charsets.UTF_8));
int number = ((ThreadLocalRandom.current().nextInt(numberOfNames)) % numberOfNames);
for (int i = 0; i < number; i++) {
reader.readLine();
}
return reader.readLine();
} catch (IOException e) {
return null;
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException e) {
// ignore this exception
}
}
}
public static String randomNodeName(InputStream nodeNames) {
if (nodeNames == null) {
return null;
}
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(nodeNames, Charsets.UTF_8));
int numberOfNames = Integer.parseInt(reader.readLine());
int number = ((new Random().nextInt(numberOfNames)) % numberOfNames) - 2; // remove 2 for last line and first line
for (int i = 0; i < number; i++) {
reader.readLine();
}
return reader.readLine();
} catch (Exception e) {
return null;
} finally {
try {
nodeNames.close();
} catch (IOException e) {
// ignore
}
}
}
private Names() {
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_Names.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.