Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
3,199 | public class IndexFieldDataModule extends AbstractModule {
private final Settings settings;
public IndexFieldDataModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(IndexFieldDataService.class).asEagerSingleton();
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_IndexFieldDataModule.java |
1,138 | private enum Method {
FACET {
@Override
SearchRequestBuilder addTermsAgg(SearchRequestBuilder builder, String name, String field, String executionHint) {
return builder.addFacet(termsFacet(name).field(field).executionHint(executionHint));
}
@Override
SearchRequestBuilder addTermsStatsAgg(SearchRequestBuilder builder, String name, String keyField, String valueField) {
return builder.addFacet(termsStatsFacet(name).keyField(keyField).valueField(valueField));
}
},
AGGREGATION {
@Override
SearchRequestBuilder addTermsAgg(SearchRequestBuilder builder, String name, String field, String executionHint) {
return builder.addAggregation(AggregationBuilders.terms(name).executionHint(executionHint).field(field));
}
@Override
SearchRequestBuilder addTermsStatsAgg(SearchRequestBuilder builder, String name, String keyField, String valueField) {
return builder.addAggregation(AggregationBuilders.terms(name).field(keyField).subAggregation(AggregationBuilders.stats("stats").field(valueField)));
}
};
abstract SearchRequestBuilder addTermsAgg(SearchRequestBuilder builder, String name, String field, String executionHint);
abstract SearchRequestBuilder addTermsStatsAgg(SearchRequestBuilder builder, String name, String keyField, String valueField);
} | 0true
| src_test_java_org_elasticsearch_benchmark_search_aggregations_TermsAggregationSearchBenchmark.java |
528 | public class OTransactionAbortedException extends OTransactionException {
private static final long serialVersionUID = 2347493191705052402L;
public OTransactionAbortedException(String message, Throwable cause) {
super(message, cause);
}
public OTransactionAbortedException(String message) {
super(message);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_exception_OTransactionAbortedException.java |
2,038 | private interface Factory<M extends Member & AnnotatedElement> {
Factory<Field> FIELDS = new Factory<Field>() {
public Field[] getMembers(Class<?> type) {
return type.getDeclaredFields();
}
public InjectionPoint create(TypeLiteral<?> typeLiteral, Field member, Errors errors) {
return new InjectionPoint(typeLiteral, member);
}
};
Factory<Method> METHODS = new Factory<Method>() {
public Method[] getMembers(Class<?> type) {
return type.getDeclaredMethods();
}
public InjectionPoint create(TypeLiteral<?> typeLiteral, Method member, Errors errors) {
checkForMisplacedBindingAnnotations(member, errors);
return new InjectionPoint(typeLiteral, member);
}
};
M[] getMembers(Class<?> type);
InjectionPoint create(TypeLiteral<?> typeLiteral, M member, Errors errors);
} | 0true
| src_main_java_org_elasticsearch_common_inject_spi_InjectionPoint.java |
3,410 | public class IsStillExecutingOperation extends AbstractOperation {
private long operationCallId;
IsStillExecutingOperation() {
}
IsStillExecutingOperation(long operationCallId) {
this.operationCallId = operationCallId;
}
@Override
public void run() throws Exception {
NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine();
BasicOperationService operationService = (BasicOperationService) nodeEngine.operationService;
boolean executing = operationService.isOperationExecuting(getCallerAddress(), getCallerUuid(), operationCallId);
getResponseHandler().sendResponse(executing);
}
@Override
public boolean returnsResponse() {
return false;
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
operationCallId = in.readLong();
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeLong(operationCallId);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_spi_impl_IsStillExecutingOperation.java |
1,480 | return new Iterator<MutableShardRouting>() {
private MutableShardRouting current;
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public MutableShardRouting next() {
return current = iterator.next();
}
@Override
public void remove() {
iterator.remove();
if (current.primary()) {
primaries--;
}
transactionId++;
}
}; | 0true
| src_main_java_org_elasticsearch_cluster_routing_RoutingNodes.java |
5,090 | transportService.sendRequest(node, SearchQueryByIdTransportHandler.ACTION, request, new BaseTransportResponseHandler<QuerySearchResult>() {
@Override
public QuerySearchResult newInstance() {
return new QuerySearchResult();
}
@Override
public void handleResponse(QuerySearchResult response) {
listener.onResult(response);
}
@Override
public void handleException(TransportException exp) {
listener.onFailure(exp);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}); | 1no label
| src_main_java_org_elasticsearch_search_action_SearchServiceTransportAction.java |
2,042 | public interface InstanceBinding<T> extends Binding<T>, HasDependencies {
/**
* Returns the user-supplied instance.
*/
T getInstance();
/**
* Returns the field and method injection points of the instance, injected at injector-creation
* time only.
*
* @return a possibly empty set
*/
Set<InjectionPoint> getInjectionPoints();
} | 0true
| src_main_java_org_elasticsearch_common_inject_spi_InstanceBinding.java |
2,838 | public final class Builder {
private final Set<CharMatcher> matchers;
Builder() {
matchers = new HashSet<CharMatcher>();
}
public Builder or(CharMatcher matcher) {
matchers.add(matcher);
return this;
}
public CharMatcher build() {
switch (matchers.size()) {
case 0:
return new CharMatcher() {
@Override
public boolean isTokenChar(int c) {
return false;
}
};
case 1:
return matchers.iterator().next();
default:
return new CharMatcher() {
@Override
public boolean isTokenChar(int c) {
for (CharMatcher matcher : matchers) {
if (matcher.isTokenChar(c)) {
return true;
}
}
return false;
}
};
}
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_CharMatcher.java |
1,823 | class FactoryProxy<T> implements InternalFactory<T>, BindingProcessor.CreationListener {
private final InjectorImpl injector;
private final Key<T> key;
private final Key<? extends T> targetKey;
private final Object source;
private InternalFactory<? extends T> targetFactory;
FactoryProxy(InjectorImpl injector, Key<T> key, Key<? extends T> targetKey, Object source) {
this.injector = injector;
this.key = key;
this.targetKey = targetKey;
this.source = source;
}
public void notify(final Errors errors) {
try {
targetFactory = injector.getInternalFactory(targetKey, errors.withSource(source));
} catch (ErrorsException e) {
errors.merge(e.getErrors());
}
}
public T get(Errors errors, InternalContext context, Dependency<?> dependency)
throws ErrorsException {
return targetFactory.get(errors.withSource(targetKey), context, dependency);
}
@Override
public String toString() {
return new ToStringBuilder(FactoryProxy.class)
.add("key", key)
.add("provider", targetFactory)
.toString();
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_FactoryProxy.java |
1,470 | public class BroadleafOrderHistoryController extends AbstractAccountController {
protected static String orderHistoryView = "account/orderHistory";
protected static String orderDetailsView = "account/partials/orderDetails";
protected static String orderDetailsRedirectView = "account/partials/orderDetails";
public String viewOrderHistory(HttpServletRequest request, Model model) {
List<Order> orders = orderService.findOrdersForCustomer(CustomerState.getCustomer(), OrderStatus.SUBMITTED);
model.addAttribute("orders", orders);
return getOrderHistoryView();
}
public String viewOrderDetails(HttpServletRequest request, Model model, String orderNumber) {
Order order = orderService.findOrderByOrderNumber(orderNumber);
if (order == null) {
throw new IllegalArgumentException("The orderNumber provided is not valid");
}
model.addAttribute("order", order);
return isAjaxRequest(request) ? getOrderDetailsView() : getOrderDetailsRedirectView();
}
public String getOrderHistoryView() {
return orderHistoryView;
}
public String getOrderDetailsView() {
return orderDetailsView;
}
public String getOrderDetailsRedirectView() {
return orderDetailsRedirectView;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_controller_account_BroadleafOrderHistoryController.java |
656 | constructors[COLLECTION_ADD_ALL] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new CollectionAddAllOperation();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java |
182 | DocumentCommand cmd= new DocumentCommand() { }; | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_AutoEditTest.java |
92 | private static class GenericEvent implements Serializable {
private static final long serialVersionUID = -933111044641052844L;
private int userId;
public GenericEvent(int userId) {
this.setUserId(userId);
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_ClientEntryListenerDisconnectTest.java |
5,806 | public final class CustomQueryScorer extends QueryScorer {
public CustomQueryScorer(Query query, IndexReader reader, String field,
String defaultField) {
super(query, reader, field, defaultField);
}
public CustomQueryScorer(Query query, IndexReader reader, String field) {
super(query, reader, field);
}
public CustomQueryScorer(Query query, String field, String defaultField) {
super(query, field, defaultField);
}
public CustomQueryScorer(Query query, String field) {
super(query, field);
}
public CustomQueryScorer(Query query) {
super(query);
}
public CustomQueryScorer(WeightedSpanTerm[] weightedTerms) {
super(weightedTerms);
}
@Override
protected WeightedSpanTermExtractor newTermExtractor(String defaultField) {
return defaultField == null ? new CustomWeightedSpanTermExtractor()
: new CustomWeightedSpanTermExtractor(defaultField);
}
private static class CustomWeightedSpanTermExtractor extends WeightedSpanTermExtractor {
public CustomWeightedSpanTermExtractor() {
super();
}
public CustomWeightedSpanTermExtractor(String defaultField) {
super(defaultField);
}
@Override
protected void extractUnknownQuery(Query query,
Map<String, WeightedSpanTerm> terms) throws IOException {
if (query instanceof FunctionScoreQuery) {
query = ((FunctionScoreQuery) query).getSubQuery();
extract(query, terms);
} else if (query instanceof FiltersFunctionScoreQuery) {
query = ((FiltersFunctionScoreQuery) query).getSubQuery();
extract(query, terms);
} else if (query instanceof XFilteredQuery) {
query = ((XFilteredQuery) query).getQuery();
extract(query, terms);
}
}
}
} | 1no label
| src_main_java_org_elasticsearch_search_highlight_CustomQueryScorer.java |
1,383 | @XmlRootElement(name = "adjustment")
@XmlAccessorType(value = XmlAccessType.FIELD)
public class AdjustmentWrapper extends BaseWrapper implements APIWrapper<Adjustment> {
@XmlElement
protected Long id;
@XmlElement
protected Long offerid;
@XmlElement
protected String reason;
@XmlElement
protected String marketingMessage;
@XmlElement
protected Money adjustmentValue;
@XmlElement
protected String discountType;
@XmlElement
protected BigDecimal discountAmount;
public void wrapDetails(Adjustment model, HttpServletRequest request) {
if (model == null) {
return;
}
this.id = model.getId();
this.reason = model.getReason();
Offer offer = model.getOffer();
if (offer != null) {
if (model.getReason() == null) {
this.reason = "OFFER";
}
this.offerid = offer.getId();
this.marketingMessage = offer.getMarketingMessage();
this.discountType = offer.getDiscountType().getType();
this.discountAmount = offer.getValue();
}
this.adjustmentValue = model.getValue();
}
@Override
public void wrapSummary(Adjustment model, HttpServletRequest request) {
wrapDetails(model, request);
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_wrapper_AdjustmentWrapper.java |
752 | loadEntriesMinor(key, inclusive, new RangeResultListener<K, V>() {
@Override
public boolean addResult(Map.Entry<K, V> entry) {
result.add(entry.getValue());
if (maxValuesToFetch > -1 && result.size() >= maxValuesToFetch)
return false;
return true;
}
}); | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_sbtreebonsai_local_OSBTreeBonsai.java |
1,178 | public final class LifecycleEvent {
/**
* lifecycle states
*/
public enum LifecycleState {
STARTING,
STARTED,
SHUTTING_DOWN,
SHUTDOWN,
MERGING,
MERGED,
CLIENT_CONNECTED,
CLIENT_DISCONNECTED
}
final LifecycleState state;
public LifecycleEvent(LifecycleState state) {
this.state = state;
}
public LifecycleState getState() {
return state;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (!(o instanceof LifecycleEvent)) return false;
final LifecycleEvent that = (LifecycleEvent) o;
if (state != that.state) return false;
return true;
}
@Override
public int hashCode() {
return state != null ? state.hashCode() : 0;
}
@Override
public String toString() {
return "LifecycleEvent [state=" + state + "]";
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_core_LifecycleEvent.java |
1,972 | @Entity
@EntityListeners(value = { TemporalTimestampListener.class })
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_CUSTOMER_ADDRESS", uniqueConstraints = @UniqueConstraint(name = "CSTMR_ADDR_UNIQUE_CNSTRNT", columnNames = { "CUSTOMER_ID", "ADDRESS_NAME" }))
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "address.firstName", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.EXCLUDED, booleanOverrideValue = true)),
@AdminPresentationMergeOverride(name = "address.lastName", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.EXCLUDED, booleanOverrideValue = true)),
@AdminPresentationMergeOverride(name = "address.addressLine1", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.PROMINENT, booleanOverrideValue = true))
}
)
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE)
public class CustomerAddressImpl implements CustomerAddress {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "CustomerAddressId")
@GenericGenerator(
name="CustomerAddressId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="CustomerAddressImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.profile.core.domain.CustomerAddressImpl")
}
)
@Column(name = "CUSTOMER_ADDRESS_ID")
protected Long id;
@Column(name = "ADDRESS_NAME")
@AdminPresentation(friendlyName = "CustomerAddressImpl_Address_Name", order=1,
group = "CustomerAddressImpl_Identification", groupOrder = 1, prominent = true, gridOrder = 1)
protected String addressName;
@ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE}, targetEntity = CustomerImpl.class, optional=false)
@JoinColumn(name = "CUSTOMER_ID")
@AdminPresentation(excluded = true, visibility = VisibilityEnum.HIDDEN_ALL)
protected Customer customer;
@ManyToOne(cascade = CascadeType.ALL, targetEntity = AddressImpl.class, optional=false)
@JoinColumn(name = "ADDRESS_ID")
@Index(name="CUSTOMERADDRESS_ADDRESS_INDEX", columnNames={"ADDRESS_ID"})
protected Address address;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getAddressName() {
return addressName;
}
@Override
public void setAddressName(String addressName) {
this.addressName = addressName;
}
@Override
public Customer getCustomer() {
return customer;
}
@Override
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public Address getAddress() {
return address;
}
@Override
public void setAddress(Address address) {
this.address = address;
}
@Override
public String toString() {
return (addressName == null)
? address.getFirstName() + " - " + address.getAddressLine1()
: addressName;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((address == null) ? 0 : address.hashCode());
result = prime * result + ((addressName == null) ? 0 : addressName.hashCode());
result = prime * result + ((customer == null) ? 0 : customer.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
CustomerAddressImpl other = (CustomerAddressImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (address == null) {
if (other.address != null) {
return false;
}
} else if (!address.equals(other.address)) {
return false;
}
if (addressName == null) {
if (other.addressName != null) {
return false;
}
} else if (!addressName.equals(other.addressName)) {
return false;
}
if (customer == null) {
if (other.customer != null) {
return false;
}
} else if (!customer.equals(other.customer)) {
return false;
}
return true;
}
} | 1no label
| core_broadleaf-profile_src_main_java_org_broadleafcommerce_profile_core_domain_CustomerAddressImpl.java |
74 | public abstract class CallableClientRequest extends ClientRequest implements Callable {
@Override
final void process() throws Exception {
ClientEndpoint endpoint = getEndpoint();
try {
Object result = call();
endpoint.sendResponse(result, getCallId());
} catch (Exception e) {
clientEngine.getLogger(getClass()).warning(e);
endpoint.sendResponse(e, getCallId());
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_CallableClientRequest.java |
3,305 | public abstract class DocValuesIndexFieldData {
protected final Index index;
protected final Names fieldNames;
public DocValuesIndexFieldData(Index index, Names fieldNames) {
super();
this.index = index;
this.fieldNames = fieldNames;
}
public final Names getFieldNames() {
return fieldNames;
}
public final void clear() {
// can't do
}
public final void clear(IndexReader reader) {
// can't do
}
public final Index index() {
return index;
}
public static class Builder implements IndexFieldData.Builder {
private static final Set<String> BINARY_INDEX_FIELD_NAMES = ImmutableSet.of(UidFieldMapper.NAME, IdFieldMapper.NAME);
private static final Set<String> NUMERIC_INDEX_FIELD_NAMES = ImmutableSet.of(TimestampFieldMapper.NAME);
private NumericType numericType;
public Builder numericType(NumericType type) {
this.numericType = type;
return this;
}
@Override
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService) {
// Ignore Circuit Breaker
final FieldMapper.Names fieldNames = mapper.names();
final Settings fdSettings = mapper.fieldDataType().getSettings();
final Map<String, Settings> filter = fdSettings.getGroups("filter");
if (filter != null && !filter.isEmpty()) {
throw new ElasticsearchIllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.name() + "]");
}
if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
assert numericType == null;
return new BinaryDVIndexFieldData(index, fieldNames);
} else if (NUMERIC_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
assert !numericType.isFloatingPoint();
return new NumericDVIndexFieldData(index, fieldNames);
} else if (numericType != null) {
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType);
} else {
return new SortedSetDVBytesIndexFieldData(index, fieldNames);
}
}
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_DocValuesIndexFieldData.java |
867 | public class OrderItemPriceDetailAnswer implements IAnswer<OrderItemPriceDetail> {
@Override
public OrderItemPriceDetail answer() throws Throwable {
return new OrderItemPriceDetailImpl();
}
} | 0true
| core_broadleaf-framework_src_test_java_org_broadleafcommerce_core_offer_service_OfferServiceTest.java |
1,339 | public class SolrSearchServiceImpl implements SearchService, DisposableBean {
private static final Log LOG = LogFactory.getLog(SolrSearchServiceImpl.class);
@Resource(name = "blProductDao")
protected ProductDao productDao;
@Resource(name = "blFieldDao")
protected FieldDao fieldDao;
@Resource(name = "blSearchFacetDao")
protected SearchFacetDao searchFacetDao;
@Resource(name = "blSolrHelperService")
protected SolrHelperService shs;
@Resource(name = "blSolrIndexService")
protected SolrIndexService solrIndexService;
@Resource(name = "blSolrSearchServiceExtensionManager")
protected SolrSearchServiceExtensionManager extensionManager;
public SolrSearchServiceImpl(String solrServer) throws IOException, ParserConfigurationException, SAXException {
if ("solrhome".equals(solrServer)) {
final String baseTempPath = System.getProperty("java.io.tmpdir");
File tempDir = new File(baseTempPath + File.separator + System.getProperty("user.name") + File.separator + "solrhome");
if (System.getProperty("tmpdir.solrhome") != null) {
//allow for an override of tmpdir
tempDir = new File(System.getProperty("tmpdir.solrhome"));
}
if (!tempDir.exists()) {
tempDir.mkdirs();
}
solrServer = tempDir.getAbsolutePath();
}
File solrXml = new File(new File(solrServer), "solr.xml");
if (!solrXml.exists()) {
copyConfigToSolrHome(this.getClass().getResourceAsStream("/solr-default.xml"), solrXml);
}
LOG.debug(String.format("Using [%s] as solrhome", solrServer));
LOG.debug(String.format("Using [%s] as solr.xml", solrXml.getAbsoluteFile()));
if (LOG.isTraceEnabled()) {
LOG.trace("Contents of solr.xml:");
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(solrXml));
String line;
while ((line = br.readLine()) != null) {
LOG.trace(line);
}
} finally {
if (br != null) {
try {
br.close();
} catch (Throwable e) {
//do nothing
}
}
}
LOG.trace("Done printing solr.xml");
}
CoreContainer coreContainer = CoreContainer.createAndLoad(solrServer, solrXml);
EmbeddedSolrServer primaryServer = new EmbeddedSolrServer(coreContainer, SolrContext.PRIMARY);
EmbeddedSolrServer reindexServer = new EmbeddedSolrServer(coreContainer, SolrContext.REINDEX);
SolrContext.setPrimaryServer(primaryServer);
SolrContext.setReindexServer(reindexServer);
}
public void copyConfigToSolrHome(InputStream configIs, File destFile) throws IOException {
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
bis = new BufferedInputStream(configIs);
bos = new BufferedOutputStream(new FileOutputStream(destFile, false));
boolean eof = false;
while (!eof) {
int temp = bis.read();
if (temp == -1) {
eof = true;
} else {
bos.write(temp);
}
}
bos.flush();
} finally {
if (bis != null) {
try {
bis.close();
} catch (Throwable e) {
//do nothing
}
}
if (bos != null) {
try {
bos.close();
} catch (Throwable e) {
//do nothing
}
}
}
}
public SolrSearchServiceImpl(SolrServer solrServer) {
SolrContext.setPrimaryServer(solrServer);
}
/**
* This constructor serves to mimic the one below this, which takes in two {@link SolrServer} arguments.
* By having this and then simply disregarding the second parameter, we can more easily support 2-core
* Solr configurations that use embedded/standalone per environment.
*
* @param solrServer
* @param reindexServer
* @throws SAXException
* @throws ParserConfigurationException
* @throws IOException
*/
public SolrSearchServiceImpl(String solrServer, String reindexServer) throws IOException, ParserConfigurationException, SAXException {
this(solrServer);
}
public SolrSearchServiceImpl(SolrServer solrServer, SolrServer reindexServer) {
SolrContext.setPrimaryServer(solrServer);
SolrContext.setReindexServer(reindexServer);
}
@Override
public void rebuildIndex() throws ServiceException, IOException {
solrIndexService.rebuildIndex();
}
@Override
public void destroy() throws Exception {
if (SolrContext.getServer() instanceof EmbeddedSolrServer) {
((EmbeddedSolrServer) SolrContext.getServer()).shutdown();
}
}
@Override
public ProductSearchResult findExplicitProductsByCategory(Category category, ProductSearchCriteria searchCriteria)
throws ServiceException {
List<SearchFacetDTO> facets = getCategoryFacets(category);
String query = shs.getExplicitCategoryFieldName() + ":" + category.getId();
return findProducts("*:*", facets, searchCriteria, shs.getCategorySortFieldName(category) + " asc", query);
}
@Override
public ProductSearchResult findProductsByCategory(Category category, ProductSearchCriteria searchCriteria)
throws ServiceException {
List<SearchFacetDTO> facets = getCategoryFacets(category);
String query = shs.getCategoryFieldName() + ":" + category.getId();
return findProducts("*:*", facets, searchCriteria, shs.getCategorySortFieldName(category) + " asc", query);
}
@Override
public ProductSearchResult findProductsByQuery(String query, ProductSearchCriteria searchCriteria)
throws ServiceException {
List<SearchFacetDTO> facets = getSearchFacets();
query = "(" + sanitizeQuery(query) + ")";
return findProducts(query, facets, searchCriteria, null);
}
@Override
public ProductSearchResult findProductsByCategoryAndQuery(Category category, String query,
ProductSearchCriteria searchCriteria) throws ServiceException {
List<SearchFacetDTO> facets = getSearchFacets();
String catFq = shs.getCategoryFieldName() + ":" + category.getId();
query = "(" + sanitizeQuery(query) + ")";
return findProducts(query, facets, searchCriteria, null, catFq);
}
public String getLocalePrefix() {
if (BroadleafRequestContext.getBroadleafRequestContext() != null) {
Locale locale = BroadleafRequestContext.getBroadleafRequestContext().getLocale();
if (locale != null) {
return locale.getLocaleCode() + "_";
}
}
return "";
}
protected String buildQueryFieldsString() {
StringBuilder queryBuilder = new StringBuilder();
List<Field> fields = fieldDao.readAllProductFields();
for (Field currentField : fields) {
if (currentField.getSearchable()) {
appendFieldToQuery(queryBuilder, currentField);
}
}
return queryBuilder.toString();
}
protected void appendFieldToQuery(StringBuilder queryBuilder, Field currentField) {
List<FieldType> searchableFieldTypes = shs.getSearchableFieldTypes(currentField);
for (FieldType currentType : searchableFieldTypes) {
queryBuilder.append(shs.getPropertyNameForFieldSearchable(currentField, currentType)).append(" ");
}
}
/**
* @deprecated in favor of the other findProducts() method
*/
protected ProductSearchResult findProducts(String qualifiedSolrQuery, List<SearchFacetDTO> facets,
ProductSearchCriteria searchCriteria, String defaultSort) throws ServiceException {
return findProducts(qualifiedSolrQuery, facets, searchCriteria, defaultSort, null);
}
/**
* Given a qualified solr query string (such as "category:2002"), actually performs a solr search. It will
* take into considering the search criteria to build out facets / pagination / sorting.
*
* @param qualifiedSolrQuery
* @param facets
* @param searchCriteria
* @return the ProductSearchResult of the search
* @throws ServiceException
*/
protected ProductSearchResult findProducts(String qualifiedSolrQuery, List<SearchFacetDTO> facets,
ProductSearchCriteria searchCriteria, String defaultSort, String... filterQueries) throws ServiceException {
Map<String, SearchFacetDTO> namedFacetMap = getNamedFacetMap(facets, searchCriteria);
// Build the basic query
SolrQuery solrQuery = new SolrQuery()
.setQuery(qualifiedSolrQuery)
.setFields(shs.getProductIdFieldName())
.setRows(searchCriteria.getPageSize())
.setStart((searchCriteria.getPage() - 1) * searchCriteria.getPageSize());
if (filterQueries != null) {
solrQuery.setFilterQueries(filterQueries);
}
solrQuery.addFilterQuery(shs.getNamespaceFieldName() + ":" + shs.getCurrentNamespace());
solrQuery.set("defType", "edismax");
solrQuery.set("qf", buildQueryFieldsString());
// Attach additional restrictions
attachSortClause(solrQuery, searchCriteria, defaultSort);
attachActiveFacetFilters(solrQuery, namedFacetMap, searchCriteria);
attachFacets(solrQuery, namedFacetMap);
extensionManager.getProxy().modifySolrQuery(solrQuery, qualifiedSolrQuery, facets,
searchCriteria, defaultSort);
if (LOG.isTraceEnabled()) {
try {
LOG.trace(URLDecoder.decode(solrQuery.toString(), "UTF-8"));
} catch (Exception e) {
LOG.trace("Couldn't UTF-8 URL Decode: " + solrQuery.toString());
}
}
// Query solr
QueryResponse response;
try {
//solrQuery = new SolrQuery().setQuery("*:*");
response = SolrContext.getServer().query(solrQuery);
if (LOG.isTraceEnabled()) {
LOG.trace(response.toString());
for (SolrDocument doc : response.getResults()) {
LOG.trace(doc);
}
}
} catch (SolrServerException e) {
throw new ServiceException("Could not perform search", e);
}
// Get the facets
setFacetResults(namedFacetMap, response);
sortFacetResults(namedFacetMap);
// Get the products
List<Product> products = getProducts(response);
ProductSearchResult result = new ProductSearchResult();
result.setFacets(facets);
result.setProducts(products);
setPagingAttributes(result, response, searchCriteria);
return result;
}
@Override
public List<SearchFacetDTO> getSearchFacets() {
return buildSearchFacetDTOs(searchFacetDao.readAllSearchFacets());
}
@Override
public List<SearchFacetDTO> getCategoryFacets(Category category) {
List<CategorySearchFacet> categorySearchFacets = category.getCumulativeSearchFacets();
List<SearchFacet> searchFacets = new ArrayList<SearchFacet>();
for (CategorySearchFacet categorySearchFacet : categorySearchFacets) {
searchFacets.add(categorySearchFacet.getSearchFacet());
}
return buildSearchFacetDTOs(searchFacets);
}
/**
* Sets up the sorting criteria. This will support sorting by multiple fields at a time
*
* @param query
* @param searchCriteria
*/
protected void attachSortClause(SolrQuery query, ProductSearchCriteria searchCriteria, String defaultSort) {
Map<String, String> solrFieldKeyMap = getSolrFieldKeyMap(searchCriteria);
String sortQuery = searchCriteria.getSortQuery();
if (StringUtils.isBlank(sortQuery)) {
sortQuery = defaultSort;
}
if (StringUtils.isNotBlank(sortQuery)) {
String[] sortFields = sortQuery.split(",");
for (String sortField : sortFields) {
String field = sortField.split(" ")[0];
if (solrFieldKeyMap.containsKey(field)) {
field = solrFieldKeyMap.get(field);
}
ORDER order = "desc".equals(sortField.split(" ")[1]) ? ORDER.desc : ORDER.asc;
if (field != null) {
query.addSortField(field, order);
}
}
}
}
/**
* Restricts the query by adding active facet filters.
*
* @param query
* @param namedFacetMap
* @param searchCriteria
*/
protected void attachActiveFacetFilters(SolrQuery query, Map<String, SearchFacetDTO> namedFacetMap,
ProductSearchCriteria searchCriteria) {
for (Entry<String, String[]> entry : searchCriteria.getFilterCriteria().entrySet()) {
String solrKey = null;
for (Entry<String, SearchFacetDTO> dtoEntry : namedFacetMap.entrySet()) {
if (dtoEntry.getValue().getFacet().getField().getAbbreviation().equals(entry.getKey())) {
solrKey = dtoEntry.getKey();
dtoEntry.getValue().setActive(true);
}
}
if (solrKey != null) {
String solrTag = getSolrFieldTag(shs.getGlobalFacetTagField(), "tag");
String[] selectedValues = entry.getValue().clone();
for (int i = 0; i < selectedValues.length; i++) {
if (selectedValues[i].contains("range[")) {
String rangeValue = selectedValues[i].substring(selectedValues[i].indexOf('[') + 1,
selectedValues[i].indexOf(']'));
String[] rangeValues = StringUtils.split(rangeValue, ':');
if (rangeValues[1].equals("null")) {
rangeValues[1] = "*";
}
selectedValues[i] = solrKey + ":[" + rangeValues[0] + " TO " + rangeValues[1] + "]";
} else {
selectedValues[i] = solrKey + ":\"" + selectedValues[i] + "\"";
}
}
String valueString = StringUtils.join(selectedValues, " OR ");
StringBuilder sb = new StringBuilder();
sb.append(solrTag).append("(").append(valueString).append(")");
query.addFilterQuery(sb.toString());
}
}
}
/**
* Notifies solr about which facets you want it to determine results and counts for
*
* @param query
* @param namedFacetMap
*/
protected void attachFacets(SolrQuery query, Map<String, SearchFacetDTO> namedFacetMap) {
query.setFacet(true);
for (Entry<String, SearchFacetDTO> entry : namedFacetMap.entrySet()) {
SearchFacetDTO dto = entry.getValue();
String facetTagField = entry.getValue().isActive() ? shs.getGlobalFacetTagField() : entry.getKey();
// Clone the list - we don't want to remove these facets from the DB
List<SearchFacetRange> facetRanges = new ArrayList<SearchFacetRange>(dto.getFacet().getSearchFacetRanges());
if (extensionManager != null) {
extensionManager.getProxy().filterSearchFacetRanges(dto, facetRanges);
}
if (facetRanges != null && facetRanges.size() > 0) {
for (SearchFacetRange range : facetRanges) {
query.addFacetQuery(getSolrTaggedFieldString(entry.getKey(), facetTagField, "ex", range));
}
} else {
query.addFacetField(getSolrTaggedFieldString(entry.getKey(), facetTagField, "ex", null));
}
}
}
/**
* Builds out the DTOs for facet results from the search. This will then be used by the view layer to
* display which values are available given the current constraints as well as the count of the values.
*
* @param namedFacetMap
* @param response
*/
protected void setFacetResults(Map<String, SearchFacetDTO> namedFacetMap, QueryResponse response) {
if (response.getFacetFields() != null) {
for (FacetField facet : response.getFacetFields()) {
String facetFieldName = facet.getName();
SearchFacetDTO facetDTO = namedFacetMap.get(facetFieldName);
for (Count value : facet.getValues()) {
SearchFacetResultDTO resultDTO = new SearchFacetResultDTO();
resultDTO.setFacet(facetDTO.getFacet());
resultDTO.setQuantity(new Long(value.getCount()).intValue());
resultDTO.setValue(value.getName());
facetDTO.getFacetValues().add(resultDTO);
}
}
}
if (response.getFacetQuery() != null) {
for (Entry<String, Integer> entry : response.getFacetQuery().entrySet()) {
String key = entry.getKey();
String facetFieldName = key.substring(key.indexOf("}") + 1, key.indexOf(':'));
SearchFacetDTO facetDTO = namedFacetMap.get(facetFieldName);
String minValue = key.substring(key.indexOf("[") + 1, key.indexOf(" TO"));
String maxValue = key.substring(key.indexOf(" TO ") + 4, key.indexOf("]"));
if (maxValue.equals("*")) {
maxValue = null;
}
SearchFacetResultDTO resultDTO = new SearchFacetResultDTO();
resultDTO.setFacet(facetDTO.getFacet());
resultDTO.setQuantity(entry.getValue());
resultDTO.setMinValue(new BigDecimal(minValue));
resultDTO.setMaxValue(maxValue == null ? null : new BigDecimal(maxValue));
facetDTO.getFacetValues().add(resultDTO);
}
}
}
/**
* Invoked to sort the facet results. This method will use the natural sorting of the value attribute of the
* facet (or, if value is null, the minValue of the facet result). Override this method to customize facet
* sorting for your given needs.
*
* @param namedFacetMap
*/
protected void sortFacetResults(Map<String, SearchFacetDTO> namedFacetMap) {
for (Entry<String, SearchFacetDTO> entry : namedFacetMap.entrySet()) {
Collections.sort(entry.getValue().getFacetValues(), new Comparator<SearchFacetResultDTO>() {
public int compare(SearchFacetResultDTO o1, SearchFacetResultDTO o2) {
if (o1.getValue() != null && o2.getValue() != null) {
return o1.getValue().compareTo(o2.getValue());
} else if (o1.getMinValue() != null && o2.getMinValue() != null) {
return o1.getMinValue().compareTo(o2.getMinValue());
}
return 0; // Don't know how to compare
}
});
}
}
/**
* Sets the total results, the current page, and the page size on the ProductSearchResult. Total results comes
* from solr, while page and page size are duplicates of the searchCriteria conditions for ease of use.
*
* @param result
* @param response
* @param searchCriteria
*/
public void setPagingAttributes(ProductSearchResult result, QueryResponse response,
ProductSearchCriteria searchCriteria) {
result.setTotalResults(new Long(response.getResults().getNumFound()).intValue());
result.setPage(searchCriteria.getPage());
result.setPageSize(searchCriteria.getPageSize());
}
/**
* Given a list of product IDs from solr, this method will look up the IDs via the productDao and build out
* actual Product instances. It will return a Products that is sorted by the order of the IDs in the passed
* in list.
*
* @param response
* @return the actual Product instances as a result of the search
*/
protected List<Product> getProducts(QueryResponse response) {
final List<Long> productIds = new ArrayList<Long>();
SolrDocumentList docs = response.getResults();
for (SolrDocument doc : docs) {
productIds.add((Long) doc.getFieldValue(shs.getProductIdFieldName()));
}
List<Product> products = productDao.readProductsByIds(productIds);
// We have to sort the products list by the order of the productIds list to maintain sortability in the UI
if (products != null) {
Collections.sort(products, new Comparator<Product>() {
public int compare(Product o1, Product o2) {
return new Integer(productIds.indexOf(o1.getId())).compareTo(productIds.indexOf(o2.getId()));
}
});
}
return products;
}
/**
* Create the wrapper DTO around the SearchFacet
*
* @param searchFacets
* @return the wrapper DTO
*/
protected List<SearchFacetDTO> buildSearchFacetDTOs(List<SearchFacet> searchFacets) {
List<SearchFacetDTO> facets = new ArrayList<SearchFacetDTO>();
Map<String, String[]> requestParameters = BroadleafRequestContext.getRequestParameterMap();
for (SearchFacet facet : searchFacets) {
if (facetIsAvailable(facet, requestParameters)) {
SearchFacetDTO dto = new SearchFacetDTO();
dto.setFacet(facet);
dto.setShowQuantity(true);
facets.add(dto);
}
}
return facets;
}
/**
* Checks to see if the requiredFacets condition for a given facet is met.
*
* @param facet
* @param request
* @return whether or not the facet parameter is available
*/
protected boolean facetIsAvailable(SearchFacet facet, Map<String, String[]> params) {
// Facets are available by default if they have no requiredFacets
if (CollectionUtils.isEmpty(facet.getRequiredFacets())) {
return true;
}
// If we have at least one required facet but no active facets, it's impossible for this facet to be available
if (MapUtils.isEmpty(params)) {
return false;
}
// We must either match all or just one of the required facets depending on the requiresAllDependentFacets flag
int requiredMatches = facet.getRequiresAllDependentFacets() ? facet.getRequiredFacets().size() : 1;
int matchesSoFar = 0;
for (RequiredFacet requiredFacet : facet.getRequiredFacets()) {
if (requiredMatches == matchesSoFar) {
return true;
}
// Check to see if the required facet has a value in the current request parameters
for (Entry<String, String[]> entry : params.entrySet()) {
String key = entry.getKey();
if (key.equals(requiredFacet.getRequiredFacet().getField().getAbbreviation())) {
matchesSoFar++;
break;
}
}
}
return requiredMatches == matchesSoFar;
}
/**
* Perform any necessary query sanitation here. For example, we disallow open and close parentheses, colons, and we also
* ensure that quotes are actual quotes (") and not the URL encoding (") so that Solr is able to properly handle
* the user's intent.
*
* @param query
* @return the sanitized query
*/
protected String sanitizeQuery(String query) {
return query.replace("(", "").replace("%28", "")
.replace(")", "").replace("%29", "")
.replace(":", "").replace("%3A", "").replace("%3a", "")
.replace(""", "\""); // Allow quotes in the query for more finely tuned matches
}
/**
* Returns a field string. Given indexField = a and a non-null range, would produce the following String:
* a:[minVal TO maxVal]
*/
protected String getSolrFieldString(String indexField, SearchFacetRange range) {
StringBuilder sb = new StringBuilder();
sb.append(indexField);
if (range != null) {
String minValue = range.getMinValue().toPlainString();
String maxValue = range.getMaxValue() == null ? "*" : range.getMaxValue().toPlainString();
sb.append(":[").append(minValue).append(" TO ").append(maxValue).append("]");
}
return sb.toString();
}
/**
* Returns a fully composed solr field string. Given indexField = a, tag = ex, and a non-null range,
* would produce the following String: {!ex=a}a:[minVal TO maxVal]
*/
protected String getSolrTaggedFieldString(String indexField, String tagField, String tag, SearchFacetRange range) {
return getSolrFieldTag(tagField, tag) + getSolrFieldString(indexField, range);
}
/**
* Returns a solr field tag. Given indexField = a, tag = ex, would produce the following String:
* {!ex=a}
*/
protected String getSolrFieldTag(String tagField, String tag) {
StringBuilder sb = new StringBuilder();
if (StringUtils.isNotBlank(tag)) {
sb.append("{!").append(tag).append("=").append(tagField).append("}");
}
return sb.toString();
}
/**
* @param facets
* @param searchCriteria
* @return a map of fully qualified solr index field key to the searchFacetDTO object
*/
protected Map<String, SearchFacetDTO> getNamedFacetMap(List<SearchFacetDTO> facets,
final ProductSearchCriteria searchCriteria) {
return BLCMapUtils.keyedMap(facets, new TypedClosure<String, SearchFacetDTO>() {
public String getKey(SearchFacetDTO facet) {
return getSolrFieldKey(facet.getFacet().getField(), searchCriteria);
}
});
}
/**
* This method will be used to map a field abbreviation to the appropriate solr index field to use. Typically,
* this default implementation that maps to the facet field type will be sufficient. However, there may be
* cases where you would want to use a different solr index depending on other currently active facets. In that
* case, you would associate that mapping here. For example, for the "price" abbreviation, we would generally
* want to use "defaultSku.retailPrice_td". However, if a secondary facet on item condition is selected (such
* as "refurbished", we may want to index "price" to "refurbishedSku.retailPrice_td". That mapping occurs here.
*
* @param fields
* @param searchCriteria the searchCriteria in case it is needed to determine the field key
* @return the solr field index key to use
*/
protected String getSolrFieldKey(Field field, ProductSearchCriteria searchCriteria) {
return shs.getPropertyNameForFieldFacet(field);
}
/**
* @param searchCriteria
* @return a map of abbreviated key to fully qualified solr index field key for all product fields
*/
protected Map<String, String> getSolrFieldKeyMap(ProductSearchCriteria searchCriteria) {
List<Field> fields = fieldDao.readAllProductFields();
Map<String, String> solrFieldKeyMap = new HashMap<String, String>();
for (Field field : fields) {
solrFieldKeyMap.put(field.getAbbreviation(), getSolrFieldKey(field, searchCriteria));
}
return solrFieldKeyMap;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_service_solr_SolrSearchServiceImpl.java |
2,152 | public class SimpleAllTests extends ElasticsearchTestCase {
@Test
public void testBoostOnEagerTokenizer() throws Exception {
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "all", 2.0f);
allEntries.addText("field2", "your", 1.0f);
allEntries.addText("field1", "boosts", 0.5f);
allEntries.reset();
// whitespace analyzer's tokenizer reads characters eagerly on the contrary to the standard tokenizer
final TokenStream ts = AllTokenStream.allTokenStream("any", allEntries, new WhitespaceAnalyzer(Lucene.VERSION));
final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
final PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
ts.reset();
for (int i = 0; i < 3; ++i) {
assertTrue(ts.incrementToken());
final String term;
final float boost;
switch (i) {
case 0:
term = "all";
boost = 2;
break;
case 1:
term = "your";
boost = 1;
break;
case 2:
term = "boosts";
boost = 0.5f;
break;
default:
throw new AssertionError();
}
assertEquals(term, termAtt.toString());
final BytesRef payload = payloadAtt.getPayload();
if (payload == null || payload.length == 0) {
assertEquals(boost, 1f, 0.001f);
} else {
assertEquals(4, payload.length);
final float b = PayloadHelper.decodeFloat(payload.bytes, payload.offset);
assertEquals(boost, b, 0.001f);
}
}
assertFalse(ts.incrementToken());
}
@Test
public void testAllEntriesRead() throws Exception {
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something", 1.0f);
allEntries.addText("field2", "else", 1.0f);
for (int i = 1; i < 30; i++) {
allEntries.reset();
char[] data = new char[i];
String value = slurpToString(allEntries, data);
assertThat("failed for " + i, value, equalTo("something else"));
}
}
private String slurpToString(AllEntries allEntries, char[] data) throws IOException {
StringBuilder sb = new StringBuilder();
while (true) {
int read = allEntries.read(data, 0, data.length);
if (read == -1) {
break;
}
sb.append(data, 0, read);
}
return sb.toString();
}
private void assertExplanationScore(IndexSearcher searcher, Query query, ScoreDoc scoreDoc) throws IOException {
final Explanation expl = searcher.explain(query, scoreDoc.doc);
assertEquals(scoreDoc.score, expl.getValue(), 0.00001f);
}
@Test
public void testSimpleAllNoBoost() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document doc = new Document();
doc.add(new Field("_id", "1", StoredField.TYPE));
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something", 1.0f);
allEntries.addText("field2", "else", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
doc = new Document();
doc.add(new Field("_id", "2", StoredField.TYPE));
allEntries = new AllEntries();
allEntries.addText("field1", "else", 1.0f);
allEntries.addText("field2", "something", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
Query query = new AllTermQuery(new Term("_all", "else"));
TopDocs docs = searcher.search(query, 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
assertThat(docs.scoreDocs[1].doc, equalTo(1));
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
query = new AllTermQuery(new Term("_all", "something"));
docs = searcher.search(query, 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
assertThat(docs.scoreDocs[1].doc, equalTo(1));
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
indexWriter.close();
}
@Test
public void testSimpleAllWithBoost() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document doc = new Document();
doc.add(new Field("_id", "1", StoredField.TYPE));
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something", 1.0f);
allEntries.addText("field2", "else", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
doc = new Document();
doc.add(new Field("_id", "2", StoredField.TYPE));
allEntries = new AllEntries();
allEntries.addText("field1", "else", 2.0f);
allEntries.addText("field2", "something", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
// this one is boosted. so the second doc is more relevant
Query query = new AllTermQuery(new Term("_all", "else"));
TopDocs docs = searcher.search(query, 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(1));
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
assertThat(docs.scoreDocs[1].doc, equalTo(0));
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
query = new AllTermQuery(new Term("_all", "something"));
docs = searcher.search(query, 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
assertThat(docs.scoreDocs[1].doc, equalTo(1));
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
indexWriter.close();
}
@Test
public void testMultipleTokensAllNoBoost() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document doc = new Document();
doc.add(new Field("_id", "1", StoredField.TYPE));
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something moo", 1.0f);
allEntries.addText("field2", "else koo", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
doc = new Document();
doc.add(new Field("_id", "2", StoredField.TYPE));
allEntries = new AllEntries();
allEntries.addText("field1", "else koo", 1.0f);
allEntries.addText("field2", "something moo", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertThat(docs.scoreDocs[1].doc, equalTo(1));
docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertThat(docs.scoreDocs[1].doc, equalTo(1));
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertThat(docs.scoreDocs[1].doc, equalTo(1));
docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertThat(docs.scoreDocs[1].doc, equalTo(1));
indexWriter.close();
}
@Test
public void testMultipleTokensAllWithBoost() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document doc = new Document();
doc.add(new Field("_id", "1", StoredField.TYPE));
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something moo", 1.0f);
allEntries.addText("field2", "else koo", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
doc = new Document();
doc.add(new Field("_id", "2", StoredField.TYPE));
allEntries = new AllEntries();
allEntries.addText("field1", "else koo", 2.0f);
allEntries.addText("field2", "something moo", 1.0f);
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(1));
assertThat(docs.scoreDocs[1].doc, equalTo(0));
docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(1));
assertThat(docs.scoreDocs[1].doc, equalTo(0));
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertThat(docs.scoreDocs[1].doc, equalTo(1));
docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
assertThat(docs.totalHits, equalTo(2));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
assertThat(docs.scoreDocs[1].doc, equalTo(1));
indexWriter.close();
}
@Test
public void testNoTokensWithKeywordAnalyzer() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.KEYWORD_ANALYZER));
Document doc = new Document();
doc.add(new Field("_id", "1", StoredField.TYPE));
AllEntries allEntries = new AllEntries();
allEntries.reset();
doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.KEYWORD_ANALYZER)));
indexWriter.addDocument(doc);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10);
assertThat(docs.totalHits, equalTo(1));
assertThat(docs.scoreDocs[0].doc, equalTo(0));
}
} | 0true
| src_test_java_org_elasticsearch_common_lucene_all_SimpleAllTests.java |
1,465 | public final class EntityRegionAccessStrategyAdapter implements EntityRegionAccessStrategy {
private final AccessDelegate<? extends HazelcastEntityRegion> delegate;
public EntityRegionAccessStrategyAdapter(final AccessDelegate<? extends HazelcastEntityRegion> delegate) {
this.delegate = delegate;
}
public boolean afterInsert(final Object key, final Object value, final Object version) throws CacheException {
return delegate.afterInsert(key, value, version);
}
public boolean afterUpdate(final Object key, final Object value, final Object currentVersion,
final Object previousVersion, final SoftLock lock) throws CacheException {
return delegate.afterUpdate(key, value, currentVersion, previousVersion, lock);
}
public void evict(final Object key) throws CacheException {
delegate.evict(key);
}
public void evictAll() throws CacheException {
delegate.evictAll();
}
public Object get(final Object key, final long txTimestamp) throws CacheException {
return delegate.get(key, txTimestamp);
}
public EntityRegion getRegion() {
return delegate.getHazelcastRegion();
}
public boolean insert(final Object key, final Object value, final Object version) throws CacheException {
return delegate.insert(key, value, version);
}
public SoftLock lockItem(final Object key, final Object version) throws CacheException {
return delegate.lockItem(key, version);
}
public SoftLock lockRegion() throws CacheException {
return delegate.lockRegion();
}
public boolean putFromLoad(final Object key, final Object value, final long txTimestamp, final Object version)
throws CacheException {
return delegate.putFromLoad(key, value, txTimestamp, version);
}
public boolean putFromLoad(final Object key, final Object value, final long txTimestamp, final Object version,
final boolean minimalPutOverride) throws CacheException {
return delegate.putFromLoad(key, value, txTimestamp, version, minimalPutOverride);
}
public void remove(final Object key) throws CacheException {
delegate.remove(key);
}
public void removeAll() throws CacheException {
delegate.removeAll();
}
public void unlockItem(final Object key, final SoftLock lock) throws CacheException {
delegate.unlockItem(key, lock);
}
public void unlockRegion(final SoftLock lock) throws CacheException {
delegate.unlockRegion(lock);
}
public boolean update(final Object key, final Object value, final Object currentVersion,
final Object previousVersion) throws CacheException {
return delegate.update(key, value, currentVersion, previousVersion);
}
} | 0true
| hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_region_EntityRegionAccessStrategyAdapter.java |
102 | private static class FindNamedArgumentsVisitor
extends Visitor
implements NaturalVisitor {
Tree.NamedArgumentList argumentList;
int offset;
private Tree.NamedArgumentList getArgumentList() {
return argumentList;
}
private FindNamedArgumentsVisitor(int offset) {
this.offset = offset;
}
@Override
public void visit(Tree.NamedArgumentList that) {
if (offset>=that.getStartIndex() &&
offset<=that.getStopIndex()+1) {
argumentList = that;
}
super.visit(that);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ConvertToPositionalArgumentsProposal.java |
93 | private static class ResourceElement
{
private Xid xid = null;
private XAResource resource = null;
private int status;
ResourceElement( Xid xid, XAResource resource )
{
this.xid = xid;
this.resource = resource;
status = RS_ENLISTED;
}
Xid getXid()
{
return xid;
}
XAResource getResource()
{
return resource;
}
int getStatus()
{
return status;
}
void setStatus( int status )
{
this.status = status;
}
@Override
public String toString()
{
String statusString;
switch ( status )
{
case RS_ENLISTED:
statusString = "ENLISTED";
break;
case RS_DELISTED:
statusString = "DELISTED";
break;
case RS_SUSPENDED:
statusString = "SUSPENDED";
break;
case RS_READONLY:
statusString = "READONLY";
break;
default:
statusString = "UNKNOWN";
}
return "Xid[" + xid + "] XAResource[" + resource + "] Status["
+ statusString + "]";
}
} | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_ReadOnlyTransactionImpl.java |
1,053 | return new Terms() {
@Override
public TermsEnum iterator(TermsEnum reuse) throws IOException {
// convert bytes ref for the terms to actual data
return new TermsEnum() {
int currentTerm = 0;
int freq = 0;
int docFreq = -1;
long totalTermFrequency = -1;
int[] positions = new int[1];
int[] startOffsets = new int[1];
int[] endOffsets = new int[1];
BytesRef[] payloads = new BytesRef[1];
final BytesRef spare = new BytesRef();
@Override
public BytesRef next() throws IOException {
if (currentTerm++ < numTerms) {
// term string. first the size...
int termVectorSize = perFieldTermVectorInput.readVInt();
spare.grow(termVectorSize);
// ...then the value.
perFieldTermVectorInput.readBytes(spare.bytes, 0, termVectorSize);
spare.length = termVectorSize;
if (hasTermStatistic) {
docFreq = readPotentiallyNegativeVInt(perFieldTermVectorInput);
totalTermFrequency = readPotentiallyNegativeVLong(perFieldTermVectorInput);
}
freq = readPotentiallyNegativeVInt(perFieldTermVectorInput);
// grow the arrays to read the values. this is just
// for performance reasons. Re-use memory instead of
// realloc.
growBuffers();
// finally, read the values into the arrays
// curentPosition etc. so that we can just iterate
// later
writeInfos(perFieldTermVectorInput);
return spare;
} else {
return null;
}
}
private void writeInfos(final BytesStreamInput input) throws IOException {
for (int i = 0; i < freq; i++) {
if (hasPositions) {
positions[i] = input.readVInt();
}
if (hasOffsets) {
startOffsets[i] = input.readVInt();
endOffsets[i] = input.readVInt();
}
if (hasPayloads) {
int payloadLength = input.readVInt();
if (payloads[i] == null) {
payloads[i] = new BytesRef(payloadLength);
} else {
payloads[i].grow(payloadLength);
}
input.readBytes(payloads[i].bytes, 0, payloadLength);
payloads[i].length = payloadLength;
payloads[i].offset = 0;
}
}
}
private void growBuffers() {
if (hasPositions) {
positions = grow(positions, freq);
}
if (hasOffsets) {
startOffsets = grow(startOffsets, freq);
endOffsets = grow(endOffsets, freq);
}
if (hasPayloads) {
if (payloads.length < freq) {
final BytesRef[] newArray = new BytesRef[ArrayUtil.oversize(freq, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(payloads, 0, newArray, 0, payloads.length);
payloads = newArray;
}
}
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public SeekStatus seekCeil(BytesRef text) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void seekExact(long ord) throws IOException {
throw new UnsupportedOperationException("Seek is not supported");
}
@Override
public BytesRef term() throws IOException {
return spare;
}
@Override
public long ord() throws IOException {
throw new UnsupportedOperationException("ordinals are not supported");
}
@Override
public int docFreq() throws IOException {
return docFreq;
}
@Override
public long totalTermFreq() throws IOException {
return totalTermFrequency;
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
return docsAndPositions(liveDocs, reuse instanceof DocsAndPositionsEnum ? (DocsAndPositionsEnum) reuse : null, 0);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
final TermVectorsDocsAndPosEnum retVal = (reuse instanceof TermVectorsDocsAndPosEnum ? (TermVectorsDocsAndPosEnum) reuse
: new TermVectorsDocsAndPosEnum());
return retVal.reset(hasPositions ? positions : null, hasOffsets ? startOffsets : null, hasOffsets ? endOffsets
: null, hasPayloads ? payloads : null, freq);
}
};
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public long size() throws IOException {
return numTerms;
}
@Override
public long getSumTotalTermFreq() throws IOException {
return sumTotalTermFreq;
}
@Override
public long getSumDocFreq() throws IOException {
return sumDocFreq;
}
@Override
public int getDocCount() throws IOException {
return docCount;
}
@Override
public boolean hasFreqs() {
return true;
}
@Override
public boolean hasOffsets() {
return hasOffsets;
}
@Override
public boolean hasPositions() {
return hasPositions;
}
@Override
public boolean hasPayloads() {
return hasPayloads;
}
}; | 0true
| src_main_java_org_elasticsearch_action_termvector_TermVectorFields.java |
1,417 | public static class PutRequest {
final String name;
final String cause;
boolean create;
int order;
String template;
Settings settings = ImmutableSettings.Builder.EMPTY_SETTINGS;
Map<String, String> mappings = Maps.newHashMap();
Map<String, IndexMetaData.Custom> customs = Maps.newHashMap();
TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT;
public PutRequest(String cause, String name) {
this.cause = cause;
this.name = name;
}
public PutRequest order(int order) {
this.order = order;
return this;
}
public PutRequest template(String template) {
this.template = template;
return this;
}
public PutRequest create(boolean create) {
this.create = create;
return this;
}
public PutRequest settings(Settings settings) {
this.settings = settings;
return this;
}
public PutRequest mappings(Map<String, String> mappings) {
this.mappings.putAll(mappings);
return this;
}
public PutRequest customs(Map<String, IndexMetaData.Custom> customs) {
this.customs.putAll(customs);
return this;
}
public PutRequest putMapping(String mappingType, String mappingSource) {
mappings.put(mappingType, mappingSource);
return this;
}
public PutRequest masterTimeout(TimeValue masterTimeout) {
this.masterTimeout = masterTimeout;
return this;
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_MetaDataIndexTemplateService.java |
1,214 | public enum OPERATION {
CREATE, READ, UPDATE, DELETE
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_ORecordCallback.java |
1,712 | runnable = new Runnable() { public void run() { map.evict(null); } }; | 0true
| hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java |
2,405 | static class BitArray {
final long[] data;
int bitCount;
BitArray(long bits) {
this(new long[Ints.checkedCast(LongMath.divide(bits, 64, RoundingMode.CEILING))]);
}
// Used by serialization
BitArray(long[] data) {
this.data = data;
int bitCount = 0;
for (long value : data) {
bitCount += Long.bitCount(value);
}
this.bitCount = bitCount;
}
/**
* Returns true if the bit changed value.
*/
boolean set(int index) {
if (!get(index)) {
data[index >> 6] |= (1L << index);
bitCount++;
return true;
}
return false;
}
boolean get(int index) {
return (data[index >> 6] & (1L << index)) != 0;
}
/**
* Number of bits
*/
int size() {
return data.length * Long.SIZE;
}
/**
* Number of set bits (1s)
*/
int bitCount() {
return bitCount;
}
BitArray copy() {
return new BitArray(data.clone());
}
@Override
public boolean equals(Object o) {
if (o instanceof BitArray) {
BitArray bitArray = (BitArray) o;
return Arrays.equals(data, bitArray.data);
}
return false;
}
@Override
public int hashCode() {
return Arrays.hashCode(data);
}
} | 0true
| src_main_java_org_elasticsearch_common_util_BloomFilter.java |
32 | static final class ParameterContextInformation
implements IContextInformation {
private final Declaration declaration;
private final ProducedReference producedReference;
private final ParameterList parameterList;
private final int argumentListOffset;
private final Unit unit;
private final boolean includeDefaulted;
// private final boolean inLinkedMode;
private final boolean namedInvocation;
private ParameterContextInformation(Declaration declaration,
ProducedReference producedReference, Unit unit,
ParameterList parameterList, int argumentListOffset,
boolean includeDefaulted, boolean namedInvocation) {
// boolean inLinkedMode
this.declaration = declaration;
this.producedReference = producedReference;
this.unit = unit;
this.parameterList = parameterList;
this.argumentListOffset = argumentListOffset;
this.includeDefaulted = includeDefaulted;
// this.inLinkedMode = inLinkedMode;
this.namedInvocation = namedInvocation;
}
@Override
public String getContextDisplayString() {
return "Parameters of '" + declaration.getName() + "'";
}
@Override
public Image getImage() {
return getImageForDeclaration(declaration);
}
@Override
public String getInformationDisplayString() {
List<Parameter> ps = getParameters(parameterList,
includeDefaulted, namedInvocation);
if (ps.isEmpty()) {
return "no parameters";
}
StringBuilder result = new StringBuilder();
for (Parameter p: ps) {
boolean isListedValues = namedInvocation &&
p==ps.get(ps.size()-1) &&
p.getModel() instanceof Value &&
p.getType()!=null &&
unit.isIterableParameterType(p.getType());
if (includeDefaulted || !p.isDefaulted() ||
isListedValues) {
if (producedReference==null) {
result.append(p.getName());
}
else {
ProducedTypedReference pr =
producedReference.getTypedParameter(p);
appendParameterContextInfo(result, pr, p, unit,
namedInvocation, isListedValues);
}
if (!isListedValues) {
result.append(namedInvocation ? "; " : ", ");
}
}
}
if (!namedInvocation && result.length()>0) {
result.setLength(result.length()-2);
}
return result.toString();
}
@Override
public boolean equals(Object that) {
if (that instanceof ParameterContextInformation) {
return ((ParameterContextInformation) that).declaration
.equals(declaration);
}
else {
return false;
}
}
int getArgumentListOffset() {
return argumentListOffset;
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_InvocationCompletionProposal.java |
1,024 | public abstract class SingleShardOperationRequestBuilder<Request extends SingleShardOperationRequest<Request>, Response extends ActionResponse, RequestBuilder extends SingleShardOperationRequestBuilder<Request, Response, RequestBuilder>>
extends ActionRequestBuilder<Request, Response, RequestBuilder> {
protected SingleShardOperationRequestBuilder(InternalGenericClient client, Request request) {
super(client, request);
}
/**
* Sets the index.
*/
@SuppressWarnings("unchecked")
public final RequestBuilder setIndex(String index) {
request.index(index);
return (RequestBuilder) this;
}
/**
* Controls if the operation will be executed on a separate thread when executed locally.
*/
@SuppressWarnings("unchecked")
public final RequestBuilder setOperationThreaded(boolean threadedOperation) {
request.operationThreaded(threadedOperation);
return (RequestBuilder) this;
}
} | 0true
| src_main_java_org_elasticsearch_action_support_single_shard_SingleShardOperationRequestBuilder.java |
1,024 | public abstract class AbstractXmlConfigHelper {
private final static ILogger logger = Logger.getLogger(AbstractXmlConfigHelper.class);
protected boolean domLevel3 = true;
public static class IterableNodeList implements Iterable<Node> {
private final NodeList parent;
private final int maximum;
private final short nodeType;
public IterableNodeList(final Node node) {
this(node.getChildNodes());
}
public IterableNodeList(final NodeList list) {
this(list, (short) 0);
}
public IterableNodeList(final Node node, short nodeType) {
this(node.getChildNodes(), nodeType);
}
public IterableNodeList(final NodeList parent, short nodeType) {
this.parent = parent;
this.nodeType = nodeType;
this.maximum = parent.getLength();
}
public Iterator<Node> iterator() {
return new Iterator<Node>() {
private int index = 0;
private Node next;
private boolean findNext() {
next = null;
for (; index < maximum; index++) {
final Node item = parent.item(index);
if (nodeType == 0 || item.getNodeType() == nodeType) {
next = item;
return true;
}
}
return false;
}
public boolean hasNext() {
return findNext();
}
public Node next() {
if (findNext()) {
index++;
return next;
}
throw new NoSuchElementException();
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}
protected String xmlToJavaName(final String name) {
final StringBuilder builder = new StringBuilder();
final char[] charArray = name.toCharArray();
boolean dash = false;
final StringBuilder token = new StringBuilder();
for (char aCharArray : charArray) {
if (aCharArray == '-') {
appendToken(builder, token);
dash = true;
continue;
}
token.append(dash ? Character.toUpperCase(aCharArray) : aCharArray);
dash = false;
}
appendToken(builder, token);
return builder.toString();
}
protected void appendToken(final StringBuilder builder, final StringBuilder token) {
String string = token.toString();
if ("Jvm".equals(string)) {
string = "JVM";
}
builder.append(string);
token.setLength(0);
}
protected String getTextContent(final Node node) {
if (node != null) {
final String text;
if (domLevel3) {
text = node.getTextContent();
} else {
text = getTextContentOld(node);
}
return text != null ? text.trim() : "";
}
return "";
}
private String getTextContentOld(final Node node) {
final Node child = node.getFirstChild();
if (child != null) {
final Node next = child.getNextSibling();
if (next == null) {
return hasTextContent(child) ? child.getNodeValue() : "";
}
final StringBuilder buf = new StringBuilder();
appendTextContents(node, buf);
return buf.toString();
}
return "";
}
private void appendTextContents(final Node node, final StringBuilder buf) {
Node child = node.getFirstChild();
while (child != null) {
if (hasTextContent(child)) {
buf.append(child.getNodeValue());
}
child = child.getNextSibling();
}
}
protected final boolean hasTextContent(final Node node) {
final short nodeType = node.getNodeType();
return nodeType != Node.COMMENT_NODE && nodeType != Node.PROCESSING_INSTRUCTION_NODE;
}
public final String cleanNodeName(final Node node) {
return cleanNodeName(node.getNodeName());
}
public static String cleanNodeName(final String nodeName) {
String name = nodeName;
if (name != null) {
name = nodeName.replaceAll("\\w+:", "").toLowerCase();
}
return name;
}
protected boolean checkTrue(final String value) {
return "true".equalsIgnoreCase(value) ||
"yes".equalsIgnoreCase(value) ||
"on".equalsIgnoreCase(value);
}
protected int getIntegerValue(final String parameterName, final String value, final int defaultValue) {
try {
return Integer.parseInt(value);
} catch (final Exception e) {
logger.info( parameterName + " parameter value, [" + value
+ "], is not a proper integer. Default value, [" + defaultValue + "], will be used!");
logger.warning(e);
return defaultValue;
}
}
protected String getAttribute(org.w3c.dom.Node node, String attName) {
final Node attNode = node.getAttributes().getNamedItem(attName);
if (attNode == null)
return null;
return getTextContent(attNode);
}
protected SocketInterceptorConfig parseSocketInterceptorConfig(final org.w3c.dom.Node node) {
SocketInterceptorConfig socketInterceptorConfig = new SocketInterceptorConfig();
final NamedNodeMap atts = node.getAttributes();
final Node enabledNode = atts.getNamedItem("enabled");
final boolean enabled = enabledNode != null && checkTrue(getTextContent(enabledNode).trim());
socketInterceptorConfig.setEnabled(enabled);
for (org.w3c.dom.Node n : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(n.getNodeName());
if ("class-name".equals(nodeName)) {
socketInterceptorConfig.setClassName(getTextContent(n).trim());
} else if ("properties".equals(nodeName)) {
fillProperties(n, socketInterceptorConfig.getProperties());
}
}
return socketInterceptorConfig;
}
protected void fillProperties(final org.w3c.dom.Node node, Properties properties) {
if (properties == null) return;
for (org.w3c.dom.Node n : new IterableNodeList(node.getChildNodes())) {
if (n.getNodeType() == org.w3c.dom.Node.TEXT_NODE || n.getNodeType() == org.w3c.dom.Node.COMMENT_NODE) {
continue;
}
final String name = cleanNodeName(n.getNodeName());
final String propertyName;
if ("property".equals(name)) {
propertyName = getTextContent(n.getAttributes().getNamedItem("name")).trim();
} else {
// old way - probably should be deprecated
propertyName = name;
}
final String value = getTextContent(n).trim();
properties.setProperty(propertyName, value);
}
}
protected SerializationConfig parseSerialization(final Node node) {
SerializationConfig serializationConfig = new SerializationConfig();
for (org.w3c.dom.Node child : new IterableNodeList(node.getChildNodes())) {
final String name = cleanNodeName(child);
if ("portable-version".equals(name)) {
String value = getTextContent(child);
serializationConfig.setPortableVersion(getIntegerValue(name, value, 0));
} else if ("check-class-def-errors".equals(name)) {
String value = getTextContent(child);
serializationConfig.setCheckClassDefErrors(checkTrue(value));
} else if ("use-native-byte-order".equals(name)) {
serializationConfig.setUseNativeByteOrder(checkTrue(getTextContent(child)));
} else if ("byte-order".equals(name)) {
String value = getTextContent(child);
ByteOrder byteOrder = null;
if (ByteOrder.BIG_ENDIAN.toString().equals(value)) {
byteOrder = ByteOrder.BIG_ENDIAN;
} else if (ByteOrder.LITTLE_ENDIAN.toString().equals(value)) {
byteOrder = ByteOrder.LITTLE_ENDIAN;
}
serializationConfig.setByteOrder(byteOrder != null ? byteOrder : ByteOrder.BIG_ENDIAN);
} else if ("enable-compression".equals(name)) {
serializationConfig.setEnableCompression(checkTrue(getTextContent(child)));
} else if ("enable-shared-object".equals(name)) {
serializationConfig.setEnableSharedObject(checkTrue(getTextContent(child)));
} else if ("allow-unsafe".equals(name)) {
serializationConfig.setAllowUnsafe(checkTrue(getTextContent(child)));
} else if ("data-serializable-factories".equals(name)) {
fillDataSerializableFactories(child, serializationConfig);
} else if ("portable-factories".equals(name)) {
fillPortableFactories(child, serializationConfig);
} else if ("serializers".equals(name)) {
fillSerializers(child, serializationConfig);
}
}
return serializationConfig;
}
protected void fillDataSerializableFactories(Node node, SerializationConfig serializationConfig) {
for (org.w3c.dom.Node child : new IterableNodeList(node.getChildNodes())) {
final String name = cleanNodeName(child);
if ("data-serializable-factory".equals(name)) {
final String value = getTextContent(child);
final Node factoryIdNode = child.getAttributes().getNamedItem("factory-id");
if (factoryIdNode == null) {
throw new IllegalArgumentException("'factory-id' attribute of 'data-serializable-factory' is required!");
}
int factoryId = Integer.parseInt(getTextContent(factoryIdNode));
serializationConfig.addDataSerializableFactoryClass(factoryId, value);
}
}
}
protected void fillPortableFactories(Node node, SerializationConfig serializationConfig) {
for (org.w3c.dom.Node child : new IterableNodeList(node.getChildNodes())) {
final String name = cleanNodeName(child);
if ("portable-factory".equals(name)) {
final String value = getTextContent(child);
final Node factoryIdNode = child.getAttributes().getNamedItem("factory-id");
if (factoryIdNode == null) {
throw new IllegalArgumentException("'factory-id' attribute of 'portable-factory' is required!");
}
int factoryId = Integer.parseInt(getTextContent(factoryIdNode));
serializationConfig.addPortableFactoryClass(factoryId, value);
}
}
}
protected void fillSerializers(final Node node, SerializationConfig serializationConfig) {
for (org.w3c.dom.Node child : new IterableNodeList(node.getChildNodes())) {
final String name = cleanNodeName(child);
final String value = getTextContent(child);
if ("serializer".equals(name)) {
SerializerConfig serializerConfig = new SerializerConfig();
serializerConfig.setClassName(value);
final String typeClassName = getAttribute(child, "type-class");
serializerConfig.setTypeClassName(typeClassName);
serializationConfig.addSerializerConfig(serializerConfig);
} else if ("global-serializer".equals(name)) {
GlobalSerializerConfig globalSerializerConfig = new GlobalSerializerConfig();
globalSerializerConfig.setClassName(value);
serializationConfig.setGlobalSerializerConfig(globalSerializerConfig);
}
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_AbstractXmlConfigHelper.java |
1,320 | return new DataSerializableFactory() {
@Override
public IdentifiedDataSerializable create(int typeId) {
switch (typeId) {
case CALLABLE_TASK:
return new CallableTaskOperation();
case MEMBER_CALLABLE_TASK:
return new MemberCallableTaskOperation();
case RUNNABLE_ADAPTER:
return new RunnableAdapter();
default:
return null;
}
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_executor_ExecutorDataSerializerHook.java |
707 | "Maximum memory used by Disk Cache", METRIC_TYPE.SIZE, new OProfilerHookValue() {
@Override
public Object getValue() {
return maxSize * pageSize;
}
}, profiler.getDatabaseMetric(null, "diskCache.maxMemory")); | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_hashindex_local_cache_OReadWriteDiskCache.java |
1,087 | public class OSQLFilterItemParameter implements OSQLFilterItem {
private final String name;
private Object value = NOT_SETTED;
private static final String NOT_SETTED = "?";
public OSQLFilterItemParameter(final String iName) {
this.name = iName;
}
public Object getValue(final OIdentifiable iRecord, OCommandContext iContetx) {
return value;
}
@Override
public String toString() {
if (value == NOT_SETTED)
return name.equals("?") ? "?" : ":" + name;
else
return value == null ? "null" : value.toString();
}
public String getName() {
return name;
}
public void setValue(Object value) {
this.value = value;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_sql_filter_OSQLFilterItemParameter.java |
3,409 | public static enum Type {
GENERATED,
SAVED
} | 0true
| src_main_java_org_elasticsearch_index_gateway_CommitPoint.java |
1 | public class CompletableFuture<T> implements Future<T> {
// jsr166e nested interfaces
/** Interface describing a void action of one argument */
public interface Action<A> { void accept(A a); }
/** Interface describing a void action of two arguments */
public interface BiAction<A,B> { void accept(A a, B b); }
/** Interface describing a function of one argument */
public interface Fun<A,T> { T apply(A a); }
/** Interface describing a function of two arguments */
public interface BiFun<A,B,T> { T apply(A a, B b); }
/** Interface describing a function of no arguments */
public interface Generator<T> { T get(); }
/*
* Overview:
*
* 1. Non-nullness of field result (set via CAS) indicates done.
* An AltResult is used to box null as a result, as well as to
* hold exceptions. Using a single field makes completion fast
* and simple to detect and trigger, at the expense of a lot of
* encoding and decoding that infiltrates many methods. One minor
* simplification relies on the (static) NIL (to box null results)
* being the only AltResult with a null exception field, so we
* don't usually need explicit comparisons with NIL. The CF
* exception propagation mechanics surrounding decoding rely on
* unchecked casts of decoded results really being unchecked,
* where user type errors are caught at point of use, as is
* currently the case in Java. These are highlighted by using
* SuppressWarnings-annotated temporaries.
*
* 2. Waiters are held in a Treiber stack similar to the one used
* in FutureTask, Phaser, and SynchronousQueue. See their
* internal documentation for algorithmic details.
*
* 3. Completions are also kept in a list/stack, and pulled off
* and run when completion is triggered. (We could even use the
* same stack as for waiters, but would give up the potential
* parallelism obtained because woken waiters help release/run
* others -- see method postComplete). Because post-processing
* may race with direct calls, class Completion opportunistically
* extends AtomicInteger so callers can claim the action via
* compareAndSet(0, 1). The Completion.run methods are all
* written a boringly similar uniform way (that sometimes includes
* unnecessary-looking checks, kept to maintain uniformity).
* There are enough dimensions upon which they differ that
* attempts to factor commonalities while maintaining efficiency
* require more lines of code than they would save.
*
* 4. The exported then/and/or methods do support a bit of
* factoring (see doThenApply etc). They must cope with the
* intrinsic races surrounding addition of a dependent action
* versus performing the action directly because the task is
* already complete. For example, a CF may not be complete upon
* entry, so a dependent completion is added, but by the time it
* is added, the target CF is complete, so must be directly
* executed. This is all done while avoiding unnecessary object
* construction in safe-bypass cases.
*/
// preliminaries
static final class AltResult {
final Throwable ex; // null only for NIL
AltResult(Throwable ex) { this.ex = ex; }
}
static final AltResult NIL = new AltResult(null);
// Fields
volatile Object result; // Either the result or boxed AltResult
volatile WaitNode waiters; // Treiber stack of threads blocked on get()
volatile CompletionNode completions; // list (Treiber stack) of completions
// Basic utilities for triggering and processing completions
/**
* Removes and signals all waiting threads and runs all completions.
*/
final void postComplete() {
WaitNode q; Thread t;
while ((q = waiters) != null) {
if (UNSAFE.compareAndSwapObject(this, WAITERS, q, q.next) &&
(t = q.thread) != null) {
q.thread = null;
LockSupport.unpark(t);
}
}
CompletionNode h; Completion c;
while ((h = completions) != null) {
if (UNSAFE.compareAndSwapObject(this, COMPLETIONS, h, h.next) &&
(c = h.completion) != null)
c.run();
}
}
/**
* Triggers completion with the encoding of the given arguments:
* if the exception is non-null, encodes it as a wrapped
* CompletionException unless it is one already. Otherwise uses
* the given result, boxed as NIL if null.
*/
final void internalComplete(T v, Throwable ex) {
if (result == null)
UNSAFE.compareAndSwapObject
(this, RESULT, null,
(ex == null) ? (v == null) ? NIL : v :
new AltResult((ex instanceof CompletionException) ? ex :
new CompletionException(ex)));
postComplete(); // help out even if not triggered
}
/**
* If triggered, helps release and/or process completions.
*/
final void helpPostComplete() {
if (result != null)
postComplete();
}
/* ------------- waiting for completions -------------- */
/** Number of processors, for spin control */
static final int NCPU = Runtime.getRuntime().availableProcessors();
/**
* Heuristic spin value for waitingGet() before blocking on
* multiprocessors
*/
static final int SPINS = (NCPU > 1) ? 1 << 8 : 0;
/**
* Linked nodes to record waiting threads in a Treiber stack. See
* other classes such as Phaser and SynchronousQueue for more
* detailed explanation. This class implements ManagedBlocker to
* avoid starvation when blocking actions pile up in
* ForkJoinPools.
*/
static final class WaitNode implements ForkJoinPool.ManagedBlocker {
long nanos; // wait time if timed
final long deadline; // non-zero if timed
volatile int interruptControl; // > 0: interruptible, < 0: interrupted
volatile Thread thread;
volatile WaitNode next;
WaitNode(boolean interruptible, long nanos, long deadline) {
this.thread = Thread.currentThread();
this.interruptControl = interruptible ? 1 : 0;
this.nanos = nanos;
this.deadline = deadline;
}
public boolean isReleasable() {
if (thread == null)
return true;
if (Thread.interrupted()) {
int i = interruptControl;
interruptControl = -1;
if (i > 0)
return true;
}
if (deadline != 0L &&
(nanos <= 0L || (nanos = deadline - System.nanoTime()) <= 0L)) {
thread = null;
return true;
}
return false;
}
public boolean block() {
if (isReleasable())
return true;
else if (deadline == 0L)
LockSupport.park(this);
else if (nanos > 0L)
LockSupport.parkNanos(this, nanos);
return isReleasable();
}
}
/**
* Returns raw result after waiting, or null if interruptible and
* interrupted.
*/
private Object waitingGet(boolean interruptible) {
WaitNode q = null;
boolean queued = false;
int spins = SPINS;
for (Object r;;) {
if ((r = result) != null) {
if (q != null) { // suppress unpark
q.thread = null;
if (q.interruptControl < 0) {
if (interruptible) {
removeWaiter(q);
return null;
}
Thread.currentThread().interrupt();
}
}
postComplete(); // help release others
return r;
}
else if (spins > 0) {
int rnd = ThreadLocalRandom.current().nextInt();
if (rnd >= 0)
--spins;
}
else if (q == null)
q = new WaitNode(interruptible, 0L, 0L);
else if (!queued)
queued = UNSAFE.compareAndSwapObject(this, WAITERS,
q.next = waiters, q);
else if (interruptible && q.interruptControl < 0) {
removeWaiter(q);
return null;
}
else if (q.thread != null && result == null) {
try {
ForkJoinPool.managedBlock(q);
} catch (InterruptedException ex) {
q.interruptControl = -1;
}
}
}
}
/**
* Awaits completion or aborts on interrupt or timeout.
*
* @param nanos time to wait
* @return raw result
*/
private Object timedAwaitDone(long nanos)
throws InterruptedException, TimeoutException {
WaitNode q = null;
boolean queued = false;
for (Object r;;) {
if ((r = result) != null) {
if (q != null) {
q.thread = null;
if (q.interruptControl < 0) {
removeWaiter(q);
throw new InterruptedException();
}
}
postComplete();
return r;
}
else if (q == null) {
if (nanos <= 0L)
throw new TimeoutException();
long d = System.nanoTime() + nanos;
q = new WaitNode(true, nanos, d == 0L ? 1L : d); // avoid 0
}
else if (!queued)
queued = UNSAFE.compareAndSwapObject(this, WAITERS,
q.next = waiters, q);
else if (q.interruptControl < 0) {
removeWaiter(q);
throw new InterruptedException();
}
else if (q.nanos <= 0L) {
if (result == null) {
removeWaiter(q);
throw new TimeoutException();
}
}
else if (q.thread != null && result == null) {
try {
ForkJoinPool.managedBlock(q);
} catch (InterruptedException ex) {
q.interruptControl = -1;
}
}
}
}
/**
* Tries to unlink a timed-out or interrupted wait node to avoid
* accumulating garbage. Internal nodes are simply unspliced
* without CAS since it is harmless if they are traversed anyway
* by releasers. To avoid effects of unsplicing from already
* removed nodes, the list is retraversed in case of an apparent
* race. This is slow when there are a lot of nodes, but we don't
* expect lists to be long enough to outweigh higher-overhead
* schemes.
*/
private void removeWaiter(WaitNode node) {
if (node != null) {
node.thread = null;
retry:
for (;;) { // restart on removeWaiter race
for (WaitNode pred = null, q = waiters, s; q != null; q = s) {
s = q.next;
if (q.thread != null)
pred = q;
else if (pred != null) {
pred.next = s;
if (pred.thread == null) // check for race
continue retry;
}
else if (!UNSAFE.compareAndSwapObject(this, WAITERS, q, s))
continue retry;
}
break;
}
}
}
/* ------------- Async tasks -------------- */
/**
* A marker interface identifying asynchronous tasks produced by
* {@code async} methods. This may be useful for monitoring,
* debugging, and tracking asynchronous activities.
*
* @since 1.8
*/
public static interface AsynchronousCompletionTask {
}
/** Base class can act as either FJ or plain Runnable */
abstract static class Async extends ForkJoinTask<Void>
implements Runnable, AsynchronousCompletionTask {
public final Void getRawResult() { return null; }
public final void setRawResult(Void v) { }
public final void run() { exec(); }
}
static final class AsyncRun extends Async {
final Runnable fn;
final CompletableFuture<Void> dst;
AsyncRun(Runnable fn, CompletableFuture<Void> dst) {
this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<Void> d; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
fn.run();
ex = null;
} catch (Throwable rex) {
ex = rex;
}
d.internalComplete(null, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AsyncSupply<U> extends Async {
final Generator<U> fn;
final CompletableFuture<U> dst;
AsyncSupply(Generator<U> fn, CompletableFuture<U> dst) {
this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<U> d; U u; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
u = fn.get();
ex = null;
} catch (Throwable rex) {
ex = rex;
u = null;
}
d.internalComplete(u, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AsyncApply<T,U> extends Async {
final T arg;
final Fun<? super T,? extends U> fn;
final CompletableFuture<U> dst;
AsyncApply(T arg, Fun<? super T,? extends U> fn,
CompletableFuture<U> dst) {
this.arg = arg; this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<U> d; U u; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
u = fn.apply(arg);
ex = null;
} catch (Throwable rex) {
ex = rex;
u = null;
}
d.internalComplete(u, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AsyncCombine<T,U,V> extends Async {
final T arg1;
final U arg2;
final BiFun<? super T,? super U,? extends V> fn;
final CompletableFuture<V> dst;
AsyncCombine(T arg1, U arg2,
BiFun<? super T,? super U,? extends V> fn,
CompletableFuture<V> dst) {
this.arg1 = arg1; this.arg2 = arg2; this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<V> d; V v; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
v = fn.apply(arg1, arg2);
ex = null;
} catch (Throwable rex) {
ex = rex;
v = null;
}
d.internalComplete(v, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AsyncAccept<T> extends Async {
final T arg;
final Action<? super T> fn;
final CompletableFuture<Void> dst;
AsyncAccept(T arg, Action<? super T> fn,
CompletableFuture<Void> dst) {
this.arg = arg; this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<Void> d; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
fn.accept(arg);
ex = null;
} catch (Throwable rex) {
ex = rex;
}
d.internalComplete(null, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AsyncAcceptBoth<T,U> extends Async {
final T arg1;
final U arg2;
final BiAction<? super T,? super U> fn;
final CompletableFuture<Void> dst;
AsyncAcceptBoth(T arg1, U arg2,
BiAction<? super T,? super U> fn,
CompletableFuture<Void> dst) {
this.arg1 = arg1; this.arg2 = arg2; this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<Void> d; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
fn.accept(arg1, arg2);
ex = null;
} catch (Throwable rex) {
ex = rex;
}
d.internalComplete(null, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AsyncCompose<T,U> extends Async {
final T arg;
final Fun<? super T, CompletableFuture<U>> fn;
final CompletableFuture<U> dst;
AsyncCompose(T arg,
Fun<? super T, CompletableFuture<U>> fn,
CompletableFuture<U> dst) {
this.arg = arg; this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<U> d, fr; U u; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
fr = fn.apply(arg);
ex = (fr == null) ? new NullPointerException() : null;
} catch (Throwable rex) {
ex = rex;
fr = null;
}
if (ex != null)
u = null;
else {
Object r = fr.result;
if (r == null)
r = fr.waitingGet(false);
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
u = null;
}
else {
@SuppressWarnings("unchecked") U ur = (U) r;
u = ur;
}
}
d.internalComplete(u, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
/* ------------- Completions -------------- */
/**
* Simple linked list nodes to record completions, used in
* basically the same way as WaitNodes. (We separate nodes from
* the Completions themselves mainly because for the And and Or
* methods, the same Completion object resides in two lists.)
*/
static final class CompletionNode {
final Completion completion;
volatile CompletionNode next;
CompletionNode(Completion completion) { this.completion = completion; }
}
// Opportunistically subclass AtomicInteger to use compareAndSet to claim.
abstract static class Completion extends AtomicInteger implements Runnable {
}
static final class ThenApply<T,U> extends Completion {
final CompletableFuture<? extends T> src;
final Fun<? super T,? extends U> fn;
final CompletableFuture<U> dst;
final Executor executor;
ThenApply(CompletableFuture<? extends T> src,
Fun<? super T,? extends U> fn,
CompletableFuture<U> dst,
Executor executor) {
this.src = src; this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final Fun<? super T,? extends U> fn;
final CompletableFuture<U> dst;
Object r; T t; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
Executor e = executor;
U u = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncApply<T,U>(t, fn, dst));
else
u = fn.apply(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(u, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ThenAccept<T> extends Completion {
final CompletableFuture<? extends T> src;
final Action<? super T> fn;
final CompletableFuture<Void> dst;
final Executor executor;
ThenAccept(CompletableFuture<? extends T> src,
Action<? super T> fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final Action<? super T> fn;
final CompletableFuture<Void> dst;
Object r; T t; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncAccept<T>(t, fn, dst));
else
fn.accept(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ThenRun extends Completion {
final CompletableFuture<?> src;
final Runnable fn;
final CompletableFuture<Void> dst;
final Executor executor;
ThenRun(CompletableFuture<?> src,
Runnable fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<?> a;
final Runnable fn;
final CompletableFuture<Void> dst;
Object r; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(fn, dst));
else
fn.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ThenCombine<T,U,V> extends Completion {
final CompletableFuture<? extends T> src;
final CompletableFuture<? extends U> snd;
final BiFun<? super T,? super U,? extends V> fn;
final CompletableFuture<V> dst;
final Executor executor;
ThenCombine(CompletableFuture<? extends T> src,
CompletableFuture<? extends U> snd,
BiFun<? super T,? super U,? extends V> fn,
CompletableFuture<V> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final CompletableFuture<? extends U> b;
final BiFun<? super T,? super U,? extends V> fn;
final CompletableFuture<V> dst;
Object r, s; T t; U u; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
(b = this.snd) != null &&
(s = b.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex != null)
u = null;
else if (s instanceof AltResult) {
ex = ((AltResult)s).ex;
u = null;
}
else {
@SuppressWarnings("unchecked") U us = (U) s;
u = us;
}
Executor e = executor;
V v = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncCombine<T,U,V>(t, u, fn, dst));
else
v = fn.apply(t, u);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(v, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ThenAcceptBoth<T,U> extends Completion {
final CompletableFuture<? extends T> src;
final CompletableFuture<? extends U> snd;
final BiAction<? super T,? super U> fn;
final CompletableFuture<Void> dst;
final Executor executor;
ThenAcceptBoth(CompletableFuture<? extends T> src,
CompletableFuture<? extends U> snd,
BiAction<? super T,? super U> fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final CompletableFuture<? extends U> b;
final BiAction<? super T,? super U> fn;
final CompletableFuture<Void> dst;
Object r, s; T t; U u; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
(b = this.snd) != null &&
(s = b.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex != null)
u = null;
else if (s instanceof AltResult) {
ex = ((AltResult)s).ex;
u = null;
}
else {
@SuppressWarnings("unchecked") U us = (U) s;
u = us;
}
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncAcceptBoth<T,U>(t, u, fn, dst));
else
fn.accept(t, u);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class RunAfterBoth extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<?> snd;
final Runnable fn;
final CompletableFuture<Void> dst;
final Executor executor;
RunAfterBoth(CompletableFuture<?> src,
CompletableFuture<?> snd,
Runnable fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<?> b;
final Runnable fn;
final CompletableFuture<Void> dst;
Object r, s; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
(b = this.snd) != null &&
(s = b.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
if (ex == null && (s instanceof AltResult))
ex = ((AltResult)s).ex;
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(fn, dst));
else
fn.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AndCompletion extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<?> snd;
final CompletableFuture<Void> dst;
AndCompletion(CompletableFuture<?> src,
CompletableFuture<?> snd,
CompletableFuture<Void> dst) {
this.src = src; this.snd = snd; this.dst = dst;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<?> b;
final CompletableFuture<Void> dst;
Object r, s; Throwable ex;
if ((dst = this.dst) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
(b = this.snd) != null &&
(s = b.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
if (ex == null && (s instanceof AltResult))
ex = ((AltResult)s).ex;
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ApplyToEither<T,U> extends Completion {
final CompletableFuture<? extends T> src;
final CompletableFuture<? extends T> snd;
final Fun<? super T,? extends U> fn;
final CompletableFuture<U> dst;
final Executor executor;
ApplyToEither(CompletableFuture<? extends T> src,
CompletableFuture<? extends T> snd,
Fun<? super T,? extends U> fn,
CompletableFuture<U> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final CompletableFuture<? extends T> b;
final Fun<? super T,? extends U> fn;
final CompletableFuture<U> dst;
Object r; T t; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(((a = this.src) != null && (r = a.result) != null) ||
((b = this.snd) != null && (r = b.result) != null)) &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
Executor e = executor;
U u = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncApply<T,U>(t, fn, dst));
else
u = fn.apply(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(u, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class AcceptEither<T> extends Completion {
final CompletableFuture<? extends T> src;
final CompletableFuture<? extends T> snd;
final Action<? super T> fn;
final CompletableFuture<Void> dst;
final Executor executor;
AcceptEither(CompletableFuture<? extends T> src,
CompletableFuture<? extends T> snd,
Action<? super T> fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final CompletableFuture<? extends T> b;
final Action<? super T> fn;
final CompletableFuture<Void> dst;
Object r; T t; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(((a = this.src) != null && (r = a.result) != null) ||
((b = this.snd) != null && (r = b.result) != null)) &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncAccept<T>(t, fn, dst));
else
fn.accept(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class RunAfterEither extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<?> snd;
final Runnable fn;
final CompletableFuture<Void> dst;
final Executor executor;
RunAfterEither(CompletableFuture<?> src,
CompletableFuture<?> snd,
Runnable fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<?> b;
final Runnable fn;
final CompletableFuture<Void> dst;
Object r; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(((a = this.src) != null && (r = a.result) != null) ||
((b = this.snd) != null && (r = b.result) != null)) &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(fn, dst));
else
fn.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class OrCompletion extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<?> snd;
final CompletableFuture<Object> dst;
OrCompletion(CompletableFuture<?> src,
CompletableFuture<?> snd,
CompletableFuture<Object> dst) {
this.src = src; this.snd = snd; this.dst = dst;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<?> b;
final CompletableFuture<Object> dst;
Object r, t; Throwable ex;
if ((dst = this.dst) != null &&
(((a = this.src) != null && (r = a.result) != null) ||
((b = this.snd) != null && (r = b.result) != null)) &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
t = r;
}
dst.internalComplete(t, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ExceptionCompletion<T> extends Completion {
final CompletableFuture<? extends T> src;
final Fun<? super Throwable, ? extends T> fn;
final CompletableFuture<T> dst;
ExceptionCompletion(CompletableFuture<? extends T> src,
Fun<? super Throwable, ? extends T> fn,
CompletableFuture<T> dst) {
this.src = src; this.fn = fn; this.dst = dst;
}
public final void run() {
final CompletableFuture<? extends T> a;
final Fun<? super Throwable, ? extends T> fn;
final CompletableFuture<T> dst;
Object r; T t = null; Throwable ex, dx = null;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if ((r instanceof AltResult) &&
(ex = ((AltResult)r).ex) != null) {
try {
t = fn.apply(ex);
} catch (Throwable rex) {
dx = rex;
}
}
else {
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
dst.internalComplete(t, dx);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ThenCopy<T> extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<T> dst;
ThenCopy(CompletableFuture<?> src,
CompletableFuture<T> dst) {
this.src = src; this.dst = dst;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<T> dst;
Object r; T t; Throwable ex;
if ((dst = this.dst) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
dst.internalComplete(t, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
// version of ThenCopy for CompletableFuture<Void> dst
static final class ThenPropagate extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<Void> dst;
ThenPropagate(CompletableFuture<?> src,
CompletableFuture<Void> dst) {
this.src = src; this.dst = dst;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<Void> dst;
Object r; Throwable ex;
if ((dst = this.dst) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class HandleCompletion<T,U> extends Completion {
final CompletableFuture<? extends T> src;
final BiFun<? super T, Throwable, ? extends U> fn;
final CompletableFuture<U> dst;
HandleCompletion(CompletableFuture<? extends T> src,
BiFun<? super T, Throwable, ? extends U> fn,
CompletableFuture<U> dst) {
this.src = src; this.fn = fn; this.dst = dst;
}
public final void run() {
final CompletableFuture<? extends T> a;
final BiFun<? super T, Throwable, ? extends U> fn;
final CompletableFuture<U> dst;
Object r; T t; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
U u = null; Throwable dx = null;
try {
u = fn.apply(t, ex);
} catch (Throwable rex) {
dx = rex;
}
dst.internalComplete(u, dx);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
static final class ThenCompose<T,U> extends Completion {
final CompletableFuture<? extends T> src;
final Fun<? super T, CompletableFuture<U>> fn;
final CompletableFuture<U> dst;
final Executor executor;
ThenCompose(CompletableFuture<? extends T> src,
Fun<? super T, CompletableFuture<U>> fn,
CompletableFuture<U> dst,
Executor executor) {
this.src = src; this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final Fun<? super T, CompletableFuture<U>> fn;
final CompletableFuture<U> dst;
Object r; T t; Throwable ex; Executor e;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
CompletableFuture<U> c = null;
U u = null;
boolean complete = false;
if (ex == null) {
if ((e = executor) != null)
e.execute(new AsyncCompose<T,U>(t, fn, dst));
else {
try {
if ((c = fn.apply(t)) == null)
ex = new NullPointerException();
} catch (Throwable rex) {
ex = rex;
}
}
}
if (c != null) {
ThenCopy<U> d = null;
Object s;
if ((s = c.result) == null) {
CompletionNode p = new CompletionNode
(d = new ThenCopy<U>(c, dst));
while ((s = c.result) == null) {
if (UNSAFE.compareAndSwapObject
(c, COMPLETIONS, p.next = c.completions, p))
break;
}
}
if (s != null && (d == null || d.compareAndSet(0, 1))) {
complete = true;
if (s instanceof AltResult) {
ex = ((AltResult)s).ex; // no rewrap
u = null;
}
else {
@SuppressWarnings("unchecked") U us = (U) s;
u = us;
}
}
}
if (complete || ex != null)
dst.internalComplete(u, ex);
if (c != null)
c.helpPostComplete();
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
// public methods
/**
* Creates a new incomplete CompletableFuture.
*/
public CompletableFuture() {
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* by a task running in the {@link ForkJoinPool#commonPool()} with
* the value obtained by calling the given Generator.
*
* @param supplier a function returning the value to be used
* to complete the returned CompletableFuture
* @param <U> the function's return type
* @return the new CompletableFuture
*/
public static <U> CompletableFuture<U> supplyAsync(Generator<U> supplier) {
if (supplier == null) throw new NullPointerException();
CompletableFuture<U> f = new CompletableFuture<U>();
ForkJoinPool.commonPool().
execute((ForkJoinTask<?>)new AsyncSupply<U>(supplier, f));
return f;
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* by a task running in the given executor with the value obtained
* by calling the given Generator.
*
* @param supplier a function returning the value to be used
* to complete the returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @param <U> the function's return type
* @return the new CompletableFuture
*/
public static <U> CompletableFuture<U> supplyAsync(Generator<U> supplier,
Executor executor) {
if (executor == null || supplier == null)
throw new NullPointerException();
CompletableFuture<U> f = new CompletableFuture<U>();
executor.execute(new AsyncSupply<U>(supplier, f));
return f;
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* by a task running in the {@link ForkJoinPool#commonPool()} after
* it runs the given action.
*
* @param runnable the action to run before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public static CompletableFuture<Void> runAsync(Runnable runnable) {
if (runnable == null) throw new NullPointerException();
CompletableFuture<Void> f = new CompletableFuture<Void>();
ForkJoinPool.commonPool().
execute((ForkJoinTask<?>)new AsyncRun(runnable, f));
return f;
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* by a task running in the given executor after it runs the given
* action.
*
* @param runnable the action to run before completing the
* returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public static CompletableFuture<Void> runAsync(Runnable runnable,
Executor executor) {
if (executor == null || runnable == null)
throw new NullPointerException();
CompletableFuture<Void> f = new CompletableFuture<Void>();
executor.execute(new AsyncRun(runnable, f));
return f;
}
/**
* Returns a new CompletableFuture that is already completed with
* the given value.
*
* @param value the value
* @param <U> the type of the value
* @return the completed CompletableFuture
*/
public static <U> CompletableFuture<U> completedFuture(U value) {
CompletableFuture<U> f = new CompletableFuture<U>();
f.result = (value == null) ? NIL : value;
return f;
}
/**
* Returns {@code true} if completed in any fashion: normally,
* exceptionally, or via cancellation.
*
* @return {@code true} if completed
*/
public boolean isDone() {
return result != null;
}
/**
* Waits if necessary for this future to complete, and then
* returns its result.
*
* @return the result value
* @throws CancellationException if this future was cancelled
* @throws ExecutionException if this future completed exceptionally
* @throws InterruptedException if the current thread was interrupted
* while waiting
*/
public T get() throws InterruptedException, ExecutionException {
Object r; Throwable ex, cause;
if ((r = result) == null && (r = waitingGet(true)) == null)
throw new InterruptedException();
if (!(r instanceof AltResult)) {
@SuppressWarnings("unchecked") T tr = (T) r;
return tr;
}
if ((ex = ((AltResult)r).ex) == null)
return null;
if (ex instanceof CancellationException)
throw (CancellationException)ex;
if ((ex instanceof CompletionException) &&
(cause = ex.getCause()) != null)
ex = cause;
throw new ExecutionException(ex);
}
/**
* Waits if necessary for at most the given time for this future
* to complete, and then returns its result, if available.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @return the result value
* @throws CancellationException if this future was cancelled
* @throws ExecutionException if this future completed exceptionally
* @throws InterruptedException if the current thread was interrupted
* while waiting
* @throws TimeoutException if the wait timed out
*/
public T get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
Object r; Throwable ex, cause;
long nanos = unit.toNanos(timeout);
if (Thread.interrupted())
throw new InterruptedException();
if ((r = result) == null)
r = timedAwaitDone(nanos);
if (!(r instanceof AltResult)) {
@SuppressWarnings("unchecked") T tr = (T) r;
return tr;
}
if ((ex = ((AltResult)r).ex) == null)
return null;
if (ex instanceof CancellationException)
throw (CancellationException)ex;
if ((ex instanceof CompletionException) &&
(cause = ex.getCause()) != null)
ex = cause;
throw new ExecutionException(ex);
}
/**
* Returns the result value when complete, or throws an
* (unchecked) exception if completed exceptionally. To better
* conform with the use of common functional forms, if a
* computation involved in the completion of this
* CompletableFuture threw an exception, this method throws an
* (unchecked) {@link CompletionException} with the underlying
* exception as its cause.
*
* @return the result value
* @throws CancellationException if the computation was cancelled
* @throws CompletionException if this future completed
* exceptionally or a completion computation threw an exception
*/
public T join() {
Object r; Throwable ex;
if ((r = result) == null)
r = waitingGet(false);
if (!(r instanceof AltResult)) {
@SuppressWarnings("unchecked") T tr = (T) r;
return tr;
}
if ((ex = ((AltResult)r).ex) == null)
return null;
if (ex instanceof CancellationException)
throw (CancellationException)ex;
if (ex instanceof CompletionException)
throw (CompletionException)ex;
throw new CompletionException(ex);
}
/**
* Returns the result value (or throws any encountered exception)
* if completed, else returns the given valueIfAbsent.
*
* @param valueIfAbsent the value to return if not completed
* @return the result value, if completed, else the given valueIfAbsent
* @throws CancellationException if the computation was cancelled
* @throws CompletionException if this future completed
* exceptionally or a completion computation threw an exception
*/
public T getNow(T valueIfAbsent) {
Object r; Throwable ex;
if ((r = result) == null)
return valueIfAbsent;
if (!(r instanceof AltResult)) {
@SuppressWarnings("unchecked") T tr = (T) r;
return tr;
}
if ((ex = ((AltResult)r).ex) == null)
return null;
if (ex instanceof CancellationException)
throw (CancellationException)ex;
if (ex instanceof CompletionException)
throw (CompletionException)ex;
throw new CompletionException(ex);
}
/**
* If not already completed, sets the value returned by {@link
* #get()} and related methods to the given value.
*
* @param value the result value
* @return {@code true} if this invocation caused this CompletableFuture
* to transition to a completed state, else {@code false}
*/
public boolean complete(T value) {
boolean triggered = result == null &&
UNSAFE.compareAndSwapObject(this, RESULT, null,
value == null ? NIL : value);
postComplete();
return triggered;
}
/**
* If not already completed, causes invocations of {@link #get()}
* and related methods to throw the given exception.
*
* @param ex the exception
* @return {@code true} if this invocation caused this CompletableFuture
* to transition to a completed state, else {@code false}
*/
public boolean completeExceptionally(Throwable ex) {
if (ex == null) throw new NullPointerException();
boolean triggered = result == null &&
UNSAFE.compareAndSwapObject(this, RESULT, null, new AltResult(ex));
postComplete();
return triggered;
}
/**
* Returns a new CompletableFuture that is completed
* when this CompletableFuture completes, with the result of the
* given function of this CompletableFuture's result.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied function throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @return the new CompletableFuture
*/
public <U> CompletableFuture<U> thenApply(Fun<? super T,? extends U> fn) {
return doThenApply(fn, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when this CompletableFuture completes, with the result of the
* given function of this CompletableFuture's result from a
* task running in the {@link ForkJoinPool#commonPool()}.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied function throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @return the new CompletableFuture
*/
public <U> CompletableFuture<U> thenApplyAsync
(Fun<? super T,? extends U> fn) {
return doThenApply(fn, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when this CompletableFuture completes, with the result of the
* given function of this CompletableFuture's result from a
* task running in the given executor.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied function throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public <U> CompletableFuture<U> thenApplyAsync
(Fun<? super T,? extends U> fn,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doThenApply(fn, executor);
}
private <U> CompletableFuture<U> doThenApply
(Fun<? super T,? extends U> fn,
Executor e) {
if (fn == null) throw new NullPointerException();
CompletableFuture<U> dst = new CompletableFuture<U>();
ThenApply<T,U> d = null;
Object r;
if ((r = result) == null) {
CompletionNode p = new CompletionNode
(d = new ThenApply<T,U>(this, fn, dst, e));
while ((r = result) == null) {
if (UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p))
break;
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
T t; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
U u = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncApply<T,U>(t, fn, dst));
else
u = fn.apply(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(u, ex);
}
helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when this CompletableFuture completes, after performing the given
* action with this CompletableFuture's result.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied action throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param block the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> thenAccept(Action<? super T> block) {
return doThenAccept(block, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when this CompletableFuture completes, after performing the given
* action with this CompletableFuture's result from a task running
* in the {@link ForkJoinPool#commonPool()}.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied action throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param block the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> thenAcceptAsync(Action<? super T> block) {
return doThenAccept(block, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when this CompletableFuture completes, after performing the given
* action with this CompletableFuture's result from a task running
* in the given executor.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied action throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param block the action to perform before completing the
* returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public CompletableFuture<Void> thenAcceptAsync(Action<? super T> block,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doThenAccept(block, executor);
}
private CompletableFuture<Void> doThenAccept(Action<? super T> fn,
Executor e) {
if (fn == null) throw new NullPointerException();
CompletableFuture<Void> dst = new CompletableFuture<Void>();
ThenAccept<T> d = null;
Object r;
if ((r = result) == null) {
CompletionNode p = new CompletionNode
(d = new ThenAccept<T>(this, fn, dst, e));
while ((r = result) == null) {
if (UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p))
break;
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
T t; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncAccept<T>(t, fn, dst));
else
fn.accept(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when this CompletableFuture completes, after performing the given
* action.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied action throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param action the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> thenRun(Runnable action) {
return doThenRun(action, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when this CompletableFuture completes, after performing the given
* action from a task running in the {@link ForkJoinPool#commonPool()}.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied action throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param action the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> thenRunAsync(Runnable action) {
return doThenRun(action, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when this CompletableFuture completes, after performing the given
* action from a task running in the given executor.
*
* <p>If this CompletableFuture completes exceptionally, or the
* supplied action throws an exception, then the returned
* CompletableFuture completes exceptionally with a
* CompletionException holding the exception as its cause.
*
* @param action the action to perform before completing the
* returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public CompletableFuture<Void> thenRunAsync(Runnable action,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doThenRun(action, executor);
}
private CompletableFuture<Void> doThenRun(Runnable action,
Executor e) {
if (action == null) throw new NullPointerException();
CompletableFuture<Void> dst = new CompletableFuture<Void>();
ThenRun d = null;
Object r;
if ((r = result) == null) {
CompletionNode p = new CompletionNode
(d = new ThenRun(this, action, dst, e));
while ((r = result) == null) {
if (UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p))
break;
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
Throwable ex;
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(action, dst));
else
action.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when both this and the other given CompletableFuture complete,
* with the result of the given function of the results of the two
* CompletableFutures.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied function throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @return the new CompletableFuture
*/
public <U,V> CompletableFuture<V> thenCombine
(CompletableFuture<? extends U> other,
BiFun<? super T,? super U,? extends V> fn) {
return doThenCombine(other, fn, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when both this and the other given CompletableFuture complete,
* with the result of the given function of the results of the two
* CompletableFutures from a task running in the
* {@link ForkJoinPool#commonPool()}.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied function throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @return the new CompletableFuture
*/
public <U,V> CompletableFuture<V> thenCombineAsync
(CompletableFuture<? extends U> other,
BiFun<? super T,? super U,? extends V> fn) {
return doThenCombine(other, fn, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when both this and the other given CompletableFuture complete,
* with the result of the given function of the results of the two
* CompletableFutures from a task running in the given executor.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied function throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public <U,V> CompletableFuture<V> thenCombineAsync
(CompletableFuture<? extends U> other,
BiFun<? super T,? super U,? extends V> fn,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doThenCombine(other, fn, executor);
}
private <U,V> CompletableFuture<V> doThenCombine
(CompletableFuture<? extends U> other,
BiFun<? super T,? super U,? extends V> fn,
Executor e) {
if (other == null || fn == null) throw new NullPointerException();
CompletableFuture<V> dst = new CompletableFuture<V>();
ThenCombine<T,U,V> d = null;
Object r, s = null;
if ((r = result) == null || (s = other.result) == null) {
d = new ThenCombine<T,U,V>(this, other, fn, dst, e);
CompletionNode q = null, p = new CompletionNode(d);
while ((r == null && (r = result) == null) ||
(s == null && (s = other.result) == null)) {
if (q != null) {
if (s != null ||
UNSAFE.compareAndSwapObject
(other, COMPLETIONS, q.next = other.completions, q))
break;
}
else if (r != null ||
UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p)) {
if (s != null)
break;
q = new CompletionNode(d);
}
}
}
if (r != null && s != null && (d == null || d.compareAndSet(0, 1))) {
T t; U u; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex != null)
u = null;
else if (s instanceof AltResult) {
ex = ((AltResult)s).ex;
u = null;
}
else {
@SuppressWarnings("unchecked") U us = (U) s;
u = us;
}
V v = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncCombine<T,U,V>(t, u, fn, dst));
else
v = fn.apply(t, u);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(v, ex);
}
helpPostComplete();
other.helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when both this and the other given CompletableFuture complete,
* after performing the given action with the results of the two
* CompletableFutures.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied action throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param block the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public <U> CompletableFuture<Void> thenAcceptBoth
(CompletableFuture<? extends U> other,
BiAction<? super T, ? super U> block) {
return doThenAcceptBoth(other, block, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when both this and the other given CompletableFuture complete,
* after performing the given action with the results of the two
* CompletableFutures from a task running in the {@link
* ForkJoinPool#commonPool()}.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied action throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param block the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public <U> CompletableFuture<Void> thenAcceptBothAsync
(CompletableFuture<? extends U> other,
BiAction<? super T, ? super U> block) {
return doThenAcceptBoth(other, block, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when both this and the other given CompletableFuture complete,
* after performing the given action with the results of the two
* CompletableFutures from a task running in the given executor.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied action throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param block the action to perform before completing the
* returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public <U> CompletableFuture<Void> thenAcceptBothAsync
(CompletableFuture<? extends U> other,
BiAction<? super T, ? super U> block,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doThenAcceptBoth(other, block, executor);
}
private <U> CompletableFuture<Void> doThenAcceptBoth
(CompletableFuture<? extends U> other,
BiAction<? super T,? super U> fn,
Executor e) {
if (other == null || fn == null) throw new NullPointerException();
CompletableFuture<Void> dst = new CompletableFuture<Void>();
ThenAcceptBoth<T,U> d = null;
Object r, s = null;
if ((r = result) == null || (s = other.result) == null) {
d = new ThenAcceptBoth<T,U>(this, other, fn, dst, e);
CompletionNode q = null, p = new CompletionNode(d);
while ((r == null && (r = result) == null) ||
(s == null && (s = other.result) == null)) {
if (q != null) {
if (s != null ||
UNSAFE.compareAndSwapObject
(other, COMPLETIONS, q.next = other.completions, q))
break;
}
else if (r != null ||
UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p)) {
if (s != null)
break;
q = new CompletionNode(d);
}
}
}
if (r != null && s != null && (d == null || d.compareAndSet(0, 1))) {
T t; U u; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex != null)
u = null;
else if (s instanceof AltResult) {
ex = ((AltResult)s).ex;
u = null;
}
else {
@SuppressWarnings("unchecked") U us = (U) s;
u = us;
}
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncAcceptBoth<T,U>(t, u, fn, dst));
else
fn.accept(t, u);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
helpPostComplete();
other.helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when both this and the other given CompletableFuture complete,
* after performing the given action.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied action throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param action the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> runAfterBoth(CompletableFuture<?> other,
Runnable action) {
return doRunAfterBoth(other, action, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when both this and the other given CompletableFuture complete,
* after performing the given action from a task running in the
* {@link ForkJoinPool#commonPool()}.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied action throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param action the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> runAfterBothAsync(CompletableFuture<?> other,
Runnable action) {
return doRunAfterBoth(other, action, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when both this and the other given CompletableFuture complete,
* after performing the given action from a task running in the
* given executor.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, or the supplied action throws an exception,
* then the returned CompletableFuture completes exceptionally
* with a CompletionException holding the exception as its cause.
*
* @param other the other CompletableFuture
* @param action the action to perform before completing the
* returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public CompletableFuture<Void> runAfterBothAsync(CompletableFuture<?> other,
Runnable action,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doRunAfterBoth(other, action, executor);
}
private CompletableFuture<Void> doRunAfterBoth(CompletableFuture<?> other,
Runnable action,
Executor e) {
if (other == null || action == null) throw new NullPointerException();
CompletableFuture<Void> dst = new CompletableFuture<Void>();
RunAfterBoth d = null;
Object r, s = null;
if ((r = result) == null || (s = other.result) == null) {
d = new RunAfterBoth(this, other, action, dst, e);
CompletionNode q = null, p = new CompletionNode(d);
while ((r == null && (r = result) == null) ||
(s == null && (s = other.result) == null)) {
if (q != null) {
if (s != null ||
UNSAFE.compareAndSwapObject
(other, COMPLETIONS, q.next = other.completions, q))
break;
}
else if (r != null ||
UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p)) {
if (s != null)
break;
q = new CompletionNode(d);
}
}
}
if (r != null && s != null && (d == null || d.compareAndSet(0, 1))) {
Throwable ex;
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
if (ex == null && (s instanceof AltResult))
ex = ((AltResult)s).ex;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(action, dst));
else
action.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
helpPostComplete();
other.helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when either this or the other given CompletableFuture completes,
* with the result of the given function of either this or the other
* CompletableFuture's result.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied function
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @return the new CompletableFuture
*/
public <U> CompletableFuture<U> applyToEither
(CompletableFuture<? extends T> other,
Fun<? super T, U> fn) {
return doApplyToEither(other, fn, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when either this or the other given CompletableFuture completes,
* with the result of the given function of either this or the other
* CompletableFuture's result from a task running in the
* {@link ForkJoinPool#commonPool()}.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied function
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @return the new CompletableFuture
*/
public <U> CompletableFuture<U> applyToEitherAsync
(CompletableFuture<? extends T> other,
Fun<? super T, U> fn) {
return doApplyToEither(other, fn, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when either this or the other given CompletableFuture completes,
* with the result of the given function of either this or the other
* CompletableFuture's result from a task running in the
* given executor.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied function
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param fn the function to use to compute the value of
* the returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public <U> CompletableFuture<U> applyToEitherAsync
(CompletableFuture<? extends T> other,
Fun<? super T, U> fn,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doApplyToEither(other, fn, executor);
}
private <U> CompletableFuture<U> doApplyToEither
(CompletableFuture<? extends T> other,
Fun<? super T, U> fn,
Executor e) {
if (other == null || fn == null) throw new NullPointerException();
CompletableFuture<U> dst = new CompletableFuture<U>();
ApplyToEither<T,U> d = null;
Object r;
if ((r = result) == null && (r = other.result) == null) {
d = new ApplyToEither<T,U>(this, other, fn, dst, e);
CompletionNode q = null, p = new CompletionNode(d);
while ((r = result) == null && (r = other.result) == null) {
if (q != null) {
if (UNSAFE.compareAndSwapObject
(other, COMPLETIONS, q.next = other.completions, q))
break;
}
else if (UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p))
q = new CompletionNode(d);
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
T t; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
U u = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncApply<T,U>(t, fn, dst));
else
u = fn.apply(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(u, ex);
}
helpPostComplete();
other.helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when either this or the other given CompletableFuture completes,
* after performing the given action with the result of either this
* or the other CompletableFuture's result.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied action
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param block the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> acceptEither
(CompletableFuture<? extends T> other,
Action<? super T> block) {
return doAcceptEither(other, block, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when either this or the other given CompletableFuture completes,
* after performing the given action with the result of either this
* or the other CompletableFuture's result from a task running in
* the {@link ForkJoinPool#commonPool()}.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied action
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param block the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> acceptEitherAsync
(CompletableFuture<? extends T> other,
Action<? super T> block) {
return doAcceptEither(other, block, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when either this or the other given CompletableFuture completes,
* after performing the given action with the result of either this
* or the other CompletableFuture's result from a task running in
* the given executor.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied action
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param block the action to perform before completing the
* returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public CompletableFuture<Void> acceptEitherAsync
(CompletableFuture<? extends T> other,
Action<? super T> block,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doAcceptEither(other, block, executor);
}
private CompletableFuture<Void> doAcceptEither
(CompletableFuture<? extends T> other,
Action<? super T> fn,
Executor e) {
if (other == null || fn == null) throw new NullPointerException();
CompletableFuture<Void> dst = new CompletableFuture<Void>();
AcceptEither<T> d = null;
Object r;
if ((r = result) == null && (r = other.result) == null) {
d = new AcceptEither<T>(this, other, fn, dst, e);
CompletionNode q = null, p = new CompletionNode(d);
while ((r = result) == null && (r = other.result) == null) {
if (q != null) {
if (UNSAFE.compareAndSwapObject
(other, COMPLETIONS, q.next = other.completions, q))
break;
}
else if (UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p))
q = new CompletionNode(d);
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
T t; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncAccept<T>(t, fn, dst));
else
fn.accept(t);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
helpPostComplete();
other.helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed
* when either this or the other given CompletableFuture completes,
* after performing the given action.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied action
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param action the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> runAfterEither(CompletableFuture<?> other,
Runnable action) {
return doRunAfterEither(other, action, null);
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when either this or the other given CompletableFuture completes,
* after performing the given action from a task running in the
* {@link ForkJoinPool#commonPool()}.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied action
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param action the action to perform before completing the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public CompletableFuture<Void> runAfterEitherAsync
(CompletableFuture<?> other,
Runnable action) {
return doRunAfterEither(other, action, ForkJoinPool.commonPool());
}
/**
* Returns a new CompletableFuture that is asynchronously completed
* when either this or the other given CompletableFuture completes,
* after performing the given action from a task running in the
* given executor.
*
* <p>If this and/or the other CompletableFuture complete
* exceptionally, then the returned CompletableFuture may also do so,
* with a CompletionException holding one of these exceptions as its
* cause. No guarantees are made about which result or exception is
* used in the returned CompletableFuture. If the supplied action
* throws an exception, then the returned CompletableFuture completes
* exceptionally with a CompletionException holding the exception as
* its cause.
*
* @param other the other CompletableFuture
* @param action the action to perform before completing the
* returned CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the new CompletableFuture
*/
public CompletableFuture<Void> runAfterEitherAsync
(CompletableFuture<?> other,
Runnable action,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doRunAfterEither(other, action, executor);
}
private CompletableFuture<Void> doRunAfterEither
(CompletableFuture<?> other,
Runnable action,
Executor e) {
if (other == null || action == null) throw new NullPointerException();
CompletableFuture<Void> dst = new CompletableFuture<Void>();
RunAfterEither d = null;
Object r;
if ((r = result) == null && (r = other.result) == null) {
d = new RunAfterEither(this, other, action, dst, e);
CompletionNode q = null, p = new CompletionNode(d);
while ((r = result) == null && (r = other.result) == null) {
if (q != null) {
if (UNSAFE.compareAndSwapObject
(other, COMPLETIONS, q.next = other.completions, q))
break;
}
else if (UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p))
q = new CompletionNode(d);
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
Throwable ex;
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(action, dst));
else
action.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
helpPostComplete();
other.helpPostComplete();
return dst;
}
/**
* Returns a CompletableFuture that upon completion, has the same
* value as produced by the given function of the result of this
* CompletableFuture.
*
* <p>If this CompletableFuture completes exceptionally, then the
* returned CompletableFuture also does so, with a
* CompletionException holding this exception as its cause.
* Similarly, if the computed CompletableFuture completes
* exceptionally, then so does the returned CompletableFuture.
*
* @param fn the function returning a new CompletableFuture
* @return the CompletableFuture
*/
public <U> CompletableFuture<U> thenCompose
(Fun<? super T, CompletableFuture<U>> fn) {
return doThenCompose(fn, null);
}
/**
* Returns a CompletableFuture that upon completion, has the same
* value as that produced asynchronously using the {@link
* ForkJoinPool#commonPool()} by the given function of the result
* of this CompletableFuture.
*
* <p>If this CompletableFuture completes exceptionally, then the
* returned CompletableFuture also does so, with a
* CompletionException holding this exception as its cause.
* Similarly, if the computed CompletableFuture completes
* exceptionally, then so does the returned CompletableFuture.
*
* @param fn the function returning a new CompletableFuture
* @return the CompletableFuture
*/
public <U> CompletableFuture<U> thenComposeAsync
(Fun<? super T, CompletableFuture<U>> fn) {
return doThenCompose(fn, ForkJoinPool.commonPool());
}
/**
* Returns a CompletableFuture that upon completion, has the same
* value as that produced asynchronously using the given executor
* by the given function of this CompletableFuture.
*
* <p>If this CompletableFuture completes exceptionally, then the
* returned CompletableFuture also does so, with a
* CompletionException holding this exception as its cause.
* Similarly, if the computed CompletableFuture completes
* exceptionally, then so does the returned CompletableFuture.
*
* @param fn the function returning a new CompletableFuture
* @param executor the executor to use for asynchronous execution
* @return the CompletableFuture
*/
public <U> CompletableFuture<U> thenComposeAsync
(Fun<? super T, CompletableFuture<U>> fn,
Executor executor) {
if (executor == null) throw new NullPointerException();
return doThenCompose(fn, executor);
}
private <U> CompletableFuture<U> doThenCompose
(Fun<? super T, CompletableFuture<U>> fn,
Executor e) {
if (fn == null) throw new NullPointerException();
CompletableFuture<U> dst = null;
ThenCompose<T,U> d = null;
Object r;
if ((r = result) == null) {
dst = new CompletableFuture<U>();
CompletionNode p = new CompletionNode
(d = new ThenCompose<T,U>(this, fn, dst, e));
while ((r = result) == null) {
if (UNSAFE.compareAndSwapObject
(this, COMPLETIONS, p.next = completions, p))
break;
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
T t; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex == null) {
if (e != null) {
if (dst == null)
dst = new CompletableFuture<U>();
e.execute(new AsyncCompose<T,U>(t, fn, dst));
}
else {
try {
if ((dst = fn.apply(t)) == null)
ex = new NullPointerException();
} catch (Throwable rex) {
ex = rex;
}
}
}
if (dst == null)
dst = new CompletableFuture<U>();
if (ex != null)
dst.internalComplete(null, ex);
}
helpPostComplete();
dst.helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed when this
* CompletableFuture completes, with the result of the given
* function of the exception triggering this CompletableFuture's
* completion when it completes exceptionally; otherwise, if this
* CompletableFuture completes normally, then the returned
* CompletableFuture also completes normally with the same value.
*
* @param fn the function to use to compute the value of the
* returned CompletableFuture if this CompletableFuture completed
* exceptionally
* @return the new CompletableFuture
*/
public CompletableFuture<T> exceptionally
(Fun<Throwable, ? extends T> fn) {
if (fn == null) throw new NullPointerException();
CompletableFuture<T> dst = new CompletableFuture<T>();
ExceptionCompletion<T> d = null;
Object r;
if ((r = result) == null) {
CompletionNode p =
new CompletionNode(d = new ExceptionCompletion<T>(this, fn, dst));
while ((r = result) == null) {
if (UNSAFE.compareAndSwapObject(this, COMPLETIONS,
p.next = completions, p))
break;
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
T t = null; Throwable ex, dx = null;
if (r instanceof AltResult) {
if ((ex = ((AltResult)r).ex) != null) {
try {
t = fn.apply(ex);
} catch (Throwable rex) {
dx = rex;
}
}
}
else {
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
dst.internalComplete(t, dx);
}
helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed when this
* CompletableFuture completes, with the result of the given
* function of the result and exception of this CompletableFuture's
* completion. The given function is invoked with the result (or
* {@code null} if none) and the exception (or {@code null} if none)
* of this CompletableFuture when complete.
*
* @param fn the function to use to compute the value of the
* returned CompletableFuture
* @return the new CompletableFuture
*/
public <U> CompletableFuture<U> handle
(BiFun<? super T, Throwable, ? extends U> fn) {
if (fn == null) throw new NullPointerException();
CompletableFuture<U> dst = new CompletableFuture<U>();
HandleCompletion<T,U> d = null;
Object r;
if ((r = result) == null) {
CompletionNode p =
new CompletionNode(d = new HandleCompletion<T,U>(this, fn, dst));
while ((r = result) == null) {
if (UNSAFE.compareAndSwapObject(this, COMPLETIONS,
p.next = completions, p))
break;
}
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
T t; Throwable ex;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
U u; Throwable dx;
try {
u = fn.apply(t, ex);
dx = null;
} catch (Throwable rex) {
dx = rex;
u = null;
}
dst.internalComplete(u, dx);
}
helpPostComplete();
return dst;
}
/* ------------- Arbitrary-arity constructions -------------- */
/*
* The basic plan of attack is to recursively form binary
* completion trees of elements. This can be overkill for small
* sets, but scales nicely. The And/All vs Or/Any forms use the
* same idea, but details differ.
*/
/**
* Returns a new CompletableFuture that is completed when all of
* the given CompletableFutures complete. If any of the given
* CompletableFutures complete exceptionally, then the returned
* CompletableFuture also does so, with a CompletionException
* holding this exception as its cause. Otherwise, the results,
* if any, of the given CompletableFutures are not reflected in
* the returned CompletableFuture, but may be obtained by
* inspecting them individually. If no CompletableFutures are
* provided, returns a CompletableFuture completed with the value
* {@code null}.
*
* <p>Among the applications of this method is to await completion
* of a set of independent CompletableFutures before continuing a
* program, as in: {@code CompletableFuture.allOf(c1, c2,
* c3).join();}.
*
* @param cfs the CompletableFutures
* @return a new CompletableFuture that is completed when all of the
* given CompletableFutures complete
* @throws NullPointerException if the array or any of its elements are
* {@code null}
*/
public static CompletableFuture<Void> allOf(CompletableFuture<?>... cfs) {
int len = cfs.length; // Directly handle empty and singleton cases
if (len > 1)
return allTree(cfs, 0, len - 1);
else {
CompletableFuture<Void> dst = new CompletableFuture<Void>();
CompletableFuture<?> f;
if (len == 0)
dst.result = NIL;
else if ((f = cfs[0]) == null)
throw new NullPointerException();
else {
ThenPropagate d = null;
CompletionNode p = null;
Object r;
while ((r = f.result) == null) {
if (d == null)
d = new ThenPropagate(f, dst);
else if (p == null)
p = new CompletionNode(d);
else if (UNSAFE.compareAndSwapObject
(f, COMPLETIONS, p.next = f.completions, p))
break;
}
if (r != null && (d == null || d.compareAndSet(0, 1)))
dst.internalComplete(null, (r instanceof AltResult) ?
((AltResult)r).ex : null);
f.helpPostComplete();
}
return dst;
}
}
/**
* Recursively constructs an And'ed tree of CompletableFutures.
* Called only when array known to have at least two elements.
*/
private static CompletableFuture<Void> allTree(CompletableFuture<?>[] cfs,
int lo, int hi) {
CompletableFuture<?> fst, snd;
int mid = (lo + hi) >>> 1;
if ((fst = (lo == mid ? cfs[lo] : allTree(cfs, lo, mid))) == null ||
(snd = (hi == mid+1 ? cfs[hi] : allTree(cfs, mid+1, hi))) == null)
throw new NullPointerException();
CompletableFuture<Void> dst = new CompletableFuture<Void>();
AndCompletion d = null;
CompletionNode p = null, q = null;
Object r = null, s = null;
while ((r = fst.result) == null || (s = snd.result) == null) {
if (d == null)
d = new AndCompletion(fst, snd, dst);
else if (p == null)
p = new CompletionNode(d);
else if (q == null) {
if (UNSAFE.compareAndSwapObject
(fst, COMPLETIONS, p.next = fst.completions, p))
q = new CompletionNode(d);
}
else if (UNSAFE.compareAndSwapObject
(snd, COMPLETIONS, q.next = snd.completions, q))
break;
}
if ((r != null || (r = fst.result) != null) &&
(s != null || (s = snd.result) != null) &&
(d == null || d.compareAndSet(0, 1))) {
Throwable ex;
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
if (ex == null && (s instanceof AltResult))
ex = ((AltResult)s).ex;
dst.internalComplete(null, ex);
}
fst.helpPostComplete();
snd.helpPostComplete();
return dst;
}
/**
* Returns a new CompletableFuture that is completed when any of
* the given CompletableFutures complete, with the same result.
* Otherwise, if it completed exceptionally, the returned
* CompletableFuture also does so, with a CompletionException
* holding this exception as its cause. If no CompletableFutures
* are provided, returns an incomplete CompletableFuture.
*
* @param cfs the CompletableFutures
* @return a new CompletableFuture that is completed with the
* result or exception of any of the given CompletableFutures when
* one completes
* @throws NullPointerException if the array or any of its elements are
* {@code null}
*/
public static CompletableFuture<Object> anyOf(CompletableFuture<?>... cfs) {
int len = cfs.length; // Same idea as allOf
if (len > 1)
return anyTree(cfs, 0, len - 1);
else {
CompletableFuture<Object> dst = new CompletableFuture<Object>();
CompletableFuture<?> f;
if (len == 0)
; // skip
else if ((f = cfs[0]) == null)
throw new NullPointerException();
else {
ThenCopy<Object> d = null;
CompletionNode p = null;
Object r;
while ((r = f.result) == null) {
if (d == null)
d = new ThenCopy<Object>(f, dst);
else if (p == null)
p = new CompletionNode(d);
else if (UNSAFE.compareAndSwapObject
(f, COMPLETIONS, p.next = f.completions, p))
break;
}
if (r != null && (d == null || d.compareAndSet(0, 1))) {
Throwable ex; Object t;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
t = r;
}
dst.internalComplete(t, ex);
}
f.helpPostComplete();
}
return dst;
}
}
/**
* Recursively constructs an Or'ed tree of CompletableFutures.
*/
private static CompletableFuture<Object> anyTree(CompletableFuture<?>[] cfs,
int lo, int hi) {
CompletableFuture<?> fst, snd;
int mid = (lo + hi) >>> 1;
if ((fst = (lo == mid ? cfs[lo] : anyTree(cfs, lo, mid))) == null ||
(snd = (hi == mid+1 ? cfs[hi] : anyTree(cfs, mid+1, hi))) == null)
throw new NullPointerException();
CompletableFuture<Object> dst = new CompletableFuture<Object>();
OrCompletion d = null;
CompletionNode p = null, q = null;
Object r;
while ((r = fst.result) == null && (r = snd.result) == null) {
if (d == null)
d = new OrCompletion(fst, snd, dst);
else if (p == null)
p = new CompletionNode(d);
else if (q == null) {
if (UNSAFE.compareAndSwapObject
(fst, COMPLETIONS, p.next = fst.completions, p))
q = new CompletionNode(d);
}
else if (UNSAFE.compareAndSwapObject
(snd, COMPLETIONS, q.next = snd.completions, q))
break;
}
if ((r != null || (r = fst.result) != null ||
(r = snd.result) != null) &&
(d == null || d.compareAndSet(0, 1))) {
Throwable ex; Object t;
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
t = r;
}
dst.internalComplete(t, ex);
}
fst.helpPostComplete();
snd.helpPostComplete();
return dst;
}
/* ------------- Control and status methods -------------- */
/**
* If not already completed, completes this CompletableFuture with
* a {@link CancellationException}. Dependent CompletableFutures
* that have not already completed will also complete
* exceptionally, with a {@link CompletionException} caused by
* this {@code CancellationException}.
*
* @param mayInterruptIfRunning this value has no effect in this
* implementation because interrupts are not used to control
* processing.
*
* @return {@code true} if this task is now cancelled
*/
public boolean cancel(boolean mayInterruptIfRunning) {
boolean cancelled = (result == null) &&
UNSAFE.compareAndSwapObject
(this, RESULT, null, new AltResult(new CancellationException()));
postComplete();
return cancelled || isCancelled();
}
/**
* Returns {@code true} if this CompletableFuture was cancelled
* before it completed normally.
*
* @return {@code true} if this CompletableFuture was cancelled
* before it completed normally
*/
public boolean isCancelled() {
Object r;
return ((r = result) instanceof AltResult) &&
(((AltResult)r).ex instanceof CancellationException);
}
/**
* Forcibly sets or resets the value subsequently returned by
* method {@link #get()} and related methods, whether or not
* already completed. This method is designed for use only in
* error recovery actions, and even in such situations may result
* in ongoing dependent completions using established versus
* overwritten outcomes.
*
* @param value the completion value
*/
public void obtrudeValue(T value) {
result = (value == null) ? NIL : value;
postComplete();
}
/**
* Forcibly causes subsequent invocations of method {@link #get()}
* and related methods to throw the given exception, whether or
* not already completed. This method is designed for use only in
* recovery actions, and even in such situations may result in
* ongoing dependent completions using established versus
* overwritten outcomes.
*
* @param ex the exception
*/
public void obtrudeException(Throwable ex) {
if (ex == null) throw new NullPointerException();
result = new AltResult(ex);
postComplete();
}
/**
* Returns the estimated number of CompletableFutures whose
* completions are awaiting completion of this CompletableFuture.
* This method is designed for use in monitoring system state, not
* for synchronization control.
*
* @return the number of dependent CompletableFutures
*/
public int getNumberOfDependents() {
int count = 0;
for (CompletionNode p = completions; p != null; p = p.next)
++count;
return count;
}
/**
* Returns a string identifying this CompletableFuture, as well as
* its completion state. The state, in brackets, contains the
* String {@code "Completed Normally"} or the String {@code
* "Completed Exceptionally"}, or the String {@code "Not
* completed"} followed by the number of CompletableFutures
* dependent upon its completion, if any.
*
* @return a string identifying this CompletableFuture, as well as its state
*/
public String toString() {
Object r = result;
int count;
return super.toString() +
((r == null) ?
(((count = getNumberOfDependents()) == 0) ?
"[Not completed]" :
"[Not completed, " + count + " dependents]") :
(((r instanceof AltResult) && ((AltResult)r).ex != null) ?
"[Completed exceptionally]" :
"[Completed normally]"));
}
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long RESULT;
private static final long WAITERS;
private static final long COMPLETIONS;
static {
try {
UNSAFE = getUnsafe();
Class<?> k = CompletableFuture.class;
RESULT = UNSAFE.objectFieldOffset
(k.getDeclaredField("result"));
WAITERS = UNSAFE.objectFieldOffset
(k.getDeclaredField("waiters"));
COMPLETIONS = UNSAFE.objectFieldOffset
(k.getDeclaredField("completions"));
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
} catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
} | 0true
| src_main_java_jsr166e_CompletableFuture.java |
1,914 | public final class SizeEstimators {
private SizeEstimators() {
}
public static SizeEstimator createMapSizeEstimator() {
return new MapSizeEstimator();
}
public static SizeEstimator createNearCacheSizeEstimator() {
return new NearCacheSizeEstimator();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_SizeEstimators.java |
1,326 | createSingleNodeExecutorService("testIssue292").submit(new MemberCheck(), new ExecutionCallback<Member>() {
public void onResponse(Member response) {
qResponse.offer(response);
}
public void onFailure(Throwable t) {
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_executor_ExecutorServiceTest.java |
556 | public abstract class AbstractLoadBalancer implements LoadBalancer, MembershipListener {
private final AtomicReference<Member[]> membersRef = new AtomicReference(new Member[]{});
private volatile Cluster clusterRef;
@Override
public final void init(Cluster cluster, ClientConfig config) {
this.clusterRef = cluster;
setMembersRef();
cluster.addMembershipListener(this);
}
private void setMembersRef() {
Cluster cluster = clusterRef;
if (cluster != null) {
Set<Member> memberSet = cluster.getMembers();
Member[] members = memberSet.toArray(new Member[memberSet.size()]);
membersRef.set(members);
}
}
protected Member[] getMembers() {
return membersRef.get();
}
@Override
public final void memberAdded(MembershipEvent membershipEvent) {
setMembersRef();
}
@Override
public final void memberRemoved(MembershipEvent membershipEvent) {
setMembersRef();
}
@Override
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
}
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_util_AbstractLoadBalancer.java |
261 | public interface OCommandContext {
public enum TIMEOUT_STRATEGY {
RETURN, EXCEPTION
}
public Object getVariable(String iName);
public Object getVariable(String iName, Object iDefaultValue);
public OCommandContext setVariable(final String iName, final Object iValue);
public Map<String, Object> getVariables();
public OCommandContext getParent();
public OCommandContext setParent(OCommandContext iParentContext);
public OCommandContext setChild(OCommandContext context);
/**
* Updates a counter. Used to record metrics.
*
* @param iName
* Metric's name
* @param iValue
* delta to add or subtract
* @return
*/
public long updateMetric(String iName, long iValue);
public boolean isRecordingMetrics();
public OCommandContext setRecordingMetrics(boolean recordMetrics);
public void beginExecution(long timeoutMs, TIMEOUT_STRATEGY iStrategy);
public boolean checkTimeout();
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_command_OCommandContext.java |
1,419 | public static interface RemoveListener {
void onResponse(RemoveResponse response);
void onFailure(Throwable t);
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_MetaDataIndexTemplateService.java |
283 | TSSLTransportFactory.TSSLTransportParameters params = new TSSLTransportFactory.TSSLTransportParameters() {{
setTrustStore(cfg.sslTruststoreLocation, cfg.sslTruststorePassword);
}}; | 0true
| titan-cassandra_src_main_java_com_thinkaurelius_titan_diskstorage_cassandra_thrift_thriftpool_CTConnectionFactory.java |
1,397 | public class VersionAwareMapMergePolicy implements MapMergePolicy {
public Object merge(String mapName, EntryView mergingEntry, EntryView existingEntry) {
final Object existingValue = existingEntry != null ? existingEntry.getValue() : null;
final Object mergingValue = mergingEntry.getValue();
if (existingValue != null && existingValue instanceof CacheEntry
&& mergingValue != null && mergingValue instanceof CacheEntry) {
final CacheEntry existingCacheEntry = (CacheEntry) existingValue;
final CacheEntry mergingCacheEntry = (CacheEntry) mergingValue;
final Object mergingVersionObject = mergingCacheEntry.getVersion();
final Object existingVersionObject = existingCacheEntry.getVersion();
if (mergingVersionObject != null && existingVersionObject != null
&& mergingVersionObject instanceof Comparable && existingVersionObject instanceof Comparable) {
final Comparable mergingVersion = (Comparable) mergingVersionObject;
final Comparable existingVersion = (Comparable) existingVersionObject;
if (mergingVersion.compareTo(existingVersion) > 0) {
return mergingValue;
} else {
return existingValue;
}
}
}
return mergingValue;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
}
@Override
public void readData(ObjectDataInput in) throws IOException {
}
} | 0true
| hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_VersionAwareMapMergePolicy.java |
140 | abstract class Striped64 extends Number {
/*
* This class maintains a lazily-initialized table of atomically
* updated variables, plus an extra "base" field. The table size
* is a power of two. Indexing uses masked per-thread hash codes.
* Nearly all declarations in this class are package-private,
* accessed directly by subclasses.
*
* Table entries are of class Cell; a variant of AtomicLong padded
* to reduce cache contention on most processors. Padding is
* overkill for most Atomics because they are usually irregularly
* scattered in memory and thus don't interfere much with each
* other. But Atomic objects residing in arrays will tend to be
* placed adjacent to each other, and so will most often share
* cache lines (with a huge negative performance impact) without
* this precaution.
*
* In part because Cells are relatively large, we avoid creating
* them until they are needed. When there is no contention, all
* updates are made to the base field. Upon first contention (a
* failed CAS on base update), the table is initialized to size 2.
* The table size is doubled upon further contention until
* reaching the nearest power of two greater than or equal to the
* number of CPUS. Table slots remain empty (null) until they are
* needed.
*
* A single spinlock ("busy") is used for initializing and
* resizing the table, as well as populating slots with new Cells.
* There is no need for a blocking lock; when the lock is not
* available, threads try other slots (or the base). During these
* retries, there is increased contention and reduced locality,
* which is still better than alternatives.
*
* Per-thread hash codes are initialized to random values.
* Contention and/or table collisions are indicated by failed
* CASes when performing an update operation (see method
* retryUpdate). Upon a collision, if the table size is less than
* the capacity, it is doubled in size unless some other thread
* holds the lock. If a hashed slot is empty, and lock is
* available, a new Cell is created. Otherwise, if the slot
* exists, a CAS is tried. Retries proceed by "double hashing",
* using a secondary hash (Marsaglia XorShift) to try to find a
* free slot.
*
* The table size is capped because, when there are more threads
* than CPUs, supposing that each thread were bound to a CPU,
* there would exist a perfect hash function mapping threads to
* slots that eliminates collisions. When we reach capacity, we
* search for this mapping by randomly varying the hash codes of
* colliding threads. Because search is random, and collisions
* only become known via CAS failures, convergence can be slow,
* and because threads are typically not bound to CPUS forever,
* may not occur at all. However, despite these limitations,
* observed contention rates are typically low in these cases.
*
* It is possible for a Cell to become unused when threads that
* once hashed to it terminate, as well as in the case where
* doubling the table causes no thread to hash to it under
* expanded mask. We do not try to detect or remove such cells,
* under the assumption that for long-running instances, observed
* contention levels will recur, so the cells will eventually be
* needed again; and for short-lived ones, it does not matter.
*/
/**
* Padded variant of AtomicLong supporting only raw accesses plus CAS.
* The value field is placed between pads, hoping that the JVM doesn't
* reorder them.
*
* JVM intrinsics note: It would be possible to use a release-only
* form of CAS here, if it were provided.
*/
static final class Cell {
volatile long p0, p1, p2, p3, p4, p5, p6;
volatile long value;
volatile long q0, q1, q2, q3, q4, q5, q6;
Cell(long x) { value = x; }
final boolean cas(long cmp, long val) {
return UNSAFE.compareAndSwapLong(this, valueOffset, cmp, val);
}
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long valueOffset;
static {
try {
UNSAFE = getUnsafe();
Class<?> ak = Cell.class;
valueOffset = UNSAFE.objectFieldOffset
(ak.getDeclaredField("value"));
} catch (Exception e) {
throw new Error(e);
}
}
}
/**
* Holder for the thread-local hash code. The code is initially
* random, but may be set to a different value upon collisions.
*/
static final class HashCode {
static final Random rng = new Random();
int code;
HashCode() {
int h = rng.nextInt(); // Avoid zero to allow xorShift rehash
code = (h == 0) ? 1 : h;
}
}
/**
* The corresponding ThreadLocal class
*/
static final class ThreadHashCode extends ThreadLocal<HashCode> {
public HashCode initialValue() { return new HashCode(); }
}
/**
* Static per-thread hash codes. Shared across all instances to
* reduce ThreadLocal pollution and because adjustments due to
* collisions in one table are likely to be appropriate for
* others.
*/
static final ThreadHashCode threadHashCode = new ThreadHashCode();
/** Number of CPUS, to place bound on table size */
static final int NCPU = Runtime.getRuntime().availableProcessors();
/**
* Table of cells. When non-null, size is a power of 2.
*/
transient volatile Cell[] cells;
/**
* Base value, used mainly when there is no contention, but also as
* a fallback during table initialization races. Updated via CAS.
*/
transient volatile long base;
/**
* Spinlock (locked via CAS) used when resizing and/or creating Cells.
*/
transient volatile int busy;
/**
* Package-private default constructor
*/
Striped64() {
}
/**
* CASes the base field.
*/
final boolean casBase(long cmp, long val) {
return UNSAFE.compareAndSwapLong(this, baseOffset, cmp, val);
}
/**
* CASes the busy field from 0 to 1 to acquire lock.
*/
final boolean casBusy() {
return UNSAFE.compareAndSwapInt(this, busyOffset, 0, 1);
}
/**
* Computes the function of current and new value. Subclasses
* should open-code this update function for most uses, but the
* virtualized form is needed within retryUpdate.
*
* @param currentValue the current value (of either base or a cell)
* @param newValue the argument from a user update call
* @return result of the update function
*/
abstract long fn(long currentValue, long newValue);
/**
* Handles cases of updates involving initialization, resizing,
* creating new Cells, and/or contention. See above for
* explanation. This method suffers the usual non-modularity
* problems of optimistic retry code, relying on rechecked sets of
* reads.
*
* @param x the value
* @param hc the hash code holder
* @param wasUncontended false if CAS failed before call
*/
final void retryUpdate(long x, HashCode hc, boolean wasUncontended) {
int h = hc.code;
boolean collide = false; // True if last slot nonempty
for (;;) {
Cell[] as; Cell a; int n; long v;
if ((as = cells) != null && (n = as.length) > 0) {
if ((a = as[(n - 1) & h]) == null) {
if (busy == 0) { // Try to attach new Cell
Cell r = new Cell(x); // Optimistically create
if (busy == 0 && casBusy()) {
boolean created = false;
try { // Recheck under lock
Cell[] rs; int m, j;
if ((rs = cells) != null &&
(m = rs.length) > 0 &&
rs[j = (m - 1) & h] == null) {
rs[j] = r;
created = true;
}
} finally {
busy = 0;
}
if (created)
break;
continue; // Slot is now non-empty
}
}
collide = false;
}
else if (!wasUncontended) // CAS already known to fail
wasUncontended = true; // Continue after rehash
else if (a.cas(v = a.value, fn(v, x)))
break;
else if (n >= NCPU || cells != as)
collide = false; // At max size or stale
else if (!collide)
collide = true;
else if (busy == 0 && casBusy()) {
try {
if (cells == as) { // Expand table unless stale
Cell[] rs = new Cell[n << 1];
for (int i = 0; i < n; ++i)
rs[i] = as[i];
cells = rs;
}
} finally {
busy = 0;
}
collide = false;
continue; // Retry with expanded table
}
h ^= h << 13; // Rehash
h ^= h >>> 17;
h ^= h << 5;
}
else if (busy == 0 && cells == as && casBusy()) {
boolean init = false;
try { // Initialize table
if (cells == as) {
Cell[] rs = new Cell[2];
rs[h & 1] = new Cell(x);
cells = rs;
init = true;
}
} finally {
busy = 0;
}
if (init)
break;
}
else if (casBase(v = base, fn(v, x)))
break; // Fall back on using base
}
hc.code = h; // Record index for next time
}
/**
* Sets base and all cells to the given value.
*/
final void internalReset(long initialValue) {
Cell[] as = cells;
base = initialValue;
if (as != null) {
int n = as.length;
for (int i = 0; i < n; ++i) {
Cell a = as[i];
if (a != null)
a.value = initialValue;
}
}
}
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long baseOffset;
private static final long busyOffset;
static {
try {
UNSAFE = getUnsafe();
Class<?> sk = Striped64.class;
baseOffset = UNSAFE.objectFieldOffset
(sk.getDeclaredField("base"));
busyOffset = UNSAFE.objectFieldOffset
(sk.getDeclaredField("busy"));
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
} catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
} | 0true
| src_main_java_jsr166e_Striped64.java |
4,063 | public class ParentConstantScoreQuery extends Query {
private final Query originalParentQuery;
private final String parentType;
private final Filter childrenFilter;
private Query rewrittenParentQuery;
private IndexReader rewriteIndexReader;
public ParentConstantScoreQuery(Query parentQuery, String parentType, Filter childrenFilter) {
this.originalParentQuery = parentQuery;
this.parentType = parentType;
this.childrenFilter = childrenFilter;
}
@Override
// See TopChildrenQuery#rewrite
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenParentQuery == null) {
rewrittenParentQuery = originalParentQuery.rewrite(reader);
rewriteIndexReader = reader;
}
return this;
}
@Override
public void extractTerms(Set<Term> terms) {
rewrittenParentQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
SearchContext searchContext = SearchContext.current();
searchContext.idCache().refresh(searcher.getTopReaderContext().leaves());
Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents = searchContext.cacheRecycler().hashSet(-1);
ParentUidsCollector collector = new ParentUidsCollector(parents.v(), searchContext, parentType);
final Query parentQuery;
if (rewrittenParentQuery != null) {
parentQuery = rewrittenParentQuery;
} else {
assert rewriteIndexReader == searcher.getIndexReader();
parentQuery = rewrittenParentQuery = originalParentQuery.rewrite(searcher.getIndexReader());
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.search(parentQuery, collector);
if (parents.v().isEmpty()) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
ChildrenWeight childrenWeight = new ChildrenWeight(childrenFilter, searchContext, parents);
searchContext.addReleasable(childrenWeight);
return childrenWeight;
}
private final class ChildrenWeight extends Weight implements Releasable {
private final Filter childrenFilter;
private final SearchContext searchContext;
private final Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents;
private float queryNorm;
private float queryWeight;
private ChildrenWeight(Filter childrenFilter, SearchContext searchContext, Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents) {
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
this.searchContext = searchContext;
this.parents = parents;
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public Query getQuery() {
return ParentConstantScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocIdSet)) {
return null;
}
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
if (innerIterator != null) {
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(innerIterator, parents.v(), idReaderTypeCache);
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
}
}
return null;
}
@Override
public boolean release() throws ElasticsearchException {
Releasables.release(parents);
return true;
}
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
private final ObjectOpenHashSet<HashedBytesArray> parents;
private final IdReaderTypeCache idReaderTypeCache;
ChildrenDocIdIterator(DocIdSetIterator innerIterator, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
super(innerIterator);
this.parents = parents;
this.idReaderTypeCache = idReaderTypeCache;
}
@Override
protected boolean match(int doc) {
return parents.contains(idReaderTypeCache.parentIdByDoc(doc));
}
}
}
private final static class ParentUidsCollector extends NoopCollector {
private final ObjectOpenHashSet<HashedBytesArray> collectedUids;
private final SearchContext context;
private final String parentType;
private IdReaderTypeCache typeCache;
ParentUidsCollector(ObjectOpenHashSet<HashedBytesArray> collectedUids, SearchContext context, String parentType) {
this.collectedUids = collectedUids;
this.context = context;
this.parentType = parentType;
}
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (typeCache != null) {
collectedUids.add(typeCache.idByDoc(doc));
}
}
@Override
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
}
}
@Override
public int hashCode() {
int result = originalParentQuery.hashCode();
result = 31 * result + parentType.hashCode();
result = 31 * result + Float.floatToIntBits(getBoost());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
ParentConstantScoreQuery that = (ParentConstantScoreQuery) obj;
if (!originalParentQuery.equals(that.originalParentQuery)) {
return false;
}
if (!parentType.equals(that.parentType)) {
return false;
}
if (this.getBoost() != that.getBoost()) {
return false;
}
return true;
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("parent_filter[").append(parentType).append("](").append(originalParentQuery).append(')');
return sb.toString();
}
} | 1no label
| src_main_java_org_elasticsearch_index_search_child_ParentConstantScoreQuery.java |
170 | public abstract class SpeedTestThread extends Thread implements SpeedTest {
protected SpeedTestData data;
protected SpeedTestMultiThreads owner;
protected SpeedTestThread() {
data = new SpeedTestData();
}
protected SpeedTestThread(long iCycles) {
data = new SpeedTestData(iCycles);
}
public void setCycles(long iCycles) {
data.cycles = iCycles;
}
public void setOwner(SpeedTestMultiThreads iOwner) {
owner = iOwner;
}
@Override
public void run() {
data.printResults = false;
data.go(this);
}
public void init() throws Exception {
}
public void deinit() throws Exception {
}
public void afterCycle() throws Exception {
}
public void beforeCycle() throws Exception {
}
} | 0true
| commons_src_test_java_com_orientechnologies_common_test_SpeedTestThread.java |
1,666 | public static class Cell {
public final Object value;
public final Map<String, String> attr;
public Cell(Object value, Cell other) {
this.value = value;
this.attr = other.attr;
}
public Cell(Object value) {
this.value = value;
this.attr = new HashMap<String, String>();
}
public Cell(Object value, Map<String, String> attr) {
this.value = value;
this.attr = attr;
}
} | 0true
| src_main_java_org_elasticsearch_common_Table.java |
1,607 | public class SimpleValueMapStructure extends MapStructure {
private static final long serialVersionUID = 1L;
private String valuePropertyName;
private String valuePropertyFriendlyName;
public SimpleValueMapStructure() {
super();
}
/**
* @param keyClassName
* @param keyPropertyName
* @param keyPropertyFriendlyName
* @param valueClassName
* @param mapProperty
*/
public SimpleValueMapStructure(String keyClassName, String keyPropertyName, String keyPropertyFriendlyName, String valueClassName, String valuePropertyName, String valuePropertyFriendlyName, String mapProperty, String mapKeyValueProperty) {
super(keyClassName, keyPropertyName, keyPropertyFriendlyName, valueClassName, mapProperty, false, mapKeyValueProperty);
this.valuePropertyFriendlyName = valuePropertyFriendlyName;
this.valuePropertyName = valuePropertyName;
}
public String getValuePropertyName() {
return valuePropertyName;
}
public void setValuePropertyName(String valuePropertyName) {
this.valuePropertyName = valuePropertyName;
}
public String getValuePropertyFriendlyName() {
return valuePropertyFriendlyName;
}
public void setValuePropertyFriendlyName(String valuePropertyFriendlyName) {
this.valuePropertyFriendlyName = valuePropertyFriendlyName;
}
public void accept(PersistencePerspectiveItemVisitor visitor) {
visitor.visit(this);
}
@Override
public PersistencePerspectiveItem clonePersistencePerspectiveItem() {
SimpleValueMapStructure mapStructure = new SimpleValueMapStructure();
mapStructure.setKeyClassName(getKeyClassName());
mapStructure.setKeyPropertyName(getKeyPropertyName());
mapStructure.setValuePropertyFriendlyName(getKeyPropertyFriendlyName());
mapStructure.setValueClassName(getValueClassName());
mapStructure.setMapProperty(getMapProperty());
mapStructure.setDeleteValueEntity(getDeleteValueEntity());
mapStructure.valuePropertyName = valuePropertyName;
mapStructure.valuePropertyFriendlyName = valuePropertyFriendlyName;
return mapStructure;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof SimpleValueMapStructure)) return false;
if (!super.equals(o)) return false;
SimpleValueMapStructure that = (SimpleValueMapStructure) o;
if (valuePropertyFriendlyName != null ? !valuePropertyFriendlyName.equals(that.valuePropertyFriendlyName) : that.valuePropertyFriendlyName != null)
return false;
if (valuePropertyName != null ? !valuePropertyName.equals(that.valuePropertyName) : that.valuePropertyName != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (valuePropertyName != null ? valuePropertyName.hashCode() : 0);
result = 31 * result + (valuePropertyFriendlyName != null ? valuePropertyFriendlyName.hashCode() : 0);
return result;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_SimpleValueMapStructure.java |
1,538 | private final Predicate<MutableShardRouting> assignedFilter = new Predicate<MutableShardRouting>() {
@Override
public boolean apply(MutableShardRouting input) {
return input.assignedToNode();
}
}; | 0true
| src_main_java_org_elasticsearch_cluster_routing_allocation_allocator_BalancedShardsAllocator.java |
349 | public interface ODatabaseSchemaAware<T extends Object> extends ODatabaseComplex<T> {
/**
* Creates a new entity instance. Each database implementation will return the right type.
*
* @return The new instance.
*/
public <RET extends Object> RET newInstance(String iClassName);
/**
* Counts the entities contained in the specified class.
*
* @param iClassName
* Class name
* @return Total entities
*/
public long countClass(String iClassName);
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_ODatabaseSchemaAware.java |
860 | public class OSecurityNull implements OSecurity {
@Override
public boolean isAllowed(final Set<OIdentifiable> iAllowAll, final Set<OIdentifiable> iAllowOperation) {
return true;
}
public OUser create() {
return null;
}
public void load() {
}
public OUser getUser(String iUserName) {
return null;
}
public OUser createUser(String iUserName, String iUserPassword, String... iRoles) {
return null;
}
public OUser createUser(String iUserName, String iUserPassword, ORole... iRoles) {
return null;
}
public ORole getRole(String iRoleName) {
return null;
}
public ORole getRole(OIdentifiable iRole) {
return null;
}
public ORole createRole(String iRoleName, ALLOW_MODES iAllowMode) {
return null;
}
public ORole createRole(String iRoleName, ORole iParent, ALLOW_MODES iAllowMode) {
return null;
}
public List<ODocument> getAllUsers() {
return null;
}
public List<ODocument> getAllRoles() {
return null;
}
public OUser authenticate(String iUsername, String iUserPassword) {
return null;
}
public void close() {
}
public OUser repair() {
return null;
}
public boolean dropUser(String iUserName) {
return false;
}
public boolean dropRole(String iRoleName) {
return false;
}
@Override
public OIdentifiable allowUser(ODocument iDocument, String iAllowFieldName, String iUserName) {
return null;
}
@Override
public OIdentifiable allowRole(ODocument iDocument, String iAllowFieldName, String iRoleName) {
return null;
}
@Override
public OIdentifiable allowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
return null;
}
@Override
public OIdentifiable disallowUser(ODocument iDocument, String iAllowFieldName, String iUserName) {
return null;
}
@Override
public OIdentifiable disallowRole(ODocument iDocument, String iAllowFieldName, String iRoleName) {
return null;
}
@Override
public OIdentifiable disallowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
return null;
}
@Override
public void createClassTrigger() {
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_metadata_security_OSecurityNull.java |
1,191 | public interface MemberSelector {
/**
* Decides for the given member to be part of an operation or not.
*
* @param member the member instance to decide on
* @return true of member should take part in the operation otherwise false
*/
boolean select(Member member);
} | 0true
| hazelcast_src_main_java_com_hazelcast_core_MemberSelector.java |
2,588 | public class ZenDiscoveryModule extends AbstractModule {
private final List<Class<? extends UnicastHostsProvider>> unicastHostProviders = Lists.newArrayList();
/**
* Adds a custom unicast hosts provider to build a dynamic list of unicast hosts list when doing unicast discovery.
*/
public ZenDiscoveryModule addUnicastHostProvider(Class<? extends UnicastHostsProvider> unicastHostProvider) {
unicastHostProviders.add(unicastHostProvider);
return this;
}
@Override
protected void configure() {
bind(ZenPingService.class).asEagerSingleton();
Multibinder<UnicastHostsProvider> unicastHostsProviderMultibinder = Multibinder.newSetBinder(binder(), UnicastHostsProvider.class);
for (Class<? extends UnicastHostsProvider> unicastHostProvider : unicastHostProviders) {
unicastHostsProviderMultibinder.addBinding().to(unicastHostProvider);
}
bindDiscovery();
}
protected void bindDiscovery() {
bind(Discovery.class).to(ZenDiscovery.class).asEagerSingleton();
}
} | 1no label
| src_main_java_org_elasticsearch_discovery_zen_ZenDiscoveryModule.java |
383 | BackendOperation.execute(new BackendOperation.Transactional<Boolean>() {
@Override
public Boolean call(StoreTransaction txh) throws BackendException {
idStore.mutate(partitionKey, KeyColumnValueStore.NO_ADDITIONS, Arrays.asList(finalTarget), txh);
return true;
}
}, new BackendOperation.TransactionalProvider() { //Use normal consistency level for these non-critical delete operations | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_idmanagement_ConsistentKeyIDAuthority.java |
1,281 | class TheNode {
final int entryCount;
final int threadCount;
final int valueSize;
final int nodeId;
final long createTime;
final ExecutorService es;
final ExecutorService esStats;
final HazelcastInstance hazelcast;
volatile boolean running = true;
TheNode(int nodeId, int entryCount, int threadCount, int valueSize) {
this.entryCount = entryCount;
this.threadCount = threadCount;
this.valueSize = valueSize;
this.nodeId = nodeId;
es = Executors.newFixedThreadPool(threadCount);
Config cfg = new XmlConfigBuilder().build();
hazelcast = Hazelcast.newHazelcastInstance(cfg);
esStats = Executors.newSingleThreadExecutor();
createTime = System.currentTimeMillis();
}
public void stop() {
try {
running = false;
es.shutdown();
es.awaitTermination(10, TimeUnit.SECONDS);
esStats.shutdown();
hazelcast.getLifecycleService().shutdown();
} catch (Throwable t) {
t.printStackTrace();
}
}
public void start() {
final Stats stats = new Stats();
for (int i = 0; i < threadCount; i++) {
es.submit(new Runnable() {
public void run() {
Map<String, byte[]> map = hazelcast.getMap("default");
while (running) {
try {
int key = (int) (random.nextFloat() * entryCount);
int operation = random(10);
if (operation < 4) {
map.put(String.valueOf(key), new byte[valueSize]);
stats.mapPuts.incrementAndGet();
} else if (operation < 8) {
map.get(String.valueOf(key));
stats.mapGets.incrementAndGet();
} else {
map.remove(String.valueOf(key));
stats.mapRemoves.incrementAndGet();
}
} catch (HazelcastInstanceNotActiveException ignored) {
throw new RuntimeException(ignored);
} catch (Throwable e) {
throw new RuntimeException(e);
}
}
}
});
}
esStats.submit(new Runnable() {
public void run() {
final ILogger logger = hazelcast.getLoggingService().getLogger(hazelcast.getName());
while (running) {
try {
Thread.sleep(STATS_SECONDS * 1000);
int clusterSize = hazelcast.getCluster().getMembers().size();
Stats currentStats = stats.getAndReset();
logger.info("Cluster size: " + clusterSize + ", Operations per Second: "
+ (currentStats.total() / STATS_SECONDS));
} catch (HazelcastInstanceNotActiveException e) {
throw new RuntimeException(e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
});
}
@Override
public String toString() {
return "TheNode{"
+ "nodeId=" + nodeId
+ ", entryCount=" + entryCount
+ ", threadCount=" + threadCount
+ ", valueSize=" + valueSize
+ ", liveSeconds=" + ((System.currentTimeMillis() - createTime) / 1000)
+ ", running=" + running + '}';
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_examples_LongRunningTest.java |
989 | public class Permit implements DataSerializable {
public static final int INITIAL_CAPACITY = 10;
private int available;
private int partitionId;
private Map<String, Integer> attachMap;
private int backupCount;
private int asyncBackupCount;
private boolean initialized;
public Permit() {
}
public Permit(int partitionId, SemaphoreConfig config) {
this.partitionId = partitionId;
this.backupCount = config.getBackupCount();
this.asyncBackupCount = config.getAsyncBackupCount();
this.available = config.getInitialPermits();
this.attachMap = new HashMap<String, Integer>(INITIAL_CAPACITY);
}
private void attach(String caller, int permitCount) {
Integer attached = attachMap.get(caller);
if (attached == null) {
attached = 0;
}
attachMap.put(caller, attached + permitCount);
}
private void detach(String caller, int permitCount) {
Integer attached = attachMap.get(caller);
if (attached == null) {
return;
}
attached -= permitCount;
if (attached <= 0) {
attachMap.remove(caller);
} else {
attachMap.put(caller, attached);
}
}
public boolean memberRemoved(String caller) {
Integer attached = attachMap.remove(caller);
if (attached != null) {
available += attached;
return true;
}
return false;
}
public boolean init(int permitCount) {
if (initialized || available != 0) {
return false;
}
available = permitCount;
initialized = true;
return true;
}
public int getAvailable() {
return available;
}
public boolean isAvailable(int permitCount) {
return available - permitCount >= 0;
}
public boolean acquire(int permitCount, String caller) {
if (isAvailable(permitCount)) {
available -= permitCount;
attach(caller, permitCount);
initialized = true;
return true;
}
return false;
}
public int drain(String caller) {
int drain = available;
available = 0;
if (drain > 0) {
initialized = true;
attach(caller, drain);
}
return drain;
}
public boolean reduce(int permitCount) {
if (available == 0 || permitCount == 0) {
return false;
}
available -= permitCount;
if (available < 0) {
available = 0;
}
return true;
}
public void release(int permitCount, String caller) {
available += permitCount;
initialized = true;
detach(caller, permitCount);
}
public int getPartitionId() {
return partitionId;
}
public int getSyncBackupCount() {
return backupCount;
}
public int getAsyncBackupCount() {
return asyncBackupCount;
}
public void setInitialized() {
this.initialized = true;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(available);
out.writeInt(partitionId);
out.writeInt(backupCount);
out.writeInt(asyncBackupCount);
out.writeInt(attachMap.size());
for (Map.Entry<String, Integer> entry : attachMap.entrySet()) {
out.writeUTF(entry.getKey());
out.writeInt(entry.getValue());
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
available = in.readInt();
partitionId = in.readInt();
backupCount = in.readInt();
asyncBackupCount = in.readInt();
int size = in.readInt();
attachMap = new HashMap<String, Integer>(size);
for (int i = 0; i < size; i++) {
String caller = in.readUTF();
Integer val = in.readInt();
attachMap.put(caller, val);
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Permit");
sb.append("{available=").append(available);
sb.append(", partitionId=").append(partitionId);
sb.append(", backupCount=").append(backupCount);
sb.append(", asyncBackupCount=").append(asyncBackupCount);
sb.append('}');
sb.append("\n");
for (Map.Entry<String, Integer> entry : attachMap.entrySet()) {
sb.append("{caller=").append(entry.getKey());
sb.append(", attached=").append(entry.getValue());
sb.append("} ");
}
return sb.toString();
}
public int getTotalBackupCount() {
return backupCount + asyncBackupCount;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_semaphore_Permit.java |
1,768 | @Component("blAdornedTargetListPersistenceModule")
@Scope("prototype")
public class AdornedTargetListPersistenceModule extends BasicPersistenceModule {
private static final Log LOG = LogFactory.getLog(AdornedTargetListPersistenceModule.class);
@Override
public boolean isCompatible(OperationType operationType) {
return OperationType.ADORNEDTARGETLIST.equals(operationType);
}
@Override
public void extractProperties(Class<?>[] inheritanceLine, Map<MergedPropertyType, Map<String, FieldMetadata>> mergedProperties, List<Property> properties) throws NumberFormatException {
if (mergedProperties.get(MergedPropertyType.ADORNEDTARGETLIST) != null) {
extractPropertiesFromMetadata(inheritanceLine, mergedProperties.get(MergedPropertyType.ADORNEDTARGETLIST), properties, true, MergedPropertyType.ADORNEDTARGETLIST);
}
}
public List<FilterMapping> getBasicFilterMappings(PersistencePerspective persistencePerspective,
CriteriaTransferObject cto, Map<String, FieldMetadata> mergedProperties,
String cefqcn) {
return getFilterMappings(persistencePerspective, cto, cefqcn, mergedProperties);
}
public List<FilterMapping> getAdornedTargetFilterMappings(PersistencePerspective persistencePerspective,
CriteriaTransferObject cto, Map<String, FieldMetadata> mergedProperties,
AdornedTargetList adornedTargetList) throws ClassNotFoundException {
List<FilterMapping> filterMappings = getFilterMappings(persistencePerspective, cto, adornedTargetList.
getAdornedTargetEntityClassname(), mergedProperties);
FilterMapping filterMapping = new FilterMapping()
.withFieldPath(new FieldPath()
.withTargetProperty(adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty()))
.withFilterValues(cto.get(adornedTargetList.getCollectionFieldName()).getFilterValues())
.withRestriction(new Restriction()
.withPredicateProvider(new PredicateProvider<Serializable, String>() {
@Override
public Predicate buildPredicate(CriteriaBuilder builder, FieldPathBuilder fieldPathBuilder, From root,
String ceilingEntity, String fullPropertyName, Path<Serializable> explicitPath,
List<String> directValues) {
if (String.class.isAssignableFrom(explicitPath.getJavaType())) {
return builder.equal(explicitPath, directValues.get(0));
} else {
return builder.equal(explicitPath, Long.parseLong(directValues.get(0)));
}
}
})
);
filterMappings.add(filterMapping);
FilterMapping filterMapping2 = new FilterMapping()
.withFieldPath(new FieldPath()
.withTargetProperty(adornedTargetList.getTargetObjectPath() + "." +
adornedTargetList.getTargetIdProperty()))
.withFilterValues(cto.get(adornedTargetList.getCollectionFieldName() + "Target").getFilterValues())
.withRestriction(new Restriction()
.withPredicateProvider(new PredicateProvider<Serializable, String>() {
@Override
public Predicate buildPredicate(CriteriaBuilder builder, FieldPathBuilder fieldPathBuilder, From root,
String ceilingEntity, String fullPropertyName, Path<Serializable> explicitPath,
List<String> directValues) {
if (String.class.isAssignableFrom(explicitPath.getJavaType())) {
return builder.equal(explicitPath, directValues.get(0));
} else {
return builder.equal(explicitPath, Long.parseLong(directValues.get(0)));
}
}
})
);
filterMappings.add(filterMapping2);
return filterMappings;
}
protected Serializable createPopulatedAdornedTargetInstance(AdornedTargetList adornedTargetList, Entity entity) throws InstantiationException, IllegalAccessException, ClassNotFoundException, NumberFormatException, InvocationTargetException, NoSuchMethodException, FieldNotAvailableException {
Serializable instance = (Serializable) Class.forName(StringUtils.isEmpty(adornedTargetList
.getAdornedTargetEntityPolymorphicType())? adornedTargetList.getAdornedTargetEntityClassname(): adornedTargetList.getAdornedTargetEntityPolymorphicType()).newInstance();
String targetPath = adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty();
String linkedPath = adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty();
getFieldManager().setFieldValue(instance, linkedPath, Long.valueOf(entity.findProperty(linkedPath).getValue()));
Object test1 = getFieldManager().getFieldValue(instance, adornedTargetList.getLinkedObjectPath());
Object test1PersistedObject = persistenceManager.getDynamicEntityDao().retrieve(test1.getClass(), Long.valueOf(entity.findProperty(linkedPath).getValue()));
Assert.isTrue(test1PersistedObject != null, "Entity not found");
Class<?> type = getFieldManager().getField(instance.getClass(), targetPath).getType();
if (String.class.isAssignableFrom(type)) {
getFieldManager().setFieldValue(instance, targetPath, entity.findProperty(targetPath).getValue());
} else {
getFieldManager().setFieldValue(instance, targetPath, Long.valueOf(entity.findProperty(targetPath).getValue()));
}
Object test2 = getFieldManager().getFieldValue(instance, adornedTargetList.getTargetObjectPath());
Object test2PersistedObject;
if (String.class.isAssignableFrom(type)) {
test2PersistedObject = persistenceManager.getDynamicEntityDao().retrieve(test2.getClass(), entity.findProperty(targetPath).getValue());
} else {
test2PersistedObject = persistenceManager.getDynamicEntityDao().retrieve(test2.getClass(), Long.valueOf(entity.findProperty(targetPath).getValue()));
}
Assert.isTrue(test2PersistedObject != null, "Entity not found");
return instance;
}
@Override
public void updateMergedProperties(PersistencePackage persistencePackage, Map<MergedPropertyType, Map<String, FieldMetadata>> allMergedProperties) throws ServiceException {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
AdornedTargetList adornedTargetList = (AdornedTargetList) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
if (adornedTargetList != null) {
Class<?>[] entities = persistenceManager.getPolymorphicEntities(adornedTargetList.getAdornedTargetEntityClassname());
Map<String, FieldMetadata> joinMergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
adornedTargetList.getAdornedTargetEntityClassname(),
entities,
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.ADORNEDTARGETLIST,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
String idProp = null;
for (String key : joinMergedProperties.keySet()) {
if (joinMergedProperties.get(key) instanceof BasicFieldMetadata && ((BasicFieldMetadata) joinMergedProperties.get(key)).getFieldType() == SupportedFieldType.ID) {
idProp = key;
break;
}
}
if (idProp != null) {
joinMergedProperties.remove(idProp);
}
allMergedProperties.put(MergedPropertyType.ADORNEDTARGETLIST, joinMergedProperties);
}
} catch (Exception e) {
throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e);
}
}
@Override
public Entity add(PersistencePackage persistencePackage) throws ServiceException {
String[] customCriteria = persistencePackage.getCustomCriteria();
if (customCriteria != null && customCriteria.length > 0) {
LOG.warn("custom persistence handlers and custom criteria not supported for add types other than BASIC");
}
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
Entity entity = persistencePackage.getEntity();
AdornedTargetList adornedTargetList = (AdornedTargetList) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
if (!adornedTargetList.getMutable()) {
throw new SecurityServiceException("Field is not mutable");
}
Entity payload;
try {
Class<?>[] entities = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname);
Map<String, FieldMetadata> mergedPropertiesTarget = persistenceManager.getDynamicEntityDao().getMergedProperties(
ceilingEntityFullyQualifiedClassname,
entities,
null,
persistencePerspective.getAdditionalNonPersistentProperties(),
persistencePerspective.getAdditionalForeignKeys(),
MergedPropertyType.PRIMARY,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
Class<?>[] entities2 = persistenceManager.getPolymorphicEntities(adornedTargetList.getAdornedTargetEntityClassname());
Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
adornedTargetList.getAdornedTargetEntityClassname(),
entities2,
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.ADORNEDTARGETLIST,
false,
new String[]{},
new String[]{},
null,
""
);
CriteriaTransferObject ctoInserted = new CriteriaTransferObject();
FilterAndSortCriteria filterCriteriaInsertedLinked = ctoInserted.get(adornedTargetList.getCollectionFieldName());
String linkedPath;
String targetPath;
if (adornedTargetList.getInverse()) {
linkedPath = adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty();
targetPath = adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty();
} else {
targetPath = adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty();
linkedPath = adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty();
}
filterCriteriaInsertedLinked.setFilterValue(entity.findProperty(adornedTargetList.getInverse() ? targetPath : linkedPath).getValue());
FilterAndSortCriteria filterCriteriaInsertedTarget = ctoInserted.get(adornedTargetList.getCollectionFieldName() + "Target");
filterCriteriaInsertedTarget.setFilterValue(entity.findProperty(adornedTargetList.getInverse() ? linkedPath : targetPath).getValue());
List<FilterMapping> filterMappingsInserted = getAdornedTargetFilterMappings(persistencePerspective, ctoInserted, mergedProperties, adornedTargetList);
List<Serializable> recordsInserted = getPersistentRecords(adornedTargetList.getAdornedTargetEntityClassname(), filterMappingsInserted, ctoInserted.getFirstResult(), ctoInserted.getMaxResults());
if (recordsInserted.size() > 0) {
payload = getRecords(mergedPropertiesTarget, recordsInserted, mergedProperties, adornedTargetList.getTargetObjectPath())[0];
} else {
Serializable instance = createPopulatedAdornedTargetInstance(adornedTargetList, entity);
instance = createPopulatedInstance(instance, entity, mergedProperties, false, persistencePackage.isValidateUnsubmittedProperties());
instance = createPopulatedInstance(instance, entity, mergedPropertiesTarget, false, persistencePackage.isValidateUnsubmittedProperties());
FieldManager fieldManager = getFieldManager();
if (fieldManager.getField(instance.getClass(), "id") != null) {
fieldManager.setFieldValue(instance, "id", null);
}
if (adornedTargetList.getSortField() != null) {
CriteriaTransferObject cto = new CriteriaTransferObject();
FilterAndSortCriteria filterCriteria = cto.get(adornedTargetList.getCollectionFieldName());
filterCriteria.setFilterValue(entity.findProperty(adornedTargetList.getInverse() ? targetPath : linkedPath).getValue());
FilterAndSortCriteria sortCriteria = cto.get(adornedTargetList.getSortField());
sortCriteria.setSortAscending(adornedTargetList.getSortAscending());
List<FilterMapping> filterMappings = getAdornedTargetFilterMappings(persistencePerspective, cto,
mergedProperties, adornedTargetList);
int totalRecords = getTotalRecords(adornedTargetList.getAdornedTargetEntityClassname(), filterMappings);
fieldManager.setFieldValue(instance, adornedTargetList.getSortField(), Long.valueOf(totalRecords + 1));
}
instance = persistenceManager.getDynamicEntityDao().merge(instance);
persistenceManager.getDynamicEntityDao().clear();
List<Serializable> recordsInserted2 = getPersistentRecords(adornedTargetList.getAdornedTargetEntityClassname(), filterMappingsInserted, ctoInserted.getFirstResult(), ctoInserted.getMaxResults());
payload = getRecords(mergedPropertiesTarget, recordsInserted2, mergedProperties, adornedTargetList.getTargetObjectPath())[0];
}
} catch (Exception e) {
throw new ServiceException("Problem adding new entity : " + e.getMessage(), e);
}
return payload;
}
@Override
public Entity update(PersistencePackage persistencePackage) throws ServiceException {
String[] customCriteria = persistencePackage.getCustomCriteria();
if (customCriteria != null && customCriteria.length > 0) {
LOG.warn("custom persistence handlers and custom criteria not supported for update types other than BASIC");
}
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Entity entity = persistencePackage.getEntity();
AdornedTargetList adornedTargetList = (AdornedTargetList) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
if (!adornedTargetList.getMutable()) {
throw new SecurityServiceException("Field is not mutable");
}
try {
AdornedTargetRetrieval adornedTargetRetrieval = new AdornedTargetRetrieval(persistencePackage, entity, adornedTargetList).invokeForUpdate();
List<Serializable> records = adornedTargetRetrieval.getRecords();
Assert.isTrue(!CollectionUtils.isEmpty(records), "Entity not found");
int index = adornedTargetRetrieval.getIndex();
Map<String, FieldMetadata> mergedProperties = adornedTargetRetrieval.getMergedProperties();
FieldManager fieldManager = getFieldManager();
Serializable myRecord;
if (adornedTargetList.getSortField() != null && entity.findProperty(adornedTargetList.getSortField()).getValue() != null) {
myRecord = records.get(index);
Integer requestedSequence = Integer.valueOf(entity.findProperty(adornedTargetList.getSortField()).getValue());
Integer previousSequence = Integer.parseInt(String.valueOf(getFieldManager().getFieldValue(myRecord, adornedTargetList.getSortField())));
if (!previousSequence.equals(requestedSequence)) {
// Sequence has changed. Rebalance the list
myRecord = records.remove(index);
myRecord = createPopulatedInstance(myRecord, entity, mergedProperties, false);
if (CollectionUtils.isEmpty(records)) {
records.add(myRecord);
} else {
records.add(requestedSequence - 1, myRecord);
}
index = 1;
for (Serializable record : records) {
fieldManager.setFieldValue(record, adornedTargetList.getSortField(), Long.valueOf(index));
index++;
}
}
} else {
myRecord = records.get(index);
}
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
Class<?>[] entities = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname);
Map<String, FieldMetadata> mergedPropertiesTarget = persistenceManager.getDynamicEntityDao().getMergedProperties(
ceilingEntityFullyQualifiedClassname,
entities,
null,
persistencePerspective.getAdditionalNonPersistentProperties(),
persistencePerspective.getAdditionalForeignKeys(),
MergedPropertyType.PRIMARY,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
myRecord = createPopulatedInstance(myRecord, entity, mergedProperties, false);
myRecord = persistenceManager.getDynamicEntityDao().merge(myRecord);
List<Serializable> myList = new ArrayList<Serializable>();
myList.add(myRecord);
Entity[] payload = getRecords(mergedPropertiesTarget, myList, mergedProperties, adornedTargetList.getTargetObjectPath());
entity = payload[0];
return entity;
} catch (Exception e) {
throw new ServiceException("Problem updating entity : " + e.getMessage(), e);
}
}
@Override
public void remove(PersistencePackage persistencePackage) throws ServiceException {
String[] customCriteria = persistencePackage.getCustomCriteria();
if (customCriteria != null && customCriteria.length > 0) {
LOG.warn("custom persistence handlers and custom criteria not supported for remove types other than BASIC");
}
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Entity entity = persistencePackage.getEntity();
try {
AdornedTargetList adornedTargetList = (AdornedTargetList) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
if (!adornedTargetList.getMutable()) {
throw new SecurityServiceException("Field is not mutable");
}
Class<?>[] entities = persistenceManager.getPolymorphicEntities(adornedTargetList.getAdornedTargetEntityClassname());
Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
adornedTargetList.getAdornedTargetEntityClassname(),
entities,
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.ADORNEDTARGETLIST,
false,
new String[]{},
new String[]{},
null,
""
);
CriteriaTransferObject ctoInserted = new CriteriaTransferObject();
FilterAndSortCriteria filterCriteriaInsertedLinked = ctoInserted.get(adornedTargetList.getCollectionFieldName());
filterCriteriaInsertedLinked.setFilterValue(entity.findProperty(adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty()).getValue());
FilterAndSortCriteria filterCriteriaInsertedTarget = ctoInserted.get(adornedTargetList.getCollectionFieldName() + "Target");
filterCriteriaInsertedTarget.setFilterValue(entity.findProperty(adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty()).getValue());
List<FilterMapping> filterMappings = getAdornedTargetFilterMappings(persistencePerspective, ctoInserted, mergedProperties, adornedTargetList);
List<Serializable> recordsInserted = getPersistentRecords(adornedTargetList.getAdornedTargetEntityClassname(), filterMappings, ctoInserted.getFirstResult(), ctoInserted.getMaxResults());
Assert.isTrue(!CollectionUtils.isEmpty(recordsInserted), "Entity not found");
persistenceManager.getDynamicEntityDao().remove(recordsInserted.get(0));
} catch (Exception e) {
throw new ServiceException("Problem removing entity : " + e.getMessage(), e);
}
}
@Override
public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
AdornedTargetList adornedTargetList = (AdornedTargetList) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
Entity[] payload;
int totalRecords;
try {
Class<?>[] entities = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname);
Map<String, FieldMetadata> mergedPropertiesTarget = persistenceManager.getDynamicEntityDao().getMergedProperties(
ceilingEntityFullyQualifiedClassname,
entities,
null,
persistencePerspective.getAdditionalNonPersistentProperties(),
persistencePerspective.getAdditionalForeignKeys(),
MergedPropertyType.PRIMARY,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
AdornedTargetRetrieval adornedTargetRetrieval = new AdornedTargetRetrieval(persistencePackage, adornedTargetList, cto).invokeForFetch();
List<Serializable> records = adornedTargetRetrieval.getRecords();
Map<String, FieldMetadata> mergedProperties = adornedTargetRetrieval.getMergedProperties();
payload = getRecords(mergedPropertiesTarget, records, mergedProperties, adornedTargetList.getTargetObjectPath());
totalRecords = getTotalRecords(adornedTargetList.getAdornedTargetEntityClassname(), adornedTargetRetrieval.getFilterMappings());
} catch (Exception e) {
throw new ServiceException("Unable to fetch results for " + adornedTargetList.getAdornedTargetEntityClassname(), e);
}
DynamicResultSet results = new DynamicResultSet(null, payload, totalRecords);
return results;
}
public class AdornedTargetRetrieval {
private PersistencePackage persistencePackage;
private PersistencePerspective persistencePerspective;
private Entity entity;
private AdornedTargetList adornedTargetList;
private Map<String, FieldMetadata> mergedProperties;
private List<Serializable> records;
private int index;
private List<FilterMapping> filterMappings;
private CriteriaTransferObject cto;
// This constructor is used by the update method
public AdornedTargetRetrieval(PersistencePackage persistencePackage, Entity entity, AdornedTargetList adornedTargetList) {
this(persistencePackage, adornedTargetList, new CriteriaTransferObject());
this.entity = entity;
}
// This constructor is used by the fetch method
public AdornedTargetRetrieval(PersistencePackage persistencePackage, AdornedTargetList adornedTargetList, CriteriaTransferObject cto) {
this.persistencePackage = persistencePackage;
this.persistencePerspective = persistencePackage.getPersistencePerspective();
this.adornedTargetList = adornedTargetList;
this.cto = cto;
}
public Map<String, FieldMetadata> getMergedProperties() {
return mergedProperties;
}
public List<Serializable> getRecords() {
return records;
}
public int getIndex() {
return index;
}
public List<FilterMapping> getFilterMappings() {
return filterMappings;
}
public AdornedTargetRetrieval invokeForFetch() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, FieldNotAvailableException, NoSuchFieldException {
invokeInternal();
return this;
}
public AdornedTargetRetrieval invokeForUpdate() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, FieldNotAvailableException, NoSuchFieldException {
FilterAndSortCriteria filterCriteria = cto.get(adornedTargetList.getCollectionFieldName());
filterCriteria.setFilterValue(entity.findProperty(adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty()).getValue());
invokeInternal();
index = 0;
Long myEntityId = Long.valueOf(entity.findProperty(adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty()).getValue());
FieldManager fieldManager = getFieldManager();
for (Serializable record : records) {
Long targetId = (Long) fieldManager.getFieldValue(record, adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty());
if (myEntityId.equals(targetId)) {
break;
}
index++;
}
return this;
}
private void invokeInternal() throws ClassNotFoundException {
if (adornedTargetList.getSortField() != null) {
FilterAndSortCriteria sortCriteria = cto.get(adornedTargetList.getSortField());
sortCriteria.setSortAscending(adornedTargetList.getSortAscending());
}
Class<?>[] entities = persistenceManager.getPolymorphicEntities(adornedTargetList
.getAdornedTargetEntityClassname());
mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties(
adornedTargetList.getAdornedTargetEntityClassname(),
entities,
null,
new String[]{},
new ForeignKey[]{},
MergedPropertyType.ADORNEDTARGETLIST,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
filterMappings = getAdornedTargetFilterMappings(persistencePerspective, cto, mergedProperties, adornedTargetList);
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
Class<?>[] entities2 = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname);
Map<String, FieldMetadata> mergedPropertiesTarget = persistenceManager.getDynamicEntityDao()
.getMergedProperties(
ceilingEntityFullyQualifiedClassname,
entities2,
null,
persistencePerspective.getAdditionalNonPersistentProperties(),
persistencePerspective.getAdditionalForeignKeys(),
MergedPropertyType.PRIMARY,
persistencePerspective.getPopulateToOneFields(),
persistencePerspective.getIncludeFields(),
persistencePerspective.getExcludeFields(),
persistencePerspective.getConfigurationKey(),
""
);
// We need to make sure that the target merged properties have the target object path prefix
Map<String, FieldMetadata> convertedMergedPropertiesTarget = new HashMap<String, FieldMetadata>();
String prefix = adornedTargetList.getTargetObjectPath();
for (Entry<String, FieldMetadata> entry : mergedPropertiesTarget.entrySet()) {
convertedMergedPropertiesTarget.put(prefix + "." + entry.getKey(), entry.getValue());
}
// We also need to make sure that the cto filter and sort criteria have the prefix
Map<String, FilterAndSortCriteria> convertedCto = new HashMap<String, FilterAndSortCriteria>();
for (Entry<String, FilterAndSortCriteria> entry : cto.getCriteriaMap().entrySet()) {
if (adornedTargetList.getSortField() != null && entry.getKey().equals(adornedTargetList.getSortField())) {
convertedCto.put(entry.getKey(), entry.getValue());
} else {
convertedCto.put(prefix + "." + entry.getKey(), entry.getValue());
}
}
cto.setCriteriaMap(convertedCto);
List<FilterMapping> filterMappings2 = getBasicFilterMappings(persistencePerspective, cto, convertedMergedPropertiesTarget, ceilingEntityFullyQualifiedClassname);
for (FilterMapping fm : filterMappings2) {
fm.setInheritedFromClass(entities[0]);
}
filterMappings.addAll(filterMappings2);
records = getPersistentRecords(adornedTargetList.getAdornedTargetEntityClassname(), filterMappings, cto.getFirstResult(), cto.getMaxResults());
}
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_AdornedTargetListPersistenceModule.java |
32 | public abstract class InMemoryBlueprintsTest extends TitanBlueprintsTest {
public void testGraphTestSuite() throws Exception {
this.stopWatch();
doTestSuite(new GraphTestSuite(this), ImmutableSet.of("testStringRepresentation","testDataTypeValidationOnProperties","testGraphDataPersists"));
BaseTest.printTestPerformance("GraphTestSuite", this.stopWatch());
}
///=========================== DEFAULT ===========
@Override
public void cleanUp() throws BackendException {
}
@Override
public boolean supportsMultipleGraphs() {
return false;
}
@Override
public void beforeSuite() {
}
@Override
public void afterSuite() {
}
@Override
public Graph generateGraph() {
TitanGraph graph = StorageSetup.getInMemoryGraph();
return graph;
}
@Override
public Graph generateGraph(String graphDirectoryName) {
throw new UnsupportedOperationException();
}
} | 0true
| titan-test_src_test_java_com_thinkaurelius_titan_blueprints_InMemoryBlueprintsTest.java |
2,310 | public static class Streams {
public static void write(Rounding rounding, StreamOutput out) throws IOException {
out.writeByte(rounding.id());
rounding.writeTo(out);
}
public static Rounding read(StreamInput in) throws IOException {
Rounding rounding = null;
byte id = in.readByte();
switch (id) {
case Interval.ID: rounding = new Interval(); break;
case TimeZoneRounding.TimeTimeZoneRoundingFloor.ID: rounding = new TimeZoneRounding.TimeTimeZoneRoundingFloor(); break;
case TimeZoneRounding.UTCTimeZoneRoundingFloor.ID: rounding = new TimeZoneRounding.UTCTimeZoneRoundingFloor(); break;
case TimeZoneRounding.DayTimeZoneRoundingFloor.ID: rounding = new TimeZoneRounding.DayTimeZoneRoundingFloor(); break;
case TimeZoneRounding.UTCIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.UTCIntervalTimeZoneRounding(); break;
case TimeZoneRounding.TimeIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.TimeIntervalTimeZoneRounding(); break;
case TimeZoneRounding.DayIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.DayIntervalTimeZoneRounding(); break;
case TimeZoneRounding.FactorTimeZoneRounding.ID: rounding = new TimeZoneRounding.FactorTimeZoneRounding(); break;
case TimeZoneRounding.PrePostTimeZoneRounding.ID: rounding = new TimeZoneRounding.PrePostTimeZoneRounding(); break;
default: throw new ElasticsearchException("unknown rounding id [" + id + "]");
}
rounding.readFrom(in);
return rounding;
}
} | 0true
| src_main_java_org_elasticsearch_common_rounding_Rounding.java |
342 | return Lists.newArrayList(Iterables.filter(entries.keySet(),new Predicate<String>() {
@Override
public boolean apply(@Nullable String s) {
assert s!=null;
return StringUtils.isBlank(prefix) || s.startsWith(prefix);
}
})); | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_configuration_backend_KCVSConfiguration.java |
2,622 | class ValidateJoinRequest extends TransportRequest {
ClusterState clusterState;
ValidateJoinRequest() {
}
ValidateJoinRequest(ClusterState clusterState) {
this.clusterState = clusterState;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterState = ClusterState.Builder.readFrom(in, nodesProvider.nodes().localNode());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
ClusterState.Builder.writeTo(clusterState, out);
}
} | 0true
| src_main_java_org_elasticsearch_discovery_zen_membership_MembershipAction.java |
2,110 | public class MapProxyImpl<K, V> extends MapProxySupport implements IMap<K, V>, InitializingObject {
public MapProxyImpl(final String name, final MapService mapService, final NodeEngine nodeEngine) {
super(name, mapService, nodeEngine);
}
@Override
public V get(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Data key = service.toData(k, partitionStrategy);
return (V) service.toObject(getInternal(key));
}
@Override
public V put(final K k, final V v) {
return put(k, v, -1, null);
}
@Override
public V put(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
final Data result = putInternal(key, value, ttl, timeunit);
return (V) service.toObject(result);
}
@Override
public boolean tryPut(final K k, final V v, final long timeout, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
return tryPutInternal(key, value, timeout, timeunit);
}
@Override
public V putIfAbsent(final K k, final V v) {
return putIfAbsent(k, v, -1, null);
}
@Override
public V putIfAbsent(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
final Data result = putIfAbsentInternal(key, value, ttl, timeunit);
return (V) service.toObject(result);
}
@Override
public void putTransient(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
putTransientInternal(key, value, ttl, timeunit);
}
@Override
public boolean replace(final K k, final V o, final V v) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (o == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data oldValue = service.toData(o);
final Data value = service.toData(v);
return replaceInternal(key, oldValue, value);
}
@Override
public V replace(final K k, final V v) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
return (V) service.toObject(replaceInternal(key, value));
}
@Override
public void set(K key, V value) {
set(key, value, -1, TimeUnit.MILLISECONDS);
}
@Override
public void set(final K k, final V v, final long ttl, final TimeUnit timeunit) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
setInternal(key, value, ttl, timeunit);
}
@Override
public V remove(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data result = removeInternal(key);
return (V) service.toObject(result);
}
@Override
public boolean remove(final Object k, final Object v) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
final Data key = service.toData(k, partitionStrategy);
final Data value = service.toData(v);
return removeInternal(key, value);
}
@Override
public void delete(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final Data key = getService().toData(k, partitionStrategy);
deleteInternal(key);
}
@Override
public boolean containsKey(Object k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data key = getService().toData(k, partitionStrategy);
return containsKeyInternal(key);
}
@Override
public boolean containsValue(final Object v) {
if (v == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
Data value = getService().toData(v);
return containsValueInternal(value);
}
@Override
public void lock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
lockSupport.lock(nodeEngine, k);
}
@Override
public void lock(final Object key, final long leaseTime, final TimeUnit timeUnit) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
shouldBePositive(leaseTime, "leaseTime");
Data k = getService().toData(key, partitionStrategy);
lockSupport.lock(getNodeEngine(), k, timeUnit.toMillis(leaseTime));
}
@Override
public void unlock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
lockSupport.unlock(nodeEngine, k);
}
@Override
public boolean tryRemove(final K key, final long timeout, final TimeUnit timeunit) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data k = getService().toData(key, partitionStrategy);
return tryRemoveInternal(k, timeout, timeunit);
}
@Override
public Future<V> getAsync(final K k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data key = getService().toData(k, partitionStrategy);
NodeEngine nodeEngine = getNodeEngine();
return new DelegatingFuture<V>(getAsyncInternal(key), nodeEngine.getSerializationService());
}
@Override
public boolean isLocked(final K k) {
if (k == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data key = getService().toData(k, partitionStrategy);
NodeEngine nodeEngine = getNodeEngine();
return lockSupport.isLocked(nodeEngine, key);
}
@Override
public Future putAsync(final K key, final V value) {
return putAsync(key, value, -1, null);
}
@Override
public ICompletableFuture putAsync(final K key, final V value, final long ttl, final TimeUnit timeunit) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
if (value == null) {
throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED);
}
MapService service = getService();
Data k = service.toData(key, partitionStrategy);
Data v = service.toData(value);
return new DelegatingFuture<V>(putAsyncInternal(k, v, ttl, timeunit),
getNodeEngine().getSerializationService());
}
@Override
public ICompletableFuture removeAsync(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data k = getService().toData(key, partitionStrategy);
return new DelegatingFuture<V>(removeAsyncInternal(k), getNodeEngine().getSerializationService());
}
@Override
public Map<K, V> getAll(final Set<K> keys) {
Set<Data> ks = new HashSet(keys.size());
MapService service = getService();
for (K key : keys) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
Data k = service.toData(key, partitionStrategy);
ks.add(k);
}
return (Map<K, V>) getAllObjectInternal(ks);
}
@Override
public void putAll(final Map<? extends K, ? extends V> m) {
// Note, putAllInternal() will take care of the null key/value checks.
putAllInternal(m);
}
@Override
public boolean tryLock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
return lockSupport.tryLock(nodeEngine, k);
}
@Override
public boolean tryLock(final K key, final long time, final TimeUnit timeunit) throws InterruptedException {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
return lockSupport.tryLock(nodeEngine, k, time, timeunit);
}
@Override
public void forceUnlock(final K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
final NodeEngine nodeEngine = getNodeEngine();
Data k = getService().toData(key, partitionStrategy);
lockSupport.forceUnlock(nodeEngine, k);
}
@Override
public String addInterceptor(MapInterceptor interceptor) {
if (interceptor == null) {
throw new NullPointerException("Interceptor should not be null!");
}
return addMapInterceptorInternal(interceptor);
}
@Override
public void removeInterceptor(String id) {
if (id == null) {
throw new NullPointerException("Interceptor id should not be null!");
}
removeMapInterceptorInternal(id);
}
@Override
public String addLocalEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addLocalEntryListenerInternal(listener, predicate, null, includeValue);
}
@Override
public String addLocalEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, K key, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addLocalEntryListenerInternal(listener, predicate, getService().toData(key, partitionStrategy), includeValue);
}
@Override
public String addEntryListener(final EntryListener listener, final boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
return addEntryListenerInternal(listener, null, includeValue);
}
@Override
public String addEntryListener(final EntryListener<K, V> listener, final K key, final boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
return addEntryListenerInternal(listener, getService().toData(key, partitionStrategy), includeValue);
}
@Override
public String addEntryListener(
EntryListener<K, V> listener, Predicate<K, V> predicate, K key, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addEntryListenerInternal(listener, predicate, getService().toData(key, partitionStrategy), includeValue);
}
@Override
public String addEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, boolean includeValue) {
if (listener == null) {
throw new NullPointerException("Listener should not be null!");
}
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return addEntryListenerInternal(listener, predicate, null, includeValue);
}
@Override
public boolean removeEntryListener(String id) {
if (id == null) {
throw new NullPointerException("Listener id should not be null!");
}
return removeEntryListenerInternal(id);
}
@Override
public EntryView<K, V> getEntryView(K key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
SimpleEntryView<K, V> entryViewInternal =
(SimpleEntryView) getEntryViewInternal(getService().toData(key, partitionStrategy));
if (entryViewInternal == null) {
return null;
}
Data value = (Data) entryViewInternal.getValue();
entryViewInternal.setKey(key);
entryViewInternal.setValue((V) getService().toObject(value));
return entryViewInternal;
}
@Override
public boolean evict(final Object key) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
return evictInternal(getService().toData(key, partitionStrategy));
}
@Override
public void clear() {
clearInternal();
}
@Override
public Set<K> keySet() {
Set<Data> dataSet = keySetInternal();
HashSet<K> keySet = new HashSet<K>();
for (Data data : dataSet) {
keySet.add((K) getService().toObject(data));
}
return keySet;
}
@Override
public Collection<V> values() {
Collection<Data> dataSet = valuesInternal();
Collection<V> valueSet = new ArrayList<V>();
for (Data data : dataSet) {
valueSet.add((V) getService().toObject(data));
}
return valueSet;
}
@Override
public Set entrySet() {
Set<Entry<Data, Data>> entries = entrySetInternal();
Set<Entry<K, V>> resultSet = new HashSet<Entry<K, V>>();
for (Entry<Data, Data> entry : entries) {
resultSet.add(new AbstractMap.SimpleImmutableEntry((K) getService().toObject(entry.getKey()),
(V) getService().toObject(entry.getValue())));
}
return resultSet;
}
@Override
public Set<K> keySet(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return query(predicate, IterationType.KEY, false);
}
@Override
public Set entrySet(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return query(predicate, IterationType.ENTRY, false);
}
@Override
public Collection<V> values(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return query(predicate, IterationType.VALUE, false);
}
@Override
public Set<K> localKeySet() {
final Set<Data> dataSet = localKeySetInternal();
final Set<K> keySet = new HashSet<K>(dataSet.size());
for (Data data : dataSet) {
keySet.add((K) getService().toObject(data));
}
return keySet;
}
@Override
public Set<K> localKeySet(final Predicate predicate) {
if (predicate == null) {
throw new NullPointerException("Predicate should not be null!");
}
return queryLocal(predicate, IterationType.KEY, false);
}
@Override
public Object executeOnKey(K key, EntryProcessor entryProcessor) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
return service.toObject(executeOnKeyInternal(service.toData(key, partitionStrategy), entryProcessor));
}
@Override
public Map<K, Object> executeOnKeys(Set<K> keys, EntryProcessor entryProcessor) {
if (keys == null || keys.size() == 0) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Set<Data> dataKeys = new HashSet<Data>(keys.size());
for(K key : keys)
{
dataKeys.add(service.toData(key, partitionStrategy));
}
return executeOnKeysInternal(dataKeys, entryProcessor);
}
@Override
public void submitToKey(K key, EntryProcessor entryProcessor, ExecutionCallback callback) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Data keyData = service.toData(key, partitionStrategy);
executeOnKeyInternal(keyData,entryProcessor,callback);
}
@Override
public ICompletableFuture submitToKey(K key, EntryProcessor entryProcessor) {
if (key == null) {
throw new NullPointerException(NULL_KEY_IS_NOT_ALLOWED);
}
MapService service = getService();
Data keyData = service.toData(key, partitionStrategy);
ICompletableFuture f = executeOnKeyInternal(keyData,entryProcessor,null);
return new DelegatingFuture(f,service.getSerializationService());
}
protected Object invoke(Operation operation, int partitionId) throws Throwable {
NodeEngine nodeEngine = getNodeEngine();
Future f = nodeEngine.getOperationService().invokeOnPartition(SERVICE_NAME, operation, partitionId);
Object response = f.get();
Object returnObj = getService().toObject(response);
if (returnObj instanceof Throwable) {
throw (Throwable) returnObj;
}
return returnObj;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("IMap");
sb.append("{name='").append(name).append('\'');
sb.append('}');
return sb.toString();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_proxy_MapProxyImpl.java |
3,172 | private static class Preprocessor {
public String toString(BytesRef ref) {
return ref.utf8ToString();
}
public int compare(BytesRef a, BytesRef b) {
return a.compareTo(b);
}
} | 0true
| src_test_java_org_elasticsearch_index_fielddata_DuelFieldDataTests.java |
1,000 | public class DrainRequest extends SemaphoreRequest {
public DrainRequest() {
}
public DrainRequest(String name) {
super(name, -1);
}
@Override
protected Operation prepareOperation() {
return new DrainOperation(name);
}
@Override
public int getClassId() {
return SemaphorePortableHook.DRAIN;
}
@Override
public Permission getRequiredPermission() {
return new SemaphorePermission(name, ActionConstants.ACTION_ACQUIRE);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_semaphore_client_DrainRequest.java |
563 | public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> {
private static ObjectOpenHashSet<String> RESERVED_FIELDS = ObjectOpenHashSet.from(
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_boost", "_parent", "_routing", "_index",
"_size", "_timestamp", "_ttl"
);
private String[] indices;
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true);
private String type;
private String source;
private boolean ignoreConflicts = false;
PutMappingRequest() {
}
/**
* Constructs a new put mapping request against one or more indices. If nothing is set then
* it will be executed against all indices.
*/
public PutMappingRequest(String... indices) {
this.indices = indices;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (type == null) {
validationException = addValidationError("mapping type is missing", validationException);
}
if (source == null) {
validationException = addValidationError("mapping source is missing", validationException);
}
return validationException;
}
/**
* Sets the indices this put mapping operation will execute on.
*/
public PutMappingRequest indices(String[] indices) {
this.indices = indices;
return this;
}
/**
* The indices the mappings will be put.
*/
public String[] indices() {
return indices;
}
public IndicesOptions indicesOptions() {
return indicesOptions;
}
public PutMappingRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* The mapping type.
*/
public String type() {
return type;
}
/**
* The type of the mappings.
*/
public PutMappingRequest type(String type) {
this.type = type;
return this;
}
/**
* The mapping source definition.
*/
public String source() {
return source;
}
/**
* A specialized simplified mapping source method, takes the form of simple properties definition:
* ("field1", "type=string,store=true").
*
* Also supports metadata mapping fields such as `_all` and `_parent` as property definition, these metadata
* mapping fields will automatically be put on the top level mapping object.
*/
public PutMappingRequest source(Object... source) {
return source(buildFromSimplifiedDef(type, source));
}
public static XContentBuilder buildFromSimplifiedDef(String type, Object... source) {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
if (type != null) {
builder.startObject(type);
}
for (int i = 0; i < source.length; i++) {
String fieldName = source[i++].toString();
if (RESERVED_FIELDS.contains(fieldName)) {
builder.startObject(fieldName);
String[] s1 = Strings.splitStringByCommaToArray(source[i].toString());
for (String s : s1) {
String[] s2 = Strings.split(s, "=");
if (s2.length != 2) {
throw new ElasticsearchIllegalArgumentException("malformed " + s);
}
builder.field(s2[0], s2[1]);
}
builder.endObject();
}
}
builder.startObject("properties");
for (int i = 0; i < source.length; i++) {
String fieldName = source[i++].toString();
if (RESERVED_FIELDS.contains(fieldName)) {
continue;
}
builder.startObject(fieldName);
String[] s1 = Strings.splitStringByCommaToArray(source[i].toString());
for (String s : s1) {
String[] s2 = Strings.split(s, "=");
if (s2.length != 2) {
throw new ElasticsearchIllegalArgumentException("malformed " + s);
}
builder.field(s2[0], s2[1]);
}
builder.endObject();
}
builder.endObject();
if (type != null) {
builder.endObject();
}
builder.endObject();
return builder;
} catch (Exception e) {
throw new ElasticsearchIllegalArgumentException("failed to generate simplified mapping definition", e);
}
}
/**
* The mapping source definition.
*/
public PutMappingRequest source(XContentBuilder mappingBuilder) {
try {
return source(mappingBuilder.string());
} catch (IOException e) {
throw new ElasticsearchIllegalArgumentException("Failed to build json for mapping request", e);
}
}
/**
* The mapping source definition.
*/
@SuppressWarnings("unchecked")
public PutMappingRequest source(Map mappingSource) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(mappingSource);
return source(builder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + mappingSource + "]", e);
}
}
/**
* The mapping source definition.
*/
public PutMappingRequest source(String mappingSource) {
this.source = mappingSource;
return this;
}
/**
* If there is already a mapping definition registered against the type, then it will be merged. If there are
* elements that can't be merged are detected, the request will be rejected unless the
* {@link #ignoreConflicts(boolean)} is set. In such a case, the duplicate mappings will be rejected.
*/
public boolean ignoreConflicts() {
return ignoreConflicts;
}
/**
* If there is already a mapping definition registered against the type, then it will be merged. If there are
* elements that can't be merged are detected, the request will be rejected unless the
* {@link #ignoreConflicts(boolean)} is set. In such a case, the duplicate mappings will be rejected.
*/
public PutMappingRequest ignoreConflicts(boolean ignoreDuplicates) {
this.ignoreConflicts = ignoreDuplicates;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
type = in.readOptionalString();
source = in.readString();
readTimeout(in);
ignoreConflicts = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(indices);
indicesOptions.writeIndicesOptions(out);
out.writeOptionalString(type);
out.writeString(source);
writeTimeout(out);
out.writeBoolean(ignoreConflicts);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_mapping_put_PutMappingRequest.java |
791 | public interface AdvancedOffer {
List<OfferTier> getOfferTiers();
void setOfferTiers(List<OfferTier> offerTiers);
boolean isTieredOffer();
void setTieredOffer(boolean isTieredOffer);
public OfferTimeZoneType getOfferTimeZoneType();
public void setOfferTimeZoneType(OfferTimeZoneType offerTimeZoneType);
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_AdvancedOffer.java |
2,082 | public class FastStringReader extends CharSequenceReader {
private String str;
private int length;
private int next = 0;
private int mark = 0;
/**
* Creates a new string reader.
*
* @param s String providing the character stream.
*/
public FastStringReader(String s) {
this.str = s;
this.length = s.length();
}
/**
* Check to make sure that the stream has not been closed
*/
private void ensureOpen() throws IOException {
if (length == -1)
throw new IOException("Stream closed");
}
@Override
public int length() {
return length;
}
@Override
public char charAt(int index) {
return str.charAt(index);
}
@Override
public CharSequence subSequence(int start, int end) {
return str.subSequence(start, end);
}
/**
* Reads a single character.
*
* @return The character read, or -1 if the end of the stream has been
* reached
* @throws IOException If an I/O error occurs
*/
@Override
public int read() throws IOException {
ensureOpen();
if (next >= length)
return -1;
return str.charAt(next++);
}
/**
* Reads characters into a portion of an array.
*
* @param cbuf Destination buffer
* @param off Offset at which to start writing characters
* @param len Maximum number of characters to read
* @return The number of characters read, or -1 if the end of the
* stream has been reached
* @throws IOException If an I/O error occurs
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
ensureOpen();
if (len == 0) {
return 0;
}
if (next >= length)
return -1;
int n = Math.min(length - next, len);
str.getChars(next, next + n, cbuf, off);
next += n;
return n;
}
/**
* Skips the specified number of characters in the stream. Returns
* the number of characters that were skipped.
* <p/>
* <p>The <code>ns</code> parameter may be negative, even though the
* <code>skip</code> method of the {@link Reader} superclass throws
* an exception in this case. Negative values of <code>ns</code> cause the
* stream to skip backwards. Negative return values indicate a skip
* backwards. It is not possible to skip backwards past the beginning of
* the string.
* <p/>
* <p>If the entire string has been read or skipped, then this method has
* no effect and always returns 0.
*
* @throws IOException If an I/O error occurs
*/
@Override
public long skip(long ns) throws IOException {
ensureOpen();
if (next >= length)
return 0;
// Bound skip by beginning and end of the source
long n = Math.min(length - next, ns);
n = Math.max(-next, n);
next += n;
return n;
}
/**
* Tells whether this stream is ready to be read.
*
* @return True if the next read() is guaranteed not to block for input
* @throws IOException If the stream is closed
*/
@Override
public boolean ready() throws IOException {
ensureOpen();
return true;
}
/**
* Tells whether this stream supports the mark() operation, which it does.
*/
@Override
public boolean markSupported() {
return true;
}
/**
* Marks the present position in the stream. Subsequent calls to reset()
* will reposition the stream to this point.
*
* @param readAheadLimit Limit on the number of characters that may be
* read while still preserving the mark. Because
* the stream's input comes from a string, there
* is no actual limit, so this argument must not
* be negative, but is otherwise ignored.
* @throws IllegalArgumentException If readAheadLimit is < 0
* @throws IOException If an I/O error occurs
*/
@Override
public void mark(int readAheadLimit) throws IOException {
if (readAheadLimit < 0) {
throw new IllegalArgumentException("Read-ahead limit < 0");
}
ensureOpen();
mark = next;
}
/**
* Resets the stream to the most recent mark, or to the beginning of the
* string if it has never been marked.
*
* @throws IOException If an I/O error occurs
*/
@Override
public void reset() throws IOException {
ensureOpen();
next = mark;
}
/**
* Closes the stream and releases any system resources associated with
* it. Once the stream has been closed, further read(),
* ready(), mark(), or reset() invocations will throw an IOException.
* Closing a previously closed stream has no effect.
*/
public void close() {
length = -1;
}
@Override
public String toString() {
return str;
}
} | 0true
| src_main_java_org_elasticsearch_common_io_FastStringReader.java |
2,333 | public class SettingsModule extends AbstractModule {
private final Settings settings;
public SettingsModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(Settings.class).toInstance(settings);
bind(SettingsFilter.class).asEagerSingleton();
}
} | 0true
| src_main_java_org_elasticsearch_common_settings_SettingsModule.java |
1,302 | public static final class RemoteDBRunner {
public static void main(String[] args) throws Exception {
OGlobalConfiguration.CACHE_LEVEL1_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL1_SIZE.setValue(0);
OGlobalConfiguration.CACHE_LEVEL2_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL2_SIZE.setValue(0);
OGlobalConfiguration.DISK_CACHE_SIZE.setValue(512);
OServer server = OServerMain.create();
server.startup(RemoteDBRunner.class
.getResourceAsStream("/com/orientechnologies/orient/core/storage/impl/local/paginated/db-create-big-records-config.xml"));
server.activate();
while (true)
;
}
} | 0true
| server_src_test_java_com_orientechnologies_orient_core_storage_impl_local_paginated_LocalPaginatedStorageSmallCacheBigRecordsCrashRestore.java |
1,508 | public static class EntityStringVersion {
@OId
private ORID rid;
@OVersion
private String version;
public EntityStringVersion() {
}
public ORID getRid() {
return rid;
}
public String getVersion() {
return version;
}
} | 0true
| object_src_test_java_com_orientechnologies_orient_object_enhancement_OVersionSerializationTest.java |
1,996 | @Entity
@EntityListeners(value = { TemporalTimestampListener.class })
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_ROLE")
public class RoleImpl implements Role {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "RoleId")
@GenericGenerator(
name="RoleId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="RoleImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.profile.core.domain.RoleImpl")
}
)
@Column(name = "ROLE_ID")
protected Long id;
@Column(name = "ROLE_NAME", nullable = false)
@Index(name="ROLE_NAME_INDEX", columnNames={"ROLE_NAME"})
protected String roleName;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getRoleName() {
return roleName;
}
@Override
public void setRoleName(String roleName) {
this.roleName = roleName;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((roleName == null) ? 0 : roleName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
RoleImpl other = (RoleImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (roleName == null) {
if (other.roleName != null)
return false;
} else if (!roleName.equals(other.roleName))
return false;
return true;
}
} | 1no label
| core_broadleaf-profile_src_main_java_org_broadleafcommerce_profile_core_domain_RoleImpl.java |
1,474 | public class ChangePasswordForm implements Serializable {
private static final long serialVersionUID = 1L;
private String currentPassword;
private String newPassword;
private String newPasswordConfirm;
public String getCurrentPassword() {
return currentPassword;
}
public void setCurrentPassword(String currentPassword) {
this.currentPassword = currentPassword;
}
public String getNewPassword() {
return newPassword;
}
public void setNewPassword(String newPassword) {
this.newPassword = newPassword;
}
public String getNewPasswordConfirm() {
return newPasswordConfirm;
}
public void setNewPasswordConfirm(String newPasswordConfirm) {
this.newPasswordConfirm = newPasswordConfirm;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_controller_account_ChangePasswordForm.java |
1,831 | private class InjectableReference<T> implements Initializable<T> {
private final InjectorImpl injector;
private final T instance;
private final Object source;
private MembersInjectorImpl<T> membersInjector;
public InjectableReference(InjectorImpl injector, T instance, Object source) {
this.injector = injector;
this.instance = checkNotNull(instance, "instance");
this.source = checkNotNull(source, "source");
}
public void validate(Errors errors) throws ErrorsException {
@SuppressWarnings("unchecked") // the type of 'T' is a TypeLiteral<T>
TypeLiteral<T> type = TypeLiteral.get((Class<T>) instance.getClass());
membersInjector = injector.membersInjectorStore.get(type, errors.withSource(source));
}
/**
* Reentrant. If {@code instance} was registered for injection at injector-creation time, this
* method will ensure that all its members have been injected before returning.
*/
public T get(Errors errors) throws ErrorsException {
if (ready.getCount() == 0) {
return instance;
}
// just wait for everything to be injected by another thread
if (Thread.currentThread() != creatingThread) {
try {
ready.await();
return instance;
} catch (InterruptedException e) {
// Give up, since we don't know if our injection is ready
throw new RuntimeException(e);
}
}
// toInject needs injection, do it right away. we only do this once, even if it fails
if (pendingInjection.remove(instance) != null) {
membersInjector.injectAndNotify(instance, errors.withSource(source));
}
return instance;
}
@Override
public String toString() {
return instance.toString();
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_Initializer.java |
1,585 | public interface ODistributedResponse {
String getExecutorNodeName();
String getSenderNodeName();
Object getPayload();
long getRequestId();
ODistributedResponse setExecutorNodeName(String iExecutor);
ODistributedResponse setPayload(Object iPayload);
boolean isExecutedOnLocalNode();
} | 0true
| server_src_main_java_com_orientechnologies_orient_server_distributed_ODistributedResponse.java |
223 | static class DefaultLineReader implements LineReader {
@edu.umd.cs.findbugs.annotations.SuppressWarnings("DM_DEFAULT_ENCODING")
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
public String readLine() throws Exception {
return in.readLine();
}
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_examples_ClientTestApp.java |
496 | private static class LinkSetRewriter implements FieldRewriter<OMVRBTreeRIDSet> {
@Override
public OMVRBTreeRIDSet rewriteValue(OMVRBTreeRIDSet setValue) {
setValue.setAutoConvertToRecord(false);
OMVRBTreeRIDSet result = new OMVRBTreeRIDSet();
result.setAutoConvertToRecord(false);
boolean wasRewritten = false;
for (OIdentifiable identifiable : setValue) {
FieldRewriter<ORID> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(null, null, identifiable.getIdentity());
ORID newRid = fieldRewriter.rewriteValue(identifiable.getIdentity());
if (newRid != null) {
wasRewritten = true;
result.add(newRid);
} else
result.add(identifiable);
}
if (wasRewritten)
return result;
result.clear();
return null;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseImport.java |
1,492 | binder.registerCustomEditor(Country.class, "address.country", new PropertyEditorSupport() {
@Override
public void setAsText(String text) {
Country country = countryService.findCountryByAbbreviation(text);
setValue(country);
}
}); | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_controller_checkout_BroadleafCheckoutController.java |
1,167 | public interface IMap<K, V> extends ConcurrentMap<K, V>, BaseMap<K, V> {
/**
* {@inheritDoc}
* <p/>
* <p><b>Warning:</b></p>
* <p> ˆ
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* </p>
*
* @throws NullPointerException if the specified key is null
*/
boolean containsKey(Object key);
/**
* {@inheritDoc}
*
* @throws NullPointerException if the specified value is null
*/
boolean containsValue(Object value);
/**
* {@inheritDoc}
* <p/>
* <p><b>Warning:</b></p>
* <p>
* This method returns a clone of original value, modifying the returned value does not change
* the actual value in the map. One should put modified value back to make changes visible to all nodes.
* <pre>
* V value = map.get(key);
* value.updateSomeProperty();
* map.put(key, value);
* </pre>
* </p>
* <p/>
* <p><b>Warning-2:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* <p/>
*
* @throws NullPointerException if the specified key is null
*/
V get(Object key);
/**
* {@inheritDoc}
* <p/>
* <p><b>Warning:</b></p>
* <p>
* This method returns a clone of previous value, not the original (identically equal) value
* previously put into map.
* </p>
* <p/>
* <p><b>Warning-2:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @throws NullPointerException if the specified key or value is null
*/
V put(K key, V value);
/**
* {@inheritDoc}
* <p/>
* <p><b>Warning:</b></p>
* <p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* </p>
* <p/>
* <p><b>Warning-2:</b></p>
* <p>
* This method returns a clone of previous value, not the original (identically equal) value
* previously put into map.
* </p>
*
* @throws NullPointerException if the specified key is null
*/
V remove(Object key);
/**
* {@inheritDoc}
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @throws NullPointerException if the specified key or value is null
*/
boolean remove(Object key, Object value);
/**
* Removes the mapping for a key from this map if it is present
* (optional operation).
* <p/>
* <p>Differently from {@link #remove(Object)}; this operation does not return
* removed value to avoid serialization cost of returned value.
* <p/>
* If the removed value will not be used, delete operation
* should be preferred over remove operation for a better performance.
* <p/>
* <p>The map will not contain a mapping for the specified key once the
* call returns.
*
* @param key key whose mapping is to be removed from the map
* @throws ClassCastException if the key is of an inappropriate type for
* this map (optional)
* @throws NullPointerException if the specified key is null
*/
void delete(Object key);
/**
* If this map has a MapStore this method flushes
* all the local dirty entries by calling MapStore.storeAll() and/or MapStore.deleteAll()
*/
void flush();
/**
* Returns the entries for the given keys.
* <p/>
* <p><b>Warning:</b></p>
* The returned map is <b>NOT</b> backed by the original map,
* so changes to the original map are <b>NOT</b> reflected in the returned map, and vice-versa.
* <p/>
* <p><b>Warning-2:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>keys</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param keys keys to get
* @return map of entries
* @throws NullPointerException if any of the specified keys are null
*/
Map<K, V> getAll(Set<K> keys);
/**
* Asynchronously gets the given key.
* <code>
* Future future = map.getAsync(key);
* // do some other stuff, when ready get the result
* Object value = future.get();
* </code>
* Future.get() will block until the actual map.get() completes.
* If the application requires timely response,
* then Future.get(timeout, timeunit) can be used.
* <code>
* try{
* Future future = map.getAsync(key);
* Object value = future.get(40, TimeUnit.MILLISECOND);
* }catch (TimeoutException t) {
* // time wasn't enough
* }
* </code>
* ExecutionException is never thrown.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key the key of the map entry
* @return Future from which the value of the key can be retrieved.
* @throws NullPointerException if the specified key is null
* @see java.util.concurrent.Future
*/
Future<V> getAsync(K key);
/**
* Asynchronously puts the given key and value.
* <code>
* Future future = map.putAsync(key, value);
* // do some other stuff, when ready get the result
* Object oldValue = future.get();
* </code>
* Future.get() will block until the actual map.get() completes.
* If the application requires timely response,
* then Future.get(timeout, timeunit) can be used.
* <code>
* try{
* Future future = map.putAsync(key, newValue);
* Object oldValue = future.get(40, TimeUnit.MILLISECOND);
* }catch (TimeoutException t) {
* // time wasn't enough
* }
* </code>
* ExecutionException is never thrown.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key the key of the map entry
* @param value the new value of the map entry
* @return Future from which the old value of the key can be retrieved.
* @throws NullPointerException if the specified key or value is null
* @see java.util.concurrent.Future
*/
Future<V> putAsync(K key, V value);
/**
* Asynchronously puts the given key and value into this map with a given ttl (time to live) value.
* Entry will expire and get evicted after the ttl. If ttl is 0, then
* the entry lives forever.
* <code>
* Future future = map.putAsync(key, value, ttl, timeunit);
* // do some other stuff, when ready get the result
* Object oldValue = future.get();
* </code>
* Future.get() will block until the actual map.get() completes.
* If the application requires timely response,
* then Future.get(timeout, timeunit) can be used.
* <code>
* try{
* Future future = map.putAsync(key, newValue, ttl, timeunit);
* Object oldValue = future.get(40, TimeUnit.MILLISECOND);
* }catch (TimeoutException t) {
* // time wasn't enough
* }
* </code>
* ExecutionException is never thrown.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key the key of the map entry
* @param value the new value of the map entry
* @param ttl maximum time for this entry to stay in the map
* 0 means infinite.
* @param timeunit time unit for the ttl
* @return Future from which the old value of the key can be retrieved.
* @throws NullPointerException if the specified key or value is null
* @see java.util.concurrent.Future
*/
Future<V> putAsync(K key, V value, long ttl, TimeUnit timeunit);
/**
* Asynchronously removes the given key.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key The key of the map entry to remove.
* @return A {@link java.util.concurrent.Future} from which the value
* removed from the map can be retrieved.
* @throws NullPointerException if the specified key is null
*/
Future<V> removeAsync(K key);
/**
* Tries to remove the entry with the given key from this map
* within specified timeout value. If the key is already locked by another
* thread and/or member, then this operation will wait timeout
* amount for acquiring the lock.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* <p/>
* <p><b>Warning-2:</b></p>
* <p>
* This method returns a clone of previous value, not the original (identically equal) value
* previously put into map.
* </p>
*
* @param key key of the entry
* @param timeout maximum time to wait for acquiring the lock
* for the key
* @param timeunit time unit for the timeout
* @return <tt>true</tt> if the remove is successful, <tt>false</tt>
* otherwise.
* @throws NullPointerException if the specified key is null
*/
boolean tryRemove(K key, long timeout, TimeUnit timeunit);
/**
* Tries to put the given key, value into this map within specified
* timeout value. If this method returns false, it means that
* the caller thread couldn't acquire the lock for the key within
* timeout duration, thus put operation is not successful.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key of the entry
* @param value value of the entry
* @param timeout maximum time to wait
* @param timeunit time unit for the timeout
* @return <tt>true</tt> if the put is successful, <tt>false</tt> otherwise.
* @throws NullPointerException if the specified key or value is null
*/
boolean tryPut(K key, V value, long timeout, TimeUnit timeunit);
/**
* Puts an entry into this map with a given ttl (time to live) value.
* Entry will expire and get evicted after the ttl. If ttl is 0, then
* the entry lives forever.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* <p/>
* <p><b>Warning-2:</b></p>
* <p>
* This method returns a clone of previous value, not the original (identically equal) value
* previously put into map.
* </p>
*
* @param key key of the entry
* @param value value of the entry
* @param ttl maximum time for this entry to stay in the map
* 0 means infinite.
* @param timeunit time unit for the ttl
* @return old value of the entry
* @throws NullPointerException if the specified key or value is null
*/
V put(K key, V value, long ttl, TimeUnit timeunit);
/**
* Same as {@link #put(K, V, long, TimeUnit)} but MapStore, if defined,
* will not be called to store/persist the entry. If ttl is 0, then
* the entry lives forever.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key of the entry
* @param value value of the entry
* @param ttl maximum time for this entry to stay in the map.
* 0 means infinite.
* @param timeunit time unit for the ttl
* @throws NullPointerException if the specified key or value is null
*/
void putTransient(K key, V value, long ttl, TimeUnit timeunit);
/**
* {@inheritDoc}
* <p/>
* <p><b>Note:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of the binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* <p>
* Also, this method returns a clone of the previous value, not the original (identically equal) value
* previously put into the map.
* </p>
*
* @throws NullPointerException if the specified key or value is null
* @return a clone of the previous value
*/
V putIfAbsent(K key, V value);
/**
* Puts an entry into this map with a given ttl (time to live) value
* if the specified key is not already associated with a value.
* Entry will expire and get evicted after the ttl.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* <p/>
* <p><b>Warning-2:</b></p>
* <p>
* This method returns a clone of previous value, not the original (identically equal) value
* previously put into map.
* </p>
*
* @param key key of the entry
* @param value value of the entry
* @param ttl maximum time for this entry to stay in the map
* @param timeunit time unit for the ttl
* @return old value of the entry
* @throws NullPointerException if the specified key or value is null
*/
V putIfAbsent(K key, V value, long ttl, TimeUnit timeunit);
/**
* {@inheritDoc}
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @throws NullPointerException if any of the specified parameters are null
*/
boolean replace(K key, V oldValue, V newValue);
/**
* {@inheritDoc}
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
* <p/>
* <p><b>Warning-2:</b></p>
* <p>
* This method returns a clone of previous value, not the original (identically equal) value
* previously put into map.
* </p>
*
* @throws NullPointerException if the specified key or value is null
*/
V replace(K key, V value);
/**
* Puts an entry into this map.
* Similar to put operation except that set
* doesn't return the old value which is more efficient.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key of the entry
* @param value value of the entry
* @throws NullPointerException if the specified key or value is null
*/
void set(K key, V value);
/**
* Puts an entry into this map with a given ttl (time to live) value.
* Entry will expire and get evicted after the ttl. If ttl is 0, then
* the entry lives forever. Similar to put operation except that set
* doesn't return the old value which is more efficient.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key of the entry
* @param value value of the entry
* @param ttl maximum time for this entry to stay in the map
* 0 means infinite.
* @param timeunit time unit for the ttl
* @return old value of the entry
* @throws NullPointerException if the specified key or value is null
*/
void set(K key, V value, long ttl, TimeUnit timeunit);
/**
* Acquires the lock for the specified key.
* <p>If the lock is not available then
* the current thread becomes disabled for thread scheduling
* purposes and lies dormant until the lock has been acquired.
* <p/>
* You get a lock whether the value is present in the map or not. Other
* threads (possibly on other systems) would block on their invoke of
* <code>lock()</code> until the non-existent key is unlocked. If the lock
* holder introduces the key to the map, the <code>put()</code> operation
* is not blocked. If a thread not holding a lock on the non-existent key
* tries to introduce the key while a lock exists on the non-existent key,
* the <code>put()</code> operation blocks until it is unlocked.
* <p/>
* Scope of the lock is this map only.
* Acquired lock is only for the key in this map.
* <p/>
* Locks are re-entrant so if the key is locked N times then
* it should be unlocked N times before another thread can acquire it.
* <p/>
* There is no lock timeout on this method. Locks will be held infinitely.
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to lock.
* @throws NullPointerException if the specified key is null
*/
void lock(K key);
/**
* Acquires the lock for the specified key for the specified lease time.
* <p>After lease time, lock will be released..
* <p/>
* <p>If the lock is not available then
* the current thread becomes disabled for thread scheduling
* purposes and lies dormant until the lock has been acquired.
* <p/>
* Scope of the lock is this map only.
* Acquired lock is only for the key in this map.
* <p/>
* Locks are re-entrant so if the key is locked N times then
* it should be unlocked N times before another thread can acquire it.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to lock.
* @param leaseTime time to wait before releasing the lock.
* @param timeUnit unit of time to specify lease time.
* @throws NullPointerException if the specified key is null
*/
void lock(K key, long leaseTime, TimeUnit timeUnit);
/**
* Checks the lock for the specified key.
* <p>If the lock is acquired then returns true, else false.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to lock to be checked.
* @return <tt>true</tt> if lock is acquired, <tt>false</tt> otherwise.
* @throws NullPointerException if the specified key is null
*/
boolean isLocked(K key);
/**
* Tries to acquire the lock for the specified key.
* <p>If the lock is not available then the current thread
* doesn't wait and returns false immediately.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to lock.
* @return <tt>true</tt> if lock is acquired, <tt>false</tt> otherwise.
* @throws NullPointerException if the specified key is null
*/
boolean tryLock(K key);
/**
* Tries to acquire the lock for the specified key.
* <p>If the lock is not available then
* the current thread becomes disabled for thread scheduling
* purposes and lies dormant until one of two things happens:
* <ul>
* <li>The lock is acquired by the current thread; or
* <li>The specified waiting time elapses
* </ul>
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to lock in this map
* @param time maximum time to wait for the lock
* @param timeunit time unit of the <tt>time</tt> argument.
* @return <tt>true</tt> if the lock was acquired and <tt>false</tt>
* if the waiting time elapsed before the lock was acquired.
* @throws NullPointerException if the specified key is null
*/
boolean tryLock(K key, long time, TimeUnit timeunit) throws InterruptedException;
/**
* Releases the lock for the specified key. It never blocks and
* returns immediately.
* <p/>
* <p>If the current thread is the holder of this lock then the hold
* count is decremented. If the hold count is now zero then the lock
* is released. If the current thread is not the holder of this
* lock then {@link IllegalMonitorStateException} is thrown.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to lock.
* @throws NullPointerException if the specified key is null
* @throws IllegalMonitorStateException if the current thread does not hold this lock
*/
void unlock(K key);
/**
* Releases the lock for the specified key regardless of the lock owner.
* It always successfully unlocks the key, never blocks
* and returns immediately.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to lock.
* @throws NullPointerException if the specified key is null
*/
void forceUnlock(K key);
/**
* Adds a local entry listener for this map. Added listener will be only
* listening for the events (add/remove/update/evict) of the locally owned entries.
* <p/>
* Note that entries in distributed map are partitioned across
* the cluster members; each member owns and manages the some portion of the
* entries. Owned entries are called local entries. This
* listener will be listening for the events of local entries. Let's say
* your cluster has member1 and member2. On member2 you added a local listener and from
* member1, you call <code>map.put(key2, value2)</code>.
* If the key2 is owned by member2 then the local listener will be
* notified for the add/update event. Also note that entries can migrate to
* other nodes for load balancing and/or membership change.
*
* @param listener entry listener
* @see #localKeySet()
*/
String addLocalEntryListener(EntryListener<K, V> listener);
/**
* Adds a local entry listener for this map. Added listener will be only
* listening for the events (add/remove/update/evict) of the locally owned entries.
* Listener will get notified for map add/remove/update/evict events filtered by given predicate.
*
* @param listener entry listener
* @param predicate predicate for filtering entries
* @param includeValue <tt>true</tt> if <tt>EntryEvent</tt> should
* contain the value.
* @return
*/
String addLocalEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, boolean includeValue);
/**
* Adds a local entry listener for this map. Added listener will be only
* listening for the events (add/remove/update/evict) of the locally owned entries.
* Listener will get notified for map add/remove/update/evict events filtered by given predicate.
*
* @param listener entry listener
* @param predicate predicate for filtering entries
* @param key key to listen
* @param includeValue <tt>true</tt> if <tt>EntryEvent</tt> should
* contain the value.
* @return
*/
String addLocalEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, K key, boolean includeValue);
/**
* Adds an interceptor for this map. Added interceptor will intercept operations
* and execute user defined methods and will cancel operations if user defined method throw exception.
* <p/>
*
* @param interceptor map interceptor
* @return id of registered interceptor
*/
String addInterceptor(MapInterceptor interceptor);
/**
* Removes the given interceptor for this map. So it will not intercept operations anymore.
* <p/>
*
* @param id registration id of map interceptor
*/
void removeInterceptor(String id);
/**
* Adds an entry listener for this map. Listener will get notified
* for all map add/remove/update/evict events.
*
* @param listener entry listener
* @param includeValue <tt>true</tt> if <tt>EntryEvent</tt> should
* contain the value.
*/
String addEntryListener(EntryListener<K, V> listener, boolean includeValue);
/**
* Removes the specified entry listener
* Returns silently if there is no such listener added before.
*
* @param id id of registered listener
* @return true if registration is removed, false otherwise
*/
boolean removeEntryListener(String id);
/**
* Adds the specified entry listener for the specified key.
* The listener will get notified for all
* add/remove/update/evict events of the specified key only.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param listener entry listener
* @param key key to listen
* @param includeValue <tt>true</tt> if <tt>EntryEvent</tt> should
* contain the value.
* @throws NullPointerException if the specified key is null
*/
String addEntryListener(EntryListener<K, V> listener, K key, boolean includeValue);
/**
* Adds an continuous entry listener for this map. Listener will get notified
* for map add/remove/update/evict events filtered by given predicate.
*
* @param listener entry listener
* @param predicate predicate for filtering entries
* @param includeValue <tt>true</tt> if <tt>EntryEvent</tt> should
* contain the value.
*/
String addEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, boolean includeValue);
/**
* Adds an continuous entry listener for this map. Listener will get notified
* for map add/remove/update/evict events filtered by given predicate.
*
* @param listener entry listener
* @param predicate predicate for filtering entries
* @param key key to listen
* @param includeValue <tt>true</tt> if <tt>EntryEvent</tt> should
* contain the value.
*/
String addEntryListener(EntryListener<K, V> listener, Predicate<K, V> predicate, K key, boolean includeValue);
/**
* Returns the <tt>EntryView</tt> for the specified key.
* <p/>
* <p><b>Warning:</b></p>
* <p>
* This method returns a clone of original mapping, modifying the returned value does not change
* the actual value in the map. One should put modified value back to make changes visible to all nodes.
* </p>
* <p/>
* <p><b>Warning-2:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key of the entry
* @return <tt>EntryView</tt> of the specified key
* @throws NullPointerException if the specified key is null
* @see EntryView
*/
EntryView<K, V> getEntryView(K key);
/**
* Evicts the specified key from this map. If
* a <tt>MapStore</tt> defined for this map, then the entry is not
* deleted from the underlying <tt>MapStore</tt>, evict only removes
* the entry from the memory.
* <p/>
* <p><b>Warning:</b></p>
* This method uses <tt>hashCode</tt> and <tt>equals</tt> of binary form of
* the <tt>key</tt>, not the actual implementations of <tt>hashCode</tt> and <tt>equals</tt>
* defined in <tt>key</tt>'s class.
*
* @param key key to evict
* @return <tt>true</tt> if the key is evicted, <tt>false</tt> otherwise.
* @throws NullPointerException if the specified key is null
*/
boolean evict(K key);
/**
* Returns a set clone of the keys contained in this map.
* The set is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the set, and vice-versa.
*
* @return a set clone of the keys contained in this map
*/
Set<K> keySet();
/**
* Returns a collection clone of the values contained in this map.
* The collection is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the collection, and vice-versa.
*
* @return a collection clone of the values contained in this map
*/
Collection<V> values();
/**
* Returns a {@link Set} clone of the mappings contained in this map.
* The set is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the set, and vice-versa.
*
* @return a set clone of the keys mappings in this map
*/
Set<Map.Entry<K, V>> entrySet();
/**
* Queries the map based on the specified predicate and
* returns the keys of matching entries.
* <p/>
* Specified predicate runs on all members in parallel.
* <p/>
* <p><b>Warning:</b></p>
* The set is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the set, and vice-versa.
*
* @param predicate query criteria
* @return result key set of the query
*/
Set<K> keySet(Predicate predicate);
/**
* Queries the map based on the specified predicate and
* returns the matching entries.
* <p/>
* Specified predicate runs on all members in parallel.
* <p/>
* <p><b>Warning:</b></p>
* The set is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the set, and vice-versa.
*
* @param predicate query criteria
* @return result entry set of the query
*/
Set<Map.Entry<K, V>> entrySet(Predicate predicate);
/**
* Queries the map based on the specified predicate and
* returns the values of matching entries.
* <p/>
* Specified predicate runs on all members in parallel.
* <p/>
* <p><b>Warning:</b></p>
* The collection is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the collection, and vice-versa.
*
* @param predicate query criteria
* @return result value collection of the query
*/
Collection<V> values(Predicate predicate);
/**
* Returns the locally owned set of keys.
* <p/>
* Each key in this map is owned and managed by a specific
* member in the cluster.
* <p/>
* Note that ownership of these keys might change over time
* so that key ownerships can be almost evenly distributed
* in the cluster.
* <p/>
* <p><b>Warning:</b></p>
* The set is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the set, and vice-versa.
*
* @return locally owned keys.
*/
Set<K> localKeySet();
/**
* Returns the keys of matching locally owned entries.
* <p/>
* Each key in this map is owned and managed by a specific
* member in the cluster.
* <p/>
* Note that ownership of these keys might change over time
* so that key ownerships can be almost evenly distributed
* in the cluster.
* <p/>
* <p><b>Warning:</b></p>
* The set is <b>NOT</b> backed by the map,
* so changes to the map are <b>NOT</b> reflected in the set, and vice-versa.
*
* @param predicate query criteria
* @return keys of matching locally owned entries.
*/
Set<K> localKeySet(Predicate predicate);
/**
* Adds an index to this map for the specified entries so
* that queries can run faster.
* <p/>
* Let's say your map values are Employee objects.
* <pre>
* public class Employee implements Serializable {
* private boolean active = false;
* private int age;
* private String name = null;
* // other fields.
*
* // getters setter
*
* }
* </pre>
* <p/>
* If you are querying your values mostly based on age and active then
* you should consider indexing these fields.
* <pre>
* IMap imap = Hazelcast.getMap("employees");
* imap.addIndex("age", true); // ordered, since we have ranged queries for this field
* imap.addIndex("active", false); // not ordered, because boolean field cannot have range
* </pre>
* <p/>
* Index attribute should either have a getter method or be public.
* You should also make sure to add the indexes before adding
* entries to this map.
*
* @param attribute attribute of value
* @param ordered <tt>true</tt> if index should be ordered,
* <tt>false</tt> otherwise.
*/
void addIndex(String attribute, boolean ordered);
/**
* Returns LocalMapStats for this map.
* LocalMapStats is the statistics for the local portion of this
* distributed map and contains information such as ownedEntryCount
* backupEntryCount, lastUpdateTime, lockedEntryCount.
* <p/>
* Since this stats are only for the local portion of this map, if you
* need the cluster-wide MapStats then you need to get the LocalMapStats
* from all members of the cluster and combine them.
*
* @return this map's local statistics.
*/
LocalMapStats getLocalMapStats();
/**
* Applies the user defined EntryProcessor to the entry mapped by the key.
* Returns the the object which is result of the process() method of EntryProcessor.
* <p/>
*
* @return result of entry process.
* @throws NullPointerException if the specified key is null
*/
Object executeOnKey(K key, EntryProcessor entryProcessor);
/**
* Applies the user defined EntryProcessor to the entries mapped by the collection of keys.
* the results mapped by each key in the collection.
* <p/>
*
* @return result of entry process.
* @throws NullPointerException if the specified key is null
*/
Map<K, Object> executeOnKeys(Set<K> keys, EntryProcessor entryProcessor);
/**
* Applies the user defined EntryProcessor to the entry mapped by the key with
* specified ExecutionCallback to listen event status and returns immediately.
*
* @param key key to be processed
* @param entryProcessor processor to process the key
* @param callback to listen whether operation is finished or not
*/
void submitToKey(K key, EntryProcessor entryProcessor, ExecutionCallback callback);
/**
* Applies the user defined EntryProcessor to the entry mapped by the key.
* Returns immediately with a Future representing that task.
* <p/>
* EntryProcessor is not cancellable, so calling Future.cancel() method won't cancel the operation of EntryProcessor.
*
* @param key key to be processed
* @param entryProcessor processor to process the key
* @return Future from which the result of the operation can be retrieved.
* @see java.util.concurrent.Future
*/
Future submitToKey(K key, EntryProcessor entryProcessor);
/**
* Applies the user defined EntryProcessor to the all entries in the map.
* Returns the results mapped by each key in the map.
* <p/>
*/
Map<K, Object> executeOnEntries(EntryProcessor entryProcessor);
/**
* Applies the user defined EntryProcessor to the entries in the map which satisfies provided predicate.
* Returns the results mapped by each key in the map.
* <p/>
*/
Map<K, Object> executeOnEntries(EntryProcessor entryProcessor, Predicate predicate);
} | 0true
| hazelcast_src_main_java_com_hazelcast_core_IMap.java |
154 | public class TransactionMonitorTest
{
@Test
public void shouldCountCommittedTransactions() throws Exception
{
GraphDatabaseService db = new TestGraphDatabaseFactory().newImpermanentDatabase();
Monitors monitors = ((GraphDatabaseAPI) db).getDependencyResolver().resolveDependency( Monitors.class );
EideticTransactionMonitor monitor = new EideticTransactionMonitor();
monitors.addMonitorListener( monitor, XaResourceManager.class.getName(), NeoStoreXaDataSource.DEFAULT_DATA_SOURCE_NAME );
Transaction tx = db.beginTx();
db.createNode();
tx.success();
tx.finish();
assertEquals( 1, monitor.getCommitCount() );
assertEquals( 0, monitor.getInjectOnePhaseCommitCount() );
assertEquals( 0, monitor.getInjectTwoPhaseCommitCount() );
}
@Test
public void shouldNotCountRolledBackTransactions() throws Exception
{
GraphDatabaseService db = new TestGraphDatabaseFactory().newImpermanentDatabase();
Monitors monitors = ((GraphDatabaseAPI) db).getDependencyResolver().resolveDependency( Monitors.class );
EideticTransactionMonitor monitor = new EideticTransactionMonitor();
monitors.addMonitorListener( monitor, XaResourceManager.class.getName(), NeoStoreXaDataSource.DEFAULT_DATA_SOURCE_NAME );
Transaction tx = db.beginTx();
db.createNode();
tx.failure();
tx.finish();
assertEquals( 0, monitor.getCommitCount() );
assertEquals( 0, monitor.getInjectOnePhaseCommitCount() );
assertEquals( 0, monitor.getInjectTwoPhaseCommitCount() );
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TransactionMonitorTest.java |
3,650 | public class IndexTypeMapperTests extends ElasticsearchTestCase {
@Test
public void simpleIndexMapperTests() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", true).field("store", "yes").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
IndexFieldMapper indexMapper = docMapper.rootMapper(IndexFieldMapper.class);
assertThat(indexMapper.enabled(), equalTo(true));
assertThat(indexMapper.fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().indexName("_index").mapper(), instanceOf(IndexFieldMapper.class));
ParsedDocument doc = docMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes());
assertThat(doc.rootDoc().get("_index"), equalTo("test"));
assertThat(doc.rootDoc().get("field"), equalTo("value"));
}
@Test
public void explicitDisabledIndexMapperTests() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", false).field("store", "yes").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
IndexFieldMapper indexMapper = docMapper.rootMapper(IndexFieldMapper.class);
assertThat(indexMapper.enabled(), equalTo(false));
assertThat(indexMapper.fieldType().stored(), equalTo(true));
ParsedDocument doc = docMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes());
assertThat(doc.rootDoc().get("_index"), nullValue());
assertThat(doc.rootDoc().get("field"), equalTo("value"));
}
@Test
public void defaultDisabledIndexMapperTests() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
IndexFieldMapper indexMapper = docMapper.rootMapper(IndexFieldMapper.class);
assertThat(indexMapper.enabled(), equalTo(false));
assertThat(indexMapper.fieldType().stored(), equalTo(false));
ParsedDocument doc = docMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes());
assertThat(doc.rootDoc().get("_index"), nullValue());
assertThat(doc.rootDoc().get("field"), equalTo("value"));
}
@Test
public void testThatMergingFieldMappingAllowsDisabling() throws Exception {
String mappingWithIndexEnabled = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", true).field("store", "yes").endObject()
.endObject().endObject().string();
DocumentMapper mapperEnabled = MapperTestUtils.newParser().parse(mappingWithIndexEnabled);
String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", false).field("store", "yes").endObject()
.endObject().endObject().string();
DocumentMapper mapperDisabled = MapperTestUtils.newParser().parse(mappingWithIndexDisabled);
mapperEnabled.merge(mapperDisabled, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
}
} | 0true
| src_test_java_org_elasticsearch_index_mapper_index_IndexTypeMapperTests.java |
961 | public abstract class NodeOperationResponse extends TransportResponse {
private DiscoveryNode node;
protected NodeOperationResponse() {
}
protected NodeOperationResponse(DiscoveryNode node) {
assert node != null;
this.node = node;
}
/**
* The node this information relates to.
*/
public DiscoveryNode getNode() {
return node;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
node = DiscoveryNode.readNode(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
node.writeTo(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_support_nodes_NodeOperationResponse.java |
25 | private class HAMClusterListener extends ClusterListener.Adapter
{
@Override
public void enteredCluster( ClusterConfiguration configuration )
{
Map<InstanceId, ClusterMember> newMembers = new HashMap<InstanceId, ClusterMember>();
for ( InstanceId memberClusterUri : configuration.getMembers().keySet() )
newMembers.put( memberClusterUri, new ClusterMember( memberClusterUri ) );
members.clear();
members.putAll( newMembers );
}
@Override
public void leftCluster()
{
members.clear();
}
@Override
public void joinedCluster( InstanceId member, URI memberUri )
{
members.put( member, new ClusterMember( member ) );
}
@Override
public void leftCluster( InstanceId member )
{
members.remove( member );
}
} | 1no label
| enterprise_ha_src_main_java_org_neo4j_kernel_ha_cluster_member_ClusterMembers.java |
984 | execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send error response for action [" + transportAction() + "] and request [" + request + "]", e1);
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_support_replication_TransportIndexReplicationOperationAction.java |
1,378 | database.getStorage().callInLock(new Callable<Void>() {
public Void call() throws Exception {
database.getStorage().rollback(OTransactionOptimistic.this);
return null;
}
}, true); | 0true
| core_src_main_java_com_orientechnologies_orient_core_tx_OTransactionOptimistic.java |
662 | class ShardValidateQueryResponse extends BroadcastShardOperationResponse {
private boolean valid;
private String explanation;
private String error;
ShardValidateQueryResponse() {
}
public ShardValidateQueryResponse(String index, int shardId, boolean valid, String explanation, String error) {
super(index, shardId);
this.valid = valid;
this.explanation = explanation;
this.error = error;
}
public boolean isValid() {
return this.valid;
}
public String getExplanation() {
return explanation;
}
public String getError() {
return error;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
valid = in.readBoolean();
explanation = in.readOptionalString();
error = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(valid);
out.writeOptionalString(explanation);
out.writeOptionalString(error);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_validate_query_ShardValidateQueryResponse.java |
585 | public class RefreshRequestBuilder extends BroadcastOperationRequestBuilder<RefreshRequest, RefreshResponse, RefreshRequestBuilder> {
public RefreshRequestBuilder(IndicesAdminClient indicesClient) {
super((InternalIndicesAdminClient) indicesClient, new RefreshRequest());
}
/**
* Forces calling refresh, overriding the check that dirty operations even happened. Defaults
* to true (note, still lightweight if no refresh is needed).
*/
public RefreshRequestBuilder setForce(boolean force) {
request.force(force);
return this;
}
@Override
protected void doExecute(ActionListener<RefreshResponse> listener) {
((IndicesAdminClient) client).refresh(request, listener);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_refresh_RefreshRequestBuilder.java |
26 | public class MVELToDataWrapperTranslatorTest extends TestCase {
private OrderItemFieldServiceImpl orderItemFieldService;
private CustomerFieldServiceImpl customerFieldService;
private OrderFieldServiceImpl orderFieldService;
private FulfillmentGroupFieldServiceImpl fulfillmentGroupFieldService;
@Override
protected void setUp() {
orderItemFieldService = new OrderItemFieldServiceImpl();
orderItemFieldService.init();
customerFieldService = new CustomerFieldServiceImpl();
customerFieldService.init();
orderFieldService = new OrderFieldServiceImpl();
orderFieldService.init();
fulfillmentGroupFieldService = new FulfillmentGroupFieldServiceImpl();
fulfillmentGroupFieldService.init();
}
/**
* Tests the creation of a DataWrapper given an mvel/quantity property
* @throws MVELTranslationException
*/
public void testCreateRuleData() throws MVELTranslationException {
MVELToDataWrapperTranslator translator = new MVELToDataWrapperTranslator();
Property[] properties = new Property[3];
Property mvelProperty = new Property();
mvelProperty.setName("orderItemMatchRule");
mvelProperty.setValue("MVEL.eval(\"toUpperCase()\",discreteOrderItem.?category.?name)==MVEL.eval(\"toUpperCase()\",\"merchandise\")");
Property quantityProperty = new Property();
quantityProperty.setName("quantity");
quantityProperty.setValue("1");
Property idProperty = new Property();
idProperty.setName("id");
idProperty.setValue("100");
properties[0] = mvelProperty;
properties[1] = quantityProperty;
properties[2] = idProperty;
Entity[] entities = new Entity[1];
Entity entity = new Entity();
entity.setProperties(properties);
entities[0] = entity;
DataWrapper dataWrapper = translator.createRuleData(entities, "orderItemMatchRule", "quantity", "id", orderItemFieldService);
assert(dataWrapper.getData().size() == 1);
assert(dataWrapper.getData().get(0).getQuantity() == 1);
assert(dataWrapper.getData().get(0).getGroups().size()==1);
assert(dataWrapper.getData().get(0).getGroups().get(0) instanceof ExpressionDTO);
ExpressionDTO exp = (ExpressionDTO) dataWrapper.getData().get(0).getGroups().get(0);
assert(exp.getName().equals("category.name"));
assert(exp.getOperator().equals(BLCOperator.IEQUALS.name()));
assert(exp.getValue().equals("merchandise"));
}
public void testCustomerQualificationDataWrapper() throws MVELTranslationException {
MVELToDataWrapperTranslator translator = new MVELToDataWrapperTranslator();
Property[] properties = new Property[1];
Property mvelProperty = new Property();
mvelProperty.setName("matchRule");
mvelProperty.setValue("customer.emailAddress!=customer.username&&customer.deactivated==true");
properties[0] = mvelProperty;
Entity[] entities = new Entity[1];
Entity entity = new Entity();
entity.setProperties(properties);
entities[0] = entity;
DataWrapper dataWrapper = translator.createRuleData(entities, "matchRule", null, null, customerFieldService);
assert(dataWrapper.getData().size() == 1);
assert(dataWrapper.getData().get(0).getQuantity() == null);
assert(dataWrapper.getData().get(0).getGroupOperator().equals(BLCOperator.AND.name()));
assert(dataWrapper.getData().get(0).getGroups().size()==2);
assert(dataWrapper.getData().get(0).getGroups().get(0) instanceof ExpressionDTO);
ExpressionDTO e1 = (ExpressionDTO) dataWrapper.getData().get(0).getGroups().get(0);
assert(e1.getName().equals("emailAddress"));
assert(e1.getOperator().equals(BLCOperator.NOT_EQUAL_FIELD.name()));
assert(e1.getValue().equals("username"));
assert(dataWrapper.getData().get(0).getGroups().get(1) instanceof ExpressionDTO);
ExpressionDTO e2 = (ExpressionDTO) dataWrapper.getData().get(0).getGroups().get(1);
assert(e2.getName().equals("deactivated"));
assert(e2.getOperator().equals(BLCOperator.EQUALS.name()));
assert(e2.getValue().equals("true"));
}
public void testOrderQualificationDataWrapper() throws MVELTranslationException {
MVELToDataWrapperTranslator translator = new MVELToDataWrapperTranslator();
Property[] properties = new Property[1];
Property mvelProperty = new Property();
mvelProperty.setName("matchRule");
mvelProperty.setValue("order.subTotal.getAmount()>=100&&(order.currency.defaultFlag==true||order.locale.localeCode==\"my\")");
properties[0] = mvelProperty;
Entity[] entities = new Entity[1];
Entity entity = new Entity();
entity.setProperties(properties);
entities[0] = entity;
DataWrapper dataWrapper = translator.createRuleData(entities, "matchRule", null, null, orderFieldService);
assert(dataWrapper.getData().size() == 1);
assert(dataWrapper.getData().get(0).getQuantity() == null);
assert(dataWrapper.getData().get(0).getGroupOperator().equals(BLCOperator.AND.name()));
assert(dataWrapper.getData().get(0).getGroups().size()==2);
assert(dataWrapper.getData().get(0).getGroups().get(0) instanceof ExpressionDTO);
ExpressionDTO e1 = (ExpressionDTO) dataWrapper.getData().get(0).getGroups().get(0);
assert(e1.getName().equals("subTotal"));
assert(e1.getOperator().equals(BLCOperator.GREATER_OR_EQUAL.name()));
assert(e1.getValue().equals("100"));
assert(dataWrapper.getData().get(0).getGroups().get(1) != null);
DataDTO d1 = dataWrapper.getData().get(0).getGroups().get(1);
assert(d1.getGroupOperator().equals(BLCOperator.OR.name()));
assert(d1.getGroups().get(0) instanceof ExpressionDTO);
ExpressionDTO d1e1 = (ExpressionDTO) d1.getGroups().get(0);
assert(d1e1.getName().equals("currency.defaultFlag"));
assert(d1e1.getOperator().equals(BLCOperator.EQUALS.name()));
assert(d1e1.getValue().equals("true"));
assert(d1.getGroups().get(1) instanceof ExpressionDTO);
ExpressionDTO d1e2 = (ExpressionDTO) d1.getGroups().get(1);
assert(d1e2.getName().equals("locale.localeCode"));
assert(d1e2.getOperator().equals(BLCOperator.EQUALS.name()));
assert(d1e2.getValue().equals("my"));
}
public void testItemQualificationDataWrapper() throws MVELTranslationException {
MVELToDataWrapperTranslator translator = new MVELToDataWrapperTranslator();
Property[] p1 = new Property[3];
Property m1 = new Property();
m1.setName("orderItemMatchRule");
m1.setValue("discreteOrderItem.category.name==\"test category\"");
Property q1 = new Property();
q1.setName("quantity");
q1.setValue("1");
Property i1 = new Property();
i1.setName("id");
i1.setValue("100");
p1[0] = m1;
p1[1] = q1;
p1[2] = i1;
Entity e1 = new Entity();
e1.setProperties(p1);
Property[] p2 = new Property[3];
Property m2 = new Property();
m2.setName("orderItemMatchRule");
m2.setValue("!(discreteOrderItem.product.manufacturer==\"test manufacturer\"&&discreteOrderItem.product.model==\"test model\")");
Property q2 = new Property();
q2.setName("quantity");
q2.setValue("2");
Property i2 = new Property();
i2.setName("id");
i2.setValue("200");
p2[0] = m2;
p2[1] = q2;
p2[2] = i2;
Entity e2 = new Entity();
e2.setProperties(p2);
Entity[] entities = new Entity[2];
entities[0] = e1;
entities[1] = e2;
DataWrapper dataWrapper = translator.createRuleData(entities, "orderItemMatchRule", "quantity", "id", orderItemFieldService);
assert(dataWrapper.getData().size() == 2);
assert(dataWrapper.getData().get(0).getQuantity() == 1);
assert(dataWrapper.getData().get(0).getGroupOperator().equals(BLCOperator.AND.name()));
assert(dataWrapper.getData().get(0).getGroups().size()==1);
assert(dataWrapper.getData().get(0).getGroups().get(0) instanceof ExpressionDTO);
ExpressionDTO exp1 = (ExpressionDTO) dataWrapper.getData().get(0).getGroups().get(0);
assert(exp1.getName().equals("category.name"));
assert(exp1.getOperator().equals(BLCOperator.EQUALS.name()));
assert(exp1.getValue().equals("test category"));
assert(dataWrapper.getData().get(1).getQuantity() == 2);
assert(dataWrapper.getData().get(1).getGroupOperator().equals(BLCOperator.NOT.name()));
assert(dataWrapper.getData().get(1).getGroups().size()==2);
assert(dataWrapper.getData().get(1).getGroups().get(0) instanceof ExpressionDTO);
ExpressionDTO expd1e1 = (ExpressionDTO) dataWrapper.getData().get(1).getGroups().get(0);
assert(expd1e1.getName().equals("product.manufacturer"));
assert(expd1e1.getOperator().equals(BLCOperator.EQUALS.name()));
assert(expd1e1.getValue().equals("test manufacturer"));
assert(dataWrapper.getData().get(1).getGroups().get(1) instanceof ExpressionDTO);
ExpressionDTO expd1e2 = (ExpressionDTO) dataWrapper.getData().get(1).getGroups().get(1);
assert(expd1e2.getName().equals("product.model"));
assert(expd1e2.getOperator().equals(BLCOperator.EQUALS.name()));
assert(expd1e2.getValue().equals("test model"));
}
public void testFulfillmentGroupQualificationDataWrapper() throws MVELTranslationException {
MVELToDataWrapperTranslator translator = new MVELToDataWrapperTranslator();
Property[] properties = new Property[1];
Property mvelProperty = new Property();
mvelProperty.setName("matchRule");
mvelProperty.setValue("fulfillmentGroup.address.state.name==\"Texas\"&&(fulfillmentGroup.retailFulfillmentPrice.getAmount()>=99&&fulfillmentGroup.retailFulfillmentPrice.getAmount()<=199)");
properties[0] = mvelProperty;
Entity[] entities = new Entity[1];
Entity entity = new Entity();
entity.setProperties(properties);
entities[0] = entity;
DataWrapper dataWrapper = translator.createRuleData(entities, "matchRule", null, null, fulfillmentGroupFieldService);
assert(dataWrapper.getData().size() == 1);
assert(dataWrapper.getData().get(0).getQuantity() == null);
assert(dataWrapper.getData().get(0).getGroupOperator().equals(BLCOperator.AND.name()));
assert(dataWrapper.getData().get(0).getGroups().size()==2);
assert(dataWrapper.getData().get(0).getGroups().get(0) instanceof ExpressionDTO);
ExpressionDTO e1 = (ExpressionDTO) dataWrapper.getData().get(0).getGroups().get(0);
assert(e1.getName().equals("address.state.name"));
assert(e1.getOperator().equals(BLCOperator.EQUALS.name()));
assert(e1.getValue().equals("Texas"));
assert(dataWrapper.getData().get(0).getGroups().get(1) instanceof ExpressionDTO);
ExpressionDTO e2 = (ExpressionDTO) dataWrapper.getData().get(0).getGroups().get(1);
assert(e2.getName().equals("retailFulfillmentPrice"));
assert(e2.getOperator().equals(BLCOperator.BETWEEN_INCLUSIVE.name()));
assert(e2.getStart().equals("99"));
assert(e2.getEnd().equals("199"));
}
} | 0true
| admin_broadleaf-admin-module_src_test_java_org_broadleafcommerce_admin_web_rulebuilder_MVELToDataWrapperTranslatorTest.java |
1,848 | return new LockStoreInfo() {
public int getBackupCount() {
return mapContainer.getBackupCount();
}
public int getAsyncBackupCount() {
return mapContainer.getAsyncBackupCount();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_map_MapService.java |
422 | return new HashMap<String, Object>() {{
put(TEXT, txt);
put(NAME, txt);
put(TIME, time);
put(WEIGHT, weight);
put(LOCATION, geo);
}}; | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_indexing_IndexProviderTest.java |
152 | {
@Override
public XaCommand readCommand( ReadableByteChannel byteChannel, ByteBuffer buffer ) throws IOException
{
return Command.readCommand( null, null, byteChannel, buffer );
}
}; | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_TransactionReader.java |
232 | assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertTrue(map.containsKey(member1.getUuid()));
assertTrue(map.containsKey(member3.getUuid()));
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceExecuteTest.java |
835 | EMBEDDEDLIST("EmbeddedList", 10, new Class<?>[] { List.class }, new Class<?>[] { List.class }) {
}, | 0true
| core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java |
666 | sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
return addToResult(transformer, valuesResultListener, entry.getValue());
}
}); | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_engine_OSBTreeIndexEngine.java |
2,049 | public class EntryOperation extends LockAwareOperation implements BackupAwareOperation {
private static final EntryEventType __NO_NEED_TO_FIRE_EVENT = null;
private EntryProcessor entryProcessor;
private EntryEventType eventType;
private Object response;
protected Object oldValue;
public EntryOperation(String name, Data dataKey, EntryProcessor entryProcessor) {
super(name, dataKey);
this.entryProcessor = entryProcessor;
}
public EntryOperation() {
}
public void innerBeforeRun() {
final ManagedContext managedContext = getNodeEngine().getSerializationService().getManagedContext();
managedContext.initialize(entryProcessor);
}
public void run() {
final long start = System.currentTimeMillis();
oldValue = recordStore.getMapEntry(dataKey).getValue();
final LocalMapStatsImpl mapStats = mapService.getLocalMapStatsImpl(name);
final Object valueBeforeProcess = mapService.toObject(oldValue);
final MapEntrySimple entry = new MapEntrySimple(mapService.toObject(dataKey), valueBeforeProcess);
response = mapService.toData(entryProcessor.process(entry));
final Object valueAfterProcess = entry.getValue();
// no matching data by key.
if (oldValue == null && valueAfterProcess == null) {
eventType = __NO_NEED_TO_FIRE_EVENT;
} else if (valueAfterProcess == null) {
recordStore.remove(dataKey);
mapStats.incrementRemoves(getLatencyFrom(start));
eventType = EntryEventType.REMOVED;
} else {
if (oldValue == null) {
mapStats.incrementPuts(getLatencyFrom(start));
eventType = EntryEventType.ADDED;
}
// take this case as a read so no need to fire an event.
else if (!entry.isModified()) {
mapStats.incrementGets(getLatencyFrom(start));
eventType = __NO_NEED_TO_FIRE_EVENT;
} else {
mapStats.incrementPuts(getLatencyFrom(start));
eventType = EntryEventType.UPDATED;
}
if (eventType != __NO_NEED_TO_FIRE_EVENT) {
recordStore.put(new AbstractMap.SimpleImmutableEntry<Data, Object>(dataKey, entry.getValue()));
dataValue = mapService.toData(entry.getValue());
}
}
}
public void afterRun() throws Exception {
super.afterRun();
if (eventType == __NO_NEED_TO_FIRE_EVENT) {
return;
}
mapService.publishEvent(getCallerAddress(), name, eventType, dataKey, mapService.toData(oldValue), dataValue);
invalidateNearCaches();
if (mapContainer.getWanReplicationPublisher() != null && mapContainer.getWanMergePolicy() != null) {
if (EntryEventType.REMOVED.equals(eventType)) {
mapService.publishWanReplicationRemove(name, dataKey, Clock.currentTimeMillis());
} else {
Record record = recordStore.getRecord(dataKey);
final SimpleEntryView entryView = mapService.createSimpleEntryView(dataKey,mapService.toData(dataValue),record);
mapService.publishWanReplicationUpdate(name, entryView);
}
}
}
@Override
public void onWaitExpire() {
getResponseHandler().sendResponse(null);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
entryProcessor = in.readObject();
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeObject(entryProcessor);
}
@Override
public Object getResponse() {
return response;
}
@Override
public String toString() {
return "EntryOperation{}";
}
public Operation getBackupOperation() {
EntryBackupProcessor backupProcessor = entryProcessor.getBackupProcessor();
return backupProcessor != null ? new EntryBackupOperation(name, dataKey, backupProcessor) : null;
}
public boolean shouldBackup() {
return entryProcessor.getBackupProcessor() != null;
}
public int getAsyncBackupCount() {
return mapContainer.getAsyncBackupCount();
}
public int getSyncBackupCount() {
return mapContainer.getBackupCount();
}
private long getLatencyFrom(long begin) {
return Clock.currentTimeMillis() - begin;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_EntryOperation.java |
689 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name="BLC_PRODUCT_CROSS_SALE")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
public class CrossSaleProductImpl implements RelatedProduct {
protected static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "CrossSaleProductId")
@GenericGenerator(
name="CrossSaleProductId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="CrossSaleProductImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.catalog.domain.CrossSaleProductImpl")
}
)
@Column(name = "CROSS_SALE_PRODUCT_ID")
protected Long id;
@Column(name = "PROMOTION_MESSAGE")
@AdminPresentation(friendlyName = "CrossSaleProductImpl_Cross_Sale_Promotion_Message", largeEntry=true)
protected String promotionMessage;
@Column(name = "SEQUENCE")
@AdminPresentation(visibility = VisibilityEnum.HIDDEN_ALL)
protected Long sequence;
@ManyToOne(targetEntity = ProductImpl.class)
@JoinColumn(name = "PRODUCT_ID")
@Index(name="CROSSSALE_INDEX", columnNames={"PRODUCT_ID"})
protected Product product;
@ManyToOne(targetEntity = CategoryImpl.class)
@JoinColumn(name = "CATEGORY_ID")
@Index(name="CROSSSALE_CATEGORY_INDEX", columnNames={"CATEGORY_ID"})
protected Category category;
@ManyToOne(targetEntity = ProductImpl.class, optional=false)
@JoinColumn(name = "RELATED_SALE_PRODUCT_ID", referencedColumnName = "PRODUCT_ID")
@Index(name="CROSSSALE_RELATED_INDEX", columnNames={"RELATED_SALE_PRODUCT_ID"})
protected Product relatedSaleProduct = new ProductImpl();
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getPromotionMessage() {
return promotionMessage;
}
@Override
public void setPromotionMessage(String promotionMessage) {
this.promotionMessage = promotionMessage;
}
@Override
public Long getSequence() {
return sequence;
}
@Override
public void setSequence(Long sequence) {
this.sequence = sequence;
}
@Override
public Product getProduct() {
return product;
}
@Override
public void setProduct(Product product) {
this.product = product;
}
@Override
public Category getCategory() {
return category;
}
@Override
public void setCategory(Category category) {
this.category = category;
}
@Override
public Product getRelatedProduct() {
return relatedSaleProduct;
}
@Override
public void setRelatedProduct(Product relatedSaleProduct) {
this.relatedSaleProduct = relatedSaleProduct;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_CrossSaleProductImpl.java |
600 | public class OIndexException extends OException {
private static final long serialVersionUID = -2655748565531836968L;
public OIndexException(final String string) {
super(string);
}
public OIndexException(final String message, final Throwable cause) {
super(message, cause);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_OIndexException.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.