Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
143k
| target
class label 2
classes | project
stringlengths 33
157
|
---|---|---|---|
6,269 |
public class GreaterThanAssertion extends Assertion {
private static final ESLogger logger = Loggers.getLogger(GreaterThanAssertion.class);
public GreaterThanAssertion(String field, Object expectedValue) {
super(field, expectedValue);
}
@Override
@SuppressWarnings("unchecked")
protected void doAssert(Object actualValue, Object expectedValue) {
logger.trace("assert that [{}] is greater than [{}]", actualValue, expectedValue);
assertThat(actualValue, instanceOf(Comparable.class));
assertThat(expectedValue, instanceOf(Comparable.class));
assertThat(errorMessage(), (Comparable)actualValue, greaterThan((Comparable) expectedValue));
}
private String errorMessage() {
return "field [" + getField() + "] is not greater than [" + getExpectedValue() + "]";
}
}
| 1no label
|
src_test_java_org_elasticsearch_test_rest_section_GreaterThanAssertion.java
|
4,518 |
public class IndicesTTLService extends AbstractLifecycleComponent<IndicesTTLService> {
public static final String INDICES_TTL_INTERVAL = "indices.ttl.interval";
public static final String INDEX_TTL_DISABLE_PURGE = "index.ttl.disable_purge";
private final ClusterService clusterService;
private final IndicesService indicesService;
private final Client client;
private volatile TimeValue interval;
private final int bulkSize;
private PurgerThread purgerThread;
@Inject
public IndicesTTLService(Settings settings, ClusterService clusterService, IndicesService indicesService, NodeSettingsService nodeSettingsService, Client client) {
super(settings);
this.clusterService = clusterService;
this.indicesService = indicesService;
this.client = client;
this.interval = componentSettings.getAsTime("interval", TimeValue.timeValueSeconds(60));
this.bulkSize = componentSettings.getAsInt("bulk_size", 10000);
nodeSettingsService.addListener(new ApplySettings());
}
@Override
protected void doStart() throws ElasticsearchException {
this.purgerThread = new PurgerThread(EsExecutors.threadName(settings, "[ttl_expire]"));
this.purgerThread.start();
}
@Override
protected void doStop() throws ElasticsearchException {
this.purgerThread.doStop();
this.purgerThread.interrupt();
}
@Override
protected void doClose() throws ElasticsearchException {
}
private class PurgerThread extends Thread {
volatile boolean running = true;
public PurgerThread(String name) {
super(name);
setDaemon(true);
}
public void doStop() {
running = false;
}
public void run() {
while (running) {
try {
List<IndexShard> shardsToPurge = getShardsToPurge();
purgeShards(shardsToPurge);
} catch (Throwable e) {
if (running) {
logger.warn("failed to execute ttl purge", e);
}
}
try {
Thread.sleep(interval.millis());
} catch (InterruptedException e) {
// ignore, if we are interrupted because we are shutting down, running will be false
}
}
}
/**
* Returns the shards to purge, i.e. the local started primary shards that have ttl enabled and disable_purge to false
*/
private List<IndexShard> getShardsToPurge() {
List<IndexShard> shardsToPurge = new ArrayList<IndexShard>();
MetaData metaData = clusterService.state().metaData();
for (IndexService indexService : indicesService) {
// check the value of disable_purge for this index
IndexMetaData indexMetaData = metaData.index(indexService.index().name());
if (indexMetaData == null) {
continue;
}
boolean disablePurge = indexMetaData.settings().getAsBoolean(INDEX_TTL_DISABLE_PURGE, false);
if (disablePurge) {
continue;
}
// should be optimized with the hasTTL flag
FieldMappers ttlFieldMappers = indexService.mapperService().name(TTLFieldMapper.NAME);
if (ttlFieldMappers == null) {
continue;
}
// check if ttl is enabled for at least one type of this index
boolean hasTTLEnabled = false;
for (FieldMapper ttlFieldMapper : ttlFieldMappers) {
if (((TTLFieldMapper) ttlFieldMapper).enabled()) {
hasTTLEnabled = true;
break;
}
}
if (hasTTLEnabled) {
for (IndexShard indexShard : indexService) {
if (indexShard.state() == IndexShardState.STARTED && indexShard.routingEntry().primary() && indexShard.routingEntry().started()) {
shardsToPurge.add(indexShard);
}
}
}
}
return shardsToPurge;
}
}
private void purgeShards(List<IndexShard> shardsToPurge) {
for (IndexShard shardToPurge : shardsToPurge) {
Query query = NumericRangeQuery.newLongRange(TTLFieldMapper.NAME, null, System.currentTimeMillis(), false, true);
Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl");
try {
logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id());
ExpiredDocsCollector expiredDocsCollector = new ExpiredDocsCollector(shardToPurge.routingEntry().index());
searcher.searcher().search(query, expiredDocsCollector);
List<DocToPurge> docsToPurge = expiredDocsCollector.getDocsToPurge();
BulkRequestBuilder bulkRequest = client.prepareBulk();
for (DocToPurge docToPurge : docsToPurge) {
bulkRequest.add(new DeleteRequest().index(shardToPurge.routingEntry().index()).type(docToPurge.type).id(docToPurge.id).version(docToPurge.version).routing(docToPurge.routing));
bulkRequest = processBulkIfNeeded(bulkRequest, false);
}
processBulkIfNeeded(bulkRequest, true);
} catch (Exception e) {
logger.warn("failed to purge", e);
} finally {
searcher.release();
}
}
}
private static class DocToPurge {
public final String type;
public final String id;
public final long version;
public final String routing;
public DocToPurge(String type, String id, long version, String routing) {
this.type = type;
this.id = id;
this.version = version;
this.routing = routing;
}
}
private class ExpiredDocsCollector extends Collector {
private final MapperService mapperService;
private AtomicReaderContext context;
private List<DocToPurge> docsToPurge = new ArrayList<DocToPurge>();
public ExpiredDocsCollector(String index) {
mapperService = indicesService.indexService(index).mapperService();
}
public void setScorer(Scorer scorer) {
}
public boolean acceptsDocsOutOfOrder() {
return true;
}
public void collect(int doc) {
try {
UidAndRoutingFieldsVisitor fieldsVisitor = new UidAndRoutingFieldsVisitor();
context.reader().document(doc, fieldsVisitor);
Uid uid = fieldsVisitor.uid();
final long version = Versions.loadVersion(context.reader(), new Term(UidFieldMapper.NAME, uid.toBytesRef()));
docsToPurge.add(new DocToPurge(uid.type(), uid.id(), version, fieldsVisitor.routing()));
} catch (Exception e) {
logger.trace("failed to collect doc", e);
}
}
public void setNextReader(AtomicReaderContext context) throws IOException {
this.context = context;
}
public List<DocToPurge> getDocsToPurge() {
return this.docsToPurge;
}
}
private BulkRequestBuilder processBulkIfNeeded(BulkRequestBuilder bulkRequest, boolean force) {
if ((force && bulkRequest.numberOfActions() > 0) || bulkRequest.numberOfActions() >= bulkSize) {
try {
bulkRequest.execute(new ActionListener<BulkResponse>() {
@Override
public void onResponse(BulkResponse bulkResponse) {
logger.trace("bulk took " + bulkResponse.getTookInMillis() + "ms");
}
@Override
public void onFailure(Throwable e) {
logger.warn("failed to execute bulk");
}
});
} catch (Exception e) {
logger.warn("failed to process bulk", e);
}
bulkRequest = client.prepareBulk();
}
return bulkRequest;
}
class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
TimeValue interval = settings.getAsTime(INDICES_TTL_INTERVAL, IndicesTTLService.this.interval);
if (!interval.equals(IndicesTTLService.this.interval)) {
logger.info("updating indices.ttl.interval from [{}] to [{}]", IndicesTTLService.this.interval, interval);
IndicesTTLService.this.interval = interval;
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_indices_ttl_IndicesTTLService.java
|
31 |
static final class ThenCombine<T,U,V> extends Completion {
final CompletableFuture<? extends T> src;
final CompletableFuture<? extends U> snd;
final BiFun<? super T,? super U,? extends V> fn;
final CompletableFuture<V> dst;
final Executor executor;
ThenCombine(CompletableFuture<? extends T> src,
CompletableFuture<? extends U> snd,
BiFun<? super T,? super U,? extends V> fn,
CompletableFuture<V> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<? extends T> a;
final CompletableFuture<? extends U> b;
final BiFun<? super T,? super U,? extends V> fn;
final CompletableFuture<V> dst;
Object r, s; T t; U u; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
(b = this.snd) != null &&
(s = b.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
@SuppressWarnings("unchecked") T tr = (T) r;
t = tr;
}
if (ex != null)
u = null;
else if (s instanceof AltResult) {
ex = ((AltResult)s).ex;
u = null;
}
else {
@SuppressWarnings("unchecked") U us = (U) s;
u = us;
}
Executor e = executor;
V v = null;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncCombine<T,U,V>(t, u, fn, dst));
else
v = fn.apply(t, u);
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(v, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
499 |
public final class ClientPartitionServiceImpl implements ClientPartitionService {
private static final ILogger LOGGER = Logger.getLogger(ClientPartitionService.class);
private static final long PERIOD = 10;
private static final long INITIAL_DELAY = 10;
private final HazelcastClient client;
private final ConcurrentHashMap<Integer, Address> partitions = new ConcurrentHashMap<Integer, Address>(271, 0.75f, 1);
private final AtomicBoolean updating = new AtomicBoolean(false);
private volatile int partitionCount;
public ClientPartitionServiceImpl(HazelcastClient client) {
this.client = client;
}
public void start() {
getInitialPartitions();
client.getClientExecutionService().scheduleWithFixedDelay(new RefreshTask(), INITIAL_DELAY, PERIOD, TimeUnit.SECONDS);
}
public void refreshPartitions() {
try {
client.getClientExecutionService().executeInternal(new RefreshTask());
} catch (RejectedExecutionException ignored) {
}
}
private class RefreshTask implements Runnable {
public void run() {
if (updating.compareAndSet(false, true)) {
try {
final ClientClusterService clusterService = client.getClientClusterService();
final Address master = clusterService.getMasterAddress();
final PartitionsResponse response = getPartitionsFrom(master);
if (response != null) {
processPartitionResponse(response);
}
} catch (HazelcastInstanceNotActiveException ignored) {
} finally {
updating.set(false);
}
}
}
}
private void getInitialPartitions() {
final ClientClusterService clusterService = client.getClientClusterService();
final Collection<MemberImpl> memberList = clusterService.getMemberList();
for (MemberImpl member : memberList) {
final Address target = member.getAddress();
PartitionsResponse response = getPartitionsFrom(target);
if (response != null) {
processPartitionResponse(response);
return;
}
}
throw new IllegalStateException("Cannot get initial partitions!");
}
private PartitionsResponse getPartitionsFrom(Address address) {
try {
final Future<PartitionsResponse> future =
client.getInvocationService().invokeOnTarget(new GetPartitionsRequest(), address);
return client.getSerializationService().toObject(future.get());
} catch (Exception e) {
LOGGER.severe("Error while fetching cluster partition table!", e);
}
return null;
}
private void processPartitionResponse(PartitionsResponse response) {
final Address[] members = response.getMembers();
final int[] ownerIndexes = response.getOwnerIndexes();
if (partitionCount == 0) {
partitionCount = ownerIndexes.length;
}
for (int partitionId = 0; partitionId < partitionCount; partitionId++) {
final int ownerIndex = ownerIndexes[partitionId];
if (ownerIndex > -1) {
partitions.put(partitionId, members[ownerIndex]);
}
}
}
public void stop() {
partitions.clear();
}
@Override
public Address getPartitionOwner(int partitionId) {
return partitions.get(partitionId);
}
@Override
public int getPartitionId(Data key) {
final int pc = partitionCount;
if (pc <= 0) {
return 0;
}
int hash = key.getPartitionHash();
return (hash == Integer.MIN_VALUE) ? 0 : Math.abs(hash) % pc;
}
@Override
public int getPartitionId(Object key) {
final Data data = client.getSerializationService().toData(key);
return getPartitionId(data);
}
@Override
public int getPartitionCount() {
return partitionCount;
}
@Override
public Partition getPartition(int partitionId) {
return new PartitionImpl(partitionId);
}
private final class PartitionImpl implements Partition {
private final int partitionId;
private PartitionImpl(int partitionId) {
this.partitionId = partitionId;
}
public int getPartitionId() {
return partitionId;
}
public Member getOwner() {
final Address owner = getPartitionOwner(partitionId);
if (owner != null) {
return client.getClientClusterService().getMember(owner);
}
return null;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("PartitionImpl{");
sb.append("partitionId=").append(partitionId);
sb.append('}');
return sb.toString();
}
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientPartitionServiceImpl.java
|
67 |
public class FieldType implements Serializable {
private static final long serialVersionUID = 1L;
private static final Map<String, FieldType> TYPES = new HashMap<String, FieldType>();
public static final FieldType BOOLEAN = new FieldType("BOOLEAN", "Boolean");
public static final FieldType DATE = new FieldType("DATE", "Date");
public static final FieldType TIME = new FieldType("TIME", "Time");
public static final FieldType INTEGER = new FieldType("INTEGER", "Integer");
public static final FieldType DECIMAL = new FieldType("DECIMAL", "Decimal");
public static final FieldType STRING = new FieldType("STRING", "String");
public static final FieldType RICH_TEXT = new FieldType("RICH_TEXT", "Rich Text");
public static final FieldType HTML = new FieldType("HTML", "HTML");
public static final FieldType ENUMERATION = new FieldType("ENUMERATION", "Enumeration");
public static FieldType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public FieldType() {
//do nothing
}
public FieldType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
FieldType other = (FieldType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_field_type_FieldType.java
|
1,267 |
public class InternalTransportClient extends AbstractClient implements InternalClient {
private final Settings settings;
private final ThreadPool threadPool;
private final TransportClientNodesService nodesService;
private final InternalTransportAdminClient adminClient;
private final ImmutableMap<Action, TransportActionNodeProxy> actions;
@Inject
public InternalTransportClient(Settings settings, ThreadPool threadPool, TransportService transportService,
TransportClientNodesService nodesService, InternalTransportAdminClient adminClient,
Map<String, GenericAction> actions) {
this.settings = settings;
this.threadPool = threadPool;
this.nodesService = nodesService;
this.adminClient = adminClient;
MapBuilder<Action, TransportActionNodeProxy> actionsBuilder = new MapBuilder<Action, TransportActionNodeProxy>();
for (GenericAction action : actions.values()) {
if (action instanceof Action) {
actionsBuilder.put((Action) action, new TransportActionNodeProxy(settings, action, transportService));
}
}
this.actions = actionsBuilder.immutableMap();
}
@Override
public void close() {
// nothing to do here
}
@Override
public Settings settings() {
return this.settings;
}
@Override
public ThreadPool threadPool() {
return this.threadPool;
}
@Override
public AdminClient admin() {
return adminClient;
}
@SuppressWarnings("unchecked")
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(final Action<Request, Response, RequestBuilder> action, final Request request) {
final TransportActionNodeProxy<Request, Response> proxy = actions.get(action);
return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<Response>>() {
@Override
public ActionFuture<Response> doWithNode(DiscoveryNode node) throws ElasticsearchException {
return proxy.execute(node, request);
}
});
}
@SuppressWarnings("unchecked")
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(final Action<Request, Response, RequestBuilder> action, final Request request, ActionListener<Response> listener) {
final TransportActionNodeProxy<Request, Response> proxy = actions.get(action);
nodesService.execute(new TransportClientNodesService.NodeListenerCallback<Response>() {
@Override
public void doWithNode(DiscoveryNode node, ActionListener<Response> listener) throws ElasticsearchException {
proxy.execute(node, request, listener);
}
}, listener);
}
}
| 1no label
|
src_main_java_org_elasticsearch_client_transport_support_InternalTransportClient.java
|
0 |
{
@Override
public void enteredCluster( ClusterConfiguration clusterConfiguration )
{
System.out.println( "Entered cluster:" + clusterConfiguration );
}
@Override
public void joinedCluster( InstanceId instanceId, URI member )
{
System.out.println( "Joined cluster:" + instanceId + " (at URI " + member +")" );
}
@Override
public void leftCluster( InstanceId instanceId )
{
System.out.println( "Left cluster:" + instanceId );
}
@Override
public void leftCluster()
{
System.out.println( "Left cluster" );
}
@Override
public void elected( String role, InstanceId instanceId, URI electedMember )
{
System.out.println( instanceId + " at URI " + electedMember + " was elected as " + role );
}
@Override
public void unelected( String role, InstanceId instanceId, URI electedMember )
{
System.out.println( instanceId + " at URI " + electedMember + " was removed from " + role );
}
} );
| 1no label
|
enterprise_cluster_src_test_java_org_neo4j_cluster_protocol_atomicbroadcast_multipaxos_MultiPaxosServer.java
|
36 |
private static final class NullProposal
implements ICompletionProposal, ICompletionProposalExtension2 {
private List<ICompletionProposal> proposals;
private NullProposal(List<ICompletionProposal> proposals) {
this.proposals = proposals;
}
@Override
public void apply(IDocument document) {}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public String getDisplayString() {
return "";
}
@Override
public Image getImage() {
return null;
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public void apply(ITextViewer viewer, char trigger, int stateMask,
int offset) {}
@Override
public void selected(ITextViewer viewer, boolean smartToggle) {}
@Override
public void unselected(ITextViewer viewer) {}
@Override
public boolean validate(IDocument document, int offset,
DocumentEvent event) {
for (ICompletionProposal p: proposals) {
if (p instanceof ICompletionProposalExtension2) {
ICompletionProposalExtension2 ext =
(ICompletionProposalExtension2) p;
if (ext.validate(document, offset, event)) {
return true;
}
}
else {
return true;
}
}
return false;
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_LinkedModeCompletionProposal.java
|
65 |
{
@Override
@SuppressWarnings("deprecation")
public TxIdGenerator getTxIdGenerator()
{
return TxIdGenerator.DEFAULT;
}
};
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestXaFramework.java
|
615 |
new OIndexEngine.EntriesResultListener() {
@Override
public boolean addResult(ODocument entry) {
return entriesResultListener.addResult(entry);
}
});
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_index_OIndexMultiValues.java
|
1 |
@Service("blAdminCatalogService")
public class AdminCatalogServiceImpl implements AdminCatalogService {
private static final Log LOG = LogFactory.getLog(AdminCatalogServiceImpl.class);
@Resource(name = "blCatalogService")
protected CatalogService catalogService;
@Resource(name = "blSkuDao")
protected SkuDao skuDao;
@PersistenceContext(unitName="blPU")
protected EntityManager em;
@Override
public Integer generateSkusFromProduct(Long productId) {
Product product = catalogService.findProductById(productId);
if (CollectionUtils.isEmpty(product.getProductOptions())) {
return -1;
}
List<List<ProductOptionValue>> allPermutations = generatePermutations(0, new ArrayList<ProductOptionValue>(), product.getProductOptions());
LOG.info("Total number of permutations: " + allPermutations.size());
LOG.info(allPermutations);
//determine the permutations that I already have Skus for
List<List<ProductOptionValue>> previouslyGeneratedPermutations = new ArrayList<List<ProductOptionValue>>();
if (CollectionUtils.isNotEmpty(product.getAdditionalSkus())) {
for (Sku additionalSku : product.getAdditionalSkus()) {
if (CollectionUtils.isNotEmpty(additionalSku.getProductOptionValues())) {
previouslyGeneratedPermutations.add(additionalSku.getProductOptionValues());
}
}
}
List<List<ProductOptionValue>> permutationsToGenerate = new ArrayList<List<ProductOptionValue>>();
for (List<ProductOptionValue> permutation : allPermutations) {
boolean previouslyGenerated = false;
for (List<ProductOptionValue> generatedPermutation : previouslyGeneratedPermutations) {
if (isSamePermutation(permutation, generatedPermutation)) {
previouslyGenerated = true;
break;
}
}
if (!previouslyGenerated) {
permutationsToGenerate.add(permutation);
}
}
int numPermutationsCreated = 0;
//For each permutation, I need them to map to a specific Sku
for (List<ProductOptionValue> permutation : permutationsToGenerate) {
if (permutation.isEmpty()) continue;
Sku permutatedSku = catalogService.createSku();
permutatedSku.setProduct(product);
permutatedSku.setProductOptionValues(permutation);
permutatedSku = catalogService.saveSku(permutatedSku);
product.getAdditionalSkus().add(permutatedSku);
numPermutationsCreated++;
}
if (numPermutationsCreated != 0) {
catalogService.saveProduct(product);
}
return numPermutationsCreated;
}
protected boolean isSamePermutation(List<ProductOptionValue> perm1, List<ProductOptionValue> perm2) {
if (perm1.size() == perm2.size()) {
Collection<Long> perm1Ids = BLCCollectionUtils.collect(perm1, new TypedTransformer<Long>() {
@Override
public Long transform(Object input) {
return ((ProductOptionValue) input).getId();
}
});
Collection<Long> perm2Ids = BLCCollectionUtils.collect(perm2, new TypedTransformer<Long>() {
@Override
public Long transform(Object input) {
return ((ProductOptionValue) input).getId();
}
});
return perm1Ids.containsAll(perm2Ids);
}
return false;
}
/**
* Generates all the possible permutations for the combinations of given ProductOptions
* @param currentTypeIndex
* @param currentPermutation
* @param options
* @return a list containing all of the possible combinations of ProductOptionValues based on grouping by the ProductOptionValue
*/
public List<List<ProductOptionValue>> generatePermutations(int currentTypeIndex, List<ProductOptionValue> currentPermutation, List<ProductOption> options) {
List<List<ProductOptionValue>> result = new ArrayList<List<ProductOptionValue>>();
if (currentTypeIndex == options.size()) {
result.add(currentPermutation);
return result;
}
ProductOption currentOption = options.get(currentTypeIndex);
if (!currentOption.getUseInSkuGeneration()) {
//This flag means do not generate skus and so do not create permutations for this productoption,
//end it here and return the current list of permutations.
result.addAll(generatePermutations(currentTypeIndex + 1, currentPermutation, options));
return result;
}
for (ProductOptionValue option : currentOption.getAllowedValues()) {
List<ProductOptionValue> permutation = new ArrayList<ProductOptionValue>();
permutation.addAll(currentPermutation);
permutation.add(option);
result.addAll(generatePermutations(currentTypeIndex + 1, permutation, options));
}
if (currentOption.getAllowedValues().size() == 0) {
//There are still product options left in our array to compute permutations, even though this productOption does not have any values associated.
result.addAll(generatePermutations(currentTypeIndex + 1, currentPermutation, options));
}
return result;
}
@Override
public Boolean cloneProduct(Long productId) {
Product cloneProduct = catalogService.findProductById(productId);
//initialize the many-to-many to save off
cloneProduct.getProductOptions().size();
cloneProduct.getAllParentCategories().size();
//Detach and save a cloned Sku
Sku cloneSku = cloneProduct.getDefaultSku();
cloneSku.getSkuMedia().size();
em.detach(cloneSku);
cloneSku.setId(null);
cloneProduct.setDefaultSku(cloneSku);
em.detach(cloneProduct);
cloneProduct.setId(null);
Product derivedProduct = catalogService.saveProduct(cloneProduct);
cloneProduct = catalogService.findProductById(productId);
//Re-associate the new Skus to the new Product
for (Sku additionalSku : cloneProduct.getAdditionalSkus()) {
additionalSku.getProductOptionValues().size();
em.detach(additionalSku);
additionalSku.setId(null);
additionalSku.setProduct(derivedProduct);
catalogService.saveSku(additionalSku);
}
return true;
}
}
| 0true
|
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_AdminCatalogServiceImpl.java
|
23 |
static class EdgeVertex extends Vertex {
private SortedSet<Edge> outEdges = new ConcurrentSkipListSet<Edge>(new Comparator<Edge>() {
@Override
public int compare(Edge e1, Edge e2) {
return e1.getEnd().compareTo(e2.getEnd());
}
});
EdgeVertex(long id) {
super(id);
}
@Override
public Iterable<Vertex> getNeighbors(final int value) {
return Iterables.transform(Iterables.filter(outEdges, new Predicate<Edge>() {
@Override
public boolean apply(@Nullable Edge edge) {
return !CHECK_VALUE || ((Integer) edge.getProperty("number")).intValue() == value;
}
}), new Function<Edge, Vertex>() {
@Override
public Vertex apply(@Nullable Edge edge) {
return edge.getEnd();
}
});
}
void addOutEdge(Edge e) {
outEdges.add(e);
}
}
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_TestByteBuffer.java
|
1,442 |
public class GoogleAnalyticsTag extends SimpleTagSupport {
private static final Log LOG = LogFactory.getLog(GoogleAnalyticsTag.class);
@Value("${googleAnalytics.webPropertyId}")
private String webPropertyId;
private Order order;
public void setOrder(Order order) {
this.order = order;
}
public void setWebPropertyId(String webPropertyId) {
this.webPropertyId = webPropertyId;
}
@Override
public void doTag() throws JspException, IOException {
JspWriter out = getJspContext().getOut();
if (webPropertyId == null) {
ServletContext sc = ((PageContext) getJspContext()).getServletContext();
ApplicationContext context = WebApplicationContextUtils.getWebApplicationContext(sc);
context.getAutowireCapableBeanFactory().autowireBeanProperties(this, AutowireCapableBeanFactory.AUTOWIRE_BY_NAME, false);
}
if (webPropertyId.equals("UA-XXXXXXX-X")) {
LOG.warn("googleAnalytics.webPropertyId has not been overridden in a custom property file. Please set this in order to properly use the Google Analytics tag");
}
out.println(analytics(webPropertyId, order));
super.doTag();
}
/**
* Documentation for the recommended asynchronous GA tag is at:
* http://code.google.com/apis/analytics/docs/tracking/gaTrackingEcommerce.html
*
* @param webPropertyId - Google Analytics ID
* @param order - optionally track the order submission. This should be included on the
* page after the order has been sucessfully submitted. If null, this will just track the current page
* @return the relevant Javascript to render on the page
*/
protected String analytics(String webPropertyId, Order order) {
StringBuffer sb = new StringBuffer();
sb.append("<script type=\"text/javascript\">");
sb.append("var _gaq = _gaq || [];");
sb.append("_gaq.push(['_setAccount', '" + webPropertyId + "']);");
sb.append("_gaq.push(['_trackPageview']);");
if (order != null) {
Address paymentAddress = order.getPaymentInfos().get(0).getAddress();
sb.append("_gaq.push(['_addTrans','" + order.getId() + "'");
sb.append(",'" + order.getName() + "'");
sb.append(",'" + order.getTotal() + "'");
sb.append(",'" + order.getTotalTax() + "'");
sb.append(",'" + order.getTotalShipping() + "'");
sb.append(",'" + paymentAddress.getCity() + "'");
sb.append(",'" + paymentAddress.getState().getName() + "'");
sb.append(",'" + paymentAddress.getCountry().getName() + "'");
sb.append("]);");
for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) {
for (FulfillmentGroupItem fulfillmentGroupItem : fulfillmentGroup.getFulfillmentGroupItems()) {
DiscreteOrderItem orderItem = (DiscreteOrderItem) fulfillmentGroupItem.getOrderItem();
sb.append("_gaq.push(['_addItem','" + order.getId() + "'");
sb.append(",'" + orderItem.getSku().getId() + "'");
sb.append(",'" + orderItem.getSku().getName() + "'");
sb.append(",' " + orderItem.getProduct().getDefaultCategory() + "'");
sb.append(",'" + orderItem.getPrice() + "'");
sb.append(",'" + orderItem.getQuantity() + "'");
sb.append("]);");
}
}
sb.append("_gaq.push(['_trackTrans']);");
}
sb.append(" (function() {"
+ "var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;"
+ "ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';"
+ "var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);"
+ "})();");
sb.append("</script>");
return sb.toString();
}
}
| 1no label
|
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_catalog_taglib_GoogleAnalyticsTag.java
|
33 |
{
@Override
public void run()
{
Channel channel = getChannel( to );
try
{
if ( channel == null )
{
channel = openChannel( to );
openedChannel( to, channel );
// Instance could be connected to, remove any marker of it being failed
failedInstances.remove( to );
}
}
catch ( Exception e )
{
// Only print out failure message on first fail
if ( !failedInstances.contains( to ) )
{
msgLog.warn( e.getMessage() );
failedInstances.add( to );
}
return;
}
try
{
// Set FROM header
message.setHeader( Message.FROM, me.toASCIIString() );
msgLog.debug( "Sending to " + to + ": " + message );
ChannelFuture future = channel.write( message );
future.addListener( new ChannelFutureListener()
{
@Override
public void operationComplete( ChannelFuture future ) throws Exception
{
if ( !future.isSuccess() )
{
msgLog.debug( "Unable to write " + message + " to " + future.getChannel(),
future.getCause() );
}
}
} );
}
catch ( Exception e )
{
msgLog.warn( "Could not send message", e );
channel.close();
}
}
} );
| 1no label
|
enterprise_cluster_src_main_java_org_neo4j_cluster_com_NetworkSender.java
|
36 |
public class SetCommandParser extends TypeAwareCommandParser {
public SetCommandParser(TextCommandConstants.TextCommandType type) {
super(type);
}
public TextCommand parser(SocketTextReader socketTextReader, String cmd, int space) {
StringTokenizer st = new StringTokenizer(cmd);
st.nextToken();
String key = null;
int valueLen = 0;
int flag = 0;
int expiration = 0;
boolean noReply = false;
if (st.hasMoreTokens()) {
key = st.nextToken();
} else {
return new ErrorCommand(ERROR_CLIENT);
}
if (st.hasMoreTokens()) {
flag = Integer.parseInt(st.nextToken());
} else {
return new ErrorCommand(ERROR_CLIENT);
}
if (st.hasMoreTokens()) {
expiration = Integer.parseInt(st.nextToken());
} else {
return new ErrorCommand(ERROR_CLIENT);
}
if (st.hasMoreTokens()) {
valueLen = Integer.parseInt(st.nextToken());
} else {
return new ErrorCommand(ERROR_CLIENT);
}
if (st.hasMoreTokens()) {
noReply = "noreply".equals(st.nextToken());
}
return new SetCommand(type, key, flag, expiration, valueLen, noReply);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_memcache_SetCommandParser.java
|
283 |
public abstract class ActionRequest<T extends ActionRequest> extends TransportRequest {
private boolean listenerThreaded = false;
protected ActionRequest() {
super();
}
protected ActionRequest(ActionRequest request) {
super(request);
// this does not set the listenerThreaded API, if needed, its up to the caller to set it
// since most times, we actually want it to not be threaded...
//this.listenerThreaded = request.listenerThreaded();
}
/**
* Should the response listener be executed on a thread or not.
* <p/>
* <p>When not executing on a thread, it will either be executed on the calling thread, or
* on an expensive, IO based, thread.
*/
public final boolean listenerThreaded() {
return this.listenerThreaded;
}
/**
* Sets if the response listener be executed on a thread or not.
*/
@SuppressWarnings("unchecked")
public final T listenerThreaded(boolean listenerThreaded) {
this.listenerThreaded = listenerThreaded;
return (T) this;
}
public abstract ActionRequestValidationException validate();
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_ActionRequest.java
|
1,262 |
public class FulfillmentItemPricingActivity extends BaseActivity<PricingContext> {
private static final Log LOG = LogFactory.getLog(FulfillmentItemPricingActivity.class);
protected BroadleafCurrency getCurrency(FulfillmentGroup fg) {
return fg.getOrder().getCurrency();
}
/**
* Returns the order adjustment value or zero if none exists
* @param order
* @return
*/
protected Money getOrderSavingsToDistribute(Order order) {
if (order.getOrderAdjustmentsValue() == null) {
return new Money(order.getCurrency());
} else {
Money adjustmentValue = order.getOrderAdjustmentsValue();
Money orderSubTotal = order.getSubTotal();
if (orderSubTotal == null || orderSubTotal.lessThan(adjustmentValue)) {
if (LOG.isWarnEnabled()) {
LOG.warn("Subtotal is null or less than orderSavings in DistributeOrderSavingsActivity.java. " +
"No distribution is taking place.");
}
return new Money(order.getCurrency());
}
return adjustmentValue;
}
}
@Override
public PricingContext execute(PricingContext context) throws Exception {
Order order = context.getSeedData();
Map<OrderItem,List<FulfillmentGroupItem>> partialOrderItemMap = new HashMap<OrderItem,List<FulfillmentGroupItem>>();
// Calculate the fulfillmentGroupItem total
populateItemTotalAmount(order, partialOrderItemMap);
fixItemTotalRoundingIssues(order, partialOrderItemMap);
// Calculate the fulfillmentGroupItem prorated orderSavings
Money totalAllItemsAmount = calculateTotalPriceForAllFulfillmentItems(order);
Money totalOrderAdjustmentDistributed = distributeOrderSavingsToItems(order, totalAllItemsAmount.getAmount());
fixOrderSavingsRoundingIssues(order, totalOrderAdjustmentDistributed);
// Step 3: Finalize the taxable amounts
updateTaxableAmountsOnItems(order);
context.setSeedData(order);
return context;
}
/**
* Sets the fulfillment amount which includes the relative portion of the total price for
* the corresponding order item.
*
* @param order
* @param partialOrderItemMap
*/
protected void populateItemTotalAmount(Order order, Map<OrderItem, List<FulfillmentGroupItem>> partialOrderItemMap) {
for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) {
for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) {
OrderItem orderItem = fgItem.getOrderItem();
int fgItemQty = fgItem.getQuantity();
int orderItemQty = orderItem.getQuantity();
Money totalItemAmount = orderItem.getTotalPrice();
if (fgItemQty != orderItemQty) {
// We need to keep track of all of these items in case we need to distribute a remainder
// to one or more of the items.
List<FulfillmentGroupItem> fgItemList = partialOrderItemMap.get(orderItem);
if (fgItemList == null) {
fgItemList = new ArrayList<FulfillmentGroupItem>();
partialOrderItemMap.put(orderItem, fgItemList);
}
fgItemList.add(fgItem);
fgItem.setTotalItemAmount(totalItemAmount.multiply(fgItemQty).divide(orderItemQty));
} else {
fgItem.setTotalItemAmount(totalItemAmount);
}
}
}
}
/**
* Because an item may have multiple price details that don't round cleanly, we may have pennies
* left over that need to be distributed.
*
* @param order
* @param partialOrderItemMap
*/
protected void fixItemTotalRoundingIssues(Order order, Map<OrderItem, List<FulfillmentGroupItem>> partialOrderItemMap) {
for (OrderItem orderItem : partialOrderItemMap.keySet()) {
Money totalItemAmount = orderItem.getTotalPrice();
Money totalFGItemAmount = sumItemAmount(partialOrderItemMap.get(orderItem), order);
Money amountDiff = totalItemAmount.subtract(totalFGItemAmount);
if (!(amountDiff.getAmount().compareTo(BigDecimal.ZERO) == 0)) {
long numApplicationsNeeded = countNumberOfUnits(amountDiff);
Money unitAmount = getUnitAmount(amountDiff);
for (FulfillmentGroupItem fgItem : partialOrderItemMap.get(orderItem)) {
numApplicationsNeeded = numApplicationsNeeded -
applyDifferenceToAmount(fgItem, numApplicationsNeeded, unitAmount);
if (numApplicationsNeeded == 0) {
break;
}
}
}
}
}
/**
* Returns the total price for all fulfillment items.
* @param order
* @return
*/
protected Money calculateTotalPriceForAllFulfillmentItems(Order order) {
Money totalAllItemsAmount = new Money(order.getCurrency());
for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) {
for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) {
totalAllItemsAmount = totalAllItemsAmount.add(fgItem.getTotalItemAmount());
}
}
return totalAllItemsAmount;
}
/**
* Distributes the order adjustments (if any) to the individual fulfillment group items.
* @param order
* @param totalAllItems
* @return
*/
protected Money distributeOrderSavingsToItems(Order order, BigDecimal totalAllItems) {
Money returnAmount = new Money(order.getCurrency());
BigDecimal orderAdjAmt = order.getOrderAdjustmentsValue().getAmount();
for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) {
for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) {
BigDecimal fgItemAmount = fgItem.getTotalItemAmount().getAmount();
BigDecimal proratedAdjAmt = totalAllItems.compareTo(BigDecimal.ZERO) == 0 ? totalAllItems : orderAdjAmt.multiply(fgItemAmount).divide(totalAllItems, RoundingMode.FLOOR);
fgItem.setProratedOrderAdjustmentAmount(new Money(proratedAdjAmt, order.getCurrency()));
returnAmount = returnAmount.add(fgItem.getProratedOrderAdjustmentAmount());
}
}
return returnAmount;
}
/**
* It is possible due to rounding that the order adjustments do not match the
* total. This method fixes by adding or removing the pennies.
* @param order
* @param partialOrderItemMap
*/
protected void fixOrderSavingsRoundingIssues(Order order, Money totalOrderAdjustmentDistributed) {
if (!order.getHasOrderAdjustments()) {
return;
}
Money orderAdjustmentTotal = order.getOrderAdjustmentsValue();
Money amountDiff = totalOrderAdjustmentDistributed.subtract(orderAdjustmentTotal);
if (!(amountDiff.getAmount().compareTo(BigDecimal.ZERO) == 0)) {
long numApplicationsNeeded = countNumberOfUnits(amountDiff);
Money unitAmount = getUnitAmount(amountDiff);
for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) {
for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) {
numApplicationsNeeded = numApplicationsNeeded -
applyDifferenceToProratedAdj(fgItem, numApplicationsNeeded, unitAmount);
if (numApplicationsNeeded == 0) {
break;
}
}
}
}
}
/**
* Returns the total price for all fulfillment items.
* @param order
* @return
*/
protected void updateTaxableAmountsOnItems(Order order) {
Money zero = new Money(order.getCurrency());
for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) {
for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) {
if (fgItem.getOrderItem().isTaxable()) {
Money proratedOrderAdjAmt = fgItem.getProratedOrderAdjustmentAmount();
if (proratedOrderAdjAmt != null) {
fgItem.setTotalItemTaxableAmount(fgItem.getTotalItemAmount().subtract(proratedOrderAdjAmt));
} else {
fgItem.setTotalItemTaxableAmount(fgItem.getTotalItemAmount());
}
} else {
fgItem.setTotalItemTaxableAmount(zero);
}
}
}
}
protected Money sumItemAmount(List<FulfillmentGroupItem> items, Order order) {
Money totalAmount = new Money(order.getCurrency());
for (FulfillmentGroupItem fgItem : items) {
totalAmount = totalAmount.add(fgItem.getTotalItemAmount());
}
return totalAmount;
}
protected Money sumTaxAmount(List<FulfillmentGroupItem> items, Order order) {
Money taxAmount = new Money(order.getCurrency());
for (FulfillmentGroupItem fgItem : items) {
taxAmount = taxAmount.add(fgItem.getTotalItemTaxableAmount());
}
return taxAmount;
}
public long countNumberOfUnits(Money difference) {
double numUnits = difference.multiply(Math.pow(10, difference.getCurrency().getDefaultFractionDigits())).doubleValue();
return Math.round(numUnits);
}
/**
* Returns the unit amount (e.g. .01 for US)
* @param currency
* @return
*/
public Money getUnitAmount(Money difference) {
Currency currency = difference.getCurrency();
BigDecimal divisor = new BigDecimal(Math.pow(10, currency.getDefaultFractionDigits()));
BigDecimal unitAmount = new BigDecimal("1").divide(divisor);
if (difference.lessThan(BigDecimal.ZERO)) {
unitAmount = unitAmount.negate();
}
return new Money(unitAmount, currency);
}
public long applyDifferenceToAmount(FulfillmentGroupItem fgItem, long numApplicationsNeeded, Money unitAmount) {
BigDecimal numTimesToApply = new BigDecimal(Math.min(numApplicationsNeeded, fgItem.getQuantity()));
Money oldAmount = fgItem.getTotalItemAmount();
Money changeToAmount = unitAmount.multiply(numTimesToApply);
fgItem.setTotalItemAmount(oldAmount.add(changeToAmount));
return numTimesToApply.longValue();
}
public long applyDifferenceToProratedAdj(FulfillmentGroupItem fgItem, long numApplicationsNeeded, Money unitAmount) {
BigDecimal numTimesToApply = new BigDecimal(Math.min(numApplicationsNeeded, fgItem.getQuantity()));
Money oldAmount = fgItem.getProratedOrderAdjustmentAmount();
Money changeToAmount = unitAmount.multiply(numTimesToApply);
fgItem.setProratedOrderAdjustmentAmount(oldAmount.add(changeToAmount));
return numTimesToApply.longValue();
}
public long applyTaxDifference(FulfillmentGroupItem fgItem, long numApplicationsNeeded, Money unitAmount) {
BigDecimal numTimesToApply = new BigDecimal(Math.min(numApplicationsNeeded, fgItem.getQuantity()));
Money oldAmount = fgItem.getTotalItemTaxableAmount();
Money changeToAmount = unitAmount.multiply(numTimesToApply);
fgItem.setTotalItemTaxableAmount(oldAmount.add(changeToAmount));
return numTimesToApply.longValue();
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_service_workflow_FulfillmentItemPricingActivity.java
|
2,698 |
final class PortableSerializer implements StreamSerializer<Portable> {
private final SerializationContext context;
private final Map<Integer, PortableFactory> factories = new HashMap<Integer, PortableFactory>();
PortableSerializer(SerializationContext context, Map<Integer, ? extends PortableFactory> portableFactories) {
this.context = context;
factories.putAll(portableFactories);
}
public int getTypeId() {
return SerializationConstants.CONSTANT_TYPE_PORTABLE;
}
public void write(ObjectDataOutput out, Portable p) throws IOException {
if (p.getClassId() == 0) {
throw new IllegalArgumentException("Portable class id cannot be zero!");
}
if (!(out instanceof BufferObjectDataOutput)) {
throw new IllegalArgumentException("ObjectDataOutput must be instance of BufferObjectDataOutput!");
}
if (p.getClassId() == 0) {
throw new IllegalArgumentException("Portable class id cannot be zero!");
}
ClassDefinition cd = context.lookupOrRegisterClassDefinition(p);
BufferObjectDataOutput bufferedOut = (BufferObjectDataOutput) out;
DefaultPortableWriter writer = new DefaultPortableWriter(this, bufferedOut, cd);
p.writePortable(writer);
writer.end();
}
public Portable read(ObjectDataInput in) throws IOException {
if (!(in instanceof BufferObjectDataInput)) {
throw new IllegalArgumentException("ObjectDataInput must be instance of BufferObjectDataInput!");
}
if (!(in instanceof PortableContextAwareInputStream)) {
throw new IllegalArgumentException("ObjectDataInput must be instance of PortableContextAwareInputStream!");
}
final PortableContextAwareInputStream ctxIn = (PortableContextAwareInputStream) in;
final int factoryId = ctxIn.getFactoryId();
final int dataClassId = ctxIn.getClassId();
final int dataVersion = ctxIn.getVersion();
final PortableFactory portableFactory = factories.get(factoryId);
if (portableFactory == null) {
throw new HazelcastSerializationException("Could not find PortableFactory for factory-id: " + factoryId);
}
final Portable portable = portableFactory.create(dataClassId);
if (portable == null) {
throw new HazelcastSerializationException("Could not create Portable for class-id: " + dataClassId);
}
final DefaultPortableReader reader;
final ClassDefinition cd;
final BufferObjectDataInput bufferedIn = (BufferObjectDataInput) in;
if (context.getVersion() == dataVersion) {
cd = context.lookup(factoryId, dataClassId);
// using context.version
if (cd == null) {
throw new HazelcastSerializationException("Could not find class-definition for "
+ "factory-id: " + factoryId + ", class-id: " + dataClassId + ", version: " + dataVersion);
}
reader = new DefaultPortableReader(this, bufferedIn, cd);
} else {
cd = context.lookup(factoryId, dataClassId, dataVersion);
// registered during read
if (cd == null) {
throw new HazelcastSerializationException("Could not find class-definition for "
+ "factory-id: " + factoryId + ", class-id: " + dataClassId + ", version: " + dataVersion);
}
reader = new MorphingPortableReader(this, bufferedIn, cd);
}
portable.readPortable(reader);
reader.end();
return portable;
}
Portable readAndInitialize(BufferObjectDataInput in) throws IOException {
Portable p = read(in);
final ManagedContext managedContext = context.getManagedContext();
return managedContext != null ? (Portable) managedContext.initialize(p) : p;
}
public void destroy() {
factories.clear();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_serialization_PortableSerializer.java
|
228 |
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
@Override
public Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
| 0true
|
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
|
54 |
@RequestMapping("/" + AdminStructuredContentController.SECTION_KEY)
public class AdminStructuredContentController extends AdminBasicEntityController {
protected static final String SECTION_KEY = "structured-content";
@Override
protected String getSectionKey(Map<String, String> pathVars) {
//allow external links to work for ToOne items
if (super.getSectionKey(pathVars) != null) {
return super.getSectionKey(pathVars);
}
return SECTION_KEY;
}
@Override
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
public String viewEntityForm(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id) throws Exception {
// Get the normal entity form for this item
String returnPath = super.viewEntityForm(request, response, model, pathVars, id);
EntityForm ef = (EntityForm) model.asMap().get("entityForm");
// Attach the dynamic fields to the form
DynamicEntityFormInfo info = new DynamicEntityFormInfo()
.withCeilingClassName(StructuredContentType.class.getName())
.withCriteriaName("constructForm")
.withPropertyName("structuredContentType")
.withPropertyValue(ef.findField("structuredContentType").getValue());
EntityForm dynamicForm = getDynamicFieldTemplateForm(info, id, null);
ef.putDynamicFormInfo("structuredContentType", info);
ef.putDynamicForm("structuredContentType", dynamicForm);
// We don't want to allow changing types once a structured content item exists
ef.findField("structuredContentType").setReadOnly(true);
return returnPath;
}
@Override
@RequestMapping(value = "/{id}", method = RequestMethod.POST)
public String saveEntity(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable(value="id") String id,
@ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result,
RedirectAttributes ra) throws Exception {
// Attach the dynamic form info so that the update service will know how to split up the fields
DynamicEntityFormInfo info = new DynamicEntityFormInfo()
.withCeilingClassName(StructuredContentType.class.getName())
.withCriteriaName("constructForm")
.withPropertyName("structuredContentType");
entityForm.putDynamicFormInfo("structuredContentType", info);
String returnPath = super.saveEntity(request, response, model, pathVars, id, entityForm, result, ra);
if (result.hasErrors()) {
info = entityForm.getDynamicFormInfo("structuredContentType");
info.setPropertyValue(entityForm.findField("structuredContentType").getValue());
//grab back the dynamic form that was actually put in
EntityForm inputDynamicForm = entityForm.getDynamicForm("structuredContentType");
EntityForm dynamicForm = getDynamicFieldTemplateForm(info, id, inputDynamicForm);
entityForm.putDynamicForm("structuredContentType", dynamicForm);
}
return returnPath;
}
@RequestMapping(value = "/{propertyName}/dynamicForm", method = RequestMethod.GET)
public String getDynamicForm(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable Map<String, String> pathVars,
@PathVariable("propertyName") String propertyName,
@RequestParam("propertyTypeId") String propertyTypeId) throws Exception {
DynamicEntityFormInfo info = new DynamicEntityFormInfo()
.withCeilingClassName(StructuredContentType.class.getName())
.withCriteriaName("constructForm")
.withPropertyName(propertyName)
.withPropertyValue(propertyTypeId);
return super.getDynamicForm(request, response, model, pathVars, info);
}
}
| 1no label
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_admin_web_controller_AdminStructuredContentController.java
|
335 |
new Thread() {
public void run() {
if (!map.tryLock("key1")) {
latch.countDown();
}
try {
if (map.tryLock("key1", 5, TimeUnit.SECONDS)) {
latch.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTest.java
|
343 |
public class NodesShutdownRequest extends MasterNodeOperationRequest<NodesShutdownRequest> {
String[] nodesIds = Strings.EMPTY_ARRAY;
TimeValue delay = TimeValue.timeValueSeconds(1);
boolean exit = true;
NodesShutdownRequest() {
}
public NodesShutdownRequest(String... nodesIds) {
this.nodesIds = nodesIds;
}
public NodesShutdownRequest nodesIds(String... nodesIds) {
this.nodesIds = nodesIds;
return this;
}
/**
* The delay for the shutdown to occur. Defaults to <tt>1s</tt>.
*/
public NodesShutdownRequest delay(TimeValue delay) {
this.delay = delay;
return this;
}
public TimeValue delay() {
return this.delay;
}
/**
* The delay for the shutdown to occur. Defaults to <tt>1s</tt>.
*/
public NodesShutdownRequest delay(String delay) {
return delay(TimeValue.parseTimeValue(delay, null));
}
/**
* Should the JVM be exited as well or not. Defaults to <tt>true</tt>.
*/
public NodesShutdownRequest exit(boolean exit) {
this.exit = exit;
return this;
}
/**
* Should the JVM be exited as well or not. Defaults to <tt>true</tt>.
*/
public boolean exit() {
return exit;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
delay = readTimeValue(in);
nodesIds = in.readStringArray();
exit = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
delay.writeTo(out);
out.writeStringArrayNullable(nodesIds);
out.writeBoolean(exit);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_NodesShutdownRequest.java
|
184 |
public class OMultiKey {
private final Collection<?> keys;
private final int hash;
public OMultiKey(final Collection<?> keys) {
this.keys = new ArrayList<Object>(keys);
hash = generateHashCode(keys);
}
private int generateHashCode(final Collection<?> objects) {
int total = 0;
for (final Object object : objects) {
total ^= object.hashCode();
}
return total;
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return hash;
}
/**
* Objects are equals if they contain the same amount of keys and these keys are equals.
* Order of keys does not matter.
*
* @param o obj the reference object with which to compare.
* @return <code>true</code> if this object is the same as the obj
* argument; <code>false</code> otherwise.
*/
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final OMultiKey oMultiKey = (OMultiKey) o;
if(keys.size() != oMultiKey.keys.size())
return false;
for (final Object inKey : keys) {
if (!oMultiKey.keys.contains(inKey))
return false;
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return "OMultiKey " + keys + "";
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_util_OMultiKey.java
|
1,052 |
Collections.sort(indexSearchResults, new Comparator<OIndexSearchResult>() {
public int compare(final OIndexSearchResult searchResultOne, final OIndexSearchResult searchResultTwo) {
return searchResultTwo.getFieldCount() - searchResultOne.getFieldCount();
}
});
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLSelect.java
|
231 |
@Entity
@Table(name = "BLC_MODULE_CONFIGURATION")
@EntityListeners(value = { AuditableListener.class })
@Inheritance(strategy = InheritanceType.JOINED)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blStandardElements")
@AdminPresentationClass(excludeFromPolymorphism = true, friendlyName = "AbstractModuleConfiguration")
public abstract class AbstractModuleConfiguration implements ModuleConfiguration, Status {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "ModuleConfigurationId")
@GenericGenerator(
name = "ModuleConfigurationId",
strategy = "org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name = "segment_value", value = "ModuleConfigurationImpl"),
@Parameter(name = "entity_name", value = "org.broadleafcommerce.common.config.domain.AbstractModuleConfiguration")
}
)
@Column(name = "MODULE_CONFIG_ID")
protected Long id;
@Column(name = "MODULE_NAME", nullable = false)
@AdminPresentation(friendlyName = "AbstractModuleConfiguration_Module_Name", order = 2000, prominent = true, requiredOverride = RequiredOverride.REQUIRED)
protected String moduleName;
@Column(name = "ACTIVE_START_DATE", nullable = true)
@AdminPresentation(friendlyName = "AbstractModuleConfiguration_Active_Start_Date", order = 3000, prominent = true, fieldType = SupportedFieldType.DATE)
protected Date activeStartDate;
@Column(name = "ACTIVE_END_DATE", nullable = true)
@AdminPresentation(friendlyName = "AbstractModuleConfiguration_Active_End_Date", order = 4000, prominent = true, fieldType = SupportedFieldType.DATE)
protected Date activeEndDate;
@Column(name = "IS_DEFAULT", nullable = false)
@AdminPresentation(friendlyName = "AbstractModuleConfiguration_Is_Default", order = 5000, prominent = true, requiredOverride = RequiredOverride.REQUIRED)
protected Boolean isDefault = false;
@Column(name = "CONFIG_TYPE", nullable = false)
@AdminPresentation(friendlyName = "AbstractModuleConfiguration_Config_Type", order = 1000, prominent = true, fieldType = SupportedFieldType.BROADLEAF_ENUMERATION,
broadleafEnumeration = "org.broadleafcommerce.common.config.service.type.ModuleConfigurationType",
requiredOverride = RequiredOverride.REQUIRED, readOnly = true)
protected String configType;
@Column(name = "MODULE_PRIORITY", nullable = false)
@AdminPresentation(friendlyName = "AbstractModuleConfiguration_Priority",
order = 6000, prominent = true, requiredOverride = RequiredOverride.REQUIRED, tooltip = "AbstractModuleConfiguration_Priority_Tooltip")
protected Integer priority = 100;
@Embedded
protected Auditable auditable = new Auditable();
@Embedded
protected ArchiveStatus archiveStatus = new ArchiveStatus();
@Override
public Long getId() {
return this.id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getModuleName() {
return moduleName;
}
@Override
public void setModuleName(String name) {
this.moduleName = name;
}
@Override
public Boolean getIsDefault() {
if (this.isDefault == null) {
this.isDefault = Boolean.FALSE;
}
return this.isDefault;
}
@Override
public void setIsDefault(Boolean isDefault) {
this.isDefault = isDefault;
}
/**
* Subclasses of this must set the ModuleConfigType in their constructor.
*/
protected void setModuleConfigurationType(ModuleConfigurationType moduleConfigurationType) {
this.configType = moduleConfigurationType.getType();
}
@Override
public ModuleConfigurationType getModuleConfigurationType() {
return ModuleConfigurationType.getInstance(this.configType);
}
@Override
public void setAuditable(Auditable auditable) {
this.auditable = auditable;
}
@Override
public Auditable getAuditable() {
return this.auditable;
}
@Override
public void setArchived(Character archived) {
archiveStatus.setArchived(archived);
}
@Override
public Character getArchived() {
return archiveStatus.getArchived();
}
@Override
public boolean isActive() {
return DateUtil.isActive(activeStartDate, activeEndDate, true) && 'Y' != getArchived();
}
@Override
public void setActiveStartDate(Date startDate) {
this.activeStartDate = startDate;
}
@Override
public Date getActiveStartDate() {
return this.activeStartDate;
}
@Override
public void setActiveEndDate(Date endDate) {
this.activeEndDate = endDate;
}
@Override
public Date getActiveEndDate() {
return this.activeEndDate;
}
@Override
public Integer getPriority() {
return priority;
}
@Override
public void setPriority(Integer priority) {
this.priority = priority;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_config_domain_AbstractModuleConfiguration.java
|
706 |
execute(request, new ActionListener<BulkResponse>() {
@Override
public void onResponse(BulkResponse result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send error response for action [" + BulkAction.NAME + "] and request [" + request + "]", e1);
}
}
});
| 0true
|
src_main_java_org_elasticsearch_action_bulk_TransportBulkAction.java
|
2,604 |
public class NodesFaultDetection extends AbstractComponent {
public static interface Listener {
void onNodeFailure(DiscoveryNode node, String reason);
}
private final ThreadPool threadPool;
private final TransportService transportService;
private final boolean connectOnNetworkDisconnect;
private final TimeValue pingInterval;
private final TimeValue pingRetryTimeout;
private final int pingRetryCount;
// used mainly for testing, should always be true
private final boolean registerConnectionListener;
private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<Listener>();
private final ConcurrentMap<DiscoveryNode, NodeFD> nodesFD = newConcurrentMap();
private final FDConnectionListener connectionListener;
private volatile DiscoveryNodes latestNodes = EMPTY_NODES;
private volatile boolean running = false;
public NodesFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService) {
super(settings);
this.threadPool = threadPool;
this.transportService = transportService;
this.connectOnNetworkDisconnect = componentSettings.getAsBoolean("connect_on_network_disconnect", true);
this.pingInterval = componentSettings.getAsTime("ping_interval", timeValueSeconds(1));
this.pingRetryTimeout = componentSettings.getAsTime("ping_timeout", timeValueSeconds(30));
this.pingRetryCount = componentSettings.getAsInt("ping_retries", 3);
this.registerConnectionListener = componentSettings.getAsBoolean("register_connection_listener", true);
logger.debug("[node ] uses ping_interval [{}], ping_timeout [{}], ping_retries [{}]", pingInterval, pingRetryTimeout, pingRetryCount);
transportService.registerHandler(PingRequestHandler.ACTION, new PingRequestHandler());
this.connectionListener = new FDConnectionListener();
if (registerConnectionListener) {
transportService.addConnectionListener(connectionListener);
}
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
public void updateNodes(DiscoveryNodes nodes) {
DiscoveryNodes prevNodes = latestNodes;
this.latestNodes = nodes;
if (!running) {
return;
}
DiscoveryNodes.Delta delta = nodes.delta(prevNodes);
for (DiscoveryNode newNode : delta.addedNodes()) {
if (newNode.id().equals(nodes.localNodeId())) {
// no need to monitor the local node
continue;
}
if (!nodesFD.containsKey(newNode)) {
nodesFD.put(newNode, new NodeFD());
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, new SendPingRequest(newNode));
}
}
for (DiscoveryNode removedNode : delta.removedNodes()) {
nodesFD.remove(removedNode);
}
}
public NodesFaultDetection start() {
if (running) {
return this;
}
running = true;
return this;
}
public NodesFaultDetection stop() {
if (!running) {
return this;
}
running = false;
return this;
}
public void close() {
stop();
transportService.removeHandler(PingRequestHandler.ACTION);
transportService.removeConnectionListener(connectionListener);
}
private void handleTransportDisconnect(DiscoveryNode node) {
if (!latestNodes.nodeExists(node.id())) {
return;
}
NodeFD nodeFD = nodesFD.remove(node);
if (nodeFD == null) {
return;
}
if (!running) {
return;
}
nodeFD.running = false;
if (connectOnNetworkDisconnect) {
try {
transportService.connectToNode(node);
nodesFD.put(node, new NodeFD());
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, new SendPingRequest(node));
} catch (Exception e) {
logger.trace("[node ] [{}] transport disconnected (with verified connect)", node);
notifyNodeFailure(node, "transport disconnected (with verified connect)");
}
} else {
logger.trace("[node ] [{}] transport disconnected", node);
notifyNodeFailure(node, "transport disconnected");
}
}
private void notifyNodeFailure(final DiscoveryNode node, final String reason) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onNodeFailure(node, reason);
}
}
});
}
private class SendPingRequest implements Runnable {
private final DiscoveryNode node;
private SendPingRequest(DiscoveryNode node) {
this.node = node;
}
@Override
public void run() {
if (!running) {
return;
}
transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()), options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout),
new BaseTransportResponseHandler<PingResponse>() {
@Override
public PingResponse newInstance() {
return new PingResponse();
}
@Override
public void handleResponse(PingResponse response) {
if (!running) {
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
nodeFD.retryCount = 0;
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, SendPingRequest.this);
}
}
@Override
public void handleException(TransportException exp) {
// check if the master node did not get switched on us...
if (!running) {
return;
}
if (exp instanceof ConnectTransportException) {
// ignore this one, we already handle it by registering a connection listener
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
int retryCount = ++nodeFD.retryCount;
logger.trace("[node ] failed to ping [{}], retry [{}] out of [{}]", exp, node, retryCount, pingRetryCount);
if (retryCount >= pingRetryCount) {
logger.debug("[node ] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", node, pingRetryCount, pingRetryTimeout);
// not good, failure
if (nodesFD.remove(node) != null) {
notifyNodeFailure(node, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout");
}
} else {
// resend the request, not reschedule, rely on send timeout
transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()),
options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout), this);
}
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}
}
static class NodeFD {
volatile int retryCount;
volatile boolean running = true;
}
private class FDConnectionListener implements TransportConnectionListener {
@Override
public void onNodeConnected(DiscoveryNode node) {
}
@Override
public void onNodeDisconnected(DiscoveryNode node) {
handleTransportDisconnect(node);
}
}
class PingRequestHandler extends BaseTransportRequestHandler<PingRequest> {
public static final String ACTION = "discovery/zen/fd/ping";
@Override
public PingRequest newInstance() {
return new PingRequest();
}
@Override
public void messageReceived(PingRequest request, TransportChannel channel) throws Exception {
// if we are not the node we are supposed to be pinged, send an exception
// this can happen when a kill -9 is sent, and another node is started using the same port
if (!latestNodes.localNodeId().equals(request.nodeId)) {
throw new ElasticsearchIllegalStateException("Got pinged as node [" + request.nodeId + "], but I am node [" + latestNodes.localNodeId() + "]");
}
channel.sendResponse(new PingResponse());
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
static class PingRequest extends TransportRequest {
// the (assumed) node id we are pinging
private String nodeId;
PingRequest() {
}
PingRequest(String nodeId) {
this.nodeId = nodeId;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
nodeId = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(nodeId);
}
}
private static class PingResponse extends TransportResponse {
private PingResponse() {
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_discovery_zen_fd_NodesFaultDetection.java
|
231 |
PostingsHighlighter highlighter = new PostingsHighlighter() {
@Override
protected char getMultiValuedSeparator(String field) {
assert field.equals("body");
return '\u2029';
}
};
| 0true
|
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
|
58 |
@SuppressWarnings("serial")
static final class ForEachTransformedMappingTask<K,V,U>
extends BulkTask<K,V,Void> {
final BiFun<? super K, ? super V, ? extends U> transformer;
final Action<? super U> action;
ForEachTransformedMappingTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
BiFun<? super K, ? super V, ? extends U> transformer,
Action<? super U> action) {
super(p, b, i, f, t);
this.transformer = transformer; this.action = action;
}
public final void compute() {
final BiFun<? super K, ? super V, ? extends U> transformer;
final Action<? super U> action;
if ((transformer = this.transformer) != null &&
(action = this.action) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
new ForEachTransformedMappingTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
transformer, action).fork();
}
for (Node<K,V> p; (p = advance()) != null; ) {
U u;
if ((u = transformer.apply(p.key, p.val)) != null)
action.apply(u);
}
propagateCompletion();
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
900 |
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
performFirstPhase(fShardIndex, shardIt);
}
});
| 1no label
|
src_main_java_org_elasticsearch_action_search_type_TransportSearchTypeAction.java
|
3,841 |
public class GeoDistanceFilterParser implements FilterParser {
public static final String NAME = "geo_distance";
@Inject
public GeoDistanceFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "geoDistance"};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
boolean cache = false;
CacheKeyFilter.Key cacheKey = null;
String filterName = null;
String currentFieldName = null;
GeoPoint point = new GeoPoint();
String fieldName = null;
double distance = 0;
Object vDistance = null;
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.DEFAULT;
String optimizeBbox = "memory";
boolean normalizeLon = true;
boolean normalizeLat = true;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
fieldName = currentFieldName;
GeoPoint.parse(parser, point);
} else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 }
String currentName = parser.currentName();
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentName = parser.currentName();
} else if (token.isValue()) {
if (currentName.equals(GeoPointFieldMapper.Names.LAT)) {
point.resetLat(parser.doubleValue());
} else if (currentName.equals(GeoPointFieldMapper.Names.LON)) {
point.resetLon(parser.doubleValue());
} else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) {
GeoHashUtils.decode(parser.text(), point);
} else {
throw new QueryParsingException(parseContext.index(), "[geo_distance] filter does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if (currentFieldName.equals("distance")) {
if (token == XContentParser.Token.VALUE_STRING) {
vDistance = parser.text(); // a String
} else {
vDistance = parser.numberValue(); // a Number
}
} else if (currentFieldName.equals("unit")) {
unit = DistanceUnit.fromString(parser.text());
} else if (currentFieldName.equals("distance_type") || currentFieldName.equals("distanceType")) {
geoDistance = GeoDistance.fromString(parser.text());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
point.resetLat(parser.doubleValue());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
point.resetLon(parser.doubleValue());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.GEOHASH_SUFFIX)) {
GeoHashUtils.decode(parser.text(), point);
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length());
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
optimizeBbox = parser.textOrNull();
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else {
point.resetFromString(parser.text());
fieldName = currentFieldName;
}
}
}
if (vDistance instanceof Number) {
distance = DistanceUnit.DEFAULT.convert(((Number) vDistance).doubleValue(), unit);
} else {
distance = DistanceUnit.parse((String) vDistance, unit, DistanceUnit.DEFAULT);
}
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
if (normalizeLat || normalizeLon) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
}
FieldMapper<?> mapper = smartMappers.mapper();
if (!(mapper instanceof GeoPointFieldMapper)) {
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
Filter filter = new GeoDistanceFilter(point.lat(), point.lon(), distance, geoDistance, indexFieldData, geoMapper, optimizeBbox);
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
filter = wrapSmartNameFilter(filter, smartMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_GeoDistanceFilterParser.java
|
14 |
final class DescendingKeyIterator extends AbstractEntryIterator<K, V, K> {
DescendingKeyIterator(final OMVRBTreeEntry<K, V> first) {
super(first);
}
public K next() {
return prevEntry().getKey();
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
|
83 |
final Map<Method, Object> consoleMethods = new TreeMap<Method, Object>(new Comparator<Method>() {
public int compare(Method o1, Method o2) {
int res = o1.getName().compareTo(o2.getName());
if (res == 0)
res = o1.toString().compareTo(o2.toString());
return res;
}
});
| 0true
|
commons_src_main_java_com_orientechnologies_common_console_OConsoleApplication.java
|
6,228 |
protected static class MavenMessageBuilder extends ReproduceErrorMessageBuilder {
public MavenMessageBuilder(StringBuilder b) {
super(b);
}
@Override
public ReproduceErrorMessageBuilder appendAllOpts(Description description) {
super.appendAllOpts(description);
return appendESProperties();
}
/**
* Append a single VM option.
*/
@Override
public ReproduceErrorMessageBuilder appendOpt(String sysPropName, String value) {
if (sysPropName.equals(SYSPROP_ITERATIONS())) { // we don't want the iters to be in there!
return this;
}
if (Strings.hasLength(value)) {
return super.appendOpt(sysPropName, value);
}
return this;
}
public ReproduceErrorMessageBuilder appendESProperties() {
appendProperties("es.logger.level", "es.node.mode", "es.node.local", TestCluster.TESTS_ENABLE_MOCK_MODULES,
"tests.assertion.disabled", "tests.security.manager");
if (System.getProperty("tests.jvm.argline") != null && !System.getProperty("tests.jvm.argline").isEmpty()) {
appendOpt("tests.jvm.argline", "\"" + System.getProperty("tests.jvm.argline") + "\"");
}
return this;
}
protected ReproduceErrorMessageBuilder appendProperties(String... properties) {
for (String sysPropName : properties) {
if (Strings.hasLength(System.getProperty(sysPropName))) {
appendOpt(sysPropName, System.getProperty(sysPropName));
}
}
return this;
}
}
| 1no label
|
src_test_java_org_elasticsearch_test_junit_listeners_ReproduceInfoPrinter.java
|
299 |
public class ServiceException extends Exception {
private static final long serialVersionUID = -7084792578727995587L;
// for serialization purposes
protected ServiceException() {
super();
}
public ServiceException(String message, Throwable cause) {
super(message, cause);
}
public ServiceException(String message) {
super(message);
}
public ServiceException(Throwable cause) {
super(cause);
}
/**
* Checks to see if any of the causes of the chain of exceptions that led to this ServiceException are an instance
* of the given class.
*
* @param clazz
* @return whether or not this exception's causes includes the given class.
*/
public boolean containsCause(Class<? extends Throwable> clazz) {
Throwable current = this;
do {
if (clazz.isAssignableFrom(current.getClass())) {
return true;
}
current = current.getCause();
} while (current.getCause() != null);
return false;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_exception_ServiceException.java
|
1,473 |
public class OSQLFunctionInV extends OSQLFunctionMove {
public static final String NAME = "inV";
public OSQLFunctionInV() {
super(NAME, 0, 1);
}
@Override
protected Object move(final OrientBaseGraph graph, final OIdentifiable iRecord, final String[] iLabels) {
return e2v(graph, iRecord, Direction.IN, iLabels);
}
}
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionInV.java
|
106 |
static final class ValueSpliterator<K,V> extends Traverser<K,V>
implements ConcurrentHashMapSpliterator<V> {
long est; // size estimate
ValueSpliterator(Node<K,V>[] tab, int size, int index, int limit,
long est) {
super(tab, size, index, limit);
this.est = est;
}
public ConcurrentHashMapSpliterator<V> trySplit() {
int i, f, h;
return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
new ValueSpliterator<K,V>(tab, baseSize, baseLimit = h,
f, est >>>= 1);
}
public void forEachRemaining(Action<? super V> action) {
if (action == null) throw new NullPointerException();
for (Node<K,V> p; (p = advance()) != null;)
action.apply(p.val);
}
public boolean tryAdvance(Action<? super V> action) {
if (action == null) throw new NullPointerException();
Node<K,V> p;
if ((p = advance()) == null)
return false;
action.apply(p.val);
return true;
}
public long estimateSize() { return est; }
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
1,763 |
public class GeoHashUtils {
private static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
public static final int PRECISION = 12;
private static final int[] BITS = {16, 8, 4, 2, 1};
private GeoHashUtils() {
}
public static String encode(double latitude, double longitude) {
return encode(latitude, longitude, PRECISION);
}
/**
* Encodes the given latitude and longitude into a geohash
*
* @param latitude Latitude to encode
* @param longitude Longitude to encode
* @return Geohash encoding of the longitude and latitude
*/
public static String encode(double latitude, double longitude, int precision) {
// double[] latInterval = {-90.0, 90.0};
// double[] lngInterval = {-180.0, 180.0};
double latInterval0 = -90.0;
double latInterval1 = 90.0;
double lngInterval0 = -180.0;
double lngInterval1 = 180.0;
final StringBuilder geohash = new StringBuilder();
boolean isEven = true;
int bit = 0;
int ch = 0;
while (geohash.length() < precision) {
double mid = 0.0;
if (isEven) {
// mid = (lngInterval[0] + lngInterval[1]) / 2D;
mid = (lngInterval0 + lngInterval1) / 2D;
if (longitude > mid) {
ch |= BITS[bit];
// lngInterval[0] = mid;
lngInterval0 = mid;
} else {
// lngInterval[1] = mid;
lngInterval1 = mid;
}
} else {
// mid = (latInterval[0] + latInterval[1]) / 2D;
mid = (latInterval0 + latInterval1) / 2D;
if (latitude > mid) {
ch |= BITS[bit];
// latInterval[0] = mid;
latInterval0 = mid;
} else {
// latInterval[1] = mid;
latInterval1 = mid;
}
}
isEven = !isEven;
if (bit < 4) {
bit++;
} else {
geohash.append(BASE_32[ch]);
bit = 0;
ch = 0;
}
}
return geohash.toString();
}
private static final char encode(int x, int y) {
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
}
/**
* Calculate all neighbors of a given geohash cell.
*
* @param geohash Geohash of the defines cell
* @return geohashes of all neighbor cells
*/
public static List<String> neighbors(String geohash) {
return addNeighbors(geohash, geohash.length(), new ArrayList<String>(8));
}
/**
* Calculate the geohash of a neighbor of a geohash
*
* @param geohash the geohash of a cell
* @param level level of the geohash
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
* @return geohash of the defined cell
*/
private final static String neighbor(String geohash, int level, int dx, int dy) {
int cell = decode(geohash.charAt(level - 1));
// Decoding the Geohash bit pattern to determine grid coordinates
int x0 = cell & 1; // first bit of x
int y0 = cell & 2; // first bit of y
int x1 = cell & 4; // second bit of x
int y1 = cell & 8; // second bit of y
int x2 = cell & 16; // third bit of x
// combine the bitpattern to grid coordinates.
// note that the semantics of x and y are swapping
// on each level
int x = x0 + (x1 / 2) + (x2 / 4);
int y = (y0 / 2) + (y1 / 4);
if (level == 1) {
// Root cells at north (namely "bcfguvyz") or at
// south (namely "0145hjnp") do not have neighbors
// in north/south direction
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
return null;
} else {
return Character.toString(encode(x + dx, y + dy));
}
} else {
// define grid coordinates for next level
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
// define grid limits for current level
final int xLimit = ((level % 2) == 0) ? 7 : 3;
final int yLimit = ((level % 2) == 0) ? 3 : 7;
// if the defined neighbor has the same parent a the current cell
// encode the cell direcly. Otherwise find the cell next to this
// cell recursively. Since encoding wraps around within a cell
// it can be encoded here.
if (nx >= 0 && nx <= xLimit && ny >= 0 && ny < yLimit) {
return geohash.substring(0, level - 1) + encode(nx, ny);
} else {
String neighbor = neighbor(geohash, level - 1, dx, dy);
if(neighbor != null) {
return neighbor + encode(nx, ny);
} else {
return null;
}
}
}
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param length level of the given geohash
* @param neighbors list to add the neighbors to
* @return the given list
*/
private static final List<String> addNeighbors(String geohash, int length, List<String> neighbors) {
String south = neighbor(geohash, length, 0, -1);
String north = neighbor(geohash, length, 0, +1);
if (north != null) {
neighbors.add(neighbor(north, length, -1, 0));
neighbors.add(north);
neighbors.add(neighbor(north, length, +1, 0));
}
neighbors.add(neighbor(geohash, length, -1, 0));
neighbors.add(neighbor(geohash, length, +1, 0));
if (south != null) {
neighbors.add(neighbor(south, length, -1, 0));
neighbors.add(south);
neighbors.add(neighbor(south, length, +1, 0));
}
return neighbors;
}
private static final int decode(char geo) {
switch (geo) {
case '0':
return 0;
case '1':
return 1;
case '2':
return 2;
case '3':
return 3;
case '4':
return 4;
case '5':
return 5;
case '6':
return 6;
case '7':
return 7;
case '8':
return 8;
case '9':
return 9;
case 'b':
return 10;
case 'c':
return 11;
case 'd':
return 12;
case 'e':
return 13;
case 'f':
return 14;
case 'g':
return 15;
case 'h':
return 16;
case 'j':
return 17;
case 'k':
return 18;
case 'm':
return 19;
case 'n':
return 20;
case 'p':
return 21;
case 'q':
return 22;
case 'r':
return 23;
case 's':
return 24;
case 't':
return 25;
case 'u':
return 26;
case 'v':
return 27;
case 'w':
return 28;
case 'x':
return 29;
case 'y':
return 30;
case 'z':
return 31;
default:
throw new ElasticsearchIllegalArgumentException("the character '" + geo + "' is not a valid geohash character");
}
}
/**
* Decodes the given geohash
*
* @param geohash Geohash to decocde
* @return {@link GeoPoint} at the center of cell, given by the geohash
*/
public static GeoPoint decode(String geohash) {
return decode(geohash, new GeoPoint());
}
/**
* Decodes the given geohash into a latitude and longitude
*
* @param geohash Geohash to decocde
* @return the given {@link GeoPoint} reseted to the center of
* cell, given by the geohash
*/
public static GeoPoint decode(String geohash, GeoPoint ret) {
double[] interval = decodeCell(geohash);
return ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
}
/**
* Decodes the given geohash into a geohash cell defined by the points nothWest and southEast
*
* @param geohash Geohash to deocde
* @param northWest the point north/west of the cell
* @param southEast the point south/east of the cell
*/
public static void decodeCell(String geohash, GeoPoint northWest, GeoPoint southEast) {
double[] interval = decodeCell(geohash);
northWest.reset(interval[1], interval[2]);
southEast.reset(interval[0], interval[3]);
}
private static double[] decodeCell(String geohash) {
double[] interval = {-90.0, 90.0, -180.0, 180.0};
boolean isEven = true;
for (int i = 0; i < geohash.length(); i++) {
final int cd = decode(geohash.charAt(i));
for (int mask : BITS) {
if (isEven) {
if ((cd & mask) != 0) {
interval[2] = (interval[2] + interval[3]) / 2D;
} else {
interval[3] = (interval[2] + interval[3]) / 2D;
}
} else {
if ((cd & mask) != 0) {
interval[0] = (interval[0] + interval[1]) / 2D;
} else {
interval[1] = (interval[0] + interval[1]) / 2D;
}
}
isEven = !isEven;
}
}
return interval;
}
//========== long-based encodings for geohashes ========================================
/**
* Encodes latitude and longitude information into a single long with variable precision.
* Up to 12 levels of precision are supported which should offer sub-metre resolution.
*
* @param latitude
* @param longitude
* @param precision The required precision between 1 and 12
* @return A single long where 4 bits are used for holding the precision and the remaining
* 60 bits are reserved for 5 bit cell identifiers giving up to 12 layers.
*/
public static long encodeAsLong(double latitude, double longitude, int precision) {
if((precision>12)||(precision<1))
{
throw new ElasticsearchIllegalArgumentException("Illegal precision length of "+precision+
". Long-based geohashes only support precisions between 1 and 12");
}
double latInterval0 = -90.0;
double latInterval1 = 90.0;
double lngInterval0 = -180.0;
double lngInterval1 = 180.0;
long geohash = 0l;
boolean isEven = true;
int bit = 0;
int ch = 0;
int geohashLength=0;
while (geohashLength < precision) {
double mid = 0.0;
if (isEven) {
mid = (lngInterval0 + lngInterval1) / 2D;
if (longitude > mid) {
ch |= BITS[bit];
lngInterval0 = mid;
} else {
lngInterval1 = mid;
}
} else {
mid = (latInterval0 + latInterval1) / 2D;
if (latitude > mid) {
ch |= BITS[bit];
latInterval0 = mid;
} else {
latInterval1 = mid;
}
}
isEven = !isEven;
if (bit < 4) {
bit++;
} else {
geohashLength++;
geohash|=ch;
if(geohashLength<precision){
geohash<<=5;
}
bit = 0;
ch = 0;
}
}
geohash<<=4;
geohash|=precision;
return geohash;
}
/**
* Formats a geohash held as a long as a more conventional
* String-based geohash
* @param geohashAsLong a geohash encoded as a long
* @return A traditional base32-based String representation of a geohash
*/
public static String toString(long geohashAsLong)
{
int precision= (int) (geohashAsLong&15);
char[] chars=new char[precision];
geohashAsLong>>=4;
for (int i = precision-1; i >=0 ; i--) {
chars[i]= BASE_32[(int) (geohashAsLong&31)];
geohashAsLong>>=5;
}
return new String(chars);
}
public static GeoPoint decode(long geohash) {
GeoPoint point = new GeoPoint();
decode(geohash, point);
return point;
}
/**
* Decodes the given long-format geohash into a latitude and longitude
*
* @param geohash long format Geohash to decode
* @param ret The Geopoint into which the latitude and longitude will be stored
*/
public static void decode(long geohash, GeoPoint ret) {
double[] interval = decodeCell(geohash);
ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
}
private static double[] decodeCell(long geohash) {
double[] interval = {-90.0, 90.0, -180.0, 180.0};
boolean isEven = true;
int precision= (int) (geohash&15);
geohash>>=4;
int[]cds=new int[precision];
for (int i = precision-1; i >=0 ; i--) {
cds[i] = (int) (geohash&31);
geohash>>=5;
}
for (int i = 0; i <cds.length ; i++) {
final int cd = cds[i];
for (int mask : BITS) {
if (isEven) {
if ((cd & mask) != 0) {
interval[2] = (interval[2] + interval[3]) / 2D;
} else {
interval[3] = (interval[2] + interval[3]) / 2D;
}
} else {
if ((cd & mask) != 0) {
interval[0] = (interval[0] + interval[1]) / 2D;
} else {
interval[1] = (interval[0] + interval[1]) / 2D;
}
}
isEven = !isEven;
}
}
return interval;
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_geo_GeoHashUtils.java
|
165 |
public class RemoveDistributedObjectListenerRequest extends BaseClientRemoveListenerRequest {
public static final String CLEAR_LISTENERS_COMMAND = "clear-all-listeners";
public RemoveDistributedObjectListenerRequest() {
}
public RemoveDistributedObjectListenerRequest(String registrationId) {
super(null, registrationId);
}
@Override
public Object call() throws Exception {
//Please see above JavaDoc
if (CLEAR_LISTENERS_COMMAND.equals(name)) {
endpoint.clearAllListeners();
return true;
}
return clientEngine.getProxyService().removeProxyListener(registrationId);
}
@Override
public String getServiceName() {
return null;
}
@Override
public int getFactoryId() {
return ClientPortableHook.ID;
}
@Override
public int getClassId() {
return ClientPortableHook.REMOVE_LISTENER;
}
@Override
public Permission getRequiredPermission() {
return null;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_RemoveDistributedObjectListenerRequest.java
|
564 |
public abstract class AbstractJoiner implements Joiner {
private final AtomicLong joinStartTime = new AtomicLong(Clock.currentTimeMillis());
private final AtomicInteger tryCount = new AtomicInteger(0);
protected final Config config;
protected final Node node;
protected final ILogger logger;
protected final SystemLogService systemLogService;
private volatile Address targetAddress;
public AbstractJoiner(Node node) {
this.node = node;
this.systemLogService = node.getSystemLogService();
this.logger = node.loggingService.getLogger(this.getClass().getName());
this.config = node.config;
}
public abstract void doJoin(AtomicBoolean joined);
@Override
public void join(AtomicBoolean joined) {
doJoin(joined);
postJoin();
}
private void postJoin() {
systemLogService.logJoin("PostJoin master: " + node.getMasterAddress() + ", isMaster: " + node.isMaster());
if (!node.isActive()) {
return;
}
if (tryCount.incrementAndGet() == 5) {
logger.warning("Join try count exceed limit, setting this node as master!");
node.setAsMaster();
}
if (!node.isMaster()) {
boolean allConnected = false;
int checkCount = 0;
final long maxJoinMillis = node.getGroupProperties().MAX_JOIN_SECONDS.getInteger() * 1000;
if (node.joined()) {
systemLogService.logJoin("Waiting for all connections");
while (checkCount++ < node.groupProperties.CONNECT_ALL_WAIT_SECONDS.getInteger() && !allConnected) {
try {
//noinspection BusyWait
Thread.sleep(1000);
} catch (InterruptedException ignored) {
}
Set<Member> members = node.getClusterService().getMembers();
allConnected = true;
for (Member member : members) {
MemberImpl memberImpl = (MemberImpl) member;
if (!memberImpl.localMember() && node.connectionManager.getOrConnect(memberImpl.getAddress()) == null) {
allConnected = false;
systemLogService.logJoin("Not-connected to " + memberImpl.getAddress());
}
}
}
}
if (!node.joined() || !allConnected) {
if (Clock.currentTimeMillis() - getStartTime() < maxJoinMillis) {
logger.warning("Failed to connect, node joined= " + node.joined() + ", allConnected= " +
allConnected + " to all other members after " + checkCount + " seconds.");
logger.warning("Rebooting after 10 seconds.");
try {
Thread.sleep(10000);
node.rejoin();
} catch (InterruptedException e) {
logger.warning(e);
node.shutdown(false);
}
} else {
throw new HazelcastException("Failed to join in " + (maxJoinMillis / 1000) + " seconds!");
}
return;
}
}
if (node.getClusterService().getSize() == 1) {
final StringBuilder sb = new StringBuilder("\n");
sb.append(node.clusterService.membersString());
logger.info(sb.toString());
}
}
protected void failedJoiningToMaster(boolean multicast, int tryCount) {
StringBuilder sb = new StringBuilder();
sb.append("\n");
sb.append("======================================================");
sb.append("\n");
sb.append("Couldn't connect to discovered master! tryCount: ").append(tryCount);
sb.append("\n");
sb.append("address: ").append(node.getThisAddress());
sb.append("\n");
sb.append("masterAddress: ").append(node.getMasterAddress());
sb.append("\n");
sb.append("multicast: ").append(multicast);
sb.append("\n");
sb.append("connection: ").append(node.connectionManager.getConnection(node.getMasterAddress()));
sb.append("\n");
sb.append("======================================================");
sb.append("\n");
throw new IllegalStateException(sb.toString());
}
boolean shouldMerge(JoinMessage joinRequest) {
boolean shouldMerge = false;
if (joinRequest != null) {
boolean validJoinRequest;
try {
try {
validJoinRequest = node.getClusterService().validateJoinMessage(joinRequest);
} catch (Exception e) {
logger.finest(e.getMessage());
validJoinRequest = false;
}
if (validJoinRequest) {
for (Member member : node.getClusterService().getMembers()) {
MemberImpl memberImpl = (MemberImpl) member;
if (memberImpl.getAddress().equals(joinRequest.getAddress())) {
if (logger.isFinestEnabled()) {
logger.finest("Should not merge to " + joinRequest.getAddress()
+ ", because it is already member of this cluster.");
}
return false;
}
}
int currentMemberCount = node.getClusterService().getMembers().size();
if (joinRequest.getMemberCount() > currentMemberCount) {
// I should join the other cluster
logger.info(node.getThisAddress() + " is merging to " + joinRequest.getAddress()
+ ", because : joinRequest.getMemberCount() > currentMemberCount ["
+ (joinRequest.getMemberCount() + " > " + currentMemberCount) + "]");
if (logger.isFinestEnabled()) {
logger.finest(joinRequest.toString());
}
shouldMerge = true;
} else if (joinRequest.getMemberCount() == currentMemberCount) {
// compare the hashes
if (node.getThisAddress().hashCode() > joinRequest.getAddress().hashCode()) {
logger.info(node.getThisAddress() + " is merging to " + joinRequest.getAddress()
+ ", because : node.getThisAddress().hashCode() > joinRequest.address.hashCode() "
+ ", this node member count: " + currentMemberCount);
if (logger.isFinestEnabled()) {
logger.finest(joinRequest.toString());
}
shouldMerge = true;
} else {
if (logger.isFinestEnabled()) {
logger.finest(joinRequest.getAddress() + " should merge to this node "
+ ", because : node.getThisAddress().hashCode() < joinRequest.address.hashCode() "
+ ", this node member count: " + currentMemberCount);
}
}
}
}
} catch (Throwable e) {
logger.severe(e);
return false;
}
}
return shouldMerge;
}
protected void connectAndSendJoinRequest(Collection<Address> colPossibleAddresses) {
for (Address possibleAddress : colPossibleAddresses) {
final Connection conn = node.connectionManager.getOrConnect(possibleAddress);
if (conn != null) {
if (logger.isFinestEnabled()) {
logger.finest("sending join request for " + possibleAddress);
}
node.clusterService.sendJoinRequest(possibleAddress, true);
}
}
}
@Override
public void reset() {
joinStartTime.set(Clock.currentTimeMillis());
tryCount.set(0);
}
protected void startClusterMerge(final Address targetAddress) {
final OperationService operationService = node.nodeEngine.getOperationService();
final Collection<MemberImpl> memberList = node.getClusterService().getMemberList();
final Collection<Future> calls = new ArrayList<Future>();
for (MemberImpl member : memberList) {
if (!member.localMember()) {
Future f = operationService.createInvocationBuilder(ClusterServiceImpl.SERVICE_NAME,
new PrepareMergeOperation(targetAddress), member.getAddress())
.setTryCount(3).invoke();
calls.add(f);
}
}
for (Future f : calls) {
try {
f.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
logger.finest("While waiting merge response...", e);
}
}
final PrepareMergeOperation prepareMergeOperation = new PrepareMergeOperation(targetAddress);
prepareMergeOperation.setNodeEngine(node.nodeEngine).setService(node.getClusterService())
.setResponseHandler(ResponseHandlerFactory.createEmptyResponseHandler());
operationService.runOperationOnCallingThread(prepareMergeOperation);
for (MemberImpl member : memberList) {
if (!member.localMember()) {
operationService.createInvocationBuilder(ClusterServiceImpl.SERVICE_NAME,
new MergeClustersOperation(targetAddress), member.getAddress())
.setTryCount(1).invoke();
}
}
final MergeClustersOperation mergeClustersOperation = new MergeClustersOperation(targetAddress);
mergeClustersOperation.setNodeEngine(node.nodeEngine).setService(node.getClusterService())
.setResponseHandler(ResponseHandlerFactory.createEmptyResponseHandler());
operationService.runOperationOnCallingThread(mergeClustersOperation);
}
@Override
public final long getStartTime() {
return joinStartTime.get();
}
@Override
public void setTargetAddress(Address targetAddress) {
this.targetAddress = targetAddress;
}
public Address getTargetAddress() {
final Address target = targetAddress;
targetAddress = null;
return target;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_cluster_AbstractJoiner.java
|
1,399 |
public class RDFInputFormat extends FileInputFormat<NullWritable, FaunusElement> implements MapReduceFormat {
@Override
public RecordReader<NullWritable, FaunusElement> createRecordReader(final InputSplit split, final TaskAttemptContext context) throws IOException {
return new RDFRecordReader(ModifiableHadoopConfiguration.of(DEFAULT_COMPAT.getContextConfiguration(context)));
}
@Override
protected boolean isSplitable(final JobContext context, final Path file) {
return null == new CompressionCodecFactory(context.getConfiguration()).getCodec(file);
}
@Override
public void addMapReduceJobs(final HadoopCompiler compiler) {
compiler.addMapReduce(EdgeListInputMapReduce.Map.class,
EdgeListInputMapReduce.Combiner.class,
EdgeListInputMapReduce.Reduce.class,
LongWritable.class,
FaunusVertex.class,
NullWritable.class,
FaunusVertex.class,
new EmptyConfiguration());
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_formats_edgelist_rdf_RDFInputFormat.java
|
980 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_DISCRETE_ORDER_ITEM")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationClass(friendlyName = "DiscreteOrderItemImpl_discreteOrderItem")
public class DiscreteOrderItemImpl extends OrderItemImpl implements DiscreteOrderItem {
private static final long serialVersionUID = 1L;
@Column(name="BASE_RETAIL_PRICE", precision=19, scale=5)
@AdminPresentation(excluded = true, friendlyName = "DiscreteOrderItemImpl_Base_Retail_Price", order=2,
group = "DiscreteOrderItemImpl_Pricing", fieldType=SupportedFieldType.MONEY)
protected BigDecimal baseRetailPrice;
@Column(name="BASE_SALE_PRICE", precision=19, scale=5)
@AdminPresentation(excluded = true, friendlyName = "DiscreteOrderItemImpl_Base_Sale_Price", order=2,
group = "DiscreteOrderItemImpl_Pricing", fieldType= SupportedFieldType.MONEY)
protected BigDecimal baseSalePrice;
@ManyToOne(targetEntity = SkuImpl.class, optional=false)
@JoinColumn(name = "SKU_ID", nullable = false)
@Index(name="DISCRETE_SKU_INDEX", columnNames={"SKU_ID"})
@AdminPresentation(friendlyName = "DiscreteOrderItemImpl_Sku", order=Presentation.FieldOrder.SKU,
group = OrderItemImpl.Presentation.Group.Name.Catalog, groupOrder = OrderItemImpl.Presentation.Group.Order.Catalog)
@AdminPresentationToOneLookup()
protected Sku sku;
@ManyToOne(targetEntity = ProductImpl.class)
@JoinColumn(name = "PRODUCT_ID")
@Index(name="DISCRETE_PRODUCT_INDEX", columnNames={"PRODUCT_ID"})
@NotFound(action = NotFoundAction.IGNORE)
@AdminPresentation(friendlyName = "DiscreteOrderItemImpl_Product", order=Presentation.FieldOrder.PRODUCT,
group = OrderItemImpl.Presentation.Group.Name.Catalog, groupOrder = OrderItemImpl.Presentation.Group.Order.Catalog)
@AdminPresentationToOneLookup()
protected Product product;
@ManyToOne(targetEntity = BundleOrderItemImpl.class)
@JoinColumn(name = "BUNDLE_ORDER_ITEM_ID")
@AdminPresentation(excluded = true)
protected BundleOrderItem bundleOrderItem;
@ManyToOne(targetEntity = SkuBundleItemImpl.class)
@JoinColumn(name = "SKU_BUNDLE_ITEM_ID")
@AdminPresentation(excluded = true)
protected SkuBundleItem skuBundleItem;
@ElementCollection
@MapKeyColumn(name="NAME")
@Column(name="VALUE")
@CollectionTable(name="BLC_ORDER_ITEM_ADD_ATTR", joinColumns=@JoinColumn(name="ORDER_ITEM_ID"))
@BatchSize(size = 50)
@Deprecated
protected Map<String, String> additionalAttributes = new HashMap<String, String>();
@OneToMany(mappedBy = "discreteOrderItem", targetEntity = DiscreteOrderItemFeePriceImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<DiscreteOrderItemFeePrice> discreteOrderItemFeePrices = new ArrayList<DiscreteOrderItemFeePrice>();
@Override
public Sku getSku() {
return sku;
}
@Override
public void setSku(Sku sku) {
this.sku = sku;
if (sku.getRetailPrice() != null) {
this.baseRetailPrice = sku.getRetailPrice().getAmount();
}
if (sku.getSalePrice() != null) {
this.baseSalePrice = sku.getSalePrice().getAmount();
}
this.itemTaxable = sku.isTaxable();
setName(sku.getName());
}
@Override
public Boolean isTaxable() {
return (sku == null || sku.isTaxable() == null || sku.isTaxable());
}
@Override
public Product getProduct() {
return product;
}
@Override
public void setProduct(Product product) {
this.product = product;
}
@Override
public BundleOrderItem getBundleOrderItem() {
return bundleOrderItem;
}
@Override
public void setBundleOrderItem(BundleOrderItem bundleOrderItem) {
if (this.order != null && bundleOrderItem != null) {
throw new IllegalStateException("Cannot set a BundleOrderItem on a DiscreteOrderItem that is already associated with an Order");
}
this.bundleOrderItem = bundleOrderItem;
}
@Override
public void setOrder(Order order) {
if (order != null && bundleOrderItem != null) {
throw new IllegalStateException("Cannot set an Order on a DiscreteOrderItem that is already associated with a BundleOrderItem");
}
this.order = order;
}
/**
* If this item is part of a bundle that was created via a ProductBundle, then this
* method returns a reference to the corresponding SkuBundleItem.
* <p/>
* For manually created
* <p/>
* For all others, this method returns null.
*
* @return
*/
@Override
public SkuBundleItem getSkuBundleItem() {
return skuBundleItem;
}
/**
* Sets the associated SkuBundleItem.
*
* @param SkuBundleItem
*/
@Override
public void setSkuBundleItem(SkuBundleItem SkuBundleItem) {
this.skuBundleItem =SkuBundleItem;
}
@Override
public String getName() {
String name = super.getName();
if (name == null) {
return sku.getName();
}
return name;
}
@Override
public Order getOrder() {
if (order == null) {
if (getBundleOrderItem() != null) {
return getBundleOrderItem().getOrder();
}
}
return order;
}
private boolean updateSalePrice() {
if (isSalePriceOverride()) {
return false;
}
Money skuSalePrice = (getSku().getSalePrice() == null ? null : getSku().getSalePrice());
// Override retail/sale prices from skuBundle.
if (skuBundleItem != null) {
if (skuBundleItem.getSalePrice() != null) {
skuSalePrice = skuBundleItem.getSalePrice();
}
}
boolean updated = false;
//use the sku prices - the retail and sale prices could be null
if (skuSalePrice != null && !skuSalePrice.getAmount().equals(salePrice)) {
baseSalePrice = skuSalePrice.getAmount();
salePrice = skuSalePrice.getAmount();
updated = true;
}
// Adjust prices by adding in fees if they are attached.
if (getDiscreteOrderItemFeePrices() != null) {
for (DiscreteOrderItemFeePrice fee : getDiscreteOrderItemFeePrices()) {
Money returnPrice = convertToMoney(salePrice);
salePrice = returnPrice.add(fee.getAmount()).getAmount();
}
}
return updated;
}
private boolean updateRetailPrice() {
if (isRetailPriceOverride()) {
return false;
}
Money skuRetailPrice = getSku().getRetailPrice();
// Override retail/sale prices from skuBundle.
if (skuBundleItem != null) {
if (skuBundleItem.getRetailPrice() != null) {
skuRetailPrice = skuBundleItem.getRetailPrice();
}
}
boolean updated = false;
//use the sku prices - the retail and sale prices could be null
if (!skuRetailPrice.getAmount().equals(retailPrice)) {
baseRetailPrice = skuRetailPrice.getAmount();
retailPrice = skuRetailPrice.getAmount();
updated = true;
}
// Adjust prices by adding in fees if they are attached.
if (getDiscreteOrderItemFeePrices() != null) {
for (DiscreteOrderItemFeePrice fee : getDiscreteOrderItemFeePrices()) {
Money returnPrice = convertToMoney(retailPrice);
retailPrice = returnPrice.add(fee.getAmount()).getAmount();
}
}
return updated;
}
@Override
public boolean updateSaleAndRetailPrices() {
boolean salePriceUpdated = updateSalePrice();
boolean retailPriceUpdated = updateRetailPrice();
if (!isRetailPriceOverride() && !isSalePriceOverride()) {
if (salePrice != null && salePrice.compareTo(retailPrice) <= 0) {
price = salePrice;
} else {
price = retailPrice;
}
}
return salePriceUpdated || retailPriceUpdated;
}
@Override
public Map<String, String> getAdditionalAttributes() {
return additionalAttributes;
}
@Override
public void setAdditionalAttributes(Map<String, String> additionalAttributes) {
this.additionalAttributes = additionalAttributes;
}
@Override
public Money getBaseRetailPrice() {
return convertToMoney(baseRetailPrice);
}
@Override
public void setBaseRetailPrice(Money baseRetailPrice) {
this.baseRetailPrice = baseRetailPrice.getAmount();
}
@Override
public Money getBaseSalePrice() {
return convertToMoney(baseSalePrice);
}
@Override
public void setBaseSalePrice(Money baseSalePrice) {
this.baseSalePrice = baseSalePrice==null?null:baseSalePrice.getAmount();
}
@Override
public List<DiscreteOrderItemFeePrice> getDiscreteOrderItemFeePrices() {
return discreteOrderItemFeePrices;
}
@Override
public void setDiscreteOrderItemFeePrices(List<DiscreteOrderItemFeePrice> discreteOrderItemFeePrices) {
this.discreteOrderItemFeePrices = discreteOrderItemFeePrices;
}
protected Money convertToMoney(BigDecimal amount) {
return amount == null ? null : BroadleafCurrencyUtils.getMoney(amount, getOrder().getCurrency());
}
@Override
public OrderItem clone() {
DiscreteOrderItem orderItem = (DiscreteOrderItem) super.clone();
if (discreteOrderItemFeePrices != null) {
for (DiscreteOrderItemFeePrice feePrice : discreteOrderItemFeePrices) {
DiscreteOrderItemFeePrice cloneFeePrice = feePrice.clone();
cloneFeePrice.setDiscreteOrderItem(orderItem);
orderItem.getDiscreteOrderItemFeePrices().add(cloneFeePrice);
}
}
if (additionalAttributes != null) {
orderItem.getAdditionalAttributes().putAll(additionalAttributes);
}
orderItem.setBaseRetailPrice(convertToMoney(baseRetailPrice));
orderItem.setBaseSalePrice(convertToMoney(baseSalePrice));
orderItem.setBundleOrderItem(bundleOrderItem);
orderItem.setProduct(product);
orderItem.setSku(sku);
if (orderItem.getOrder() == null) {
throw new IllegalStateException("Either an Order or a BundleOrderItem must be set on the DiscreteOrderItem");
}
return orderItem;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DiscreteOrderItemImpl other = (DiscreteOrderItemImpl) obj;
if (!super.equals(obj)) {
return false;
}
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (bundleOrderItem == null) {
if (other.bundleOrderItem != null) {
return false;
}
} else if (!bundleOrderItem.equals(other.bundleOrderItem)) {
return false;
}
if (sku == null) {
if (other.sku != null) {
return false;
}
} else if (!sku.equals(other.sku)) {
return false;
}
return true;
}
@Override
public int hashCode() {
final int prime = super.hashCode();
int result = 1;
result = prime * result + ((bundleOrderItem == null) ? 0 : bundleOrderItem.hashCode());
result = prime * result + ((sku == null) ? 0 : sku.hashCode());
return result;
}
@Override
public boolean isDiscountingAllowed() {
if (discountsAllowed == null) {
return sku.isDiscountable();
} else {
return discountsAllowed.booleanValue();
}
}
@Override
public BundleOrderItem findParentItem() {
for (OrderItem orderItem : getOrder().getOrderItems()) {
if (orderItem instanceof BundleOrderItem) {
BundleOrderItem bundleItem = (BundleOrderItem) orderItem;
for (OrderItem containedItem : bundleItem.getOrderItems()) {
if (containedItem.equals(this)) {
return bundleItem;
}
}
}
}
return null;
}
public static class Presentation {
public static class Tab {
public static class Name {
public static final String OrderItems = "OrderImpl_Order_Items_Tab";
}
public static class Order {
public static final int OrderItems = 2000;
}
}
public static class Group {
public static class Name {
}
public static class Order {
}
}
public static class FieldOrder {
public static final int PRODUCT = 2000;
public static final int SKU = 3000;
}
}
@Override
public boolean isSkuActive() {
return sku.isActive();
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_DiscreteOrderItemImpl.java
|
693 |
public static class Builder {
private final Client client;
private final Listener listener;
private String name;
private int concurrentRequests = 1;
private int bulkActions = 1000;
private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB);
private TimeValue flushInterval = null;
/**
* Creates a builder of bulk processor with the client to use and the listener that will be used
* to be notified on the completion of bulk requests.
*/
public Builder(Client client, Listener listener) {
this.client = client;
this.listener = listener;
}
/**
* Sets an optional name to identify this bulk processor.
*/
public Builder setName(String name) {
this.name = name;
return this;
}
/**
* Sets the number of concurrent requests allowed to be executed. A value of 0 means that only a single
* request will be allowed to be executed. A value of 1 means 1 concurrent request is allowed to be executed
* while accumulating new bulk requests. Defaults to <tt>1</tt>.
*/
public Builder setConcurrentRequests(int concurrentRequests) {
this.concurrentRequests = concurrentRequests;
return this;
}
/**
* Sets when to flush a new bulk request based on the number of actions currently added. Defaults to
* <tt>1000</tt>. Can be set to <tt>-1</tt> to disable it.
*/
public Builder setBulkActions(int bulkActions) {
this.bulkActions = bulkActions;
return this;
}
/**
* Sets when to flush a new bulk request based on the size of actions currently added. Defaults to
* <tt>5mb</tt>. Can be set to <tt>-1</tt> to disable it.
*/
public Builder setBulkSize(ByteSizeValue bulkSize) {
this.bulkSize = bulkSize;
return this;
}
/**
* Sets a flush interval flushing *any* bulk actions pending if the interval passes. Defaults to not set.
* <p/>
* Note, both {@link #setBulkActions(int)} and {@link #setBulkSize(org.elasticsearch.common.unit.ByteSizeValue)}
* can be set to <tt>-1</tt> with the flush interval set allowing for complete async processing of bulk actions.
*/
public Builder setFlushInterval(TimeValue flushInterval) {
this.flushInterval = flushInterval;
return this;
}
/**
* Builds a new bulk processor.
*/
public BulkProcessor build() {
return new BulkProcessor(client, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_bulk_BulkProcessor.java
|
217 |
public class XPostingsHighlighter {
//BEGIN EDIT added method to override offset for current value (default 0)
//we need this to perform discrete highlighting per field
protected int getOffsetForCurrentValue(String field, int docId) {
return 0;
}
//END EDIT
//BEGIN EDIT
//we need this to fix scoring when highlighting every single value separately, since the score depends on the total length of the field (all values rather than only the current one)
protected int getContentLength(String field, int docId) {
return -1;
}
//END EDIT
// TODO: maybe allow re-analysis for tiny fields? currently we require offsets,
// but if the analyzer is really fast and the field is tiny, this might really be
// unnecessary.
/** for rewriting: we don't want slow processing from MTQs */
private static final IndexReader EMPTY_INDEXREADER = new MultiReader();
/** Default maximum content size to process. Typically snippets
* closer to the beginning of the document better summarize its content */
public static final int DEFAULT_MAX_LENGTH = 10000;
private final int maxLength;
/** Set the first time {@link #getFormatter} is called,
* and then reused. */
private PassageFormatter defaultFormatter;
/** Set the first time {@link #getScorer} is called,
* and then reused. */
private PassageScorer defaultScorer;
/**
* Creates a new highlighter with default parameters.
*/
public XPostingsHighlighter() {
this(DEFAULT_MAX_LENGTH);
}
/**
* Creates a new highlighter, specifying maximum content length.
* @param maxLength maximum content size to process.
* @throws IllegalArgumentException if <code>maxLength</code> is negative or <code>Integer.MAX_VALUE</code>
*/
public XPostingsHighlighter(int maxLength) {
if (maxLength < 0 || maxLength == Integer.MAX_VALUE) {
// two reasons: no overflow problems in BreakIterator.preceding(offset+1),
// our sentinel in the offsets queue uses this value to terminate.
throw new IllegalArgumentException("maxLength must be < Integer.MAX_VALUE");
}
this.maxLength = maxLength;
}
/** Returns the {@link java.text.BreakIterator} to use for
* dividing text into passages. This returns
* {@link java.text.BreakIterator#getSentenceInstance(java.util.Locale)} by default;
* subclasses can override to customize. */
protected BreakIterator getBreakIterator(String field) {
return BreakIterator.getSentenceInstance(Locale.ROOT);
}
/** Returns the {@link PassageFormatter} to use for
* formatting passages into highlighted snippets. This
* returns a new {@code PassageFormatter} by default;
* subclasses can override to customize. */
protected PassageFormatter getFormatter(String field) {
if (defaultFormatter == null) {
defaultFormatter = new DefaultPassageFormatter();
}
return defaultFormatter;
}
/** Returns the {@link PassageScorer} to use for
* ranking passages. This
* returns a new {@code PassageScorer} by default;
* subclasses can override to customize. */
protected PassageScorer getScorer(String field) {
if (defaultScorer == null) {
defaultScorer = new PassageScorer();
}
return defaultScorer;
}
/**
* Highlights the top passages from a single field.
*
* @param field field name to highlight.
* Must have a stored string value and also be indexed with offsets.
* @param query query to highlight.
* @param searcher searcher that was previously used to execute the query.
* @param topDocs TopDocs containing the summary result documents to highlight.
* @return Array of formatted snippets corresponding to the documents in <code>topDocs</code>.
* If no highlights were found for a document, the
* first sentence for the field will be returned.
* @throws java.io.IOException if an I/O error occurred during processing
* @throws IllegalArgumentException if <code>field</code> was indexed without
* {@link org.apache.lucene.index.FieldInfo.IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS}
*/
public String[] highlight(String field, Query query, IndexSearcher searcher, TopDocs topDocs) throws IOException {
return highlight(field, query, searcher, topDocs, 1);
}
/**
* Highlights the top-N passages from a single field.
*
* @param field field name to highlight.
* Must have a stored string value and also be indexed with offsets.
* @param query query to highlight.
* @param searcher searcher that was previously used to execute the query.
* @param topDocs TopDocs containing the summary result documents to highlight.
* @param maxPassages The maximum number of top-N ranked passages used to
* form the highlighted snippets.
* @return Array of formatted snippets corresponding to the documents in <code>topDocs</code>.
* If no highlights were found for a document, the
* first {@code maxPassages} sentences from the
* field will be returned.
* @throws IOException if an I/O error occurred during processing
* @throws IllegalArgumentException if <code>field</code> was indexed without
* {@link org.apache.lucene.index.FieldInfo.IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS}
*/
public String[] highlight(String field, Query query, IndexSearcher searcher, TopDocs topDocs, int maxPassages) throws IOException {
Map<String,String[]> res = highlightFields(new String[] { field }, query, searcher, topDocs, new int[] { maxPassages });
return res.get(field);
}
/**
* Highlights the top passages from multiple fields.
* <p>
* Conceptually, this behaves as a more efficient form of:
* <pre class="prettyprint">
* Map m = new HashMap();
* for (String field : fields) {
* m.put(field, highlight(field, query, searcher, topDocs));
* }
* return m;
* </pre>
*
* @param fields field names to highlight.
* Must have a stored string value and also be indexed with offsets.
* @param query query to highlight.
* @param searcher searcher that was previously used to execute the query.
* @param topDocs TopDocs containing the summary result documents to highlight.
* @return Map keyed on field name, containing the array of formatted snippets
* corresponding to the documents in <code>topDocs</code>.
* If no highlights were found for a document, the
* first sentence from the field will be returned.
* @throws IOException if an I/O error occurred during processing
* @throws IllegalArgumentException if <code>field</code> was indexed without
* {@link org.apache.lucene.index.FieldInfo.IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS}
*/
public Map<String,String[]> highlightFields(String fields[], Query query, IndexSearcher searcher, TopDocs topDocs) throws IOException {
int maxPassages[] = new int[fields.length];
Arrays.fill(maxPassages, 1);
return highlightFields(fields, query, searcher, topDocs, maxPassages);
}
/**
* Highlights the top-N passages from multiple fields.
* <p>
* Conceptually, this behaves as a more efficient form of:
* <pre class="prettyprint">
* Map m = new HashMap();
* for (String field : fields) {
* m.put(field, highlight(field, query, searcher, topDocs, maxPassages));
* }
* return m;
* </pre>
*
* @param fields field names to highlight.
* Must have a stored string value and also be indexed with offsets.
* @param query query to highlight.
* @param searcher searcher that was previously used to execute the query.
* @param topDocs TopDocs containing the summary result documents to highlight.
* @param maxPassages The maximum number of top-N ranked passages per-field used to
* form the highlighted snippets.
* @return Map keyed on field name, containing the array of formatted snippets
* corresponding to the documents in <code>topDocs</code>.
* If no highlights were found for a document, the
* first {@code maxPassages} sentences from the
* field will be returned.
* @throws IOException if an I/O error occurred during processing
* @throws IllegalArgumentException if <code>field</code> was indexed without
* {@link org.apache.lucene.index.FieldInfo.IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS}
*/
public Map<String,String[]> highlightFields(String fields[], Query query, IndexSearcher searcher, TopDocs topDocs, int maxPassages[]) throws IOException {
final ScoreDoc scoreDocs[] = topDocs.scoreDocs;
int docids[] = new int[scoreDocs.length];
for (int i = 0; i < docids.length; i++) {
docids[i] = scoreDocs[i].doc;
}
return highlightFields(fields, query, searcher, docids, maxPassages);
}
/**
* Highlights the top-N passages from multiple fields,
* for the provided int[] docids.
*
* @param fieldsIn field names to highlight.
* Must have a stored string value and also be indexed with offsets.
* @param query query to highlight.
* @param searcher searcher that was previously used to execute the query.
* @param docidsIn containing the document IDs to highlight.
* @param maxPassagesIn The maximum number of top-N ranked passages per-field used to
* form the highlighted snippets.
* @return Map keyed on field name, containing the array of formatted snippets
* corresponding to the documents in <code>topDocs</code>.
* If no highlights were found for a document, the
* first {@code maxPassages} from the field will
* be returned.
* @throws IOException if an I/O error occurred during processing
* @throws IllegalArgumentException if <code>field</code> was indexed without
* {@link org.apache.lucene.index.FieldInfo.IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS}
*/
public Map<String,String[]> highlightFields(String fieldsIn[], Query query, IndexSearcher searcher, int[] docidsIn, int maxPassagesIn[]) throws IOException {
Map<String,String[]> snippets = new HashMap<String,String[]>();
for(Map.Entry<String,Object[]> ent : highlightFieldsAsObjects(fieldsIn, query, searcher, docidsIn, maxPassagesIn).entrySet()) {
Object[] snippetObjects = ent.getValue();
String[] snippetStrings = new String[snippetObjects.length];
snippets.put(ent.getKey(), snippetStrings);
for(int i=0;i<snippetObjects.length;i++) {
Object snippet = snippetObjects[i];
if (snippet != null) {
snippetStrings[i] = snippet.toString();
}
}
}
return snippets;
}
public Map<String,Object[]> highlightFieldsAsObjects(String fieldsIn[], Query query, IndexSearcher searcher, int[] docidsIn, int maxPassagesIn[]) throws IOException {
if (fieldsIn.length < 1) {
throw new IllegalArgumentException("fieldsIn must not be empty");
}
if (fieldsIn.length != maxPassagesIn.length) {
throw new IllegalArgumentException("invalid number of maxPassagesIn");
}
final IndexReader reader = searcher.getIndexReader();
query = rewrite(query);
SortedSet<Term> queryTerms = new TreeSet<Term>();
query.extractTerms(queryTerms);
IndexReaderContext readerContext = reader.getContext();
List<AtomicReaderContext> leaves = readerContext.leaves();
// Make our own copies because we sort in-place:
int[] docids = new int[docidsIn.length];
System.arraycopy(docidsIn, 0, docids, 0, docidsIn.length);
final String fields[] = new String[fieldsIn.length];
System.arraycopy(fieldsIn, 0, fields, 0, fieldsIn.length);
final int maxPassages[] = new int[maxPassagesIn.length];
System.arraycopy(maxPassagesIn, 0, maxPassages, 0, maxPassagesIn.length);
// sort for sequential io
Arrays.sort(docids);
new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
String tmp = fields[i];
fields[i] = fields[j];
fields[j] = tmp;
int tmp2 = maxPassages[i];
maxPassages[i] = maxPassages[j];
maxPassages[j] = tmp2;
}
@Override
protected int compare(int i, int j) {
return fields[i].compareTo(fields[j]);
}
}.sort(0, fields.length);
// pull stored data:
String[][] contents = loadFieldValues(searcher, fields, docids, maxLength);
Map<String,Object[]> highlights = new HashMap<String,Object[]>();
for (int i = 0; i < fields.length; i++) {
String field = fields[i];
int numPassages = maxPassages[i];
Term floor = new Term(field, "");
Term ceiling = new Term(field, UnicodeUtil.BIG_TERM);
SortedSet<Term> fieldTerms = queryTerms.subSet(floor, ceiling);
// TODO: should we have some reasonable defaults for term pruning? (e.g. stopwords)
// Strip off the redundant field:
BytesRef terms[] = new BytesRef[fieldTerms.size()];
int termUpto = 0;
for(Term term : fieldTerms) {
terms[termUpto++] = term.bytes();
}
Map<Integer,Object> fieldHighlights = highlightField(field, contents[i], getBreakIterator(field), terms, docids, leaves, numPassages);
Object[] result = new Object[docids.length];
for (int j = 0; j < docidsIn.length; j++) {
result[j] = fieldHighlights.get(docidsIn[j]);
}
highlights.put(field, result);
}
return highlights;
}
/** Loads the String values for each field X docID to be
* highlighted. By default this loads from stored
* fields, but a subclass can change the source. This
* method should allocate the String[fields.length][docids.length]
* and fill all values. The returned Strings must be
* identical to what was indexed. */
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
String contents[][] = new String[fields.length][docids.length];
char valueSeparators[] = new char[fields.length];
for (int i = 0; i < fields.length; i++) {
valueSeparators[i] = getMultiValuedSeparator(fields[i]);
}
LimitedStoredFieldVisitor visitor = new LimitedStoredFieldVisitor(fields, valueSeparators, maxLength);
for (int i = 0; i < docids.length; i++) {
searcher.doc(docids[i], visitor);
for (int j = 0; j < fields.length; j++) {
contents[j][i] = visitor.getValue(j);
}
visitor.reset();
}
return contents;
}
/**
* Returns the logical separator between values for multi-valued fields.
* The default value is a space character, which means passages can span across values,
* but a subclass can override, for example with {@code U+2029 PARAGRAPH SEPARATOR (PS)}
* if each value holds a discrete passage for highlighting.
*/
protected char getMultiValuedSeparator(String field) {
return ' ';
}
//BEGIN EDIT: made protected so that we can call from our subclass and pass in the terms by ourselves
protected Map<Integer,Object> highlightField(String field, String contents[], BreakIterator bi, BytesRef terms[], int[] docids, List<AtomicReaderContext> leaves, int maxPassages) throws IOException {
//private Map<Integer,Object> highlightField(String field, String contents[], BreakIterator bi, BytesRef terms[], int[] docids, List<AtomicReaderContext > leaves, int maxPassages) throws IOException {
//END EDIT
Map<Integer,Object> highlights = new HashMap<Integer,Object>();
// reuse in the real sense... for docs in same segment we just advance our old enum
DocsAndPositionsEnum postings[] = null;
TermsEnum termsEnum = null;
int lastLeaf = -1;
PassageFormatter fieldFormatter = getFormatter(field);
if (fieldFormatter == null) {
throw new NullPointerException("PassageFormatter cannot be null");
}
for (int i = 0; i < docids.length; i++) {
String content = contents[i];
if (content.length() == 0) {
continue; // nothing to do
}
bi.setText(content);
int doc = docids[i];
int leaf = ReaderUtil.subIndex(doc, leaves);
AtomicReaderContext subContext = leaves.get(leaf);
AtomicReader r = subContext.reader();
Terms t = r.terms(field);
if (t == null) {
continue; // nothing to do
}
if (leaf != lastLeaf) {
termsEnum = t.iterator(null);
postings = new DocsAndPositionsEnum[terms.length];
}
Passage passages[] = highlightDoc(field, terms, content.length(), bi, doc - subContext.docBase, termsEnum, postings, maxPassages);
if (passages.length == 0) {
passages = getEmptyHighlight(field, bi, maxPassages);
}
if (passages.length > 0) {
// otherwise a null snippet (eg if field is missing
// entirely from the doc)
highlights.put(doc, fieldFormatter.format(passages, content));
}
lastLeaf = leaf;
}
return highlights;
}
// algorithm: treat sentence snippets as miniature documents
// we can intersect these with the postings lists via BreakIterator.preceding(offset),s
// score each sentence as norm(sentenceStartOffset) * sum(weight * tf(freq))
private Passage[] highlightDoc(String field, BytesRef terms[], int contentLength, BreakIterator bi, int doc,
TermsEnum termsEnum, DocsAndPositionsEnum[] postings, int n) throws IOException {
//BEGIN EDIT added call to method that returns the offset for the current value (discrete highlighting)
int valueOffset = getOffsetForCurrentValue(field, doc);
//END EDIT
PassageScorer scorer = getScorer(field);
if (scorer == null) {
throw new NullPointerException("PassageScorer cannot be null");
}
//BEGIN EDIT discrete highlighting
// the scoring needs to be based on the length of the whole field (all values rather than only the current one)
int totalContentLength = getContentLength(field, doc);
if (totalContentLength == -1) {
totalContentLength = contentLength;
}
//END EDIT
PriorityQueue<OffsetsEnum> pq = new PriorityQueue<OffsetsEnum>();
float weights[] = new float[terms.length];
// initialize postings
for (int i = 0; i < terms.length; i++) {
DocsAndPositionsEnum de = postings[i];
int pDoc;
if (de == EMPTY) {
continue;
} else if (de == null) {
postings[i] = EMPTY; // initially
if (!termsEnum.seekExact(terms[i])) {
continue; // term not found
}
de = postings[i] = termsEnum.docsAndPositions(null, null, DocsAndPositionsEnum.FLAG_OFFSETS);
if (de == null) {
// no positions available
throw new IllegalArgumentException("field '" + field + "' was indexed without offsets, cannot highlight");
}
pDoc = de.advance(doc);
} else {
pDoc = de.docID();
if (pDoc < doc) {
pDoc = de.advance(doc);
}
}
if (doc == pDoc) {
//BEGIN EDIT we take into account the length of the whole field (all values) to properly score the snippets
weights[i] = scorer.weight(totalContentLength, de.freq());
//weights[i] = scorer.weight(contentLength, de.freq());
//END EDIT
de.nextPosition();
pq.add(new OffsetsEnum(de, i));
}
}
pq.add(new OffsetsEnum(EMPTY, Integer.MAX_VALUE)); // a sentinel for termination
PriorityQueue<Passage> passageQueue = new PriorityQueue<Passage>(n, new Comparator<Passage>() {
@Override
public int compare(Passage left, Passage right) {
if (left.score < right.score) {
return -1;
} else if (left.score > right.score) {
return 1;
} else {
return left.startOffset - right.startOffset;
}
}
});
Passage current = new Passage();
OffsetsEnum off;
while ((off = pq.poll()) != null) {
final DocsAndPositionsEnum dp = off.dp;
int start = dp.startOffset();
if (start == -1) {
throw new IllegalArgumentException("field '" + field + "' was indexed without offsets, cannot highlight");
}
int end = dp.endOffset();
// LUCENE-5166: this hit would span the content limit... however more valid
// hits may exist (they are sorted by start). so we pretend like we never
// saw this term, it won't cause a passage to be added to passageQueue or anything.
assert EMPTY.startOffset() == Integer.MAX_VALUE;
if (start < contentLength && end > contentLength) {
continue;
}
//BEGIN EDIT support for discrete highlighting (added block code)
//switch to the first match in the current value if there is one
boolean seenEnough = false;
while (start < valueOffset) {
if (off.pos == dp.freq()) {
seenEnough = true;
break;
} else {
off.pos++;
dp.nextPosition();
start = dp.startOffset();
end = dp.endOffset();
}
}
//continue with next term if we've already seen the current one all the times it appears
//that means that the current value doesn't hold matches for the current term
if (seenEnough) {
continue;
}
//we now subtract the offset of the current value to both start and end
start -= valueOffset;
end -= valueOffset;
//END EDIT
if (start >= current.endOffset) {
if (current.startOffset >= 0) {
// finalize current
//BEGIN EDIT we take into account the value offset when scoring the snippet based on its position
current.score *= scorer.norm(current.startOffset + valueOffset);
//current.score *= scorer.norm(current.startOffset);
//END EDIT
// new sentence: first add 'current' to queue
if (passageQueue.size() == n && current.score < passageQueue.peek().score) {
current.reset(); // can't compete, just reset it
} else {
passageQueue.offer(current);
if (passageQueue.size() > n) {
current = passageQueue.poll();
current.reset();
} else {
current = new Passage();
}
}
}
// if we exceed limit, we are done
if (start >= contentLength) {
Passage passages[] = new Passage[passageQueue.size()];
passageQueue.toArray(passages);
for (Passage p : passages) {
p.sort();
}
// sort in ascending order
Arrays.sort(passages, new Comparator<Passage>() {
@Override
public int compare(Passage left, Passage right) {
return left.startOffset - right.startOffset;
}
});
return passages;
}
// advance breakiterator
assert BreakIterator.DONE < 0;
current.startOffset = Math.max(bi.preceding(start+1), 0);
current.endOffset = Math.min(bi.next(), contentLength);
}
int tf = 0;
while (true) {
tf++;
current.addMatch(start, end, terms[off.id]);
if (off.pos == dp.freq()) {
break; // removed from pq
} else {
off.pos++;
dp.nextPosition();
//BEGIN EDIT support for discrete highlighting
start = dp.startOffset() - valueOffset;
end = dp.endOffset() - valueOffset;
//start = dp.startOffset();
//end = dp.endOffset();
//END EDIT
}
if (start >= current.endOffset || end > contentLength) {
pq.offer(off);
break;
}
}
current.score += weights[off.id] * scorer.tf(tf, current.endOffset - current.startOffset);
}
// Dead code but compiler disagrees:
assert false;
return null;
}
/** Called to summarize a document when no hits were
* found. By default this just returns the first
* {@code maxPassages} sentences; subclasses can override
* to customize. */
protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
// BreakIterator should be un-next'd:
List<Passage> passages = new ArrayList<Passage>();
int pos = bi.current();
assert pos == 0;
while (passages.size() < maxPassages) {
int next = bi.next();
if (next == BreakIterator.DONE) {
break;
}
Passage passage = new Passage();
passage.score = Float.NaN;
passage.startOffset = pos;
passage.endOffset = next;
passages.add(passage);
pos = next;
}
return passages.toArray(new Passage[passages.size()]);
}
private static class OffsetsEnum implements Comparable<OffsetsEnum> {
DocsAndPositionsEnum dp;
int pos;
int id;
OffsetsEnum(DocsAndPositionsEnum dp, int id) throws IOException {
this.dp = dp;
this.id = id;
this.pos = 1;
}
@Override
public int compareTo(OffsetsEnum other) {
try {
int off = dp.startOffset();
int otherOff = other.dp.startOffset();
if (off == otherOff) {
return id - other.id;
} else {
return Long.signum(((long)off) - otherOff);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
private static final DocsAndPositionsEnum EMPTY = new DocsAndPositionsEnum() {
@Override
public int nextPosition() throws IOException { return 0; }
@Override
public int startOffset() throws IOException { return Integer.MAX_VALUE; }
@Override
public int endOffset() throws IOException { return Integer.MAX_VALUE; }
@Override
public BytesRef getPayload() throws IOException { return null; }
@Override
public int freq() throws IOException { return 0; }
@Override
public int docID() { return NO_MORE_DOCS; }
@Override
public int nextDoc() throws IOException { return NO_MORE_DOCS; }
@Override
public int advance(int target) throws IOException { return NO_MORE_DOCS; }
@Override
public long cost() { return 0; }
};
/**
* we rewrite against an empty indexreader: as we don't want things like
* rangeQueries that don't summarize the document
*/
private static Query rewrite(Query original) throws IOException {
Query query = original;
for (Query rewrittenQuery = query.rewrite(EMPTY_INDEXREADER); rewrittenQuery != query;
rewrittenQuery = query.rewrite(EMPTY_INDEXREADER)) {
query = rewrittenQuery;
}
return query;
}
private static class LimitedStoredFieldVisitor extends StoredFieldVisitor {
private final String fields[];
private final char valueSeparators[];
private final int maxLength;
private final StringBuilder builders[];
private int currentField = -1;
public LimitedStoredFieldVisitor(String fields[], char valueSeparators[], int maxLength) {
assert fields.length == valueSeparators.length;
this.fields = fields;
this.valueSeparators = valueSeparators;
this.maxLength = maxLength;
builders = new StringBuilder[fields.length];
for (int i = 0; i < builders.length; i++) {
builders[i] = new StringBuilder();
}
}
@Override
public void stringField(FieldInfo fieldInfo, String value) throws IOException {
assert currentField >= 0;
StringBuilder builder = builders[currentField];
if (builder.length() > 0 && builder.length() < maxLength) {
builder.append(valueSeparators[currentField]);
}
if (builder.length() + value.length() > maxLength) {
builder.append(value, 0, maxLength - builder.length());
} else {
builder.append(value);
}
}
@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
currentField = Arrays.binarySearch(fields, fieldInfo.name);
if (currentField < 0) {
return Status.NO;
} else if (builders[currentField].length() > maxLength) {
return fields.length == 1 ? Status.STOP : Status.NO;
}
return Status.YES;
}
String getValue(int i) {
return builders[i].toString();
}
void reset() {
currentField = -1;
for (int i = 0; i < fields.length; i++) {
builders[i].setLength(0);
}
}
}
}
| 0true
|
src_main_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighter.java
|
1,379 |
public abstract class OTransactionRealAbstract extends OTransactionAbstract {
protected Map<ORID, ORecord<?>> temp2persistent = new HashMap<ORID, ORecord<?>>();
protected Map<ORID, ORecordOperation> allEntries = new HashMap<ORID, ORecordOperation>();
protected Map<ORID, ORecordOperation> recordEntries = new LinkedHashMap<ORID, ORecordOperation>();
protected Map<String, OTransactionIndexChanges> indexEntries = new LinkedHashMap<String, OTransactionIndexChanges>();
protected Map<ORID, List<OTransactionRecordIndexOperation>> recordIndexOperations = new HashMap<ORID, List<OTransactionRecordIndexOperation>>();
protected int id;
private final OOperationUnitId operationUnitId;
protected int newObjectCounter = -2;
/**
* USE THIS AS RESPONSE TO REPORT A DELETED RECORD IN TX
*/
public static final ORecordFlat DELETED_RECORD = new ORecordFlat();
/**
* Represents information for each index operation for each record in DB.
*/
public static final class OTransactionRecordIndexOperation {
public OTransactionRecordIndexOperation(String index, Object key, OPERATION operation) {
this.index = index;
this.key = key;
this.operation = operation;
}
public String index;
public Object key;
public OPERATION operation;
}
protected OTransactionRealAbstract(ODatabaseRecordTx database, int id) {
super(database);
this.id = id;
this.operationUnitId = OOperationUnitId.generateId();
}
public void close() {
temp2persistent.clear();
allEntries.clear();
recordEntries.clear();
indexEntries.clear();
recordIndexOperations.clear();
newObjectCounter = -2;
status = TXSTATUS.INVALID;
database.setDefaultTransactionMode();
}
public int getId() {
return id;
}
public void clearRecordEntries() {
for (Entry<ORID, ORecordOperation> entry : recordEntries.entrySet()) {
final ORID key = entry.getKey();
// ID NEW CREATE A COPY OF RID TO AVOID IT CHANGES IDENTITY+HASHCODE AND IT'S UNREACHEABLE THEREAFTER
allEntries.put(key.isNew() ? key.copy() : key, entry.getValue());
}
recordEntries.clear();
}
public Collection<ORecordOperation> getCurrentRecordEntries() {
return recordEntries.values();
}
public Collection<ORecordOperation> getAllRecordEntries() {
return allEntries.values();
}
public ORecordOperation getRecordEntry(ORID rid) {
ORecordOperation e = allEntries.get(rid);
if (e != null)
return e;
if (rid.isTemporary()) {
final ORecord<?> record = temp2persistent.get(rid);
if (record != null && !record.getIdentity().equals(rid))
rid = record.getIdentity();
}
e = recordEntries.get(rid);
if (e != null)
return e;
e = allEntries.get(rid);
if (e != null)
return e;
return null;
}
public ORecordInternal<?> getRecord(final ORID rid) {
final ORecordOperation e = getRecordEntry(rid);
if (e != null)
if (e.type == ORecordOperation.DELETED)
return DELETED_RECORD;
else
return e.getRecord();
return null;
}
/**
* Called by class iterator.
*/
public List<ORecordOperation> getRecordEntriesByClass(final String iClassName) {
final List<ORecordOperation> result = new ArrayList<ORecordOperation>();
if (iClassName == null || iClassName.length() == 0)
// RETURN ALL THE RECORDS
for (ORecordOperation entry : recordEntries.values()) {
result.add(entry);
}
else
// FILTER RECORDS BY CLASSNAME
for (ORecordOperation entry : recordEntries.values()) {
if (entry.getRecord() != null && entry.getRecord() instanceof ODocument
&& iClassName.equals(((ODocument) entry.getRecord()).getClassName()))
result.add(entry);
}
return result;
}
/**
* Called by cluster iterator.
*/
public List<ORecordOperation> getNewRecordEntriesByClusterIds(final int[] iIds) {
final List<ORecordOperation> result = new ArrayList<ORecordOperation>();
if (iIds == null)
// RETURN ALL THE RECORDS
for (ORecordOperation entry : recordEntries.values()) {
if (entry.type == ORecordOperation.CREATED)
result.add(entry);
}
else
// FILTER RECORDS BY ID
for (ORecordOperation entry : recordEntries.values()) {
for (int id : iIds) {
if (entry.getRecord() != null && entry.getRecord().getIdentity().getClusterId() == id
&& entry.type == ORecordOperation.CREATED) {
result.add(entry);
break;
}
}
}
return result;
}
public void clearIndexEntries() {
indexEntries.clear();
recordIndexOperations.clear();
}
public List<String> getInvolvedIndexes() {
List<String> list = null;
for (String indexName : indexEntries.keySet()) {
if (list == null)
list = new ArrayList<String>();
list.add(indexName);
}
return list;
}
public ODocument getIndexChanges() {
final ODocument result = new ODocument().setAllowChainedAccess(false);
for (Entry<String, OTransactionIndexChanges> indexEntry : indexEntries.entrySet()) {
final ODocument indexDoc = new ODocument().addOwner(result);
result.field(indexEntry.getKey(), indexDoc, OType.EMBEDDED);
if (indexEntry.getValue().cleared)
indexDoc.field("clear", Boolean.TRUE);
final List<ODocument> entries = new ArrayList<ODocument>();
indexDoc.field("entries", entries, OType.EMBEDDEDLIST);
// STORE INDEX ENTRIES
for (OTransactionIndexChangesPerKey entry : indexEntry.getValue().changesPerKey.values())
serializeIndexChangeEntry(entry, indexDoc, entries);
}
indexEntries.clear();
return result;
}
/**
* Bufferizes index changes to be flushed at commit time.
*
* @return
*/
public OTransactionIndexChanges getIndexChanges(final String iIndexName) {
return indexEntries.get(iIndexName);
}
/**
* Bufferizes index changes to be flushed at commit time.
*/
public void addIndexEntry(final OIndex<?> delegate, final String iIndexName, final OTransactionIndexChanges.OPERATION iOperation,
final Object key, final OIdentifiable iValue) {
OTransactionIndexChanges indexEntry = indexEntries.get(iIndexName);
if (indexEntry == null) {
indexEntry = new OTransactionIndexChanges();
indexEntries.put(iIndexName, indexEntry);
}
if (iOperation == OPERATION.CLEAR)
indexEntry.setCleared();
else {
if (iOperation == OPERATION.REMOVE && iValue != null && iValue.getIdentity().isTemporary()) {
// TEMPORARY RECORD: JUST REMOVE IT
for (OTransactionIndexChangesPerKey changes : indexEntry.changesPerKey.values())
for (int i = 0; i < changes.entries.size(); ++i)
if (changes.entries.get(i).value.equals(iValue)) {
changes.entries.remove(i);
break;
}
}
OTransactionIndexChangesPerKey changes = indexEntry.getChangesPerKey(key);
changes.add(iValue, iOperation);
if (iValue == null)
return;
List<OTransactionRecordIndexOperation> transactionIndexOperations = recordIndexOperations.get(iValue.getIdentity());
if (transactionIndexOperations == null) {
transactionIndexOperations = new ArrayList<OTransactionRecordIndexOperation>();
recordIndexOperations.put(iValue.getIdentity().copy(), transactionIndexOperations);
}
transactionIndexOperations.add(new OTransactionRecordIndexOperation(iIndexName, key, iOperation));
}
}
public void updateIdentityAfterCommit(final ORID oldRid, final ORID newRid) {
if (oldRid.equals(newRid))
// NO CHANGE, IGNORE IT
return;
final ORecordOperation rec = getRecordEntry(oldRid);
if (rec != null) {
if (allEntries.remove(oldRid) != null)
allEntries.put(newRid, rec);
if (recordEntries.remove(oldRid) != null)
recordEntries.put(newRid, rec);
if (!rec.getRecord().getIdentity().equals(newRid)) {
rec.getRecord().onBeforeIdentityChanged(oldRid);
rec.getRecord().setIdentity(new ORecordId(newRid));
rec.getRecord().onAfterIdentityChanged(rec.getRecord());
}
}
// UPDATE INDEXES
final List<OTransactionRecordIndexOperation> transactionIndexOperations = recordIndexOperations.get(oldRid);
if (transactionIndexOperations != null) {
for (final OTransactionRecordIndexOperation indexOperation : transactionIndexOperations) {
OTransactionIndexChanges indexEntryChanges = indexEntries.get(indexOperation.index);
if (indexEntryChanges == null)
continue;
final OTransactionIndexChangesPerKey changesPerKey = indexEntryChanges.getChangesPerKey(indexOperation.key);
updateChangesIdentity(oldRid, newRid, changesPerKey);
}
}
}
private void updateChangesIdentity(ORID oldRid, ORID newRid, OTransactionIndexChangesPerKey changesPerKey) {
if (changesPerKey == null)
return;
for (final OTransactionIndexEntry indexEntry : changesPerKey.entries)
if (indexEntry.value.getIdentity().equals(oldRid))
indexEntry.value = newRid;
}
protected void checkTransaction() {
if (status == TXSTATUS.INVALID)
throw new OTransactionException("Invalid state of the transaction. The transaction must be begun.");
}
protected void serializeIndexChangeEntry(OTransactionIndexChangesPerKey entry, final ODocument indexDoc,
final List<ODocument> entries) {
// SERIALIZE KEY
final String key;
final ODocument keyContainer = new ODocument();
try {
if (entry.key != null) {
if (entry.key instanceof OCompositeKey) {
final List<Object> keys = ((OCompositeKey) entry.key).getKeys();
keyContainer.field("key", keys, OType.EMBEDDEDLIST);
keyContainer.field("binary", false);
} else if (!(entry.key instanceof ORecordElement) && (entry.key instanceof OSerializableStream)) {
keyContainer.field("key", OStreamSerializerAnyStreamable.INSTANCE.toStream(entry.key), OType.BINARY);
keyContainer.field("binary", true);
} else {
keyContainer.field("key", entry.key);
keyContainer.field("binary", false);
}
key = ORecordSerializerSchemaAware2CSV.INSTANCE.toString(keyContainer, null, false).toString();
} else
key = "*";
} catch (IOException ioe) {
throw new OTransactionException("Error during index changes serialization. ", ioe);
}
final List<ODocument> operations = new ArrayList<ODocument>();
// SERIALIZE VALUES
if (entry.entries != null && !entry.entries.isEmpty()) {
for (OTransactionIndexEntry e : entry.entries) {
final ODocument changeDoc = new ODocument().addOwner(indexDoc).setAllowChainedAccess(false);
// SERIALIZE OPERATION
changeDoc.field("o", e.operation.ordinal());
if (e.value instanceof ORecord<?> && e.value.getIdentity().isNew()) {
final ORecord<?> saved = temp2persistent.get(e.value.getIdentity());
if (saved != null)
e.value = saved;
else
((ORecord<?>) e.value).save();
}
changeDoc.field("v", e.value != null ? e.value.getIdentity() : null);
operations.add(changeDoc);
}
}
entries.add(new ODocument().addOwner(indexDoc).setAllowChainedAccess(false).field("k", OStringSerializerHelper.encode(key))
.field("ops", operations, OType.EMBEDDEDLIST));
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_tx_OTransactionRealAbstract.java
|
64 |
public class OSharedLock extends OAbstractLock {
private final ReadWriteLock lock;
public OSharedLock(final ReadWriteLock iLock) {
lock = iLock;
}
public void lock() {
lock.readLock().lock();
}
public void unlock() {
lock.readLock().unlock();
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_concur_lock_OSharedLock.java
|
57 |
class GetInitializedVisitor extends Visitor {
Value dec;
@Override
public void visit(Tree.AttributeDeclaration that) {
super.visit(that);
if (that.getSpecifierOrInitializerExpression()==sie) {
dec = that.getDeclarationModel();
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AddAnnotionProposal.java
|
346 |
static abstract class TestHelper extends Thread {
protected static final int ITERATIONS = 1000*10;
protected final Random random = new Random();
protected final IMap<String, Integer> map;
protected final String upKey;
protected final String downKey;
public TestHelper(IMap map, String upKey, String downKey){
this.map = map;
this.upKey = upKey;
this.downKey = downKey;
}
public void run() {
try{
for ( int i=0; i < ITERATIONS; i++ ) {
doRun();
}
}catch(Exception e){
throw new RuntimeException("Test Thread crashed with ", e);
}
}
abstract void doRun()throws Exception;
public void work(){
int upTotal = map.get(upKey);
int downTotal = map.get(downKey);
int dif = random.nextInt(1000);
upTotal += dif;
downTotal -= dif;
map.put(upKey, upTotal);
map.put(downKey, downTotal);
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTryLockConcurrentTests.java
|
631 |
public class IndexShardStatus implements Iterable<ShardStatus> {
private final ShardId shardId;
private final ShardStatus[] shards;
IndexShardStatus(ShardId shardId, ShardStatus[] shards) {
this.shardId = shardId;
this.shards = shards;
}
public ShardId getShardId() {
return this.shardId;
}
public ShardStatus[] getShards() {
return this.shards;
}
public ShardStatus getAt(int position) {
return shards[position];
}
/**
* Returns only the primary shards store size in bytes.
*/
public ByteSizeValue getPrimaryStoreSize() {
long bytes = -1;
for (ShardStatus shard : getShards()) {
if (!shard.getShardRouting().primary()) {
// only sum docs for the primaries
continue;
}
if (shard.getStoreSize() != null) {
if (bytes == -1) {
bytes = 0;
}
bytes += shard.getStoreSize().bytes();
}
}
if (bytes == -1) {
return null;
}
return new ByteSizeValue(bytes);
}
/**
* Returns the full store size in bytes, of both primaries and replicas.
*/
public ByteSizeValue getStoreSize() {
long bytes = -1;
for (ShardStatus shard : getShards()) {
if (shard.getStoreSize() != null) {
if (bytes == -1) {
bytes = 0;
}
bytes += shard.getStoreSize().bytes();
}
}
if (bytes == -1) {
return null;
}
return new ByteSizeValue(bytes);
}
public long getTranslogOperations() {
long translogOperations = -1;
for (ShardStatus shard : getShards()) {
if (shard.getTranslogOperations() != -1) {
if (translogOperations == -1) {
translogOperations = 0;
}
translogOperations += shard.getTranslogOperations();
}
}
return translogOperations;
}
private transient DocsStatus docs;
public DocsStatus getDocs() {
if (docs != null) {
return docs;
}
DocsStatus docs = null;
for (ShardStatus shard : getShards()) {
if (!shard.getShardRouting().primary()) {
// only sum docs for the primaries
continue;
}
if (shard.getDocs() == null) {
continue;
}
if (docs == null) {
docs = new DocsStatus();
}
docs.numDocs += shard.getDocs().getNumDocs();
docs.maxDoc += shard.getDocs().getMaxDoc();
docs.deletedDocs += shard.getDocs().getDeletedDocs();
}
this.docs = docs;
return this.docs;
}
/**
* Total merges of this shard replication group.
*/
public MergeStats getMergeStats() {
MergeStats mergeStats = new MergeStats();
for (ShardStatus shard : shards) {
mergeStats.add(shard.getMergeStats());
}
return mergeStats;
}
public RefreshStats getRefreshStats() {
RefreshStats refreshStats = new RefreshStats();
for (ShardStatus shard : shards) {
refreshStats.add(shard.getRefreshStats());
}
return refreshStats;
}
public FlushStats getFlushStats() {
FlushStats flushStats = new FlushStats();
for (ShardStatus shard : shards) {
flushStats.add(shard.flushStats);
}
return flushStats;
}
@Override
public Iterator<ShardStatus> iterator() {
return Iterators.forArray(shards);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_status_IndexShardStatus.java
|
1,377 |
public class IndexMetaData {
public interface Custom {
String type();
interface Factory<T extends Custom> {
String type();
T readFrom(StreamInput in) throws IOException;
void writeTo(T customIndexMetaData, StreamOutput out) throws IOException;
T fromMap(Map<String, Object> map) throws IOException;
T fromXContent(XContentParser parser) throws IOException;
void toXContent(T customIndexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException;
/**
* Merges from first to second, with first being more important, i.e., if something exists in first and second,
* first will prevail.
*/
T merge(T first, T second);
}
}
public static Map<String, Custom.Factory> customFactories = new HashMap<String, Custom.Factory>();
static {
// register non plugin custom metadata
registerFactory(IndexWarmersMetaData.TYPE, IndexWarmersMetaData.FACTORY);
}
/**
* Register a custom index meta data factory. Make sure to call it from a static block.
*/
public static void registerFactory(String type, Custom.Factory factory) {
customFactories.put(type, factory);
}
@Nullable
public static <T extends Custom> Custom.Factory<T> lookupFactory(String type) {
return customFactories.get(type);
}
public static <T extends Custom> Custom.Factory<T> lookupFactorySafe(String type) throws ElasticsearchIllegalArgumentException {
Custom.Factory<T> factory = customFactories.get(type);
if (factory == null) {
throw new ElasticsearchIllegalArgumentException("No custom index metadata factoy registered for type [" + type + "]");
}
return factory;
}
public static final ClusterBlock INDEX_READ_ONLY_BLOCK = new ClusterBlock(5, "index read-only (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA);
public static final ClusterBlock INDEX_READ_BLOCK = new ClusterBlock(7, "index read (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.READ);
public static final ClusterBlock INDEX_WRITE_BLOCK = new ClusterBlock(8, "index write (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.WRITE);
public static final ClusterBlock INDEX_METADATA_BLOCK = new ClusterBlock(9, "index metadata (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.METADATA);
public static enum State {
OPEN((byte) 0),
CLOSE((byte) 1);
private final byte id;
State(byte id) {
this.id = id;
}
public byte id() {
return this.id;
}
public static State fromId(byte id) {
if (id == 0) {
return OPEN;
} else if (id == 1) {
return CLOSE;
}
throw new ElasticsearchIllegalStateException("No state match for id [" + id + "]");
}
public static State fromString(String state) {
if ("open".equals(state)) {
return OPEN;
} else if ("close".equals(state)) {
return CLOSE;
}
throw new ElasticsearchIllegalStateException("No state match for [" + state + "]");
}
}
public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards";
public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas";
public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas";
public static final String SETTING_READ_ONLY = "index.blocks.read_only";
public static final String SETTING_BLOCKS_READ = "index.blocks.read";
public static final String SETTING_BLOCKS_WRITE = "index.blocks.write";
public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata";
public static final String SETTING_VERSION_CREATED = "index.version.created";
public static final String SETTING_UUID = "index.uuid";
public static final String INDEX_UUID_NA_VALUE = "_na_";
private final String index;
private final long version;
private final State state;
private final ImmutableOpenMap<String, AliasMetaData> aliases;
private final Settings settings;
private final ImmutableOpenMap<String, MappingMetaData> mappings;
private final ImmutableOpenMap<String, Custom> customs;
private transient final int totalNumberOfShards;
private final DiscoveryNodeFilters requireFilters;
private final DiscoveryNodeFilters includeFilters;
private final DiscoveryNodeFilters excludeFilters;
private IndexMetaData(String index, long version, State state, Settings settings, ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, Custom> customs) {
Preconditions.checkArgument(settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1) != -1, "must specify numberOfShards for index [" + index + "]");
Preconditions.checkArgument(settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1) != -1, "must specify numberOfReplicas for index [" + index + "]");
this.index = index;
this.version = version;
this.state = state;
this.settings = settings;
this.mappings = mappings;
this.customs = customs;
this.totalNumberOfShards = numberOfShards() * (numberOfReplicas() + 1);
this.aliases = aliases;
ImmutableMap<String, String> requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap();
if (requireMap.isEmpty()) {
requireFilters = null;
} else {
requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap);
}
ImmutableMap<String, String> includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap();
if (includeMap.isEmpty()) {
includeFilters = null;
} else {
includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap);
}
ImmutableMap<String, String> excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap();
if (excludeMap.isEmpty()) {
excludeFilters = null;
} else {
excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap);
}
}
public String index() {
return index;
}
public String getIndex() {
return index();
}
public String uuid() {
return settings.get(SETTING_UUID, INDEX_UUID_NA_VALUE);
}
public String getUUID() {
return uuid();
}
/**
* Test whether the current index UUID is the same as the given one. Returns true if either are _na_
*/
public boolean isSameUUID(String otherUUID) {
assert otherUUID != null;
assert uuid() != null;
if (INDEX_UUID_NA_VALUE.equals(otherUUID) || INDEX_UUID_NA_VALUE.equals(uuid())) {
return true;
}
return otherUUID.equals(getUUID());
}
public long version() {
return this.version;
}
public long getVersion() {
return this.version;
}
public State state() {
return this.state;
}
public State getState() {
return state();
}
public int numberOfShards() {
return settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1);
}
public int getNumberOfShards() {
return numberOfShards();
}
public int numberOfReplicas() {
return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1);
}
public int getNumberOfReplicas() {
return numberOfReplicas();
}
public int totalNumberOfShards() {
return totalNumberOfShards;
}
public int getTotalNumberOfShards() {
return totalNumberOfShards();
}
public Settings settings() {
return settings;
}
public Settings getSettings() {
return settings();
}
public ImmutableOpenMap<String, AliasMetaData> aliases() {
return this.aliases;
}
public ImmutableOpenMap<String, AliasMetaData> getAliases() {
return aliases();
}
public ImmutableOpenMap<String, MappingMetaData> mappings() {
return mappings;
}
public ImmutableOpenMap<String, MappingMetaData> getMappings() {
return mappings();
}
@Nullable
public MappingMetaData mapping(String mappingType) {
return mappings.get(mappingType);
}
/**
* Sometimes, the default mapping exists and an actual mapping is not created yet (introduced),
* in this case, we want to return the default mapping in case it has some default mapping definitions.
* <p/>
* Note, once the mapping type is introduced, the default mapping is applied on the actual typed MappingMetaData,
* setting its routing, timestamp, and so on if needed.
*/
@Nullable
public MappingMetaData mappingOrDefault(String mappingType) {
MappingMetaData mapping = mappings.get(mappingType);
if (mapping != null) {
return mapping;
}
return mappings.get(MapperService.DEFAULT_MAPPING);
}
public ImmutableOpenMap<String, Custom> customs() {
return this.customs;
}
public ImmutableOpenMap<String, Custom> getCustoms() {
return this.customs;
}
public <T extends Custom> T custom(String type) {
return (T) customs.get(type);
}
@Nullable
public DiscoveryNodeFilters requireFilters() {
return requireFilters;
}
@Nullable
public DiscoveryNodeFilters includeFilters() {
return includeFilters;
}
@Nullable
public DiscoveryNodeFilters excludeFilters() {
return excludeFilters;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
IndexMetaData that = (IndexMetaData) o;
if (!aliases.equals(that.aliases)) {
return false;
}
if (!index.equals(that.index)) {
return false;
}
if (!mappings.equals(that.mappings)) {
return false;
}
if (!settings.equals(that.settings)) {
return false;
}
if (state != that.state) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = index.hashCode();
result = 31 * result + state.hashCode();
result = 31 * result + aliases.hashCode();
result = 31 * result + settings.hashCode();
result = 31 * result + mappings.hashCode();
return result;
}
public static Builder builder(String index) {
return new Builder(index);
}
public static Builder builder(IndexMetaData indexMetaData) {
return new Builder(indexMetaData);
}
public static class Builder {
private String index;
private State state = State.OPEN;
private long version = 1;
private Settings settings = ImmutableSettings.Builder.EMPTY_SETTINGS;
private final ImmutableOpenMap.Builder<String, MappingMetaData> mappings;
private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases;
private final ImmutableOpenMap.Builder<String, Custom> customs;
public Builder(String index) {
this.index = index;
this.mappings = ImmutableOpenMap.builder();
this.aliases = ImmutableOpenMap.builder();
this.customs = ImmutableOpenMap.builder();
}
public Builder(IndexMetaData indexMetaData) {
this.index = indexMetaData.index();
this.state = indexMetaData.state;
this.version = indexMetaData.version;
this.settings = indexMetaData.settings();
this.mappings = ImmutableOpenMap.builder(indexMetaData.mappings);
this.aliases = ImmutableOpenMap.builder(indexMetaData.aliases);
this.customs = ImmutableOpenMap.builder(indexMetaData.customs);
}
public String index() {
return index;
}
public Builder index(String index) {
this.index = index;
return this;
}
public Builder numberOfShards(int numberOfShards) {
settings = settingsBuilder().put(settings).put(SETTING_NUMBER_OF_SHARDS, numberOfShards).build();
return this;
}
public int numberOfShards() {
return settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1);
}
public Builder numberOfReplicas(int numberOfReplicas) {
settings = settingsBuilder().put(settings).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas).build();
return this;
}
public int numberOfReplicas() {
return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1);
}
public Builder settings(Settings.Builder settings) {
this.settings = settings.build();
return this;
}
public Builder settings(Settings settings) {
this.settings = settings;
return this;
}
public MappingMetaData mapping(String type) {
return mappings.get(type);
}
public Builder removeMapping(String mappingType) {
mappings.remove(mappingType);
return this;
}
public Builder putMapping(String type, String source) throws IOException {
XContentParser parser = XContentFactory.xContent(source).createParser(source);
try {
putMapping(new MappingMetaData(type, parser.mapOrdered()));
} finally {
parser.close();
}
return this;
}
public Builder putMapping(MappingMetaData mappingMd) {
mappings.put(mappingMd.type(), mappingMd);
return this;
}
public Builder state(State state) {
this.state = state;
return this;
}
public Builder putAlias(AliasMetaData aliasMetaData) {
aliases.put(aliasMetaData.alias(), aliasMetaData);
return this;
}
public Builder putAlias(AliasMetaData.Builder aliasMetaData) {
aliases.put(aliasMetaData.alias(), aliasMetaData.build());
return this;
}
public Builder removerAlias(String alias) {
aliases.remove(alias);
return this;
}
public Builder putCustom(String type, Custom customIndexMetaData) {
this.customs.put(type, customIndexMetaData);
return this;
}
public Builder removeCustom(String type) {
this.customs.remove(type);
return this;
}
public Custom getCustom(String type) {
return this.customs.get(type);
}
public long version() {
return this.version;
}
public Builder version(long version) {
this.version = version;
return this;
}
public IndexMetaData build() {
ImmutableOpenMap.Builder<String, AliasMetaData> tmpAliases = aliases;
Settings tmpSettings = settings;
// For backward compatibility
String[] legacyAliases = settings.getAsArray("index.aliases");
if (legacyAliases.length > 0) {
tmpAliases = ImmutableOpenMap.builder();
for (String alias : legacyAliases) {
AliasMetaData aliasMd = AliasMetaData.newAliasMetaDataBuilder(alias).build();
tmpAliases.put(alias, aliasMd);
}
tmpAliases.putAll(aliases);
// Remove index.aliases from settings once they are migrated to the new data structure
tmpSettings = ImmutableSettings.settingsBuilder().put(settings).putArray("index.aliases").build();
}
// update default mapping on the MappingMetaData
if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
MappingMetaData defaultMapping = mappings.get(MapperService.DEFAULT_MAPPING);
for (ObjectCursor<MappingMetaData> cursor : mappings.values()) {
cursor.value.updateDefaultMapping(defaultMapping);
}
}
return new IndexMetaData(index, version, state, tmpSettings, mappings.build(), tmpAliases.build(), customs.build());
}
public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(indexMetaData.index(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("version", indexMetaData.version());
builder.field("state", indexMetaData.state().toString().toLowerCase(Locale.ENGLISH));
boolean binary = params.paramAsBoolean("binary", false);
builder.startObject("settings");
for (Map.Entry<String, String> entry : indexMetaData.settings().getAsMap().entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
builder.startArray("mappings");
for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.mappings()) {
if (binary) {
builder.value(cursor.value.source().compressed());
} else {
byte[] data = cursor.value.source().uncompressed();
XContentParser parser = XContentFactory.xContent(data).createParser(data);
Map<String, Object> mapping = parser.mapOrdered();
parser.close();
builder.map(mapping);
}
}
builder.endArray();
for (ObjectObjectCursor<String, Custom> cursor : indexMetaData.customs()) {
builder.startObject(cursor.key, XContentBuilder.FieldCaseConversion.NONE);
lookupFactorySafe(cursor.key).toXContent(cursor.value, builder, params);
builder.endObject();
}
builder.startObject("aliases");
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.aliases().values()) {
AliasMetaData.Builder.toXContent(cursor.value, builder, params);
}
builder.endObject();
builder.endObject();
}
public static IndexMetaData fromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
parser.nextToken();
}
Builder builder = new Builder(parser.currentName());
String currentFieldName = null;
XContentParser.Token token = parser.nextToken();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("settings".equals(currentFieldName)) {
builder.settings(ImmutableSettings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())));
} else if ("mappings".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
String mappingType = currentFieldName;
Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map();
builder.putMapping(new MappingMetaData(mappingType, mappingSource));
}
}
} else if ("aliases".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
builder.putAlias(AliasMetaData.Builder.fromXContent(parser));
}
} else {
// check if its a custom index metadata
Custom.Factory<Custom> factory = lookupFactory(currentFieldName);
if (factory == null) {
//TODO warn
parser.skipChildren();
} else {
builder.putCustom(factory.type(), factory.fromXContent(parser));
}
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("mappings".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
builder.putMapping(new MappingMetaData(new CompressedString(parser.binaryValue())));
} else {
Map<String, Object> mapping = parser.mapOrdered();
if (mapping.size() == 1) {
String mappingType = mapping.keySet().iterator().next();
builder.putMapping(new MappingMetaData(mappingType, mapping));
}
}
}
}
} else if (token.isValue()) {
if ("state".equals(currentFieldName)) {
builder.state(State.fromString(parser.text()));
} else if ("version".equals(currentFieldName)) {
builder.version(parser.longValue());
}
}
}
return builder.build();
}
public static IndexMetaData readFrom(StreamInput in) throws IOException {
Builder builder = new Builder(in.readString());
builder.version(in.readLong());
builder.state(State.fromId(in.readByte()));
builder.settings(readSettingsFromStream(in));
int mappingsSize = in.readVInt();
for (int i = 0; i < mappingsSize; i++) {
MappingMetaData mappingMd = MappingMetaData.readFrom(in);
builder.putMapping(mappingMd);
}
int aliasesSize = in.readVInt();
for (int i = 0; i < aliasesSize; i++) {
AliasMetaData aliasMd = AliasMetaData.Builder.readFrom(in);
builder.putAlias(aliasMd);
}
int customSize = in.readVInt();
for (int i = 0; i < customSize; i++) {
String type = in.readString();
Custom customIndexMetaData = lookupFactorySafe(type).readFrom(in);
builder.putCustom(type, customIndexMetaData);
}
return builder.build();
}
public static void writeTo(IndexMetaData indexMetaData, StreamOutput out) throws IOException {
out.writeString(indexMetaData.index());
out.writeLong(indexMetaData.version());
out.writeByte(indexMetaData.state().id());
writeSettingsToStream(indexMetaData.settings(), out);
out.writeVInt(indexMetaData.mappings().size());
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.mappings().values()) {
MappingMetaData.writeTo(cursor.value, out);
}
out.writeVInt(indexMetaData.aliases().size());
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.aliases().values()) {
AliasMetaData.Builder.writeTo(cursor.value, out);
}
out.writeVInt(indexMetaData.customs().size());
for (ObjectObjectCursor<String, Custom> cursor : indexMetaData.customs()) {
out.writeString(cursor.key);
lookupFactorySafe(cursor.key).writeTo(cursor.value, out);
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_cluster_metadata_IndexMetaData.java
|
1,100 |
public class OSQLFunctionFirst extends OSQLFunctionConfigurableAbstract {
public static final String NAME = "first";
private Object first = this;
public OSQLFunctionFirst() {
super(NAME, 1, 1);
}
public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters,
final OCommandContext iContext) {
Object value = iParameters[0];
if (value instanceof OSQLFilterItem)
value = ((OSQLFilterItem) value).getValue(iCurrentRecord, iContext);
if (OMultiValue.isMultiValue(value))
value = OMultiValue.getFirstValue(value);
if (first == this)
// ONLY THE FIRST TIME
first = value;
return value;
}
public boolean aggregateResults() {
return configuredParameters.length == 1;
}
@Override
public Object getResult() {
return first;
}
@Override
public boolean filterResult() {
return true;
}
public String getSyntax() {
return "Syntax error: first(<field>)";
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_coll_OSQLFunctionFirst.java
|
754 |
phasedUnit.getCompilationUnit().visit(new Visitor() {
@Override
public void visit(ImportMemberOrType that) {
super.visit(that);
visitIt(that.getIdentifier(), that.getDeclarationModel());
}
@Override
public void visit(BaseMemberOrTypeExpression that) {
super.visit(that);
visitIt(that.getIdentifier(), that.getDeclaration());
}
@Override
public void visit(BaseType that) {
super.visit(that);
visitIt(that.getIdentifier(), that.getDeclarationModel());
}
@Override
public void visit(ModuleDescriptor that) {
super.visit(that);
visitIt(that.getImportPath());
}
@Override
public void visit(PackageDescriptor that) {
super.visit(that);
visitIt(that.getImportPath());
}
private void visitIt(Tree.ImportPath importPath) {
if (formatPath(importPath.getIdentifiers()).equals(oldName)) {
edits.add(new ReplaceEdit(importPath.getStartIndex(),
oldName.length(), newName));
}
}
private void visitIt(Tree.Identifier id, Declaration dec) {
if (dec!=null && !declarations.contains(dec)) {
String pn = dec.getUnit().getPackage().getNameAsString();
if (pn.equals(oldName) && !pn.isEmpty() &&
!pn.equals(Module.LANGUAGE_MODULE_NAME)) {
imports.put(dec, id.getText());
}
}
}
});
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_refactor_CopyFileRefactoringParticipant.java
|
1,323 |
public class ExecutorServiceProxy extends AbstractDistributedObject<DistributedExecutorService>
implements IExecutorService {
public static final int SYNC_FREQUENCY = 100;
private final String name;
private final Random random = new Random(-System.currentTimeMillis());
private final int partitionCount;
private final AtomicInteger consecutiveSubmits = new AtomicInteger();
private final ILogger logger;
private volatile long lastSubmitTime;
public ExecutorServiceProxy(String name, NodeEngine nodeEngine, DistributedExecutorService service) {
super(nodeEngine, service);
this.name = name;
this.partitionCount = nodeEngine.getPartitionService().getPartitionCount();
this.logger = nodeEngine.getLogger(ExecutorServiceProxy.class);
}
@Override
public void execute(Runnable command, MemberSelector memberSelector) {
List<Member> members = selectMembers(memberSelector);
int selectedMember = random.nextInt(members.size());
executeOnMember(command, members.get(selectedMember));
}
@Override
public void executeOnMembers(Runnable command, MemberSelector memberSelector) {
List<Member> members = selectMembers(memberSelector);
executeOnMembers(command, members);
}
@Override
public <T> Future<T> submit(Callable<T> task, MemberSelector memberSelector) {
List<Member> members = selectMembers(memberSelector);
int selectedMember = random.nextInt(members.size());
return submitToMember(task, members.get(selectedMember));
}
@Override
public <T> Map<Member, Future<T>> submitToMembers(Callable<T> task, MemberSelector memberSelector) {
List<Member> members = selectMembers(memberSelector);
return submitToMembers(task, members);
}
@Override
public void submit(Runnable task, MemberSelector memberSelector, ExecutionCallback callback) {
List<Member> members = selectMembers(memberSelector);
int selectedMember = random.nextInt(members.size());
submitToMember(task, members.get(selectedMember), callback);
}
@Override
public void submitToMembers(Runnable task, MemberSelector memberSelector, MultiExecutionCallback callback) {
List<Member> members = selectMembers(memberSelector);
submitToMembers(task, members, callback);
}
@Override
public <T> void submit(Callable<T> task, MemberSelector memberSelector, ExecutionCallback<T> callback) {
List<Member> members = selectMembers(memberSelector);
int selectedMember = random.nextInt(members.size());
submitToMember(task, members.get(selectedMember), callback);
}
@Override
public <T> void submitToMembers(Callable<T> task, MemberSelector memberSelector, MultiExecutionCallback callback) {
List<Member> members = selectMembers(memberSelector);
submitToMembers(task, members, callback);
}
@Override
public void execute(Runnable command) {
Callable<?> callable = createRunnableAdapter(command);
submit(callable);
}
private <T> RunnableAdapter<T> createRunnableAdapter(Runnable command) {
if (command == null) {
throw new NullPointerException();
}
return new RunnableAdapter<T>(command);
}
@Override
public void executeOnKeyOwner(Runnable command, Object key) {
Callable<?> callable = createRunnableAdapter(command);
submitToKeyOwner(callable, key);
}
@Override
public void executeOnMember(Runnable command, Member member) {
Callable<?> callable = createRunnableAdapter(command);
submitToMember(callable, member);
}
@Override
public void executeOnMembers(Runnable command, Collection<Member> members) {
Callable<?> callable = createRunnableAdapter(command);
submitToMembers(callable, members);
}
@Override
public void executeOnAllMembers(Runnable command) {
Callable<?> callable = createRunnableAdapter(command);
submitToAllMembers(callable);
}
@Override
public Future<?> submit(Runnable task) {
Callable<?> callable = createRunnableAdapter(task);
return submit(callable);
}
@Override
public <T> Future<T> submit(Runnable task, T result) {
if (task == null) {
throw new NullPointerException();
}
if (isShutdown()) {
throw new RejectedExecutionException(getRejectionMessage());
}
Callable<T> callable = createRunnableAdapter(task);
NodeEngine nodeEngine = getNodeEngine();
String uuid = buildRandomUuidString();
int partitionId = getTaskPartitionId(callable);
CallableTaskOperation op = new CallableTaskOperation(name, uuid, callable);
ICompletableFuture future = invoke(partitionId, op);
boolean sync = checkSync();
if (sync) {
try {
future.get();
} catch (Exception exception) {
logger.warning(exception);
}
return new CompletedFuture<T>(nodeEngine.getSerializationService(), result, getAsyncExecutor());
}
return new CancellableDelegatingFuture<T>(future, result, nodeEngine, uuid, partitionId);
}
private InternalCompletableFuture invoke(int partitionId, CallableTaskOperation op) {
NodeEngine nodeEngine = getNodeEngine();
OperationService operationService = nodeEngine.getOperationService();
return operationService.invokeOnPartition(DistributedExecutorService.SERVICE_NAME, op, partitionId);
}
@Override
public <T> Future<T> submit(Callable<T> task) {
final int partitionId = getTaskPartitionId(task);
return submitToPartitionOwner(task, partitionId, false);
}
private <T> Future<T> submitToPartitionOwner(Callable<T> task, int partitionId, boolean preventSync) {
if (task == null) {
throw new NullPointerException();
}
if (isShutdown()) {
throw new RejectedExecutionException(getRejectionMessage());
}
NodeEngine nodeEngine = getNodeEngine();
String uuid = buildRandomUuidString();
boolean sync = !preventSync && checkSync();
CallableTaskOperation op = new CallableTaskOperation(name, uuid, task);
ICompletableFuture future = invoke(partitionId, op);
if (sync) {
Object response;
try {
response = future.get();
} catch (Exception e) {
response = e;
}
return new CompletedFuture<T>(nodeEngine.getSerializationService(), response, getAsyncExecutor());
}
return new CancellableDelegatingFuture<T>(future, nodeEngine, uuid, partitionId);
}
/**
* This is a hack to prevent overloading the system with unprocessed tasks. Once backpressure is added, this can
* be removed.
*/
private boolean checkSync() {
boolean sync = false;
long last = lastSubmitTime;
long now = Clock.currentTimeMillis();
if (last + 10 < now) {
consecutiveSubmits.set(0);
} else if (consecutiveSubmits.incrementAndGet() % SYNC_FREQUENCY == 0) {
sync = true;
}
lastSubmitTime = now;
return sync;
}
private <T> int getTaskPartitionId(Callable<T> task) {
int partitionId;
if (task instanceof PartitionAware) {
final Object partitionKey = ((PartitionAware) task).getPartitionKey();
partitionId = getNodeEngine().getPartitionService().getPartitionId(partitionKey);
} else {
partitionId = random.nextInt(partitionCount);
}
return partitionId;
}
@Override
public <T> Future<T> submitToKeyOwner(Callable<T> task, Object key) {
NodeEngine nodeEngine = getNodeEngine();
return submitToPartitionOwner(task, nodeEngine.getPartitionService().getPartitionId(key), false);
}
@Override
public <T> Future<T> submitToMember(Callable<T> task, Member member) {
if (task == null) {
throw new NullPointerException();
}
if (isShutdown()) {
throw new RejectedExecutionException(getRejectionMessage());
}
NodeEngine nodeEngine = getNodeEngine();
String uuid = buildRandomUuidString();
Address target = ((MemberImpl) member).getAddress();
boolean sync = checkSync();
MemberCallableTaskOperation op = new MemberCallableTaskOperation(name, uuid, task);
InternalCompletableFuture future = nodeEngine.getOperationService().invokeOnTarget(
DistributedExecutorService.SERVICE_NAME, op, target);
if (sync) {
Object response;
try {
response = future.get();
} catch (Exception e) {
response = e;
}
return new CompletedFuture<T>(nodeEngine.getSerializationService(), response, getAsyncExecutor());
}
return new CancellableDelegatingFuture<T>(future, nodeEngine, uuid, target);
}
@Override
public <T> Map<Member, Future<T>> submitToMembers(Callable<T> task, Collection<Member> members) {
Map<Member, Future<T>> futures = new HashMap<Member, Future<T>>(members.size());
for (Member member : members) {
futures.put(member, submitToMember(task, member));
}
return futures;
}
@Override
public <T> Map<Member, Future<T>> submitToAllMembers(Callable<T> task) {
NodeEngine nodeEngine = getNodeEngine();
return submitToMembers(task, nodeEngine.getClusterService().getMembers());
}
@Override
public void submit(Runnable task, ExecutionCallback callback) {
Callable<?> callable = createRunnableAdapter(task);
submit(callable, callback);
}
@Override
public void submitToKeyOwner(Runnable task, Object key, ExecutionCallback callback) {
Callable<?> callable = createRunnableAdapter(task);
submitToKeyOwner(callable, key, callback);
}
@Override
public void submitToMember(Runnable task, Member member, ExecutionCallback callback) {
Callable<?> callable = createRunnableAdapter(task);
submitToMember(callable, member, callback);
}
@Override
public void submitToMembers(Runnable task, Collection<Member> members, MultiExecutionCallback callback) {
Callable<?> callable = createRunnableAdapter(task);
submitToMembers(callable, members, callback);
}
@Override
public void submitToAllMembers(Runnable task, MultiExecutionCallback callback) {
Callable<?> callable = createRunnableAdapter(task);
submitToAllMembers(callable, callback);
}
private <T> void submitToPartitionOwner(Callable<T> task, ExecutionCallback<T> callback, int partitionId) {
if (isShutdown()) {
throw new RejectedExecutionException(getRejectionMessage());
}
NodeEngine nodeEngine = getNodeEngine();
CallableTaskOperation op = new CallableTaskOperation(name, null, task);
OperationService operationService = nodeEngine.getOperationService();
operationService.createInvocationBuilder(DistributedExecutorService.SERVICE_NAME, op, partitionId)
.setCallback(new ExecutionCallbackAdapter(callback))
.invoke();
}
@Override
public <T> void submit(Callable<T> task, ExecutionCallback<T> callback) {
int partitionId = getTaskPartitionId(task);
submitToPartitionOwner(task, callback, partitionId);
}
@Override
public <T> void submitToKeyOwner(Callable<T> task, Object key, ExecutionCallback<T> callback) {
NodeEngine nodeEngine = getNodeEngine();
submitToPartitionOwner(task, callback, nodeEngine.getPartitionService().getPartitionId(key));
}
public <T> void submitToMember(Callable<T> task, Member member, ExecutionCallback<T> callback) {
if (isShutdown()) {
throw new RejectedExecutionException(getRejectionMessage());
}
NodeEngine nodeEngine = getNodeEngine();
MemberCallableTaskOperation op = new MemberCallableTaskOperation(name, null, task);
OperationService operationService = nodeEngine.getOperationService();
Address address = ((MemberImpl) member).getAddress();
operationService.createInvocationBuilder(DistributedExecutorService.SERVICE_NAME, op, address)
.setCallback(new ExecutionCallbackAdapter(callback))
.invoke();
}
private String getRejectionMessage() {
return "ExecutorService[" + name + "] is shutdown! In order to create a new ExecutorService with name '"
+ name + "', you need to destroy current ExecutorService first!";
}
@Override
public <T> void submitToMembers(Callable<T> task, Collection<Member> members, MultiExecutionCallback callback) {
NodeEngine nodeEngine = getNodeEngine();
ExecutionCallbackAdapterFactory executionCallbackFactory = new ExecutionCallbackAdapterFactory(nodeEngine,
members, callback);
for (Member member : members) {
submitToMember(task, member, executionCallbackFactory.<T>callbackFor(member));
}
}
@Override
public <T> void submitToAllMembers(Callable<T> task, MultiExecutionCallback callback) {
NodeEngine nodeEngine = getNodeEngine();
submitToMembers(task, nodeEngine.getClusterService().getMembers(), callback);
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) throws InterruptedException {
List<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
List<Future<T>> result = new ArrayList<Future<T>>(tasks.size());
for (Callable<T> task : tasks) {
futures.add(submit(task));
}
for (Future<T> future : futures) {
Object value;
try {
value = future.get();
} catch (ExecutionException e) {
value = e;
}
result.add(new CompletedFuture<T>(getNodeEngine().getSerializationService(), value, getAsyncExecutor()));
}
return result;
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks,
long timeout, TimeUnit unit) throws InterruptedException {
if (unit == null) {
throw new NullPointerException("unit must not be null");
}
if (tasks == null) {
throw new NullPointerException("tasks must not be null");
}
long timeoutNanos = unit.toNanos(timeout);
List<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
List<Future<T>> result = new ArrayList<Future<T>>(tasks.size());
boolean done = true;
try {
for (Callable<T> task : tasks) {
long start = System.nanoTime();
int partitionId = getTaskPartitionId(task);
futures.add(submitToPartitionOwner(task, partitionId, true));
timeoutNanos -= System.nanoTime() - start;
if (timeoutNanos <= 0L) {
for (Future<T> future : futures) {
result.add(future);
}
return result;
}
}
for (int i = 0, size = futures.size(); i < size; i++) {
long start = System.nanoTime();
Object value;
try {
Future<T> future = futures.get(i);
value = future.get(timeoutNanos, TimeUnit.NANOSECONDS);
} catch (ExecutionException e) {
value = e;
} catch (TimeoutException e) {
done = false;
for (int l = i; l < size; l++) {
Future<T> f = futures.get(i);
if (!f.isDone()) {
result.add(f);
} else {
Object v;
try {
v = f.get();
} catch (ExecutionException ex) {
v = ex;
}
result.add(new CompletedFuture<T>(getNodeEngine().getSerializationService(), v, getAsyncExecutor()));
}
}
break;
}
result.add(new CompletedFuture<T>(getNodeEngine().getSerializationService(), value, getAsyncExecutor()));
timeoutNanos -= System.nanoTime() - start;
}
} catch (Throwable t) {
logger.severe(t);
} finally {
if (!done) {
cancelAll(result);
}
return result;
}
}
private static <T> void cancelAll(List<Future<T>> result) {
for (Future<T> aResult : result) {
aResult.cancel(true);
}
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks) throws InterruptedException, ExecutionException {
throw new UnsupportedOperationException();
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
throw new UnsupportedOperationException();
}
@Override
protected RuntimeException throwNotActiveException() {
throw new RejectedExecutionException();
}
@Override
public boolean isShutdown() {
try {
return getService().isShutdown(name);
} catch (HazelcastInstanceNotActiveException e) {
return true;
}
}
@Override
public boolean isTerminated() {
return isShutdown();
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
return false;
}
@Override
public void shutdown() {
NodeEngine nodeEngine = getNodeEngine();
Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList();
OperationService operationService = nodeEngine.getOperationService();
Collection<Future> calls = new LinkedList<Future>();
for (MemberImpl member : members) {
if (member.localMember()) {
getService().shutdownExecutor(name);
} else {
Future f = submitShutdownOperation(operationService, member);
calls.add(f);
}
}
for (Future f : calls) {
try {
f.get(1, TimeUnit.SECONDS);
} catch (Exception exception) {
if (logger.isFinestEnabled()) {
logger.finest(exception);
}
}
}
}
private Future submitShutdownOperation(OperationService operationService, MemberImpl member) {
ShutdownOperation op = new ShutdownOperation(name);
return operationService.invokeOnTarget(getServiceName(), op, member.getAddress());
}
@Override
public List<Runnable> shutdownNow() {
shutdown();
return Collections.emptyList();
}
@Override
public LocalExecutorStats getLocalExecutorStats() {
return getService().getLocalExecutorStats(name);
}
@Override
public String getServiceName() {
return DistributedExecutorService.SERVICE_NAME;
}
@Override
public String getName() {
return name;
}
private ExecutorService getAsyncExecutor() {
return getNodeEngine().getExecutionService().getExecutor(ExecutionService.ASYNC_EXECUTOR);
}
private List<Member> selectMembers(MemberSelector memberSelector) {
if (memberSelector == null) {
throw new IllegalArgumentException("memberSelector must not be null");
}
List<Member> selected = new ArrayList<Member>();
Collection<MemberImpl> members = getNodeEngine().getClusterService().getMemberList();
for (MemberImpl member : members) {
if (memberSelector.select(member)) {
selected.add(member);
}
}
return selected;
}
@Override
public String toString() {
return "IExecutorService{" + "name='" + name + '\'' + '}';
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_executor_ExecutorServiceProxy.java
|
1,218 |
public class PaymentActionType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, PaymentActionType> TYPES = new LinkedHashMap<String, PaymentActionType>();
public static final PaymentActionType AUTHORIZE = new PaymentActionType("AUTHORIZE", "Authorize");
public static final PaymentActionType DEBIT = new PaymentActionType("DEBIT", "Debit");
public static final PaymentActionType AUTHORIZEANDDEBIT = new PaymentActionType("AUTHORIZEANDDEBIT", "Authorize and Debit");
public static final PaymentActionType CREDIT = new PaymentActionType("CREDIT", "Credit");
public static final PaymentActionType VOID = new PaymentActionType("VOID", "Void");
public static final PaymentActionType BALANCE = new PaymentActionType("BALANCE", "Check Balance");
public static final PaymentActionType REVERSEAUTHORIZE = new PaymentActionType("REVERSEAUTHORIZE", "Reverse Authorize");
public static final PaymentActionType PARTIALPAYMENT = new PaymentActionType("PARTIALPAYMENT", "Partial Payment");
public static PaymentActionType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public PaymentActionType() {
//do nothing
}
public PaymentActionType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PaymentActionType other = (PaymentActionType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_workflow_PaymentActionType.java
|
188 |
public interface OService {
public String getName();
public void startup();
public void shutdown();
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_util_OService.java
|
424 |
private class ClientJob<KeyIn, ValueIn> extends AbstractJob<KeyIn, ValueIn> {
public ClientJob(String name, KeyValueSource<KeyIn, ValueIn> keyValueSource) {
super(name, ClientMapReduceProxy.this, keyValueSource);
}
@Override
protected <T> JobCompletableFuture<T> invoke(final Collator collator) {
try {
final String jobId = UuidUtil.buildRandomUuidString();
ClientContext context = getContext();
ClientInvocationService cis = context.getInvocationService();
ClientMapReduceRequest request = new ClientMapReduceRequest(name, jobId, keys,
predicate, mapper, combinerFactory, reducerFactory, keyValueSource,
chunkSize, topologyChangedStrategy);
final ClientCompletableFuture completableFuture = new ClientCompletableFuture(jobId);
ClientCallFuture future = (ClientCallFuture) cis.invokeOnRandomTarget(request, null);
future.andThen(new ExecutionCallback() {
@Override
public void onResponse(Object response) {
try {
if (collator != null) {
response = collator.collate(((Map) response).entrySet());
}
} finally {
completableFuture.setResult(response);
trackableJobs.remove(jobId);
}
}
@Override
public void onFailure(Throwable t) {
try {
if (t instanceof ExecutionException
&& t.getCause() instanceof CancellationException) {
t = t.getCause();
}
completableFuture.setResult(t);
} finally {
trackableJobs.remove(jobId);
}
}
});
Address runningMember = future.getConnection().getRemoteEndpoint();
trackableJobs.putIfAbsent(jobId, new ClientTrackableJob<T>(jobId, runningMember, completableFuture));
return completableFuture;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientMapReduceProxy.java
|
1,936 |
@Service("blFormBuilderService")
public class FormBuilderServiceImpl implements FormBuilderService {
private static final Log LOG = LogFactory.getLog(FormBuilderServiceImpl.class);
@Resource(name = "blAdminEntityService")
protected AdminEntityService adminEntityService;
@Resource (name = "blAdminNavigationService")
protected AdminNavigationService navigationService;
@Resource(name = "blFormBuilderExtensionManagers")
protected List<FormBuilderExtensionManager> extensionManagers;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
protected static final VisibilityEnum[] FORM_HIDDEN_VISIBILITIES = new VisibilityEnum[] {
VisibilityEnum.HIDDEN_ALL, VisibilityEnum.FORM_HIDDEN
};
protected static final VisibilityEnum[] GRID_HIDDEN_VISIBILITIES = new VisibilityEnum[] {
VisibilityEnum.HIDDEN_ALL, VisibilityEnum.GRID_HIDDEN
};
@Override
public ListGrid buildMainListGrid(DynamicResultSet drs, ClassMetadata cmd, String sectionKey)
throws ServiceException {
List<Field> headerFields = new ArrayList<Field>();
ListGrid.Type type = ListGrid.Type.MAIN;
String idProperty = "id";
for (Property p : cmd.getProperties()) {
if (p.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata fmd = (BasicFieldMetadata) p.getMetadata();
if (SupportedFieldType.ID.equals(fmd.getFieldType())) {
idProperty = fmd.getName();
}
if (fmd.isProminent() != null && fmd.isProminent()
&& !ArrayUtils.contains(getGridHiddenVisibilities(), fmd.getVisibility())) {
Field hf = createHeaderField(p, fmd);
headerFields.add(hf);
}
}
}
ListGrid listGrid = createListGrid(cmd.getCeilingType(), headerFields, type, drs, sectionKey, 0, idProperty);
if (CollectionUtils.isNotEmpty(listGrid.getHeaderFields())) {
// Set the first column to be able to link to the main entity
listGrid.getHeaderFields().iterator().next().setMainEntityLink(true);
} else {
String message = "There are no listgrid header fields configured for the class " + cmd.getCeilingType();
message += "Please mark some @AdminPresentation fields with 'prominent = true'";
LOG.error(message);
}
return listGrid;
}
protected Field createHeaderField(Property p, BasicFieldMetadata fmd) {
Field hf;
if (fmd.getFieldType().equals(SupportedFieldType.EXPLICIT_ENUMERATION) ||
fmd.getFieldType().equals(SupportedFieldType.BROADLEAF_ENUMERATION) ||
fmd.getFieldType().equals(SupportedFieldType.DATA_DRIVEN_ENUMERATION) ||
fmd.getFieldType().equals(SupportedFieldType.EMPTY_ENUMERATION)) {
hf = new ComboField();
((ComboField) hf).setOptions(fmd.getEnumerationValues());
} else {
hf = new Field();
}
hf.withName(p.getName())
.withFriendlyName(fmd.getFriendlyName())
.withOrder(fmd.getGridOrder())
.withColumnWidth(fmd.getColumnWidth())
.withForeignKeyDisplayValueProperty(fmd.getForeignKeyDisplayValueProperty())
.withForeignKeyClass(fmd.getForeignKeyClass())
.withOwningEntityClass(fmd.getOwningClass() != null ? fmd.getOwningClass() : fmd.getTargetClass());
String fieldType = fmd.getFieldType() == null ? null : fmd.getFieldType().toString();
hf.setFieldType(fieldType);
return hf;
}
@Override
public ListGrid buildCollectionListGrid(String containingEntityId, DynamicResultSet drs, Property field,
String sectionKey)
throws ServiceException {
FieldMetadata fmd = field.getMetadata();
// Get the class metadata for this particular field
PersistencePackageRequest ppr = PersistencePackageRequest.fromMetadata(fmd);
ClassMetadata cmd = adminEntityService.getClassMetadata(ppr);
List<Field> headerFields = new ArrayList<Field>();
ListGrid.Type type = null;
boolean editable = false;
boolean sortable = false;
boolean readOnly = false;
boolean hideIdColumn = false;
boolean canFilterAndSort = true;
String idProperty = "id";
for (Property property : cmd.getProperties()) {
if (property.getMetadata() instanceof BasicFieldMetadata &&
SupportedFieldType.ID==((BasicFieldMetadata) property.getMetadata()).getFieldType() &&
//make sure it's a property for this entity - not an association
!property.getName().contains(".")) {
idProperty = property.getName();
break;
}
}
// Get the header fields for this list grid. Note that the header fields are different depending on the
// kind of field this is.
if (fmd instanceof BasicFieldMetadata) {
readOnly = ((BasicFieldMetadata) fmd).getReadOnly();
for (Property p : cmd.getProperties()) {
if (p.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata md = (BasicFieldMetadata) p.getMetadata();
if (SupportedFieldType.ID.equals(md.getFieldType())) {
idProperty = md.getName();
}
if (md.isProminent() != null && md.isProminent()
&& !ArrayUtils.contains(getGridHiddenVisibilities(), md.getVisibility())) {
Field hf = createHeaderField(p, md);
headerFields.add(hf);
}
}
}
type = ListGrid.Type.TO_ONE;
} else if (fmd instanceof BasicCollectionMetadata) {
readOnly = !((BasicCollectionMetadata) fmd).isMutable();
for (Property p : cmd.getProperties()) {
if (p.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata md = (BasicFieldMetadata) p.getMetadata();
if (md.isProminent() != null && md.isProminent()
&& !ArrayUtils.contains(getGridHiddenVisibilities(), md.getVisibility())) {
Field hf = createHeaderField(p, md);
headerFields.add(hf);
}
}
}
type = ListGrid.Type.BASIC;
if (((BasicCollectionMetadata) fmd).getAddMethodType().equals(AddMethodType.PERSIST)) {
editable = true;
}
} else if (fmd instanceof AdornedTargetCollectionMetadata) {
readOnly = !((AdornedTargetCollectionMetadata) fmd).isMutable();
AdornedTargetCollectionMetadata atcmd = (AdornedTargetCollectionMetadata) fmd;
for (String fieldName : atcmd.getGridVisibleFields()) {
Property p = cmd.getPMap().get(fieldName);
BasicFieldMetadata md = (BasicFieldMetadata) p.getMetadata();
Field hf = createHeaderField(p, md);
headerFields.add(hf);
}
type = ListGrid.Type.ADORNED;
if (atcmd.getMaintainedAdornedTargetFields().length > 0) {
editable = true;
}
AdornedTargetList adornedList = (AdornedTargetList) atcmd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
sortable = StringUtils.isNotBlank(adornedList.getSortField());
} else if (fmd instanceof MapMetadata) {
readOnly = !((MapMetadata) fmd).isMutable();
MapMetadata mmd = (MapMetadata) fmd;
Property p2 = cmd.getPMap().get("key");
BasicFieldMetadata keyMd = (BasicFieldMetadata) p2.getMetadata();
keyMd.setFriendlyName("Key");
Field hf = createHeaderField(p2, keyMd);
headerFields.add(hf);
if (mmd.isSimpleValue()) {
Property valueProperty = cmd.getPMap().get("value");
BasicFieldMetadata valueMd = (BasicFieldMetadata) valueProperty.getMetadata();
valueMd.setFriendlyName("Value");
hf = createHeaderField(valueProperty, valueMd);
headerFields.add(hf);
idProperty = "key";
hideIdColumn = true;
} else {
for (Property p : cmd.getProperties()) {
if (p.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata md = (BasicFieldMetadata) p.getMetadata();
if (md.getTargetClass().equals(mmd.getValueClassName())) {
if (md.isProminent() != null && md.isProminent()
&& !ArrayUtils.contains(getGridHiddenVisibilities(), md.getVisibility())) {
hf = createHeaderField(p, md);
headerFields.add(hf);
}
}
}
}
}
type = ListGrid.Type.MAP;
editable = true;
canFilterAndSort = false;
}
String ceilingType = "";
if (fmd instanceof BasicFieldMetadata) {
ceilingType = cmd.getCeilingType();
} else if (fmd instanceof CollectionMetadata) {
ceilingType = ((CollectionMetadata) fmd).getCollectionCeilingEntity();
}
if (CollectionUtils.isEmpty(headerFields)) {
String message = "There are no listgrid header fields configured for the class " + ceilingType + " and property '" +
field.getName() + "'.";
if (type == ListGrid.Type.ADORNED || type == ListGrid.Type.ADORNED_WITH_FORM) {
message += " Please configure 'gridVisibleFields' in your @AdminPresentationAdornedTargetCollection configuration";
} else {
message += " Please mark some @AdminPresentation fields with 'prominent = true'";
}
LOG.error(message);
}
ListGrid listGrid = createListGrid(ceilingType, headerFields, type, drs, sectionKey, fmd.getOrder(), idProperty);
listGrid.setSubCollectionFieldName(field.getName());
listGrid.setFriendlyName(field.getMetadata().getFriendlyName());
if (StringUtils.isEmpty(listGrid.getFriendlyName())) {
listGrid.setFriendlyName(field.getName());
}
listGrid.setContainingEntityId(containingEntityId);
listGrid.setReadOnly(readOnly);
listGrid.setHideIdColumn(hideIdColumn);
listGrid.setCanFilterAndSort(canFilterAndSort);
if (editable) {
listGrid.getRowActions().add(DefaultListGridActions.UPDATE);
}
if (readOnly) {
listGrid.getRowActions().add(DefaultListGridActions.VIEW);
}
if (sortable) {
listGrid.setCanFilterAndSort(false);
listGrid.getToolbarActions().add(DefaultListGridActions.REORDER);
}
listGrid.getRowActions().add(DefaultListGridActions.REMOVE);
return listGrid;
}
protected ListGrid createListGrid(String className, List<Field> headerFields, ListGrid.Type type, DynamicResultSet drs,
String sectionKey, int order, String idProperty) {
// Create the list grid and set some basic attributes
ListGrid listGrid = new ListGrid();
listGrid.setClassName(className);
listGrid.getHeaderFields().addAll(headerFields);
listGrid.setListGridType(type);
listGrid.setSectionKey(sectionKey);
listGrid.setOrder(order);
listGrid.setIdProperty(idProperty);
listGrid.setStartIndex(drs.getStartIndex());
listGrid.setTotalRecords(drs.getTotalRecords());
listGrid.setPageSize(drs.getPageSize());
AdminSection section = navigationService.findAdminSectionByClass(className);
if (section != null) {
listGrid.setExternalEntitySectionKey(section.getUrl());
}
// For each of the entities (rows) in the list grid, we need to build the associated
// ListGridRecord and set the required fields on the record. These fields are the same ones
// that are used for the header fields.
for (Entity e : drs.getRecords()) {
ListGridRecord record = new ListGridRecord();
record.setListGrid(listGrid);
if (e.findProperty(idProperty) != null) {
record.setId(e.findProperty(idProperty).getValue());
}
for (Field headerField : headerFields) {
Property p = e.findProperty(headerField.getName());
if (p != null) {
Field recordField = new Field().withName(headerField.getName())
.withFriendlyName(headerField.getFriendlyName())
.withOrder(p.getMetadata().getOrder());
if (headerField instanceof ComboField) {
recordField.setValue(((ComboField) headerField).getOption(p.getValue()));
recordField.setDisplayValue(p.getDisplayValue());
} else {
recordField.setValue(p.getValue());
recordField.setDisplayValue(p.getDisplayValue());
}
recordField.setDerived(isDerivedField(headerField, recordField, p));
record.getFields().add(recordField);
}
}
if (e.findProperty(AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY) != null) {
Field hiddenField = new Field().withName(AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY);
hiddenField.setValue(e.findProperty(AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY).getValue());
record.getHiddenFields().add(hiddenField);
}
listGrid.getRecords().add(record);
}
return listGrid;
}
/**
* Determines whether or not a particular field in a record is derived. By default this checks the {@link BasicFieldMetadata}
* for the given Property to see if something on the backend has marked it as derived
*
* @param headerField the header for this recordField
* @param recordField the recordField being populated
* @param p the property that relates to this recordField
* @return whether or not this field is derived
* @see {@link #createListGrid(String, List, org.broadleafcommerce.openadmin.web.form.component.ListGrid.Type, DynamicResultSet, String, int, String)}
*/
protected Boolean isDerivedField(Field headerField, Field recordField, Property p) {
return BooleanUtils.isTrue(((BasicFieldMetadata) p.getMetadata()).getIsDerived());
}
protected void setEntityFormFields(EntityForm ef, List<Property> properties) {
for (Property property : properties) {
if (property.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata fmd = (BasicFieldMetadata) property.getMetadata();
if (!ArrayUtils.contains(getFormHiddenVisibilities(), fmd.getVisibility())) {
// Depending on visibility, field for the particular property is not created on the form
String fieldType = fmd.getFieldType() == null ? null : fmd.getFieldType().toString();
// Create the field and set some basic attributes
Field f;
if (fieldType.equals(SupportedFieldType.BROADLEAF_ENUMERATION.toString())
|| fieldType.equals(SupportedFieldType.EXPLICIT_ENUMERATION.toString())
|| fieldType.equals(SupportedFieldType.DATA_DRIVEN_ENUMERATION.toString())
|| fieldType.equals(SupportedFieldType.EMPTY_ENUMERATION.toString())) {
// We're dealing with fields that should render as drop-downs, so set their possible values
f = new ComboField();
((ComboField) f).setOptions(fmd.getEnumerationValues());
} else if (fieldType.equals(SupportedFieldType.RULE_SIMPLE.toString())
|| fieldType.equals(SupportedFieldType.RULE_WITH_QUANTITY.toString())) {
// We're dealing with rule builders, so we'll create those specialized fields
f = new RuleBuilderField();
((RuleBuilderField) f).setJsonFieldName(property.getName() + "Json");
((RuleBuilderField) f).setDataWrapper(new DataWrapper());
((RuleBuilderField) f).setFieldBuilder(fmd.getRuleIdentifier());
String blankJsonString = "{\"data\":[]}";
((RuleBuilderField) f).setJson(blankJsonString);
DataWrapper dw = convertJsonToDataWrapper(blankJsonString);
if (dw != null) {
((RuleBuilderField) f).setDataWrapper(dw);
}
if (fieldType.equals(SupportedFieldType.RULE_SIMPLE.toString())) {
((RuleBuilderField) f).setStyleClass("rule-builder-simple");
} else if (fieldType.equals(SupportedFieldType.RULE_WITH_QUANTITY.toString())) {
((RuleBuilderField) f).setStyleClass("rule-builder-complex");
}
} else if (LookupType.DROPDOWN.equals(fmd.getLookupType())) {
// We're dealing with a to-one field that wants to be rendered as a dropdown instead of in a
// modal, so we'll provision the combo field here. Available options will be set as part of a
// subsequent operation
f = new ComboField();
} else if (fieldType.equals(SupportedFieldType.MEDIA.toString())) {
f = new MediaField();
} else {
// Create a default field since there was no specialized handler
f = new Field();
}
Boolean required = fmd.getRequiredOverride();
if (required == null) {
required = fmd.getRequired();
}
f.withName(property.getName())
.withFieldType(fieldType)
.withOrder(fmd.getOrder())
.withFriendlyName(fmd.getFriendlyName())
.withForeignKeyDisplayValueProperty(fmd.getForeignKeyDisplayValueProperty())
.withForeignKeyClass(fmd.getForeignKeyClass())
.withOwningEntityClass(fmd.getOwningClass()!=null?fmd.getOwningClass():fmd.getInheritedFromType())
.withRequired(required)
.withReadOnly(fmd.getReadOnly())
.withTranslatable(fmd.getTranslatable())
.withAlternateOrdering((Boolean) fmd.getAdditionalMetadata().get(Field.ALTERNATE_ORDERING))
.withLargeEntry(fmd.isLargeEntry())
.withHint(fmd.getHint())
.withTooltip(fmd.getTooltip())
.withHelp(fmd.getHelpText());
if (StringUtils.isBlank(f.getFriendlyName())) {
f.setFriendlyName(f.getName());
}
// Add the field to the appropriate FieldGroup
ef.addField(f, fmd.getGroup(), fmd.getGroupOrder(), fmd.getTab(), fmd.getTabOrder());
}
}
}
}
@Override
public void removeNonApplicableFields(ClassMetadata cmd, EntityForm entityForm, String entityType) {
for (Property p : cmd.getProperties()) {
if (!ArrayUtils.contains(p.getMetadata().getAvailableToTypes(), entityType)) {
entityForm.removeField(p.getName());
}
}
}
@Override
public EntityForm createEntityForm(ClassMetadata cmd)
throws ServiceException {
EntityForm ef = createStandardEntityForm();
populateEntityForm(cmd, ef);
return ef;
}
@Override
public void populateEntityForm(ClassMetadata cmd, EntityForm ef)
throws ServiceException {
ef.setCeilingEntityClassname(cmd.getCeilingType());
AdminSection section = navigationService.findAdminSectionByClass(cmd.getCeilingType());
if (section != null) {
ef.setSectionKey(section.getUrl());
} else {
ef.setSectionKey(cmd.getCeilingType());
}
setEntityFormFields(ef, Arrays.asList(cmd.getProperties()));
populateDropdownToOneFields(ef, cmd);
}
@Override
public EntityForm createEntityForm(ClassMetadata cmd, Entity entity)
throws ServiceException {
EntityForm ef = createStandardEntityForm();
populateEntityForm(cmd, entity, ef);
return ef;
}
@Override
public void populateEntityForm(ClassMetadata cmd, Entity entity, EntityForm ef)
throws ServiceException {
// Get the empty form with appropriate fields
populateEntityForm(cmd, ef);
String idProperty = adminEntityService.getIdProperty(cmd);
ef.setId(entity.findProperty(idProperty).getValue());
ef.setEntityType(entity.getType()[0]);
populateEntityFormFieldValues(cmd, entity, ef);
Property p = entity.findProperty(BasicPersistenceModule.MAIN_ENTITY_NAME_PROPERTY);
if (p != null) {
ef.setMainEntityName(p.getValue());
}
}
@Override
public void populateEntityFormFieldValues(ClassMetadata cmd, Entity entity, EntityForm ef) {
// Set the appropriate property values
for (Property p : cmd.getProperties()) {
if (p.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata basicFM = (BasicFieldMetadata) p.getMetadata();
Property entityProp = entity.findProperty(p.getName());
boolean explicitlyShown = VisibilityEnum.FORM_EXPLICITLY_SHOWN.equals(basicFM.getVisibility());
//always show special map fields
if (p.getName().equals("key") || p.getName().equals("priorKey")) {
explicitlyShown = true;
}
if (entityProp == null && explicitlyShown) {
Field field = ef.findField(p.getName());
if (field != null) {
field.setValue(null);
}
} else if (entityProp == null && !SupportedFieldType.PASSWORD_CONFIRM.equals(basicFM.getExplicitFieldType())) {
ef.removeField(p.getName());
} else {
Field field = ef.findField(p.getName());
if (field != null) {
if (basicFM.getFieldType()==SupportedFieldType.RULE_SIMPLE
|| basicFM.getFieldType()==SupportedFieldType.RULE_WITH_QUANTITY) {
RuleBuilderField rbf = (RuleBuilderField) field;
if (entity.getPMap().containsKey(rbf.getJsonFieldName())) {
String json = entity.getPMap().get(rbf.getJsonFieldName()).getValue();
rbf.setJson(json);
DataWrapper dw = convertJsonToDataWrapper(json);
if (dw != null) {
rbf.setDataWrapper(dw);
}
}
}
if (basicFM.getFieldType() == SupportedFieldType.MEDIA) {
field.setValue(entityProp.getValue());
field.setDisplayValue(entityProp.getDisplayValue());
MediaField mf = (MediaField) field;
mf.setMedia(convertJsonToMedia(entityProp.getUnHtmlEncodedValue()));
} else if (!SupportedFieldType.PASSWORD_CONFIRM.equals(basicFM.getExplicitFieldType())){
field.setValue(entityProp.getValue());
field.setDisplayValue(entityProp.getDisplayValue());
}
}
}
}
}
}
protected Media convertJsonToMedia(String json) {
if (json != null && !"".equals(json)) {
try {
ObjectMapper om = new ObjectMapper();
om.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return om.readValue(json, entityConfiguration.lookupEntityClass(MediaDto.class.getName(), MediaDto.class));
} catch (Exception e) {
LOG.warn("Error parsing json to media " + json, e);
}
}
return entityConfiguration.createEntityInstance(MediaDto.class.getName(), MediaDto.class);
}
/**
* When using Thymeleaf, we need to convert the JSON string back to
* a DataWrapper object because Thymeleaf escapes JSON strings.
* Thymeleaf uses it's own object de-serializer
* see: https://github.com/thymeleaf/thymeleaf/issues/84
* see: http://forum.thymeleaf.org/Spring-Javascript-and-escaped-JSON-td4024739.html
* @param json
* @return DataWrapper
* @throws IOException
*/
protected DataWrapper convertJsonToDataWrapper(String json) {
ObjectMapper mapper = new ObjectMapper();
DataDTODeserializer dtoDeserializer = new DataDTODeserializer();
SimpleModule module = new SimpleModule("DataDTODeserializerModule", new Version(1, 0, 0, null));
module.addDeserializer(DataDTO.class, dtoDeserializer);
mapper.registerModule(module);
try {
return mapper.readValue(json, DataWrapper.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected void populateDropdownToOneFields(EntityForm ef, ClassMetadata cmd)
throws ServiceException {
for (Property p : cmd.getProperties()) {
if (p.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata fmd = (BasicFieldMetadata) p.getMetadata();
if (LookupType.DROPDOWN.equals(fmd.getLookupType())
&& !ArrayUtils.contains(getFormHiddenVisibilities(), fmd.getVisibility())) {
// Get the records
PersistencePackageRequest toOnePpr = PersistencePackageRequest.standard()
.withCeilingEntityClassname(fmd.getForeignKeyClass());
Entity[] rows = adminEntityService.getRecords(toOnePpr).getRecords();
// Determine the id field
String idProp = null;
ClassMetadata foreignClassMd = adminEntityService.getClassMetadata(toOnePpr);
for (Property foreignP : foreignClassMd.getProperties()) {
if (foreignP.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata foreignFmd = (BasicFieldMetadata) foreignP.getMetadata();
if (SupportedFieldType.ID.equals(foreignFmd.getFieldType())) {
idProp = foreignP.getName();
}
}
}
if (idProp == null) {
throw new RuntimeException("Could not determine ID property for " + fmd.getForeignKeyClass());
}
// Determine the display field
String displayProp = fmd.getLookupDisplayProperty();
// Build the options map
Map<String, String> options = new HashMap<String, String>();
for (Entity row : rows) {
String displayValue = row.findProperty(displayProp).getDisplayValue();
if (StringUtils.isBlank(displayValue)) {
displayValue = row.findProperty(displayProp).getValue();
}
options.put(row.findProperty(idProp).getValue(), displayValue);
}
// Set the options on the entity field
ComboField cf = (ComboField) ef.findField(p.getName());
cf.setOptions(options);
}
}
}
}
@Override
public EntityForm createEntityForm(ClassMetadata cmd, Entity entity, Map<String, DynamicResultSet> collectionRecords)
throws ServiceException {
EntityForm ef = createStandardEntityForm();
populateEntityForm(cmd, entity, collectionRecords, ef);
return ef;
}
@Override
public void populateEntityForm(ClassMetadata cmd, Entity entity, Map<String, DynamicResultSet> collectionRecords, EntityForm ef)
throws ServiceException {
// Get the form with values for this entity
populateEntityForm(cmd, entity, ef);
// Attach the sub-collection list grids and specialty UI support
for (Property p : cmd.getProperties()) {
if (p.getMetadata() instanceof BasicFieldMetadata) {
continue;
}
if (!ArrayUtils.contains(p.getMetadata().getAvailableToTypes(), entity.getType()[0])) {
continue;
}
DynamicResultSet subCollectionEntities = collectionRecords.get(p.getName());
String containingEntityId = entity.getPMap().get(ef.getIdProperty()).getValue();
ListGrid listGrid = buildCollectionListGrid(containingEntityId, subCollectionEntities, p, ef.getSectionKey());
listGrid.setListGridType(ListGrid.Type.INLINE);
CollectionMetadata md = ((CollectionMetadata) p.getMetadata());
ef.addListGrid(listGrid, md.getTab(), md.getTabOrder());
}
for (ListGrid lg : ef.getAllListGrids()) {
// We always want the add option to be the first toolbar action for consistency
if (lg.getToolbarActions().isEmpty()) {
lg.addToolbarAction(DefaultListGridActions.ADD);
} else {
lg.getToolbarActions().add(0, DefaultListGridActions.ADD);
}
}
if (CollectionUtils.isEmpty(ef.getActions())) {
ef.addAction(DefaultEntityFormActions.SAVE);
}
ef.addAction(DefaultEntityFormActions.DELETE);
addAdditionalEntityFormActions(ef);
}
protected void addAdditionalEntityFormActions(EntityForm ef) {
if (extensionManagers != null && !extensionManagers.isEmpty()) {
for (FormBuilderExtensionManager mgr : extensionManagers) {
if (mgr.canHandle(ef)) {
mgr.addFormExtensions(ef);
}
}
}
}
@Override
public void populateEntityFormFields(EntityForm ef, Entity entity) {
populateEntityFormFields(ef, entity, true, true);
}
@Override
public void populateEntityFormFields(EntityForm ef, Entity entity, boolean populateType, boolean populateId) {
if (populateId) {
ef.setId(entity.findProperty(ef.getIdProperty()).getValue());
}
if (populateType) {
ef.setEntityType(entity.getType()[0]);
}
for (Entry<String, Field> entry : ef.getFields().entrySet()) {
Property entityProp = entity.findProperty(entry.getKey());
if (entityProp != null) {
entry.getValue().setValue(entityProp.getValue());
entry.getValue().setDisplayValue(entityProp.getDisplayValue());
}
}
}
@Override
public void populateAdornedEntityFormFields(EntityForm ef, Entity entity, AdornedTargetList adornedList) {
Field field = ef.getFields().get(adornedList.getTargetObjectPath() + "." + adornedList.getTargetIdProperty());
Property entityProp = entity.findProperty(ef.getIdProperty());
field.setValue(entityProp.getValue());
if (StringUtils.isNotBlank(adornedList.getSortField())) {
field = ef.getFields().get(adornedList.getSortField());
entityProp = entity.findProperty(adornedList.getSortField());
if (field != null && entityProp != null) {
field.setValue(entityProp.getValue());
}
}
}
@Override
public void populateMapEntityFormFields(EntityForm ef, Entity entity) {
Field field = ef.getFields().get("priorKey");
Property entityProp = entity.findProperty("key");
if (field != null && entityProp != null) {
field.setValue(entityProp.getValue());
}
}
@Override
public EntityForm buildAdornedListForm(AdornedTargetCollectionMetadata adornedMd, AdornedTargetList adornedList,
String parentId)
throws ServiceException {
EntityForm ef = createStandardEntityForm();
return buildAdornedListForm(adornedMd, adornedList, parentId, ef);
}
@Override
public EntityForm buildAdornedListForm(AdornedTargetCollectionMetadata adornedMd, AdornedTargetList adornedList,
String parentId, EntityForm ef)
throws ServiceException {
ef.setEntityType(adornedList.getAdornedTargetEntityClassname());
// Get the metadata for this adorned field
PersistencePackageRequest request = PersistencePackageRequest.adorned()
.withCeilingEntityClassname(adornedMd.getCollectionCeilingEntity())
.withAdornedList(adornedList);
ClassMetadata collectionMetadata = adminEntityService.getClassMetadata(request);
// We want our entity form to only render the maintained adorned target fields
List<Property> entityFormProperties = new ArrayList<Property>();
for (String targetFieldName : adornedMd.getMaintainedAdornedTargetFields()) {
Property p = collectionMetadata.getPMap().get(targetFieldName);
if (p.getMetadata() instanceof BasicFieldMetadata) {
((BasicFieldMetadata) p.getMetadata()).setVisibility(VisibilityEnum.VISIBLE_ALL);
entityFormProperties.add(p);
}
}
// Set the maintained fields on the form
setEntityFormFields(ef, entityFormProperties);
// Add these two additional hidden fields that are required for persistence
Field f = new Field()
.withName(adornedList.getLinkedObjectPath() + "." + adornedList.getLinkedIdProperty())
.withFieldType(SupportedFieldType.HIDDEN.toString())
.withValue(parentId);
ef.addHiddenField(f);
f = new Field()
.withName(adornedList.getTargetObjectPath() + "." + adornedList.getTargetIdProperty())
.withFieldType(SupportedFieldType.HIDDEN.toString())
.withIdOverride("adornedTargetIdProperty");
ef.addHiddenField(f);
if (StringUtils.isNotBlank(adornedList.getSortField())) {
f = new Field()
.withName(adornedList.getSortField())
.withFieldType(SupportedFieldType.HIDDEN.toString());
ef.addHiddenField(f);
}
return ef;
}
@Override
public EntityForm buildMapForm(MapMetadata mapMd, final MapStructure mapStructure, ClassMetadata cmd, String parentId)
throws ServiceException {
EntityForm ef = createStandardEntityForm();
return buildMapForm(mapMd, mapStructure, cmd, parentId, ef);
}
@Override
public EntityForm buildMapForm(MapMetadata mapMd, final MapStructure mapStructure, ClassMetadata cmd, String parentId, EntityForm ef)
throws ServiceException {
ForeignKey foreignKey = (ForeignKey) mapMd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY);
ef.setEntityType(foreignKey.getForeignKeyClass());
Field keyField;
if (!mapMd.getForceFreeFormKeys()) {
// We will use a combo field to render the key choices
ComboField temp = new ComboField();
temp.withName("key")
.withFieldType("combo_field")
.withFriendlyName("Key");
if (mapMd.getKeys() != null) {
// The keys can be explicitly set in the annotation...
temp.setOptions(mapMd.getKeys());
} else {
// Or they could be based on a different entity
PersistencePackageRequest ppr = PersistencePackageRequest.standard()
.withCeilingEntityClassname(mapMd.getMapKeyOptionEntityClass());
DynamicResultSet drs = adminEntityService.getRecords(ppr);
for (Entity entity : drs.getRecords()) {
String keyValue = entity.getPMap().get(mapMd.getMapKeyOptionEntityValueField()).getValue();
String keyDisplayValue = entity.getPMap().get(mapMd.getMapKeyOptionEntityDisplayField()).getValue();
temp.putOption(keyValue, keyDisplayValue);
}
}
keyField = temp;
} else {
keyField = new Field().withName("key")
.withFieldType(SupportedFieldType.STRING.toString())
.withFriendlyName("Key");
}
keyField.setRequired(true);
ef.addMapKeyField(keyField);
// Set the fields for this form
List<Property> mapFormProperties;
if (mapMd.isSimpleValue()) {
ef.setIdProperty("key");
mapFormProperties = new ArrayList<Property>();
Property valueProp = cmd.getPMap().get("value");
mapFormProperties.add(valueProp);
} else {
mapFormProperties = new ArrayList<Property>(Arrays.asList(cmd.getProperties()));
CollectionUtils.filter(mapFormProperties, new Predicate() {
@Override
public boolean evaluate(Object object) {
Property p = (Property) object;
return ArrayUtils.contains(p.getMetadata().getAvailableToTypes(), mapStructure.getValueClassName());
}
});
}
setEntityFormFields(ef, mapFormProperties);
Field f = new Field()
.withName("priorKey")
.withFieldType(SupportedFieldType.HIDDEN.toString());
ef.addHiddenField(f);
return ef;
}
protected EntityForm createStandardEntityForm() {
EntityForm ef = new EntityForm();
ef.addAction(DefaultEntityFormActions.SAVE);
return ef;
}
protected VisibilityEnum[] getGridHiddenVisibilities() {
return FormBuilderServiceImpl.GRID_HIDDEN_VISIBILITIES;
}
protected VisibilityEnum[] getFormHiddenVisibilities() {
return FormBuilderServiceImpl.FORM_HIDDEN_VISIBILITIES;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_web_service_FormBuilderServiceImpl.java
|
121 |
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
| 0true
|
src_main_java_jsr166e_ForkJoinTask.java
|
188 |
new Thread(){
public void run() {
for (int i=0; i<5; i++){
tempSet.add("item" + i);
}
tempSet.add("done");
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_collections_ClientSetTest.java
|
63 |
public interface FieldEnumerationItem extends Serializable {
FieldEnumeration getFieldEnumeration();
void setFieldEnumeration(FieldEnumeration fieldEnumeration);
int getFieldOrder();
void setFieldOrder(int fieldOrder);
String getFriendlyName();
void setFriendlyName(String friendlyName);
Long getId();
void setId(Long id);
String getName();
void setName(String name);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_field_domain_FieldEnumerationItem.java
|
35 |
@SuppressWarnings("unchecked")
public class OMVRBTreeEntryMemory<K, V> extends OMVRBTreeEntry<K, V> {
protected int size = 1;
protected int pageSize;
protected K[] keys;
protected V[] values;
protected OMVRBTreeEntryMemory<K, V> left = null;
protected OMVRBTreeEntryMemory<K, V> right = null;
protected OMVRBTreeEntryMemory<K, V> parent;
protected boolean color = OMVRBTree.RED;
/**
* Constructor called on unmarshalling.
*
*/
protected OMVRBTreeEntryMemory(final OMVRBTree<K, V> iTree) {
super(iTree);
}
/**
* Make a new cell with given key, value, and parent, and with <tt>null</tt> child links, and BLACK color.
*/
protected OMVRBTreeEntryMemory(final OMVRBTree<K, V> iTree, final K iKey, final V iValue, final OMVRBTreeEntryMemory<K, V> iParent) {
super(iTree);
setParent(iParent);
pageSize = tree.getDefaultPageSize();
keys = (K[]) new Object[pageSize];
keys[0] = iKey;
values = (V[]) new Object[pageSize];
values[0] = iValue;
init();
}
/**
* Copy values from the parent node.
*
* @param iParent
* @param iPosition
*/
protected OMVRBTreeEntryMemory(final OMVRBTreeEntryMemory<K, V> iParent, final int iPosition) {
super(iParent.getTree());
pageSize = tree.getDefaultPageSize();
keys = (K[]) new Object[pageSize];
values = (V[]) new Object[pageSize];
size = iParent.size - iPosition;
System.arraycopy(iParent.keys, iPosition, keys, 0, size);
System.arraycopy(iParent.values, iPosition, values, 0, size);
Arrays.fill(iParent.keys, iPosition, iParent.size, null);
Arrays.fill(iParent.values, iPosition, iParent.size, null);
iParent.size = iPosition;
setParent(iParent);
init();
}
@Override
protected void setColor(final boolean iColor) {
this.color = iColor;
}
@Override
public boolean getColor() {
return color;
}
@Override
public void setLeft(final OMVRBTreeEntry<K, V> iLeft) {
left = (OMVRBTreeEntryMemory<K, V>) iLeft;
if (iLeft != null && iLeft.getParent() != this)
iLeft.setParent(this);
}
@Override
public OMVRBTreeEntry<K, V> getLeft() {
return left;
}
@Override
public void setRight(final OMVRBTreeEntry<K, V> iRight) {
right = (OMVRBTreeEntryMemory<K, V>) iRight;
if (iRight != null && iRight.getParent() != this)
iRight.setParent(this);
}
@Override
public OMVRBTreeEntry<K, V> getRight() {
return right;
}
@Override
public OMVRBTreeEntry<K, V> setParent(final OMVRBTreeEntry<K, V> iParent) {
parent = (OMVRBTreeEntryMemory<K, V>) iParent;
return iParent;
}
@Override
public OMVRBTreeEntry<K, V> getParent() {
return parent;
}
/**
* Returns the successor of the current Entry only by traversing the memory, or null if no such.
*/
@Override
public OMVRBTreeEntryMemory<K, V> getNextInMemory() {
OMVRBTreeEntryMemory<K, V> t = this;
OMVRBTreeEntryMemory<K, V> p = null;
if (t.right != null) {
p = t.right;
while (p.left != null)
p = p.left;
} else {
p = t.parent;
while (p != null && t == p.right) {
t = p;
p = p.parent;
}
}
return p;
}
public int getSize() {
return size;
}
public int getPageSize() {
return pageSize;
}
@Override
protected OMVRBTreeEntry<K, V> getLeftInMemory() {
return left;
}
@Override
protected OMVRBTreeEntry<K, V> getParentInMemory() {
return parent;
}
@Override
protected OMVRBTreeEntry<K, V> getRightInMemory() {
return right;
}
protected K getKeyAt(final int iIndex) {
return keys[iIndex];
}
protected V getValueAt(int iIndex) {
return values[iIndex];
}
/**
* Replaces the value currently associated with the key with the given value.
*
* @return the value associated with the key before this method was called
*/
public V setValue(final V value) {
V oldValue = this.getValue();
this.values[tree.pageIndex] = value;
return oldValue;
}
protected void insert(final int iPosition, final K key, final V value) {
if (iPosition < size) {
// MOVE RIGHT TO MAKE ROOM FOR THE ITEM
System.arraycopy(keys, iPosition, keys, iPosition + 1, size - iPosition);
System.arraycopy(values, iPosition, values, iPosition + 1, size - iPosition);
}
keys[iPosition] = key;
values[iPosition] = value;
size++;
}
protected void remove() {
if (tree.pageIndex == size - 1) {
// LAST ONE: JUST REMOVE IT
} else if (tree.pageIndex > -1) {
// SHIFT LEFT THE VALUES
System.arraycopy(keys, tree.pageIndex + 1, keys, tree.pageIndex, size - tree.pageIndex - 1);
System.arraycopy(values, tree.pageIndex + 1, values, tree.pageIndex, size - tree.pageIndex - 1);
}
// FREE RESOURCES
keys[size - 1] = null;
values[size - 1] = null;
size--;
tree.pageIndex = 0;
}
protected void copyFrom(final OMVRBTreeEntry<K, V> iSource) {
OMVRBTreeEntryMemory<K, V> source = (OMVRBTreeEntryMemory<K, V>) iSource;
keys = (K[]) new Object[source.keys.length];
for (int i = 0; i < source.keys.length; ++i)
keys[i] = source.keys[i];
values = (V[]) new Object[source.values.length];
for (int i = 0; i < source.values.length; ++i)
values[i] = source.values[i];
size = source.size;
}
@Override
public String toString() {
if (keys == null)
return "?";
final StringBuilder buffer = new StringBuilder();
final Object k = tree.pageIndex >= size ? '?' : getKey();
buffer.append(k);
buffer.append(" (size=");
buffer.append(size);
if (size > 0) {
buffer.append(" [");
buffer.append(keys[0] != null ? keys[0] : "{lazy}");
buffer.append('-');
buffer.append(keys[size - 1] != null ? keys[size - 1] : "{lazy}");
buffer.append(']');
}
buffer.append(')');
return buffer.toString();
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTreeEntryMemory.java
|
83 |
GREATER_THAN {
@Override
public boolean isValidValueType(Class<?> clazz) {
Preconditions.checkNotNull(clazz);
return Comparable.class.isAssignableFrom(clazz);
}
@Override
public boolean isValidCondition(Object condition) {
return condition!=null && condition instanceof Comparable;
}
@Override
public boolean evaluate(Object value, Object condition) {
Integer cmp = AttributeUtil.compare(value,condition);
return cmp!=null?cmp>0:false;
}
@Override
public String toString() {
return ">";
}
@Override
public TitanPredicate negate() {
return LESS_THAN_EQUAL;
}
},
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Cmp.java
|
447 |
static final class Fields {
static final XContentBuilderString CPU = new XContentBuilderString("cpu");
static final XContentBuilderString PERCENT = new XContentBuilderString("percent");
static final XContentBuilderString OPEN_FILE_DESCRIPTORS = new XContentBuilderString("open_file_descriptors");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
static final XContentBuilderString AVG = new XContentBuilderString("avg");
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java
|
201 |
public final class ExtendedMemoryIndex extends MemoryIndex {
public ExtendedMemoryIndex(boolean storeOffsets, long maxReusedBytes) {
super(storeOffsets, maxReusedBytes);
}
}
| 0true
|
src_main_java_org_apache_lucene_index_memory_ExtendedMemoryIndex.java
|
538 |
public class ORemoteFetchListener implements OFetchListener {
final Set<ODocument> recordsToSend;
public ORemoteFetchListener(final Set<ODocument> iRecordsToSend) {
recordsToSend = iRecordsToSend;
}
public void processStandardField(ORecordSchemaAware<?> iRecord, Object iFieldValue, String iFieldName, OFetchContext iContext,
final Object iusObject, final String iFormat) throws OFetchException {
}
public void parseLinked(ORecordSchemaAware<?> iRootRecord, OIdentifiable iLinked, Object iUserObject, String iFieldName,
OFetchContext iContext) throws OFetchException {
}
public void parseLinkedCollectionValue(ORecordSchemaAware<?> iRootRecord, OIdentifiable iLinked, Object iUserObject,
String iFieldName, OFetchContext iContext) throws OFetchException {
}
public Object fetchLinkedMapEntry(ORecordSchemaAware<?> iRoot, Object iUserObject, String iFieldName, String iKey,
ORecordSchemaAware<?> iLinked, OFetchContext iContext) throws OFetchException {
if (iLinked.getIdentity().isValid())
return recordsToSend.add((ODocument) iLinked) ? iLinked : null;
return null;
}
public Object fetchLinkedCollectionValue(ORecordSchemaAware<?> iRoot, Object iUserObject, String iFieldName,
ORecordSchemaAware<?> iLinked, OFetchContext iContext) throws OFetchException {
if (iLinked.getIdentity().isValid())
return recordsToSend.add((ODocument) iLinked) ? iLinked : null;
return null;
}
@SuppressWarnings("unchecked")
public Object fetchLinked(ORecordSchemaAware<?> iRoot, Object iUserObject, String iFieldName, ORecordSchemaAware<?> iLinked,
OFetchContext iContext) throws OFetchException {
if (iLinked instanceof ODocument)
return recordsToSend.add((ODocument) iLinked) ? iLinked : null;
// HOW THIS CODE CAN HAVE SENSE?
else if (iLinked instanceof Collection<?>)
return recordsToSend.addAll((Collection<? extends ODocument>) iLinked) ? iLinked : null;
// HOW THIS CODE CAN HAVE SENSE?
else if (iLinked instanceof Map<?, ?>)
return recordsToSend.addAll(((Map<String, ? extends ODocument>) iLinked).values()) ? iLinked : null;
else
throw new OFetchException("Unrecognized type while fetching records: " + iLinked);
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_fetch_remote_ORemoteFetchListener.java
|
75 |
@SuppressWarnings("serial")
static final class MapReduceKeysToIntTask<K,V>
extends BulkTask<K,V,Integer> {
final ObjectToInt<? super K> transformer;
final IntByIntToInt reducer;
final int basis;
int result;
MapReduceKeysToIntTask<K,V> rights, nextRight;
MapReduceKeysToIntTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceKeysToIntTask<K,V> nextRight,
ObjectToInt<? super K> transformer,
int basis,
IntByIntToInt reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.basis = basis; this.reducer = reducer;
}
public final Integer getRawResult() { return result; }
public final void compute() {
final ObjectToInt<? super K> transformer;
final IntByIntToInt reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
int r = this.basis;
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceKeysToIntTask<K,V>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, r, reducer)).fork();
}
for (Node<K,V> p; (p = advance()) != null; )
r = reducer.apply(r, transformer.apply(p.key));
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceKeysToIntTask<K,V>
t = (MapReduceKeysToIntTask<K,V>)c,
s = t.rights;
while (s != null) {
t.result = reducer.apply(t.result, s.result);
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
1,410 |
Job registerCeylonModules = new Job("Load the Ceylon Metamodel for plugin dependencies") {
protected IStatus run(IProgressMonitor monitor) {
Activator.loadBundleAsModule(bundleContext.getBundle());
return Status.OK_STATUS;
};
};
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_ui_CeylonPlugin.java
|
433 |
public class ClientSemaphoreProxy extends ClientProxy implements ISemaphore {
private final String name;
private volatile Data key;
public ClientSemaphoreProxy(String instanceName, String serviceName, String objectId) {
super(instanceName, serviceName, objectId);
this.name = objectId;
}
public boolean init(int permits) {
checkNegative(permits);
InitRequest request = new InitRequest(name, permits);
Boolean result = invoke(request);
return result;
}
public void acquire() throws InterruptedException {
acquire(1);
}
public void acquire(int permits) throws InterruptedException {
checkNegative(permits);
AcquireRequest request = new AcquireRequest(name, permits, -1);
invoke(request);
}
public int availablePermits() {
AvailableRequest request = new AvailableRequest(name);
Integer result = invoke(request);
return result;
}
public int drainPermits() {
DrainRequest request = new DrainRequest(name);
Integer result = invoke(request);
return result;
}
public void reducePermits(int reduction) {
checkNegative(reduction);
ReduceRequest request = new ReduceRequest(name, reduction);
invoke(request);
}
public void release() {
release(1);
}
public void release(int permits) {
checkNegative(permits);
ReleaseRequest request = new ReleaseRequest(name, permits);
invoke(request);
}
public boolean tryAcquire() {
return tryAcquire(1);
}
public boolean tryAcquire(int permits) {
checkNegative(permits);
try {
return tryAcquire(permits, 0, TimeUnit.SECONDS);
} catch (InterruptedException e) {
return false;
}
}
public boolean tryAcquire(long timeout, TimeUnit unit) throws InterruptedException {
return tryAcquire(1, timeout, unit);
}
public boolean tryAcquire(int permits, long timeout, TimeUnit unit) throws InterruptedException {
checkNegative(permits);
AcquireRequest request = new AcquireRequest(name, permits, unit.toMillis(timeout));
Boolean result = invoke(request);
return result;
}
protected void onDestroy() {
}
protected <T> T invoke(ClientRequest req) {
return super.invoke(req, getKey());
}
public Data getKey() {
if (key == null) {
key = getContext().getSerializationService().toData(name);
}
return key;
}
private void checkNegative(int permits) {
if (permits < 0) {
throw new IllegalArgumentException("Permits cannot be negative!");
}
}
@Override
public String toString() {
return "ISemaphore{" + "name='" + getName() + '\'' + '}';
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientSemaphoreProxy.java
|
970 |
public abstract class OStringSerializerHelper {
public static final char RECORD_SEPARATOR = ',';
public static final String CLASS_SEPARATOR = "@";
public static final char LINK = ORID.PREFIX;
public static final char EMBEDDED_BEGIN = '(';
public static final char EMBEDDED_END = ')';
public static final char LIST_BEGIN = '[';
public static final char LIST_END = ']';
public static final char SET_BEGIN = '<';
public static final char SET_END = '>';
public static final char MAP_BEGIN = '{';
public static final char MAP_END = '}';
public static final char BINARY_BEGINEND = '_';
public static final char CUSTOM_TYPE = '^';
public static final char ENTRY_SEPARATOR = ':';
public static final char PARAMETER_NAMED = ':';
public static final char PARAMETER_POSITIONAL = '?';
public static final char[] PARAMETER_SEPARATOR = new char[] { ',' };
public static final char[] PARAMETER_EXT_SEPARATOR = new char[] { ' ', '.' };
public static final char[] DEFAULT_IGNORE_CHARS = new char[] { '\n', '\r', ' ' };
public static final char[] DEFAULT_FIELD_SEPARATOR = new char[] { ',', ' ' };
public static final char COLLECTION_SEPARATOR = ',';
public static final String LINKSET_PREFIX = "" + SET_BEGIN + LINK + CLASS_SEPARATOR;
public static Object fieldTypeFromStream(final ODocument iDocument, OType iType, final Object iValue) {
if (iValue == null)
return null;
if (iType == null)
iType = OType.EMBEDDED;
switch (iType) {
case STRING:
if (iValue instanceof String) {
final String s = (String) iValue;
return decode(s.substring(1, s.length() - 1));
}
return iValue.toString();
case INTEGER:
if (iValue instanceof Integer)
return iValue;
return new Integer(getStringContent(iValue));
case BOOLEAN:
if (iValue instanceof Boolean)
return iValue;
return new Boolean(getStringContent(iValue));
case DECIMAL:
if (iValue instanceof BigDecimal)
return iValue;
return new BigDecimal(getStringContent(iValue));
case FLOAT:
if (iValue instanceof Float)
return iValue;
return new Float(getStringContent(iValue));
case LONG:
if (iValue instanceof Long)
return iValue;
return new Long(getStringContent(iValue));
case DOUBLE:
if (iValue instanceof Double)
return iValue;
return new Double(getStringContent(iValue));
case SHORT:
if (iValue instanceof Short)
return iValue;
return new Short(getStringContent(iValue));
case BYTE:
if (iValue instanceof Byte)
return iValue;
return new Byte(getStringContent(iValue));
case BINARY:
return getBinaryContent(iValue);
case DATE:
case DATETIME:
if (iValue instanceof Date)
return iValue;
return new Date(Long.parseLong(getStringContent(iValue)));
case LINK:
if (iValue instanceof ORID)
return iValue.toString();
else if (iValue instanceof String)
return new ORecordId((String) iValue);
else
return ((ORecord<?>) iValue).getIdentity().toString();
case EMBEDDED:
// EMBEDDED
return OStringSerializerAnyStreamable.INSTANCE.fromStream((String) iValue);
case EMBEDDEDMAP:
// RECORD
final String value = (String) iValue;
return ORecordSerializerSchemaAware2CSV.INSTANCE.embeddedMapFromStream(iDocument, null, value, null);
}
throw new IllegalArgumentException("Type " + iType + " does not support converting value: " + iValue);
}
public static List<String> smartSplit(final String iSource, final char iRecordSeparator, final char... iJumpChars) {
return smartSplit(iSource, new char[] { iRecordSeparator }, 0, -1, false, true, false, iJumpChars);
}
public static List<String> smartSplit(final String iSource, final char iRecordSeparator, final boolean iConsiderSets,
final char... iJumpChars) {
return smartSplit(iSource, new char[] { iRecordSeparator }, 0, -1, false, true, iConsiderSets, iJumpChars);
}
public static List<String> smartSplit(final String iSource, final char[] iRecordSeparator, int beginIndex, final int endIndex,
final boolean iStringSeparatorExtended, boolean iConsiderBraces, boolean iConsiderSets, final char... iJumpChars) {
final StringBuilder buffer = new StringBuilder();
final ArrayList<String> parts = new ArrayList<String>();
if (iSource != null && !iSource.isEmpty()) {
while ((beginIndex = parse(iSource, buffer, beginIndex, endIndex, iRecordSeparator, iStringSeparatorExtended,
iConsiderBraces, iConsiderSets, iJumpChars)) > -1) {
parts.add(buffer.toString());
buffer.setLength(0);
}
if (buffer.length() > 0 || isCharPresent(iSource.charAt(iSource.length() - 1), iRecordSeparator))
parts.add(buffer.toString());
}
return parts;
}
public static int parse(final String iSource, final StringBuilder iBuffer, final int beginIndex, final int endIndex,
final char[] iSeparator, final boolean iStringSeparatorExtended, final boolean iConsiderBraces, final boolean iConsiderSets,
final char... iJumpChars) {
char stringBeginChar = ' ';
boolean encodeMode = false;
int insideParenthesis = 0;
int insideList = 0;
int insideSet = 0;
int insideMap = 0;
int insideLinkPart = 0;
final int max = endIndex > -1 ? endIndex + 1 : iSource.length();
final char[] buffer = new char[max - beginIndex];
iSource.getChars(beginIndex, max, buffer, 0);
iBuffer.ensureCapacity(max);
// JUMP FIRST CHARS
int i = 0;
for (; i < buffer.length; ++i) {
final char c = buffer[i];
if (!isCharPresent(c, iJumpChars))
break;
}
for (; i < buffer.length; ++i) {
final char c = buffer[i];
if (stringBeginChar == ' ') {
// OUTSIDE A STRING
if (iConsiderBraces)
if (c == LIST_BEGIN)
insideList++;
else if (c == LIST_END) {
if (!isCharPresent(c, iSeparator)) {
if (insideList == 0)
throw new OSerializationException("Found invalid " + LIST_END
+ " character. Ensure it is opened and closed correctly.");
insideList--;
}
} else if (c == EMBEDDED_BEGIN) {
insideParenthesis++;
} else if (c == EMBEDDED_END) {
// if (!isCharPresent(c, iRecordSeparator)) {
if (insideParenthesis == 0)
throw new OSerializationException("Found invalid " + EMBEDDED_END
+ " character. Ensure it is opened and closed correctly.");
// }
insideParenthesis--;
} else if (c == MAP_BEGIN) {
insideMap++;
} else if (c == MAP_END) {
if (!isCharPresent(c, iSeparator)) {
if (insideMap == 0)
throw new OSerializationException("Found invalid " + MAP_END
+ " character. Ensure it is opened and closed correctly.");
insideMap--;
}
} else if (c == LINK)
// FIRST PART OF LINK
insideLinkPart = 1;
else if (insideLinkPart == 1 && c == ORID.SEPARATOR)
// SECOND PART OF LINK
insideLinkPart = 2;
else if (iConsiderSets)
if (c == SET_BEGIN)
insideSet++;
else if (c == SET_END) {
if (!isCharPresent(c, iSeparator)) {
if (insideSet == 0)
throw new OSerializationException("Found invalid " + SET_END
+ " character. Ensure it is opened and closed correctly.");
insideSet--;
}
}
if (insideLinkPart > 0 && c != '-' && !Character.isDigit(c) && c != ORID.SEPARATOR && c != LINK)
insideLinkPart = 0;
if ((c == '"' || iStringSeparatorExtended && c == '\'') && !encodeMode) {
// START STRING
stringBeginChar = c;
}
if (insideParenthesis == 0 && insideList == 0 && insideSet == 0 && insideMap == 0 && insideLinkPart == 0) {
// OUTSIDE A PARAMS/COLLECTION/MAP
if (isCharPresent(c, iSeparator)) {
// SEPARATOR (OUTSIDE A STRING): PUSH
return beginIndex + i + 1;
}
}
if (iJumpChars.length > 0) {
if (isCharPresent(c, iJumpChars))
continue;
}
} else {
// INSIDE A STRING
if ((c == '"' || iStringSeparatorExtended && c == '\'') && !encodeMode) {
// CLOSE THE STRING ?
if (stringBeginChar == c) {
// SAME CHAR AS THE BEGIN OF THE STRING: CLOSE IT AND PUSH
stringBeginChar = ' ';
}
}
}
if (c == '\\' && !encodeMode) {
// ESCAPE CHARS
final char nextChar = buffer[i + 1];
if (nextChar == 'u') {
i = OStringParser.readUnicode(buffer, i + 2, iBuffer);
continue;
} else if (nextChar == 'n') {
iBuffer.append("\n");
i++;
continue;
} else if (nextChar == 'r') {
iBuffer.append("\r");
i++;
continue;
} else if (nextChar == 't') {
iBuffer.append("\t");
i++;
continue;
} else if (nextChar == 'f') {
iBuffer.append("\f");
i++;
continue;
} else
encodeMode = true;
} else
encodeMode = false;
if (c != '\\' && encodeMode) {
encodeMode = false;
}
iBuffer.append(c);
}
return -1;
}
public static boolean isCharPresent(final char iCharacter, final char[] iCharacters) {
final int len = iCharacters.length;
for (int i = 0; i < len; ++i) {
if (iCharacter == iCharacters[i]) {
return true;
}
}
return false;
}
public static List<String> split(final String iSource, final char iRecordSeparator, final char... iJumpCharacters) {
return split(iSource, 0, iSource.length(), iRecordSeparator, iJumpCharacters);
}
public static Collection<String> split(final Collection<String> iParts, final String iSource, final char iRecordSeparator,
final char... iJumpCharacters) {
return split(iParts, iSource, 0, iSource.length(), iRecordSeparator, iJumpCharacters);
}
public static List<String> split(final String iSource, final int iStartPosition, final int iEndPosition,
final char iRecordSeparator, final char... iJumpCharacters) {
return (List<String>) split(new ArrayList<String>(), iSource, iStartPosition, iSource.length(), iRecordSeparator,
iJumpCharacters);
}
public static Collection<String> split(final Collection<String> iParts, final String iSource, final int iStartPosition,
final int iEndPosition, final char iRecordSeparator, final char... iJumpCharacters) {
return split(iParts, iSource, iStartPosition, iEndPosition, String.valueOf(iRecordSeparator), iJumpCharacters);
}
public static Collection<String> split(final Collection<String> iParts, final String iSource, final int iStartPosition,
int iEndPosition, final String iRecordSeparators, final char... iJumpCharacters) {
if (iEndPosition == -1)
iEndPosition = iSource.length();
final StringBuilder buffer = new StringBuilder();
for (int i = iStartPosition; i < iEndPosition; ++i) {
char c = iSource.charAt(i);
if (iRecordSeparators.indexOf(c) > -1) {
iParts.add(buffer.toString());
buffer.setLength(0);
} else {
if (iJumpCharacters.length > 0 && buffer.length() == 0) {
// CHECK IF IT'S A CHAR TO JUMP
if (!isCharPresent(c, iJumpCharacters)) {
buffer.append(c);
}
} else
buffer.append(c);
}
}
if (iJumpCharacters.length > 0 && buffer.length() > 0) {
// CHECK THE END OF LAST ITEM IF NEED TO CUT THE CHARS TO JUMP
char b;
int newSize = 0;
boolean found;
for (int i = buffer.length() - 1; i >= 0; --i) {
b = buffer.charAt(i);
found = false;
for (char j : iJumpCharacters) {
if (j == b) {
found = true;
++newSize;
break;
}
}
if (!found)
break;
}
if (newSize > 0)
buffer.setLength(buffer.length() - newSize);
}
iParts.add(buffer.toString());
return iParts;
}
public static String joinIntArray(int[] iArray) {
final StringBuilder ids = new StringBuilder();
for (int id : iArray) {
if (ids.length() > 0)
ids.append(RECORD_SEPARATOR);
ids.append(id);
}
return ids.toString();
}
public static int[] splitIntArray(final String iInput) {
final List<String> items = split(iInput, RECORD_SEPARATOR);
final int[] values = new int[items.size()];
for (int i = 0; i < items.size(); ++i) {
values[i] = Integer.parseInt(items.get(i).trim());
}
return values;
}
public static boolean contains(final String iText, final char iSeparator) {
if (iText == null)
return false;
final int max = iText.length();
for (int i = 0; i < max; ++i) {
if (iText.charAt(i) == iSeparator)
return true;
}
return false;
}
public static int getCollection(final String iText, final int iStartPosition, final Collection<String> iCollection) {
return getCollection(iText, iStartPosition, iCollection, LIST_BEGIN, LIST_END, COLLECTION_SEPARATOR);
}
public static int getCollection(final String iText, final int iStartPosition, final Collection<String> iCollection,
final char iCollectionBegin, final char iCollectionEnd, final char iCollectionSeparator) {
final StringBuilder buffer = new StringBuilder();
int openPos = iText.indexOf(iCollectionBegin, iStartPosition);
if (openPos == -1)
return -1;
boolean escape = false;
int currentPos, deep;
int maxPos = iText.length() - 1;
for (currentPos = openPos + 1, deep = 1; deep > 0; currentPos++) {
if (currentPos > maxPos)
return -1;
char c = iText.charAt(currentPos);
if (buffer.length() == 0 && c == ' ')
continue;
if (c == iCollectionBegin) {
// BEGIN
buffer.append(c);
deep++;
} else if (c == iCollectionEnd) {
// END
if (deep > 1)
buffer.append(c);
deep--;
} else if (c == iCollectionSeparator) {
// SEPARATOR
if (deep > 1) {
buffer.append(c);
} else {
iCollection.add(buffer.toString().trim());
buffer.setLength(0);
}
} else {
// COLLECT
if (!escape && c == '\\' && (currentPos + 1 <= maxPos)) {
// ESCAPE CHARS
final char nextChar = iText.charAt(currentPos + 1);
if (nextChar == 'u') {
currentPos = OStringParser.readUnicode(iText, currentPos + 2, buffer);
} else if (nextChar == 'n') {
buffer.append("\n");
currentPos++;
} else if (nextChar == 'r') {
buffer.append("\r");
currentPos++;
} else if (nextChar == 't') {
buffer.append("\t");
currentPos++;
} else if (nextChar == 'f') {
buffer.append("\f");
currentPos++;
} else
escape = true;
continue;
}
buffer.append(c);
}
}
if (buffer.length() > 0)
iCollection.add(buffer.toString().trim());
return --currentPos;
}
public static int getParameters(final String iText, final int iBeginPosition, int iEndPosition, final List<String> iParameters) {
iParameters.clear();
final int openPos = iText.indexOf(EMBEDDED_BEGIN, iBeginPosition);
if (openPos == -1 || (iEndPosition > -1 && openPos > iEndPosition))
return iBeginPosition;
final StringBuilder buffer = new StringBuilder();
parse(iText, buffer, openPos, iEndPosition, PARAMETER_EXT_SEPARATOR, true, true, false);
if (buffer.length() == 0)
return iBeginPosition;
final String t = buffer.substring(1, buffer.length() - 1).trim();
final List<String> pars = smartSplit(t, PARAMETER_SEPARATOR, 0, -1, true, true, false);
for (int i = 0; i < pars.size(); ++i)
iParameters.add(pars.get(i).trim());
return iBeginPosition + buffer.length();
}
public static int getEmbedded(final String iText, final int iBeginPosition, int iEndPosition, final StringBuilder iEmbedded) {
final int openPos = iText.indexOf(EMBEDDED_BEGIN, iBeginPosition);
if (openPos == -1 || (iEndPosition > -1 && openPos > iEndPosition))
return iBeginPosition;
final StringBuilder buffer = new StringBuilder();
parse(iText, buffer, openPos, iEndPosition, PARAMETER_EXT_SEPARATOR, true, true, false);
if (buffer.length() == 0)
return iBeginPosition;
final String t = buffer.substring(1, buffer.length() - 1).trim();
iEmbedded.append(t);
return iBeginPosition + buffer.length();
}
public static List<String> getParameters(final String iText) {
final List<String> params = new ArrayList<String>();
try {
getParameters(iText, 0, -1, params);
} catch (Exception e) {
throw new OCommandSQLParsingException("Error on reading parameters in: " + iText);
}
return params;
}
public static Map<String, String> getMap(final String iText) {
int openPos = iText.indexOf(MAP_BEGIN);
if (openPos == -1)
return Collections.emptyMap();
int closePos = iText.indexOf(MAP_END, openPos + 1);
if (closePos == -1)
return Collections.emptyMap();
final List<String> entries = smartSplit(iText.substring(openPos + 1, closePos), COLLECTION_SEPARATOR);
if (entries.size() == 0)
return Collections.emptyMap();
Map<String, String> map = new HashMap<String, String>();
List<String> entry;
for (String item : entries) {
if (item != null && !item.isEmpty()) {
entry = OStringSerializerHelper.split(item, OStringSerializerHelper.ENTRY_SEPARATOR);
map.put((String) fieldTypeFromStream(null, OType.STRING, entry.get(0)), entry.get(1));
}
}
return map;
}
/**
* Transforms, only if needed, the source string escaping the characters \ and ".
*
* @param iText
* Input String
* @return Modified string if needed, otherwise the same input object
* @see OStringSerializerHelper#decode(String)
*/
public static String encode(final String iText) {
int pos = -1;
final int newSize = iText.length();
for (int i = 0; i < newSize; ++i) {
final char c = iText.charAt(i);
if (c == '"' || c == '\\') {
pos = i;
break;
}
}
if (pos > -1) {
// CHANGE THE INPUT STRING
final StringBuilder iOutput = new StringBuilder();
char c;
for (int i = 0; i < iText.length(); ++i) {
c = iText.charAt(i);
if (c == '"' || c == '\\')
iOutput.append('\\');
iOutput.append(c);
}
return iOutput.toString();
}
return iText;
}
/**
* Transforms, only if needed, the source string un-escaping the characters \ and ".
*
* @param iText
* Input String
* @return Modified string if needed, otherwise the same input object
* @see OStringSerializerHelper#encode(String)
*/
public static String decode(final String iText) {
int pos = -1;
final int textSize = iText.length();
for (int i = 0; i < textSize; ++i)
if (iText.charAt(i) == '"' || iText.charAt(i) == '\\') {
pos = i;
break;
}
if (pos == -1)
// NOT FOUND, RETURN THE SAME STRING (AVOID COPIES)
return iText;
// CHANGE THE INPUT STRING
final StringBuilder buffer = new StringBuilder(textSize);
buffer.append(iText.substring(0, pos));
boolean escaped = false;
for (int i = pos; i < textSize; ++i) {
final char c = iText.charAt(i);
if (escaped)
escaped = false;
else if (c == '\\') {
escaped = true;
continue;
}
buffer.append(c);
}
return buffer.toString();
}
public static OClass getRecordClassName(final String iValue, OClass iLinkedClass) {
// EXTRACT THE CLASS NAME
final int classSeparatorPos = OStringParser.indexOfOutsideStrings(iValue, OStringSerializerHelper.CLASS_SEPARATOR.charAt(0), 0,
-1);
if (classSeparatorPos > -1) {
final String className = iValue.substring(0, classSeparatorPos);
final ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
if (className != null && database != null)
iLinkedClass = database.getMetadata().getSchema().getClass(className);
}
return iLinkedClass;
}
public static String getStringContent(final Object iValue) {
// MOVED
return OIOUtils.getStringContent(iValue);
}
/**
* Returns the binary representation of a content. If it's a String a Base64 decoding is applied.
*/
public static byte[] getBinaryContent(final Object iValue) {
if (iValue == null)
return null;
else if (iValue instanceof OBinary)
return ((OBinary) iValue).toByteArray();
else if (iValue instanceof byte[])
return (byte[]) iValue;
else if (iValue instanceof String) {
String s = (String) iValue;
if (s.length() > 1 && (s.charAt(0) == BINARY_BEGINEND && s.charAt(s.length() - 1) == BINARY_BEGINEND)
|| (s.charAt(0) == '\'' && s.charAt(s.length() - 1) == '\''))
// @COMPATIBILITY 1.0rc7-SNAPSHOT ' TO SUPPORT OLD DATABASES
s = s.substring(1, s.length() - 1);
// IN CASE OF JSON BINARY IMPORT THIS EXEPTION IS WRONG
// else
// throw new IllegalArgumentException("Not binary type: " + iValue);
return OBase64Utils.decode(s);
} else
throw new IllegalArgumentException("Cannot parse binary as the same type as the value (class=" + iValue.getClass().getName()
+ "): " + iValue);
}
/**
* Checks if a string contains alphanumeric only characters.
*
* @param iContent
* String to check
* @return true is all the content is alphanumeric, otherwise false
*/
public static boolean isAlphanumeric(final String iContent) {
final int tot = iContent.length();
for (int i = 0; i < tot; ++i) {
if (!Character.isLetterOrDigit(iContent.charAt(i)))
return false;
}
return true;
}
public static String removeQuotationMarks(final String iValue) {
if (iValue != null
&& iValue.length() > 1
&& (iValue.charAt(0) == '\'' && iValue.charAt(iValue.length() - 1) == '\'' || iValue.charAt(0) == '"'
&& iValue.charAt(iValue.length() - 1) == '"'))
return iValue.substring(1, iValue.length() - 1);
return iValue;
}
public static boolean startsWithIgnoreCase(final String iFirst, final String iSecond) {
if (iFirst == null)
throw new IllegalArgumentException("Origin string to compare is null");
if (iSecond == null)
throw new IllegalArgumentException("String to match is null");
final int iSecondLength = iSecond.length();
if (iSecondLength > iFirst.length())
return false;
for (int i = 0; i < iSecondLength; ++i) {
if (Character.toUpperCase(iFirst.charAt(i)) != Character.toUpperCase(iSecond.charAt(i)))
return false;
}
return true;
}
public static int indexOf(final String iSource, final int iBegin, char... iChars) {
if (iChars.length == 1)
// ONE CHAR: USE JAVA INDEXOF
return iSource.indexOf(iChars[0], iBegin);
final int len = iSource.length();
for (int i = iBegin; i < len; ++i) {
for (int k = 0; k < iChars.length; ++k) {
final char c = iSource.charAt(i);
if (c == iChars[k])
return i;
}
}
return -1;
}
/**
* Finds the end of a block delimited by 2 chars.
*/
public static final int findEndBlock(final String iOrigin, final char iBeginChar, final char iEndChar, final int iBeginOffset) {
int inc = 0;
for (int i = iBeginOffset; i < iOrigin.length(); i++) {
char c = iOrigin.charAt(i);
if (c == '\'') {
// skip to text end
int tend = i;
while (true) {
tend = iOrigin.indexOf('\'', tend + 1);
if (tend < 0) {
throw new OCommandSQLParsingException("Could not find end of text area.", iOrigin, i);
}
if (iOrigin.charAt(tend - 1) == '\\') {
// inner quote, skip it
continue;
} else {
break;
}
}
i = tend;
continue;
}
if (c != iBeginChar && c != iEndChar)
continue;
if (c == iBeginChar) {
inc++;
} else if (c == iEndChar) {
inc--;
if (inc == 0) {
return i;
}
}
}
return -1;
}
public static int getLowerIndexOf(final String iText, final int iBeginOffset, final String... iToSearch) {
int lowest = -1;
for (String toSearch : iToSearch) {
int index = iText.indexOf(toSearch, iBeginOffset);
if (index > -1 && (lowest == -1 || index < lowest))
lowest = index;
}
return lowest;
}
public static int getHigherIndexOf(final String iText, final int iBeginOffset, final String... iToSearch) {
int lowest = -1;
for (String toSearch : iToSearch) {
int index = iText.indexOf(toSearch, iBeginOffset);
if (index > -1 && (lowest == -1 || index > lowest))
lowest = index;
}
return lowest;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_OStringSerializerHelper.java
|
1,471 |
public class PlainShardIterator extends PlainShardsIterator implements ShardIterator {
private final ShardId shardId;
/**
* Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards
* this the a given <code>shardId</code>.
*
* @param shardId shard id of the group
* @param shards shards to iterate
*/
public PlainShardIterator(ShardId shardId, List<ShardRouting> shards) {
super(shards);
this.shardId = shardId;
}
/**
* Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards
* this the a given <code>shardId</code>.
*
* @param shardId shard id of the group
* @param shards shards to iterate
* @param index the offset in the shards list to start the iteration from
*/
public PlainShardIterator(ShardId shardId, List<ShardRouting> shards, int index) {
super(shards, index);
this.shardId = shardId;
}
@Override
public ShardId shardId() {
return this.shardId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
ShardIterator that = (ShardIterator) o;
return shardId.equals(that.shardId());
}
@Override
public int hashCode() {
return shardId.hashCode();
}
}
| 1no label
|
src_main_java_org_elasticsearch_cluster_routing_PlainShardIterator.java
|
3,521 |
public class ParseContext {
/** Fork of {@link org.apache.lucene.document.Document} with additional functionality. */
public static class Document implements Iterable<IndexableField> {
private final List<IndexableField> fields;
private ObjectObjectMap<Object, IndexableField> keyedFields;
public Document() {
fields = Lists.newArrayList();
}
@Override
public Iterator<IndexableField> iterator() {
return fields.iterator();
}
public List<IndexableField> getFields() {
return fields;
}
public void add(IndexableField field) {
fields.add(field);
}
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */
public void addWithKey(Object key, IndexableField field) {
if (keyedFields == null) {
keyedFields = new ObjectObjectOpenHashMap<Object, IndexableField>();
} else if (keyedFields.containsKey(key)) {
throw new ElasticsearchIllegalStateException("Only one field can be stored per key");
}
keyedFields.put(key, field);
add(field);
}
/** Get back fields that have been previously added with {@link #addWithKey(Object, IndexableField)}. */
public IndexableField getByKey(Object key) {
return keyedFields == null ? null : keyedFields.get(key);
}
public IndexableField[] getFields(String name) {
List<IndexableField> f = new ArrayList<IndexableField>();
for (IndexableField field : fields) {
if (field.name().equals(name)) {
f.add(field);
}
}
return f.toArray(new IndexableField[f.size()]);
}
public IndexableField getField(String name) {
for (IndexableField field : fields) {
if (field.name().equals(name)) {
return field;
}
}
return null;
}
public String get(String name) {
for (IndexableField f : fields) {
if (f.name().equals(name) && f.stringValue() != null) {
return f.stringValue();
}
}
return null;
}
public BytesRef getBinaryValue(String name) {
for (IndexableField f : fields) {
if (f.name().equals(name) && f.binaryValue() != null) {
return f.binaryValue();
}
}
return null;
}
}
private final DocumentMapper docMapper;
private final DocumentMapperParser docMapperParser;
private final ContentPath path;
private XContentParser parser;
private Document document;
private List<Document> documents = Lists.newArrayList();
private Analyzer analyzer;
private final String index;
@Nullable
private final Settings indexSettings;
private SourceToParse sourceToParse;
private BytesReference source;
private String id;
private DocumentMapper.ParseListener listener;
private Field uid, version;
private StringBuilder stringBuilder = new StringBuilder();
private Map<String, String> ignoredValues = new HashMap<String, String>();
private boolean mappingsModified = false;
private boolean withinNewMapper = false;
private boolean withinCopyTo = false;
private boolean externalValueSet;
private Object externalValue;
private AllEntries allEntries = new AllEntries();
private float docBoost = 1.0f;
public ParseContext(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
this.index = index;
this.indexSettings = indexSettings;
this.docMapper = docMapper;
this.docMapperParser = docMapperParser;
this.path = path;
}
public void reset(XContentParser parser, Document document, SourceToParse source, DocumentMapper.ParseListener listener) {
this.parser = parser;
this.document = document;
if (document != null) {
this.documents = Lists.newArrayList();
this.documents.add(document);
} else {
this.documents = null;
}
this.analyzer = null;
this.uid = null;
this.version = null;
this.id = null;
this.sourceToParse = source;
this.source = source == null ? null : sourceToParse.source();
this.path.reset();
this.mappingsModified = false;
this.withinNewMapper = false;
this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener;
this.allEntries = new AllEntries();
this.ignoredValues.clear();
this.docBoost = 1.0f;
}
public boolean flyweight() {
return sourceToParse.flyweight();
}
public DocumentMapperParser docMapperParser() {
return this.docMapperParser;
}
public boolean mappingsModified() {
return this.mappingsModified;
}
public void setMappingsModified() {
this.mappingsModified = true;
}
public void setWithinNewMapper() {
this.withinNewMapper = true;
}
public void clearWithinNewMapper() {
this.withinNewMapper = false;
}
public boolean isWithinNewMapper() {
return withinNewMapper;
}
public void setWithinCopyTo() {
this.withinCopyTo = true;
}
public void clearWithinCopyTo() {
this.withinCopyTo = false;
}
public boolean isWithinCopyTo() {
return withinCopyTo;
}
public String index() {
return this.index;
}
@Nullable
public Settings indexSettings() {
return this.indexSettings;
}
public String type() {
return sourceToParse.type();
}
public SourceToParse sourceToParse() {
return this.sourceToParse;
}
public BytesReference source() {
return source;
}
// only should be used by SourceFieldMapper to update with a compressed source
public void source(BytesReference source) {
this.source = source;
}
public ContentPath path() {
return this.path;
}
public XContentParser parser() {
return this.parser;
}
public DocumentMapper.ParseListener listener() {
return this.listener;
}
public Document rootDoc() {
return documents.get(0);
}
public List<Document> docs() {
return this.documents;
}
public Document doc() {
return this.document;
}
public void addDoc(Document doc) {
this.documents.add(doc);
}
public Document switchDoc(Document doc) {
Document prev = this.document;
this.document = doc;
return prev;
}
public RootObjectMapper root() {
return docMapper.root();
}
public DocumentMapper docMapper() {
return this.docMapper;
}
public AnalysisService analysisService() {
return docMapperParser.analysisService;
}
public String id() {
return id;
}
public void ignoredValue(String indexName, String value) {
ignoredValues.put(indexName, value);
}
public String ignoredValue(String indexName) {
return ignoredValues.get(indexName);
}
/**
* Really, just the id mapper should set this.
*/
public void id(String id) {
this.id = id;
}
public Field uid() {
return this.uid;
}
/**
* Really, just the uid mapper should set this.
*/
public void uid(Field uid) {
this.uid = uid;
}
public Field version() {
return this.version;
}
public void version(Field version) {
this.version = version;
}
public boolean includeInAll(Boolean includeInAll, FieldMapper mapper) {
return includeInAll(includeInAll, mapper.fieldType().indexed());
}
/**
* Is all included or not. Will always disable it if {@link org.elasticsearch.index.mapper.internal.AllFieldMapper#enabled()}
* is <tt>false</tt>. If its enabled, then will return <tt>true</tt> only if the specific flag is <tt>null</tt> or
* its actual value (so, if not set, defaults to "true") and the field is indexed.
*/
private boolean includeInAll(Boolean specificIncludeInAll, boolean indexed) {
if (withinCopyTo) {
return false;
}
if (!docMapper.allFieldMapper().enabled()) {
return false;
}
// not explicitly set
if (specificIncludeInAll == null) {
return indexed;
}
return specificIncludeInAll;
}
public AllEntries allEntries() {
return this.allEntries;
}
public Analyzer analyzer() {
return this.analyzer;
}
public void analyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
public void externalValue(Object externalValue) {
this.externalValueSet = true;
this.externalValue = externalValue;
}
public boolean externalValueSet() {
return this.externalValueSet;
}
public Object externalValue() {
externalValueSet = false;
return externalValue;
}
public float docBoost() {
return this.docBoost;
}
public void docBoost(float docBoost) {
this.docBoost = docBoost;
}
/**
* A string builder that can be used to construct complex names for example.
* Its better to reuse the.
*/
public StringBuilder stringBuilder() {
stringBuilder.setLength(0);
return this.stringBuilder;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_ParseContext.java
|
1,298 |
public class HadoopPipeline {
private static final Logger log =
LoggerFactory.getLogger(HadoopPipeline.class);
// used to validate closure parse tree
protected static final ScriptEngine engine = new GroovyScriptEngineImpl();
public static final String PIPELINE_IS_LOCKED = "No more steps are possible as pipeline is locked";
protected final HadoopCompiler compiler;
protected final HadoopGraph graph;
protected final State state;
protected final List<String> stringRepresentation = new ArrayList<String>();
private Compare convert(final com.tinkerpop.gremlin.Tokens.T compare) {
if (compare.equals(com.tinkerpop.gremlin.Tokens.T.eq))
return Compare.EQUAL;
else if (compare.equals(com.tinkerpop.gremlin.Tokens.T.neq))
return Compare.NOT_EQUAL;
else if (compare.equals(com.tinkerpop.gremlin.Tokens.T.gt))
return Compare.GREATER_THAN;
else if (compare.equals(com.tinkerpop.gremlin.Tokens.T.gte))
return Compare.GREATER_THAN_EQUAL;
else if (compare.equals(com.tinkerpop.gremlin.Tokens.T.lt))
return Compare.LESS_THAN;
else
return Compare.LESS_THAN_EQUAL;
}
protected class State {
private Class<? extends Element> elementType;
private String propertyKey;
private Class<? extends WritableComparable> propertyType;
private int step = -1;
private boolean locked = false;
private Map<String, Integer> namedSteps = new HashMap<String, Integer>();
public State set(Class<? extends Element> elementType) {
if (!elementType.equals(Vertex.class) && !elementType.equals(Edge.class))
throw new IllegalArgumentException("The element class type must be either Vertex or Edge");
this.elementType = elementType;
return this;
}
public Class<? extends Element> getElementType() {
return this.elementType;
}
public boolean atVertex() {
if (null == this.elementType)
throw new IllegalStateException("No element type can be inferred: start vertices (or edges) set must be defined");
return this.elementType.equals(Vertex.class);
}
public State setProperty(final String key, final Class type) {
this.propertyKey = key;
this.propertyType = convertJavaToHadoop(type);
return this;
}
public Pair<String, Class<? extends WritableComparable>> popProperty() {
if (null == this.propertyKey)
return null;
Pair<String, Class<? extends WritableComparable>> pair = new Pair<String, Class<? extends WritableComparable>>(this.propertyKey, this.propertyType);
this.propertyKey = null;
this.propertyType = null;
return pair;
}
public int incrStep() {
return ++this.step;
}
public int getStep() {
return this.step;
}
public void assertNotLocked() {
if (this.locked) throw new IllegalStateException(PIPELINE_IS_LOCKED);
}
public void assertNoProperty() {
if (this.propertyKey != null)
throw new IllegalStateException("This step can not follow a property reference");
}
public void assertAtVertex() {
if (!this.atVertex())
throw new IllegalStateException("This step can not follow an edge-based step");
}
public void assertAtEdge() {
if (this.atVertex())
throw new IllegalStateException("This step can not follow a vertex-based step");
}
public boolean isLocked() {
return this.locked;
}
public void lock() {
this.locked = true;
}
public void addStep(final String name) {
if (this.step == -1)
throw new IllegalArgumentException("There is no previous step to name");
this.namedSteps.put(name, this.step);
}
public int getStep(final String name) {
final Integer i = this.namedSteps.get(name);
if (null == i)
throw new IllegalArgumentException("There is no step identified by: " + name);
else
return i;
}
}
////////////////////////////////
////////////////////////////////
////////////////////////////////
/**
* Construct a HadoopPipeline
*
* @param graph the HadoopGraph that is the source of the traversal
*/
public HadoopPipeline(final HadoopGraph graph) {
this.graph = graph;
this.compiler = HadoopCompatLoader.getCompat().newCompiler(graph);
this.state = new State();
if (MapReduceFormat.class.isAssignableFrom(this.graph.getGraphInputFormat())) {
try {
((Class<? extends MapReduceFormat>) this.graph.getGraphInputFormat()).getConstructor().newInstance().addMapReduceJobs(this.compiler);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
if (graph.hasEdgeCopyDirection()) {
Direction ecDir = graph.getEdgeCopyDirection();
this.compiler.addMapReduce(EdgeCopyMapReduce.Map.class,
null,
EdgeCopyMapReduce.Reduce.class,
null,
LongWritable.class,
Holder.class,
NullWritable.class,
FaunusVertex.class,
EdgeCopyMapReduce.createConfiguration(ecDir));
}
}
//////// TRANSFORMS
/**
* The identity step does not alter the graph in anyway.
* It has the benefit of emitting various useful graph statistic counters.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline _() {
this.state.assertNotLocked();
this.compiler.addMap(IdentityMap.Map.class,
NullWritable.class,
FaunusVertex.class,
IdentityMap.createConfiguration());
makeMapReduceString(IdentityMap.class);
return this;
}
/**
* Apply the provided closure to the current element and emit the result.
*
* @param closure the closure to apply to the element
* @return the extended HadoopPipeline
*/
public HadoopPipeline transform(final String closure) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(TransformMap.Map.class,
NullWritable.class,
FaunusVertex.class,
TransformMap.createConfiguration(this.state.getElementType(), this.validateClosure(closure)));
this.state.lock();
makeMapReduceString(TransformMap.class);
return this;
}
/**
* Start a traversal at all vertices in the graph.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline V() {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.set(Vertex.class);
this.compiler.addMap(VerticesMap.Map.class,
NullWritable.class,
FaunusVertex.class,
VerticesMap.createConfiguration(this.state.incrStep() != 0));
makeMapReduceString(VerticesMap.class);
return this;
}
/**
* Start a traversal at all edges in the graph.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline E() {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.set(Edge.class);
this.compiler.addMap(EdgesMap.Map.class,
NullWritable.class,
FaunusVertex.class,
EdgesMap.createConfiguration(this.state.incrStep() != 0));
makeMapReduceString(EdgesMap.class);
return this;
}
/**
* Start a traversal at the vertices identified by the provided ids.
*
* @param ids the long ids of the vertices to start the traversal from
* @return the extended HadoopPipeline
*/
public HadoopPipeline v(final long... ids) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.set(Vertex.class);
this.state.incrStep();
this.compiler.addMap(VertexMap.Map.class,
NullWritable.class,
FaunusVertex.class,
VertexMap.createConfiguration(ids));
makeMapReduceString(VertexMap.class);
return this;
}
/**
* Take outgoing labeled edges to adjacent vertices.
*
* @param labels the labels of the edges to traverse over
* @return the extended HadoopPipeline
*/
public HadoopPipeline out(final String... labels) {
return this.inOutBoth(OUT, labels);
}
/**
* Take incoming labeled edges to adjacent vertices.
*
* @param labels the labels of the edges to traverse over
* @return the extended HadoopPipeline
*/
public HadoopPipeline in(final String... labels) {
return this.inOutBoth(IN, labels);
}
/**
* Take both incoming and outgoing labeled edges to adjacent vertices.
*
* @param labels the labels of the edges to traverse over
* @return the extended HadoopPipeline
*/
public HadoopPipeline both(final String... labels) {
return this.inOutBoth(BOTH, labels);
}
private HadoopPipeline inOutBoth(final Direction direction, final String... labels) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.assertAtVertex();
this.state.incrStep();
this.compiler.addMapReduce(VerticesVerticesMapReduce.Map.class,
null,
VerticesVerticesMapReduce.Reduce.class,
null,
LongWritable.class,
Holder.class,
NullWritable.class,
FaunusVertex.class,
VerticesVerticesMapReduce.createConfiguration(direction, labels));
this.state.set(Vertex.class);
makeMapReduceString(VerticesVerticesMapReduce.class, direction.name(), Arrays.asList(labels));
return this;
}
/**
* Take outgoing labeled edges to incident edges.
*
* @param labels the labels of the edges to traverse over
* @return the extended HadoopPipeline
*/
public HadoopPipeline outE(final String... labels) {
return this.inOutBothE(OUT, labels);
}
/**
* Take incoming labeled edges to incident edges.
*
* @param labels the labels of the edges to traverse over
* @return the extended HadoopPipeline
*/
public HadoopPipeline inE(final String... labels) {
return this.inOutBothE(IN, labels);
}
/**
* Take both incoming and outgoing labeled edges to incident edges.
*
* @param labels the labels of the edges to traverse over
* @return the extended HadoopPipeline
*/
public HadoopPipeline bothE(final String... labels) {
return this.inOutBothE(BOTH, labels);
}
private HadoopPipeline inOutBothE(final Direction direction, final String... labels) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.assertAtVertex();
this.state.incrStep();
this.compiler.addMapReduce(VerticesEdgesMapReduce.Map.class,
null,
VerticesEdgesMapReduce.Reduce.class,
null,
LongWritable.class,
Holder.class,
NullWritable.class,
FaunusVertex.class,
VerticesEdgesMapReduce.createConfiguration(direction, labels));
this.state.set(Edge.class);
makeMapReduceString(VerticesEdgesMapReduce.class, direction.name(), Arrays.asList(labels));
return this;
}
/**
* Go to the outgoing/tail vertex of the edge.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline outV() {
return this.inOutBothV(OUT);
}
/**
* Go to the incoming/head vertex of the edge.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline inV() {
return this.inOutBothV(IN);
}
/**
* Go to both the incoming/head and outgoing/tail vertices of the edge.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline bothV() {
return this.inOutBothV(BOTH);
}
private HadoopPipeline inOutBothV(final Direction direction) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.assertAtEdge();
this.state.incrStep();
this.compiler.addMap(EdgesVerticesMap.Map.class,
NullWritable.class,
FaunusVertex.class,
EdgesVerticesMap.createConfiguration(direction));
this.state.set(Vertex.class);
makeMapReduceString(EdgesVerticesMap.class, direction.name());
return this;
}
/**
* Emit the property value of an element.
*
* @param key the key identifying the property
* @param type the class of the property value (so Hadoop can intelligently handle the result)
* @return the extended HadoopPipeline
*/
public HadoopPipeline property(final String key, final Class type) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.setProperty(key, type);
return this;
}
/**
* Emit the property value of an element.
*
* @param key the key identifying the property
* @return the extended HadoopPipeline
*/
public HadoopPipeline property(final String key) {
return this.property(key, String.class);
}
/**
* Emit a string representation of the property map.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline map() {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(PropertyMapMap.Map.class,
LongWritable.class,
Text.class,
PropertyMapMap.createConfiguration(this.state.getElementType()));
makeMapReduceString(PropertyMap.class);
this.state.lock();
return this;
}
/**
* Emit the label of the current edge.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline label() {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.assertAtEdge();
this.property(Tokens.LABEL, String.class);
return this;
}
/**
* Emit the path taken from start to current element.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline path() {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(PathMap.Map.class,
NullWritable.class,
Text.class,
PathMap.createConfiguration(this.state.getElementType()));
this.state.lock();
makeMapReduceString(PathMap.class);
return this;
}
/**
* Order the previous property value results and emit them with another element property value.
* It is important to emit the previous property with a provided type else it is ordered lexigraphically.
*
* @param order increasing and descending order
* @param elementKey the key of the element to associate it with
* @return the extended HadoopPipeline
*/
public HadoopPipeline order(final TransformPipe.Order order, final String elementKey) {
this.state.assertNotLocked();
final Pair<String, Class<? extends WritableComparable>> pair = this.state.popProperty();
if (null != pair) {
this.compiler.addMapReduce(OrderMapReduce.Map.class,
null,
OrderMapReduce.Reduce.class,
OrderMapReduce.createComparator(order, pair.getB()),
pair.getB(),
Text.class,
Text.class,
pair.getB(),
OrderMapReduce.createConfiguration(this.state.getElementType(), pair.getA(), pair.getB(), elementKey));
makeMapReduceString(OrderMapReduce.class, order.name(), elementKey);
} else {
throw new IllegalArgumentException("There is no specified property to order on");
}
this.state.lock();
return this;
}
/**
* Order the previous property value results.
*
* @param order increasing and descending order
* @return the extended HadoopPipeline
*/
public HadoopPipeline order(final TransformPipe.Order order) {
return this.order(order, Tokens.ID);
}
/**
* Order the previous property value results and emit them with another element property value.
* It is important to emit the previous property with a provided type else it is ordered lexigraphically.
*
* @param order increasing and descending order
* @param elementKey the key of the element to associate it with
* @return the extended HadoopPipeline
*/
public HadoopPipeline order(final com.tinkerpop.gremlin.Tokens.T order, final String elementKey) {
return this.order(com.tinkerpop.gremlin.Tokens.mapOrder(order), elementKey);
}
/**
* Order the previous property value results.
*
* @param order increasing and descending order
* @return the extended HadoopPipeline
*/
public HadoopPipeline order(final com.tinkerpop.gremlin.Tokens.T order) {
return this.order(com.tinkerpop.gremlin.Tokens.mapOrder(order));
}
//////// FILTERS
/**
* Emit or deny the current element based upon the provided boolean-based closure.
*
* @param closure return true to emit and false to remove.
* @return the extended HadoopPipeline
*/
public HadoopPipeline filter(final String closure) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(FilterMap.Map.class,
NullWritable.class,
FaunusVertex.class,
FilterMap.createConfiguration(this.state.getElementType(), this.validateClosure(closure)));
makeMapReduceString(FilterMap.class);
return this;
}
/**
* Emit the current element if it has a property value comparable to the provided values.
*
* @param key the property key of the element
* @param compare the comparator
* @param values the values to compare against where only one needs to succeed (or'd)
* @return the extended HadoopPipeline
*/
public HadoopPipeline has(final String key, final com.tinkerpop.gremlin.Tokens.T compare, final Object... values) {
return this.has(key, convert(compare), values);
}
/**
* Emit the current element if it does not have a property value comparable to the provided values.
*
* @param key the property key of the element
* @param compare the comparator (will be not'd)
* @param values the values to compare against where only one needs to succeed (or'd)
* @return the extended HadoopPipeline
*/
public HadoopPipeline hasNot(final String key, final com.tinkerpop.gremlin.Tokens.T compare, final Object... values) {
return this.hasNot(key, convert(compare), values);
}
/**
* Emit the current element if it has a property value comparable to the provided values.
*
* @param key the property key of the element
* @param compare the comparator
* @param values the values to compare against where only one needs to succeed (or'd)
* @return the extended HadoopPipeline
*/
public HadoopPipeline has(final String key, final Compare compare, final Object... values) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(PropertyFilterMap.Map.class,
NullWritable.class,
FaunusVertex.class,
PropertyFilterMap.createConfiguration(this.state.getElementType(), key, compare, values));
makeMapReduceString(PropertyFilterMap.class, compare.name(), Arrays.asList(values));
return this;
}
/**
* Emit the current element if it does not have a property value comparable to the provided values.
*
* @param key the property key of the element
* @param compare the comparator (will be not'd)
* @param values the values to compare against where only one needs to succeed (or'd)
* @return the extended HadoopPipeline
*/
public HadoopPipeline hasNot(final String key, final Compare compare, final Object... values) {
return this.has(key, compare.opposite(), values);
}
/**
* Emit the current element it has a property value equal to the provided values.
*
* @param key the property key of the element
* @param values the values to compare against where only one needs to succeed (or'd)
* @return the extended HadoopPipeline
*/
public HadoopPipeline has(final String key, final Object... values) {
return (values.length == 0) ? this.has(key, Compare.NOT_EQUAL, new Object[]{null}) : this.has(key, Compare.EQUAL, values);
}
/**
* Emit the current element it does not have a property value equal to the provided values.
*
* @param key the property key of the element
* @param values the values to compare against where only one needs to succeed (or'd)
* @return the extended HadoopPipeline
*/
public HadoopPipeline hasNot(final String key, final Object... values) {
return (values.length == 0) ? this.has(key, Compare.EQUAL, new Object[]{null}) : this.has(key, Compare.NOT_EQUAL, values);
}
/**
* Emit the current element it has a property value equal within the provided range.
*
* @param key the property key of the element
* @param startValue the start of the range (inclusive)
* @param endValue the end of the range (exclusive)
* @return the extended HadoopPipeline
*/
public HadoopPipeline interval(final String key, final Object startValue, final Object endValue) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(IntervalFilterMap.Map.class,
NullWritable.class,
FaunusVertex.class,
IntervalFilterMap.createConfiguration(this.state.getElementType(), key, startValue, endValue));
makeMapReduceString(IntervalFilterMap.class, key, startValue, endValue);
return this;
}
/**
* Remove any duplicate traversers at a single element.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline dedup() {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(DuplicateFilterMap.Map.class,
NullWritable.class,
FaunusVertex.class,
DuplicateFilterMap.createConfiguration(this.state.getElementType()));
makeMapReduceString(DuplicateFilterMap.class);
return this;
}
/**
* Go back to an element a named step ago.
* Currently only backing up to vertices is supported.
*
* @param step the name of the step to back up to
* @return the extended HadoopPipeline
*/
public HadoopPipeline back(final String step) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMapReduce(BackFilterMapReduce.Map.class,
BackFilterMapReduce.Combiner.class,
BackFilterMapReduce.Reduce.class,
LongWritable.class,
Holder.class,
NullWritable.class,
FaunusVertex.class,
BackFilterMapReduce.createConfiguration(this.state.getElementType(), this.state.getStep(step)));
makeMapReduceString(BackFilterMapReduce.class, step);
return this;
}
/*public HadoopPipeline back(final int numberOfSteps) {
this.state.assertNotLocked();
this.compiler.backFilterMapReduce(this.state.getElementType(), this.state.getStep() - numberOfSteps);
this.compiler.setPathEnabled(true);
makeMapReduceString(BackFilterMapReduce.class, numberOfSteps);
return this;
}*/
/**
* Emit the element only if it was arrived at via a path that does not have cycles in it.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline simplePath() {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(CyclicPathFilterMap.Map.class,
NullWritable.class,
FaunusVertex.class,
CyclicPathFilterMap.createConfiguration(this.state.getElementType()));
makeMapReduceString(CyclicPathFilterMap.class);
return this;
}
//////// SIDEEFFECTS
/**
* Emit the element, but compute some sideeffect in the process.
* For example, mutate the properties of the element.
*
* @param closure the sideeffect closure whose results are ignored.
* @return the extended HadoopPipeline
*/
public HadoopPipeline sideEffect(final String closure) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.compiler.addMap(SideEffectMap.Map.class,
NullWritable.class,
FaunusVertex.class,
SideEffectMap.createConfiguration(this.state.getElementType(), this.validateClosure(closure)));
makeMapReduceString(SideEffectMap.class);
return this;
}
/**
* Name a step in order to reference it later in the expression.
*
* @param name the string representation of the name
* @return the extended HadoopPipeline
*/
public HadoopPipeline as(final String name) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.addStep(name);
final String string = "As(" + name + "," + this.stringRepresentation.get(this.state.getStep(name)) + ")";
this.stringRepresentation.set(this.state.getStep(name), string);
return this;
}
/**
* Have the elements for the named step previous project an edge to the current vertex with provided label.
* If a merge weight key is provided, then count the number of duplicate edges between the same two vertices and add a weight.
* No weight key is specified by "_" and then all duplicates are merged, but no weight is added to the resultant edge.
*
* @param step the name of the step where the source vertices were
* @param label the label of the edge to project
* @param mergeWeightKey the property key to use for weight
* @return the extended HadoopPipeline
*/
public HadoopPipeline linkIn(final String label, final String step, final String mergeWeightKey) {
return this.link(IN, label, step, mergeWeightKey);
}
/**
* Have the elements for the named step previous project an edge to the current vertex with provided label.
*
* @param step the name of the step where the source vertices were
* @param label the label of the edge to project
* @return the extended HadoopPipeline
*/
public HadoopPipeline linkIn(final String label, final String step) {
return this.link(IN, label, step, null);
}
/**
* Have the elements for the named step previous project an edge from the current vertex with provided label.
* If a merge weight key is provided, then count the number of duplicate edges between the same two vertices and add a weight.
* No weight key is specified by "_" and then all duplicates are merged, but no weight is added to the resultant edge.
*
* @param step the name of the step where the source vertices were
* @param label the label of the edge to project
* @param mergeWeightKey the property key to use for weight
* @return the extended HadoopPipeline
*/
public HadoopPipeline linkOut(final String label, final String step, final String mergeWeightKey) {
return link(OUT, label, step, mergeWeightKey);
}
/**
* Have the elements for the named step previous project an edge from the current vertex with provided label.
*
* @param step the name of the step where the source vertices were
* @param label the label of the edge to project
* @return the extended HadoopPipeline
*/
public HadoopPipeline linkOut(final String label, final String step) {
return this.link(OUT, label, step, null);
}
private HadoopPipeline link(final Direction direction, final String label, final String step, final String mergeWeightKey) {
this.state.assertNotLocked();
this.state.assertNoProperty();
Preconditions.checkNotNull(direction);
this.compiler.addMapReduce(LinkMapReduce.Map.class,
LinkMapReduce.Combiner.class,
LinkMapReduce.Reduce.class,
null,
LongWritable.class,
Holder.class,
NullWritable.class,
FaunusVertex.class,
LinkMapReduce.createConfiguration(direction, label, this.state.getStep(step), mergeWeightKey));
log.debug("Added {} job with direction {}, label {}, step {}, merge weight key {}", LinkMapReduce.class.getSimpleName(), direction, label, step, mergeWeightKey);
if (null != mergeWeightKey)
makeMapReduceString(LinkMapReduce.class, direction.name(), label, step, mergeWeightKey);
else
makeMapReduceString(LinkMapReduce.class, direction.name(), label, step);
return this;
}
/**
* Count the number of times the previous element (or property) has been traversed to.
* The results are stored in the jobs sideeffect file in HDFS.
*
* @return the extended HadoopPipeline.
*/
public HadoopPipeline groupCount() {
this.state.assertNotLocked();
final Pair<String, Class<? extends WritableComparable>> pair = this.state.popProperty();
if (null == pair) {
return this.groupCount(null, null);
} else {
this.compiler.addMapReduce(ValueGroupCountMapReduce.Map.class,
ValueGroupCountMapReduce.Combiner.class,
ValueGroupCountMapReduce.Reduce.class,
pair.getB(),
LongWritable.class,
pair.getB(),
LongWritable.class,
ValueGroupCountMapReduce.createConfiguration(this.state.getElementType(), pair.getA(), pair.getB()));
makeMapReduceString(ValueGroupCountMapReduce.class, pair.getA());
}
return this;
}
/**
* Apply the provided closure to the incoming element to determine the grouping key.
* The value of the count is incremented by 1
* The results are stored in the jobs sideeffect file in HDFS.
*
* @return the extended HadoopPipeline.
*/
public HadoopPipeline groupCount(final String keyClosure) {
return this.groupCount(keyClosure, null);
}
/**
* Apply the provided closure to the incoming element to determine the grouping key.
* Then apply the value closure to the current element to determine the count increment.
* The results are stored in the jobs sideeffect file in HDFS.
*
* @return the extended HadoopPipeline.
*/
public HadoopPipeline groupCount(final String keyClosure, final String valueClosure) {
this.state.assertNotLocked();
this.compiler.addMapReduce(GroupCountMapReduce.Map.class,
GroupCountMapReduce.Combiner.class,
GroupCountMapReduce.Reduce.class,
Text.class,
LongWritable.class,
Text.class,
LongWritable.class,
GroupCountMapReduce.createConfiguration(this.state.getElementType(),
this.validateClosure(keyClosure), this.validateClosure(valueClosure)));
makeMapReduceString(GroupCountMapReduce.class);
return this;
}
private HadoopPipeline commit(final Tokens.Action action) {
this.state.assertNotLocked();
this.state.assertNoProperty();
if (this.state.atVertex()) {
this.compiler.addMapReduce(CommitVerticesMapReduce.Map.class,
CommitVerticesMapReduce.Combiner.class,
CommitVerticesMapReduce.Reduce.class,
null,
LongWritable.class,
Holder.class,
NullWritable.class,
FaunusVertex.class,
CommitVerticesMapReduce.createConfiguration(action));
makeMapReduceString(CommitVerticesMapReduce.class, action.name());
} else {
this.compiler.addMap(CommitEdgesMap.Map.class,
NullWritable.class,
FaunusVertex.class,
CommitEdgesMap.createConfiguration(action));
makeMapReduceString(CommitEdgesMap.class, action.name());
}
return this;
}
/**
* Drop all the elements of respective type at the current step. Keep all others.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline drop() {
return this.commit(Tokens.Action.DROP);
}
/**
* Keep all the elements of the respetive type at the current step. Drop all others.
*
* @return the extended HadoopPipeline
*/
public HadoopPipeline keep() {
return this.commit(Tokens.Action.KEEP);
}
public HadoopPipeline script(final String scriptUri, final String... args) {
this.state.assertNotLocked();
this.state.assertNoProperty();
this.state.assertAtVertex();
this.compiler.addMap(ScriptMap.Map.class,
NullWritable.class,
FaunusVertex.class,
ScriptMap.createConfiguration(scriptUri, args));
makeMapReduceString(CommitEdgesMap.class, scriptUri);
// this.state.lock();
return this;
}
/////////////// UTILITIES
/**
* Count the number of traversers currently in the graph
*
* @return the count
*/
public HadoopPipeline count() {
this.state.assertNotLocked();
this.compiler.addMapReduce(CountMapReduce.Map.class,
CountMapReduce.Combiner.class,
CountMapReduce.Reduce.class,
NullWritable.class,
LongWritable.class,
NullWritable.class,
LongWritable.class,
CountMapReduce.createConfiguration(this.state.getElementType()));
makeMapReduceString(CountMapReduce.class);
this.state.lock();
return this;
}
public String toString() {
return this.stringRepresentation.toString();
}
private HadoopPipeline done() {
if (!this.state.isLocked()) {
final Pair<String, Class<? extends WritableComparable>> pair = this.state.popProperty();
if (null != pair) {
this.compiler.addMap(PropertyMap.Map.class,
LongWritable.class,
pair.getB(),
PropertyMap.createConfiguration(this.state.getElementType(), pair.getA(), pair.getB()));
makeMapReduceString(PropertyMap.class, pair.getA());
this.state.lock();
}
}
return this;
}
/**
* Submit the HadoopPipeline to the Hadoop cluster.
*
* @throws Exception
*/
public void submit() throws Exception {
submit(Tokens.EMPTY_STRING, false);
}
/**
* Submit the HadoopPipeline to the Hadoop cluster and ensure that a header is emitted in the logs.
*
* @param script the Gremlin script
* @param showHeader the Titan/Hadoop header
* @throws Exception
*/
public void submit(final String script, final Boolean showHeader) throws Exception {
this.done();
if (MapReduceFormat.class.isAssignableFrom(this.graph.getGraphOutputFormat())) {
this.state.assertNotLocked();
((Class<? extends MapReduceFormat>) this.graph.getGraphOutputFormat()).getConstructor().newInstance().addMapReduceJobs(this.compiler);
}
this.compiler.completeSequence();
ToolRunner.run(this.compiler, new String[]{script, showHeader.toString()});
}
/**
* Get a reference to the graph currently being used in this HadoopPipeline.
*
* @return the HadoopGraph
*/
public HadoopGraph getGraph() {
return this.graph;
}
public HadoopCompiler getCompiler() {
return this.compiler;
}
private String validateClosure(String closure) {
if (closure == null)
return null;
try {
engine.eval(closure);
return closure;
} catch (ScriptException e) {
closure = closure.trim();
closure = closure.replaceFirst("\\{", "{it->");
try {
engine.eval(closure);
return closure;
} catch (ScriptException e1) {
}
throw new IllegalArgumentException("The provided closure does not compile: " + e.getMessage(), e);
}
}
private void makeMapReduceString(final Class klass, final Object... arguments) {
String result = klass.getSimpleName();
if (arguments.length > 0) {
result = result + "(";
for (final Object arg : arguments) {
result = result + arg + ",";
}
result = result.substring(0, result.length() - 1) + ")";
}
this.stringRepresentation.add(result);
}
private Class<? extends WritableComparable> convertJavaToHadoop(final Class klass) {
if (klass.equals(String.class)) {
return Text.class;
} else if (klass.equals(Integer.class)) {
return IntWritable.class;
} else if (klass.equals(Double.class)) {
return DoubleWritable.class;
} else if (klass.equals(Long.class)) {
return LongWritable.class;
} else if (klass.equals(Float.class)) {
return FloatWritable.class;
} else if (klass.equals(Boolean.class)) {
return BooleanWritable.class;
} else {
throw new IllegalArgumentException("The provided class is not supported: " + klass.getSimpleName());
}
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_HadoopPipeline.java
|
393 |
@RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientMultiMapTest {
static HazelcastInstance server;
static HazelcastInstance client;
@BeforeClass
public static void init() {
server = Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
}
@AfterClass
public static void destroy() {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testPut() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
assertTrue(mm.put(key, 1));
}
@Test(expected = HazelcastSerializationException.class)
public void testPut_withNullValue() {
Object key ="key";
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.put(key, null));
}
@Test(expected = NullPointerException.class)
public void testPut_withNullKey() {
Object value ="value";
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.put(null, value));
}
@Test
public void testPutMultiValuesToKey() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
mm.put(key, 1);
assertTrue(mm.put(key, 2));
}
@Test
public void testPut_WithExistingKeyValue() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
assertTrue(mm.put(key, 1));
assertFalse(mm.put(key, 1));
}
@Test
public void testValueCount() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
mm.put(key, 1);
mm.put(key, 2);
assertEquals(2, mm.valueCount(key));
}
@Test
public void testValueCount_whenKeyNotThere() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(0, mm.valueCount("NOT_THERE"));
}
@Test
public void testSizeCount() {
final Object key1 = "key1";
final Object key2 = "key2";
final MultiMap mm = client.getMultiMap(randomString());
mm.put(key1, 1);
mm.put(key1, 2);
mm.put(key2, 1);
mm.put(key2, 2);
mm.put(key2, 2);
assertEquals(4, mm.size());
}
@Test
public void testEmptySizeCount() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(0, mm.size());
}
@Test
public void testGet_whenNotExist() {
final MultiMap mm = client.getMultiMap(randomString());
Collection coll = mm.get("NOT_THERE");
assertEquals(Collections.EMPTY_LIST, coll);
}
@Test
public void testGet() {
final Object key = "key";
final int maxItemsPerKey = 33;
final MultiMap mm = client.getMultiMap(randomString());
Set expected = new TreeSet();
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
expected.add(i);
}
Collection resultSet = new TreeSet( mm.get(key) );
assertEquals(expected, resultSet);
}
@Test
public void testRemove_whenKeyNotExist() {
final MultiMap mm = client.getMultiMap(randomString());
Collection coll = mm.remove("NOT_THERE");
assertEquals(Collections.EMPTY_LIST, coll);
}
@Test
public void testRemoveKey() {
final Object key = "key";
final int maxItemsPerKey = 44;
final MultiMap mm = client.getMultiMap(randomString());
Set expeted = new TreeSet();
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
expeted.add(i);
}
Set resultSet = new TreeSet( mm.remove(key) );
assertEquals(expeted, resultSet);
assertEquals(0, mm.size());
}
@Test
public void testRemoveValue_whenValueNotExists() {
final Object key = "key";
final int maxItemsPerKey = 4;
final MultiMap mm = client.getMultiMap(randomString());
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
}
boolean result = mm.remove(key, "NOT_THERE");
assertFalse(result);
}
@Test
public void testRemoveKeyValue() {
final Object key = "key";
final int maxItemsPerKey = 4;
final MultiMap mm = client.getMultiMap(randomString());
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
}
for ( int i=0; i< maxItemsPerKey; i++ ){
boolean result = mm.remove(key, i);
assertTrue(result);
}
}
@Test(expected = UnsupportedOperationException.class)
public void testLocalKeySet() {
final MultiMap mm = client.getMultiMap(randomString());
mm.localKeySet();
}
@Test
public void testEmptyKeySet() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(Collections.EMPTY_SET, mm.keySet());
}
@Test
public void testKeySet() {
final int maxKeys = 23;
final MultiMap mm = client.getMultiMap(randomString());
Set expected = new TreeSet();
for ( int key=0; key< maxKeys; key++ ){
mm.put(key, 1);
expected.add(key);
}
assertEquals(expected, mm.keySet());
}
@Test
public void testValues_whenEmptyCollection() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(Collections.EMPTY_LIST, mm.values());
}
@Test
public void testKeyValues() {
final int maxKeys = 31;
final int maxValues = 3;
final MultiMap mm = client.getMultiMap(randomString());
Set expected = new TreeSet();
for ( int key=0; key< maxKeys; key++ ){
for ( int val=0; val< maxValues; val++ ){
mm.put(key, val);
expected.add(val);
}
}
Set resultSet = new TreeSet( mm.values() );
assertEquals(expected, resultSet);
}
@Test
public void testEntrySet_whenEmpty() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(Collections.EMPTY_SET, mm.entrySet());
}
@Test
public void testEnterySet() {
final int maxKeys = 14;
final int maxValues = 3;
final MultiMap mm = client.getMultiMap(randomString());
for ( int key=0; key< maxKeys; key++ ){
for ( int val=0; val< maxValues; val++ ){
mm.put(key, val);
}
}
assertEquals(maxKeys * maxValues, mm.entrySet().size());
}
@Test
public void testContainsKey_whenKeyExists() {
final MultiMap mm = client.getMultiMap(randomString());
mm.put("key1", "value1");
assertTrue(mm.containsKey("key1"));
}
@Test
public void testContainsKey_whenKeyNotExists() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsKey("NOT_THERE"));
}
@Test(expected = NullPointerException.class)
public void testContainsKey_whenKeyNull() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsKey(null));
}
@Test
public void testContainsValue_whenExists() {
final MultiMap mm = client.getMultiMap(randomString());
mm.put("key1", "value1");
assertTrue(mm.containsValue("value1"));
assertFalse(mm.containsValue("NOT_THERE"));
}
@Test
public void testContainsValue_whenNotExists() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsValue("NOT_THERE"));
}
@Test
public void testContainsValue_whenSearchValueNull() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsValue(null));
}
@Test
public void testContainsEntry() {
final MultiMap mm = client.getMultiMap(randomString());
mm.put("key1", "value1");
assertTrue(mm.containsEntry("key1", "value1"));
assertFalse(mm.containsEntry("key1", "NOT_THERE"));
assertFalse(mm.containsEntry("NOT_THERE", "NOT_THERE"));
assertFalse(mm.containsEntry("NOT_THERE", "value1"));
}
@Test(expected = UnsupportedOperationException.class)
public void testGetLocalMultiMapStats() {
final MultiMap mm = client.getMultiMap(randomString());
mm.getLocalMultiMapStats();
}
@Test
public void testClear() {
final MultiMap mm = client.getMultiMap(randomString());
final int maxKeys = 9;
final int maxValues = 3;
for ( int key=0; key< maxKeys; key++ ){
for ( int val=0; val< maxValues; val++ ){
mm.put(key, val);
}
}
mm.clear();
assertEquals(0, mm.size());
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapTest.java
|
1,696 |
public class BytesArray implements BytesReference {
public static final BytesArray EMPTY = new BytesArray(BytesRef.EMPTY_BYTES, 0, 0);
private byte[] bytes;
private int offset;
private int length;
public BytesArray(String bytes) {
BytesRef bytesRef = new BytesRef();
UnicodeUtil.UTF16toUTF8(bytes, 0, bytes.length(), bytesRef);
this.bytes = bytesRef.bytes;
this.offset = bytesRef.offset;
this.length = bytesRef.length;
}
public BytesArray(BytesRef bytesRef) {
this(bytesRef, false);
}
public BytesArray(BytesRef bytesRef, boolean deepCopy) {
if (deepCopy) {
BytesRef copy = BytesRef.deepCopyOf(bytesRef);
bytes = copy.bytes;
offset = copy.offset;
length = copy.length;
} else {
bytes = bytesRef.bytes;
offset = bytesRef.offset;
length = bytesRef.length;
}
}
public BytesArray(byte[] bytes) {
this.bytes = bytes;
this.offset = 0;
this.length = bytes.length;
}
public BytesArray(byte[] bytes, int offset, int length) {
this.bytes = bytes;
this.offset = offset;
this.length = length;
}
@Override
public byte get(int index) {
return bytes[offset + index];
}
@Override
public int length() {
return length;
}
@Override
public BytesReference slice(int from, int length) {
if (from < 0 || (from + length) > this.length) {
throw new ElasticsearchIllegalArgumentException("can't slice a buffer with length [" + this.length + "], with slice parameters from [" + from + "], length [" + length + "]");
}
return new BytesArray(bytes, offset + from, length);
}
@Override
public StreamInput streamInput() {
return new BytesStreamInput(bytes, offset, length, false);
}
@Override
public void writeTo(OutputStream os) throws IOException {
os.write(bytes, offset, length);
}
@Override
public byte[] toBytes() {
if (offset == 0 && bytes.length == length) {
return bytes;
}
return Arrays.copyOfRange(bytes, offset, offset + length);
}
@Override
public BytesArray toBytesArray() {
return this;
}
@Override
public BytesArray copyBytesArray() {
return new BytesArray(Arrays.copyOfRange(bytes, offset, offset + length));
}
@Override
public ChannelBuffer toChannelBuffer() {
return ChannelBuffers.wrappedBuffer(bytes, offset, length);
}
@Override
public boolean hasArray() {
return true;
}
@Override
public byte[] array() {
return bytes;
}
@Override
public int arrayOffset() {
return offset;
}
@Override
public String toUtf8() {
if (length == 0) {
return "";
}
return new String(bytes, offset, length, Charsets.UTF_8);
}
@Override
public BytesRef toBytesRef() {
return new BytesRef(bytes, offset, length);
}
@Override
public BytesRef copyBytesRef() {
return new BytesRef(Arrays.copyOfRange(bytes, offset, offset + length));
}
@Override
public int hashCode() {
return Helper.bytesHashCode(this);
}
@Override
public boolean equals(Object obj) {
return Helper.bytesEqual(this, (BytesReference) obj);
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_bytes_BytesArray.java
|
1,139 |
public class OSQLMethodAsLong extends OAbstractSQLMethod {
public static final String NAME = "aslong";
public OSQLMethodAsLong() {
super(NAME);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
if (ioResult instanceof Number) {
ioResult = ((Number) ioResult).longValue();
} else if (ioResult instanceof Date) {
ioResult = ((Date) ioResult).getTime();
} else {
ioResult = ioResult != null ? new Long(ioResult.toString().trim()) : null;
}
return ioResult;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsLong.java
|
1,482 |
public class FilterMap {
public static final String CLASS = Tokens.makeNamespace(FilterMap.class) + ".class";
public static final String CLOSURE = Tokens.makeNamespace(FilterMap.class) + ".closure";
private static final ScriptEngine engine = new GremlinGroovyScriptEngine();
public enum Counters {
VERTICES_FILTERED,
EDGES_FILTERED
}
public static Configuration createConfiguration(final Class<? extends Element> klass, final String closure) {
final Configuration configuration = new EmptyConfiguration();
configuration.setClass(CLASS, klass, Element.class);
configuration.set(CLOSURE, closure);
return configuration;
}
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private boolean isVertex;
private Closure<Boolean> closure;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
try {
this.closure = (Closure<Boolean>) engine.eval(context.getConfiguration().get(CLOSURE));
} catch (final ScriptException e) {
throw new IOException(e.getMessage(), e);
}
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.isVertex) {
if (value.hasPaths() && !this.closure.call(value)) {
value.clearPaths();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_FILTERED, 1L);
}
} else {
long counter = 0;
for (final Edge e : value.getEdges(Direction.BOTH)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths() && !this.closure.call(edge)) {
edge.clearPaths();
counter++;
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.EDGES_FILTERED, counter);
}
context.write(NullWritable.get(), value);
}
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_filter_FilterMap.java
|
3,880 |
public class IndicesQueryParser implements QueryParser {
public static final String NAME = "indices";
@Nullable
private final ClusterService clusterService;
@Inject
public IndicesQueryParser(@Nullable ClusterService clusterService) {
this.clusterService = clusterService;
}
@Override
public String[] names() {
return new String[]{NAME};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Query query = null;
Query noMatchQuery = Queries.newMatchAllQuery();
boolean queryFound = false;
boolean indicesFound = false;
boolean currentIndexMatchesIndices = false;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
//TODO We are able to decide whether to parse the query or not only if indices in the query appears first
queryFound = true;
if (indicesFound && !currentIndexMatchesIndices) {
parseContext.parser().skipChildren(); // skip the query object without parsing it
} else {
query = parseContext.parseInnerQuery();
}
} else if ("no_match_query".equals(currentFieldName)) {
if (indicesFound && currentIndexMatchesIndices) {
parseContext.parser().skipChildren(); // skip the query object without parsing it
} else {
noMatchQuery = parseContext.parseInnerQuery();
}
} else {
throw new QueryParsingException(parseContext.index(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("indices".equals(currentFieldName)) {
if (indicesFound) {
throw new QueryParsingException(parseContext.index(), "[indices] indices or index already specified");
}
indicesFound = true;
Collection<String> indices = new ArrayList<String>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
String value = parser.textOrNull();
if (value == null) {
throw new QueryParsingException(parseContext.index(), "[indices] no value specified for 'indices' entry");
}
indices.add(value);
}
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), indices.toArray(new String[indices.size()]));
} else {
throw new QueryParsingException(parseContext.index(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
if (indicesFound) {
throw new QueryParsingException(parseContext.index(), "[indices] indices or index already specified");
}
indicesFound = true;
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), parser.text());
} else if ("no_match_query".equals(currentFieldName)) {
String type = parser.text();
if ("all".equals(type)) {
noMatchQuery = Queries.newMatchAllQuery();
} else if ("none".equals(type)) {
noMatchQuery = Queries.newMatchNoDocsQuery();
}
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new QueryParsingException(parseContext.index(), "[indices] query does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new QueryParsingException(parseContext.index(), "[indices] requires 'query' element");
}
if (!indicesFound) {
throw new QueryParsingException(parseContext.index(), "[indices] requires 'indices' or 'index' element");
}
Query chosenQuery;
if (currentIndexMatchesIndices) {
chosenQuery = query;
} else {
chosenQuery = noMatchQuery;
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, chosenQuery);
}
return chosenQuery;
}
protected boolean matchesIndices(String currentIndex, String... indices) {
final String[] concreteIndices = clusterService.state().metaData().concreteIndicesIgnoreMissing(indices);
for (String index : concreteIndices) {
if (Regex.simpleMatch(index, currentIndex)) {
return true;
}
}
return false;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_IndicesQueryParser.java
|
2,157 |
static class IteratorBasedIterator extends DocIdSetIterator {
int lastReturn = -1;
private DocIdSetIterator[] iterators = null;
private final long cost;
IteratorBasedIterator(DocIdSet[] sets) throws IOException {
iterators = new DocIdSetIterator[sets.length];
int j = 0;
long cost = Integer.MAX_VALUE;
for (DocIdSet set : sets) {
if (set == null) {
lastReturn = DocIdSetIterator.NO_MORE_DOCS; // non matching
break;
} else {
DocIdSetIterator dcit = set.iterator();
if (dcit == null) {
lastReturn = DocIdSetIterator.NO_MORE_DOCS; // non matching
break;
}
iterators[j++] = dcit;
cost = Math.min(cost, dcit.cost());
}
}
this.cost = cost;
if (lastReturn != DocIdSetIterator.NO_MORE_DOCS) {
lastReturn = (iterators.length > 0 ? -1 : DocIdSetIterator.NO_MORE_DOCS);
}
}
@Override
public final int docID() {
return lastReturn;
}
@Override
public final int nextDoc() throws IOException {
if (lastReturn == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
DocIdSetIterator dcit = iterators[0];
int target = dcit.nextDoc();
int size = iterators.length;
int skip = 0;
int i = 1;
while (i < size) {
if (i != skip) {
dcit = iterators[i];
int docid = dcit.advance(target);
if (docid > target) {
target = docid;
if (i != 0) {
skip = i;
i = 0;
continue;
} else
skip = 0;
}
}
i++;
}
return (lastReturn = target);
}
@Override
public final int advance(int target) throws IOException {
if (lastReturn == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
DocIdSetIterator dcit = iterators[0];
target = dcit.advance(target);
int size = iterators.length;
int skip = 0;
int i = 1;
while (i < size) {
if (i != skip) {
dcit = iterators[i];
int docid = dcit.advance(target);
if (docid > target) {
target = docid;
if (i != 0) {
skip = i;
i = 0;
continue;
} else {
skip = 0;
}
}
}
i++;
}
return (lastReturn = target);
}
@Override
public long cost() {
return cost;
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_docset_AndDocIdSet.java
|
148 |
public class Backend implements LockerProvider {
private static final Logger log = LoggerFactory.getLogger(Backend.class);
/**
* These are the names for the edge store and property index databases, respectively.
* The edge store contains all edges and properties. The property index contains an
* inverted index from attribute value to vertex.
* <p/>
* These names are fixed and should NEVER be changed. Changing these strings can
* disrupt storage adapters that rely on these names for specific configurations.
*/
public static final String EDGESTORE_NAME = "edgestore";
public static final String INDEXSTORE_NAME = "graphindex";
public static final String ID_STORE_NAME = "titan_ids";
public static final String METRICS_MERGED_STORE = "stores";
public static final String METRICS_MERGED_CACHE = "caches";
public static final String METRICS_CACHE_SUFFIX = ".cache";
public static final String LOCK_STORE_SUFFIX = "_lock_";
public static final String SYSTEM_TX_LOG_NAME = "txlog";
public static final String SYSTEM_MGMT_LOG_NAME = "systemlog";
public static final double EDGESTORE_CACHE_PERCENT = 0.8;
public static final double INDEXSTORE_CACHE_PERCENT = 0.2;
private static final long ETERNAL_CACHE_EXPIRATION = 1000l*3600*24*365*200; //200 years
public static final int THREAD_POOL_SIZE_SCALE_FACTOR = 2;
public static final Map<String, Integer> STATIC_KEY_LENGTHS = new HashMap<String, Integer>() {{
put(EDGESTORE_NAME, 8);
put(EDGESTORE_NAME + LOCK_STORE_SUFFIX, 8);
put(ID_STORE_NAME, 8);
}};
private final KeyColumnValueStoreManager storeManager;
private final KeyColumnValueStoreManager storeManagerLocking;
private final StoreFeatures storeFeatures;
private KCVSCache edgeStore;
private KCVSCache indexStore;
private KCVSCache txLogStore;
private IDAuthority idAuthority;
private KCVSConfiguration systemConfig;
private boolean hasAttemptedClose;
private final KCVSLogManager mgmtLogManager;
private final KCVSLogManager txLogManager;
private final LogManager userLogManager;
private final Map<String, IndexProvider> indexes;
private final int bufferSize;
private final Duration maxWriteTime;
private final Duration maxReadTime;
private final boolean cacheEnabled;
private final ExecutorService threadPool;
private final Function<String, Locker> lockerCreator;
private final ConcurrentHashMap<String, Locker> lockers =
new ConcurrentHashMap<String, Locker>();
private final Configuration configuration;
public Backend(Configuration configuration) {
this.configuration = configuration;
storeManager = getStorageManager(configuration);
indexes = getIndexes(configuration);
storeFeatures = storeManager.getFeatures();
mgmtLogManager = getKCVSLogManager(MANAGEMENT_LOG);
txLogManager = getKCVSLogManager(TRANSACTION_LOG);
userLogManager = getLogManager(USER_LOG);
cacheEnabled = !configuration.get(STORAGE_BATCH) && configuration.get(DB_CACHE);
int bufferSizeTmp = configuration.get(BUFFER_SIZE);
Preconditions.checkArgument(bufferSizeTmp > 0, "Buffer size must be positive");
if (!storeFeatures.hasBatchMutation()) {
bufferSize = Integer.MAX_VALUE;
} else bufferSize = bufferSizeTmp;
maxWriteTime = configuration.get(STORAGE_WRITE_WAITTIME);
maxReadTime = configuration.get(STORAGE_READ_WAITTIME);
if (!storeFeatures.hasLocking()) {
Preconditions.checkArgument(storeFeatures.isKeyConsistent(),"Store needs to support some form of locking");
storeManagerLocking = new ExpectedValueCheckingStoreManager(storeManager,LOCK_STORE_SUFFIX,this,maxReadTime);
} else {
storeManagerLocking = storeManager;
}
if (configuration.get(PARALLEL_BACKEND_OPS)) {
int poolsize = Runtime.getRuntime().availableProcessors() * THREAD_POOL_SIZE_SCALE_FACTOR;
threadPool = Executors.newFixedThreadPool(poolsize);
log.info("Initiated backend operations thread pool of size {}", poolsize);
} else {
threadPool = null;
}
final String lockBackendName = configuration.get(LOCK_BACKEND);
if (REGISTERED_LOCKERS.containsKey(lockBackendName)) {
lockerCreator = REGISTERED_LOCKERS.get(lockBackendName);
} else {
throw new TitanConfigurationException("Unknown lock backend \"" +
lockBackendName + "\". Known lock backends: " +
Joiner.on(", ").join(REGISTERED_LOCKERS.keySet()) + ".");
}
// Never used for backends that have innate transaction support, but we
// want to maintain the non-null invariant regardless; it will default
// to connsistentkey impl if none is specified
Preconditions.checkNotNull(lockerCreator);
}
@Override
public Locker getLocker(String lockerName) {
Preconditions.checkNotNull(lockerName);
Locker l = lockers.get(lockerName);
if (null == l) {
l = lockerCreator.apply(lockerName);
final Locker x = lockers.putIfAbsent(lockerName, l);
if (null != x) {
l = x;
}
}
return l;
}
/**
* Initializes this backend with the given configuration. Must be called before this Backend can be used
*
* @param config
*/
public void initialize(Configuration config) {
try {
boolean reportMetrics = configuration.get(BASIC_METRICS);
//EdgeStore & VertexIndexStore
KeyColumnValueStore idStore = storeManager.openDatabase(ID_STORE_NAME);
if (reportMetrics) {
idStore = new MetricInstrumentedStore(idStore, getMetricsStoreName(ID_STORE_NAME));
}
idAuthority = null;
if (storeFeatures.isKeyConsistent()) {
idAuthority = new ConsistentKeyIDAuthority(idStore, storeManager, config);
} else {
throw new IllegalStateException("Store needs to support consistent key or transactional operations for ID manager to guarantee proper id allocations");
}
KeyColumnValueStore edgeStoreRaw = storeManagerLocking.openDatabase(EDGESTORE_NAME);
KeyColumnValueStore indexStoreRaw = storeManagerLocking.openDatabase(INDEXSTORE_NAME);
if (reportMetrics) {
edgeStoreRaw = new MetricInstrumentedStore(edgeStoreRaw, getMetricsStoreName(EDGESTORE_NAME));
indexStoreRaw = new MetricInstrumentedStore(indexStoreRaw, getMetricsStoreName(INDEXSTORE_NAME));
}
//Configure caches
if (cacheEnabled) {
long expirationTime = configuration.get(DB_CACHE_TIME);
Preconditions.checkArgument(expirationTime>=0,"Invalid cache expiration time: %s",expirationTime);
if (expirationTime==0) expirationTime=ETERNAL_CACHE_EXPIRATION;
long cacheSizeBytes;
double cachesize = configuration.get(DB_CACHE_SIZE);
Preconditions.checkArgument(cachesize>0.0,"Invalid cache size specified: %s",cachesize);
if (cachesize<1.0) {
//Its a percentage
Runtime runtime = Runtime.getRuntime();
cacheSizeBytes = (long)((runtime.maxMemory()-(runtime.totalMemory()-runtime.freeMemory())) * cachesize);
} else {
Preconditions.checkArgument(cachesize>1000,"Cache size is too small: %s",cachesize);
cacheSizeBytes = (long)cachesize;
}
log.info("Configuring total store cache size: {}",cacheSizeBytes);
long cleanWaitTime = configuration.get(DB_CACHE_CLEAN_WAIT);
Preconditions.checkArgument(EDGESTORE_CACHE_PERCENT + INDEXSTORE_CACHE_PERCENT == 1.0,"Cache percentages don't add up!");
long edgeStoreCacheSize = Math.round(cacheSizeBytes * EDGESTORE_CACHE_PERCENT);
long indexStoreCacheSize = Math.round(cacheSizeBytes * INDEXSTORE_CACHE_PERCENT);
edgeStore = new ExpirationKCVSCache(edgeStoreRaw,getMetricsCacheName("edgeStore",reportMetrics),expirationTime,cleanWaitTime,edgeStoreCacheSize);
indexStore = new ExpirationKCVSCache(indexStoreRaw,getMetricsCacheName("indexStore",reportMetrics),expirationTime,cleanWaitTime,indexStoreCacheSize);
} else {
edgeStore = new NoKCVSCache(edgeStoreRaw);
indexStore = new NoKCVSCache(indexStoreRaw);
}
//Just open them so that they are cached
txLogManager.openLog(SYSTEM_TX_LOG_NAME);
mgmtLogManager.openLog(SYSTEM_MGMT_LOG_NAME);
txLogStore = new NoKCVSCache(storeManager.openDatabase(SYSTEM_TX_LOG_NAME));
//Open global configuration
KeyColumnValueStore systemConfigStore = storeManagerLocking.openDatabase(SYSTEM_PROPERTIES_STORE_NAME);
systemConfig = getGlobalConfiguration(new BackendOperation.TransactionalProvider() {
@Override
public StoreTransaction openTx() throws BackendException {
return storeManagerLocking.beginTransaction(StandardBaseTransactionConfig.of(
configuration.get(TIMESTAMP_PROVIDER),
storeFeatures.getKeyConsistentTxConfig()));
}
@Override
public void close() throws BackendException {
//Do nothing, storeManager is closed explicitly by Backend
}
},systemConfigStore,configuration);
} catch (BackendException e) {
throw new TitanException("Could not initialize backend", e);
}
}
/**
* Get information about all registered {@link IndexProvider}s.
*
* @return
*/
public Map<String, IndexInformation> getIndexInformation() {
ImmutableMap.Builder<String, IndexInformation> copy = ImmutableMap.builder();
copy.putAll(indexes);
return copy.build();
}
//
// public IndexProvider getIndexProvider(String name) {
// return indexes.get(name);
// }
public KCVSLog getSystemTxLog() {
try {
return txLogManager.openLog(SYSTEM_TX_LOG_NAME);
} catch (BackendException e) {
throw new TitanException("Could not re-open transaction log", e);
}
}
public Log getSystemMgmtLog() {
try {
return mgmtLogManager.openLog(SYSTEM_MGMT_LOG_NAME);
} catch (BackendException e) {
throw new TitanException("Could not re-open management log", e);
}
}
public Log getUserLog(String identifier) throws BackendException {
return userLogManager.openLog(getUserLogName(identifier));
}
public static final String getUserLogName(String identifier) {
Preconditions.checkArgument(StringUtils.isNotBlank(identifier));
return USER_LOG_PREFIX +identifier;
}
public KCVSConfiguration getGlobalSystemConfig() {
return systemConfig;
}
private String getMetricsStoreName(String storeName) {
return configuration.get(METRICS_MERGE_STORES) ? METRICS_MERGED_STORE : storeName;
}
private String getMetricsCacheName(String storeName, boolean reportMetrics) {
if (!reportMetrics) return null;
return configuration.get(METRICS_MERGE_STORES) ? METRICS_MERGED_CACHE : storeName + METRICS_CACHE_SUFFIX;
}
public KCVSLogManager getKCVSLogManager(String logName) {
Preconditions.checkArgument(configuration.restrictTo(logName).get(LOG_BACKEND).equalsIgnoreCase(LOG_BACKEND.getDefaultValue()));
return (KCVSLogManager)getLogManager(logName);
}
public LogManager getLogManager(String logName) {
return getLogManager(configuration, logName, storeManager);
}
private static LogManager getLogManager(Configuration config, String logName, KeyColumnValueStoreManager sm) {
Configuration logConfig = config.restrictTo(logName);
String backend = logConfig.get(LOG_BACKEND);
if (backend.equalsIgnoreCase(LOG_BACKEND.getDefaultValue())) {
return new KCVSLogManager(sm,logConfig);
} else {
Preconditions.checkArgument(config!=null);
LogManager lm = getImplementationClass(logConfig,logConfig.get(LOG_BACKEND),REGISTERED_LOG_MANAGERS);
Preconditions.checkNotNull(lm);
return lm;
}
}
public static KeyColumnValueStoreManager getStorageManager(Configuration storageConfig) {
StoreManager manager = getImplementationClass(storageConfig, storageConfig.get(STORAGE_BACKEND),
REGISTERED_STORAGE_MANAGERS);
if (manager instanceof OrderedKeyValueStoreManager) {
manager = new OrderedKeyValueStoreManagerAdapter((OrderedKeyValueStoreManager) manager, STATIC_KEY_LENGTHS);
}
Preconditions.checkArgument(manager instanceof KeyColumnValueStoreManager,"Invalid storage manager: %s",manager.getClass());
return (KeyColumnValueStoreManager) manager;
}
private static KCVSConfiguration getGlobalConfiguration(final BackendOperation.TransactionalProvider txProvider,
final KeyColumnValueStore store,
final Configuration config) {
try {
KCVSConfiguration kcvsConfig = new KCVSConfiguration(txProvider,config.get(TIMESTAMP_PROVIDER),store,SYSTEM_CONFIGURATION_IDENTIFIER);
kcvsConfig.setMaxOperationWaitTime(config.get(SETUP_WAITTIME));
return kcvsConfig;
} catch (BackendException e) {
throw new TitanException("Could not open global configuration",e);
}
}
public static KCVSConfiguration getStandaloneGlobalConfiguration(final KeyColumnValueStoreManager manager,
final Configuration config) {
try {
final StoreFeatures features = manager.getFeatures();
return getGlobalConfiguration(new BackendOperation.TransactionalProvider() {
@Override
public StoreTransaction openTx() throws BackendException {
return manager.beginTransaction(StandardBaseTransactionConfig.of(config.get(TIMESTAMP_PROVIDER),features.getKeyConsistentTxConfig()));
}
@Override
public void close() throws BackendException {
manager.close();
}
},manager.openDatabase(SYSTEM_PROPERTIES_STORE_NAME),config);
} catch (BackendException e) {
throw new TitanException("Could not open global configuration",e);
}
}
private final static Map<String, IndexProvider> getIndexes(Configuration config) {
ImmutableMap.Builder<String, IndexProvider> builder = ImmutableMap.builder();
for (String index : config.getContainedNamespaces(INDEX_NS)) {
Preconditions.checkArgument(StringUtils.isNotBlank(index), "Invalid index name [%s]", index);
log.info("Configuring index [{}]", index);
IndexProvider provider = getImplementationClass(config.restrictTo(index), config.get(INDEX_BACKEND,index),
REGISTERED_INDEX_PROVIDERS);
Preconditions.checkNotNull(provider);
builder.put(index, provider);
}
return builder.build();
}
public final static <T> T getImplementationClass(Configuration config, String clazzname, Map<String, String> registeredImpls) {
if (registeredImpls.containsKey(clazzname.toLowerCase())) {
clazzname = registeredImpls.get(clazzname.toLowerCase());
}
return ConfigurationUtil.instantiate(clazzname, new Object[]{config}, new Class[]{Configuration.class});
}
/**
* Returns the configured {@link IDAuthority}.
*
* @return
*/
public IDAuthority getIDAuthority() {
Preconditions.checkNotNull(idAuthority, "Backend has not yet been initialized");
return idAuthority;
}
/**
* Returns the {@link StoreFeatures} of the configured backend storage engine.
*
* @return
*/
public StoreFeatures getStoreFeatures() {
return storeFeatures;
}
/**
* Returns the {@link IndexFeatures} of all configured index backends
*/
public Map<String,IndexFeatures> getIndexFeatures() {
return Maps.transformValues(indexes,new Function<IndexProvider, IndexFeatures>() {
@Nullable
@Override
public IndexFeatures apply(@Nullable IndexProvider indexProvider) {
return indexProvider.getFeatures();
}
});
}
/**
* Opens a new transaction against all registered backend system wrapped in one {@link BackendTransaction}.
*
* @return
* @throws BackendException
*/
public BackendTransaction beginTransaction(TransactionConfiguration configuration, KeyInformation.Retriever indexKeyRetriever) throws BackendException {
StoreTransaction tx = storeManagerLocking.beginTransaction(configuration);
// Cache
CacheTransaction cacheTx = new CacheTransaction(tx, storeManagerLocking, bufferSize, maxWriteTime, configuration.hasEnabledBatchLoading());
// Index transactions
Map<String, IndexTransaction> indexTx = new HashMap<String, IndexTransaction>(indexes.size());
for (Map.Entry<String, IndexProvider> entry : indexes.entrySet()) {
indexTx.put(entry.getKey(), new IndexTransaction(entry.getValue(), indexKeyRetriever.get(entry.getKey()), configuration, maxWriteTime));
}
return new BackendTransaction(cacheTx, configuration, storeFeatures,
edgeStore, indexStore, txLogStore,
maxReadTime, indexTx, threadPool);
}
public synchronized void close() throws BackendException {
if (!hasAttemptedClose) {
hasAttemptedClose = true;
mgmtLogManager.close();
txLogManager.close();
userLogManager.close();
edgeStore.close();
indexStore.close();
idAuthority.close();
systemConfig.close();
storeManager.close();
if(threadPool != null) {
threadPool.shutdown();
}
//Indexes
for (IndexProvider index : indexes.values()) index.close();
} else {
log.debug("Backend {} has already been closed or cleared", this);
}
}
/**
* Clears the storage of all registered backend data providers. This includes backend storage engines and index providers.
* <p/>
* IMPORTANT: Clearing storage means that ALL data will be lost and cannot be recovered.
*
* @throws BackendException
*/
public synchronized void clearStorage() throws BackendException {
if (!hasAttemptedClose) {
hasAttemptedClose = true;
mgmtLogManager.close();
txLogManager.close();
userLogManager.close();
edgeStore.close();
indexStore.close();
idAuthority.close();
systemConfig.close();
storeManager.clearStorage();
storeManager.close();
//Indexes
for (IndexProvider index : indexes.values()) {
index.clearStorage();
index.close();
}
} else {
log.debug("Backend {} has already been closed or cleared", this);
}
}
//############ Registered Storage Managers ##############
private static final ImmutableMap<String, String> REGISTERED_STORAGE_MANAGERS;
static {
ImmutableMap.Builder<String, String> b = ImmutableMap.builder();
b.put("berkeleyje", "com.thinkaurelius.titan.diskstorage.berkeleyje.BerkeleyJEStoreManager");
b.put("infinispan", "com.thinkaurelius.titan.diskstorage.infinispan.InfinispanCacheStoreManager");
b.put("cassandrathrift", "com.thinkaurelius.titan.diskstorage.cassandra.thrift.CassandraThriftStoreManager");
b.put("cassandra", "com.thinkaurelius.titan.diskstorage.cassandra.astyanax.AstyanaxStoreManager");
b.put("astyanax", "com.thinkaurelius.titan.diskstorage.cassandra.astyanax.AstyanaxStoreManager");
b.put("hbase", "com.thinkaurelius.titan.diskstorage.hbase.HBaseStoreManager");
b.put("embeddedcassandra", "com.thinkaurelius.titan.diskstorage.cassandra.embedded.CassandraEmbeddedStoreManager");
b.put("inmemory", "com.thinkaurelius.titan.diskstorage.keycolumnvalue.inmemory.InMemoryStoreManager");
REGISTERED_STORAGE_MANAGERS = b.build();
}
public static final Map<String, String> getRegisteredStoreManagers() {
return REGISTERED_STORAGE_MANAGERS;
}
public static final Map<String, ConfigOption> REGISTERED_STORAGE_MANAGERS_SHORTHAND = new HashMap<String, ConfigOption>() {{
put("berkeleyje", STORAGE_DIRECTORY);
put("hazelcast", STORAGE_DIRECTORY);
put("hazelcastcache", STORAGE_DIRECTORY);
put("infinispan", STORAGE_DIRECTORY);
put("cassandra", STORAGE_HOSTS);
put("cassandrathrift", STORAGE_HOSTS);
put("astyanax", STORAGE_HOSTS);
put("hbase", STORAGE_HOSTS);
put("embeddedcassandra", STORAGE_CONF_FILE);
put("inmemory", null);
}};
public static final Map<String, String> REGISTERED_INDEX_PROVIDERS = new HashMap<String, String>() {{
put("lucene", "com.thinkaurelius.titan.diskstorage.lucene.LuceneIndex");
put("elasticsearch", "com.thinkaurelius.titan.diskstorage.es.ElasticSearchIndex");
put("es", "com.thinkaurelius.titan.diskstorage.es.ElasticSearchIndex");
put("solr", "com.thinkaurelius.titan.diskstorage.solr.SolrIndex");
}};
public static final Map<String,String> REGISTERED_LOG_MANAGERS = new HashMap<String, String>() {{
put("default","com.thinkaurelius.titan.diskstorage.log.kcvs.KCVSLogManager");
}};
private final Function<String, Locker> CONSISTENT_KEY_LOCKER_CREATOR = new Function<String, Locker>() {
@Override
public Locker apply(String lockerName) {
KeyColumnValueStore lockerStore;
try {
lockerStore = storeManager.openDatabase(lockerName);
} catch (BackendException e) {
throw new TitanConfigurationException("Could not retrieve store named " + lockerName + " for locker configuration", e);
}
return new ConsistentKeyLocker.Builder(lockerStore, storeManager).fromConfig(configuration).build();
}
};
private final Function<String, Locker> ASTYANAX_RECIPE_LOCKER_CREATOR = new Function<String, Locker>() {
@Override
public Locker apply(String lockerName) {
String expectedManagerName = "com.thinkaurelius.titan.diskstorage.cassandra.astyanax.AstyanaxStoreManager";
String actualManagerName = storeManager.getClass().getCanonicalName();
// Require AstyanaxStoreManager
Preconditions.checkArgument(expectedManagerName.equals(actualManagerName),
"Astyanax Recipe locker is only supported with the Astyanax storage backend (configured:"
+ actualManagerName + " != required:" + expectedManagerName + ")");
try {
Class<?> c = storeManager.getClass();
Method method = c.getMethod("openLocker", String.class);
Object o = method.invoke(storeManager, lockerName);
return (Locker) o;
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Could not find method when configuring locking with Astyanax Recipes");
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Could not access method when configuring locking with Astyanax Recipes", e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException("Could not invoke method when configuring locking with Astyanax Recipes", e);
}
}
};
private final Function<String, Locker> TEST_LOCKER_CREATOR = new Function<String, Locker>() {
@Override
public Locker apply(String lockerName) {
return openManagedLocker("com.thinkaurelius.titan.diskstorage.util.TestLockerManager",lockerName);
}
};
private final Map<String, Function<String, Locker>> REGISTERED_LOCKERS = ImmutableMap.of(
"consistentkey", CONSISTENT_KEY_LOCKER_CREATOR,
"astyanaxrecipe", ASTYANAX_RECIPE_LOCKER_CREATOR,
"test", TEST_LOCKER_CREATOR
);
private static Locker openManagedLocker(String classname, String lockerName) {
try {
Class c = Class.forName(classname);
Constructor constructor = c.getConstructor();
Object instance = constructor.newInstance();
Method method = c.getMethod("openLocker", String.class);
Object o = method.invoke(instance, lockerName);
return (Locker) o;
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Could not find implementation class: " + classname);
} catch (InstantiationException e) {
throw new IllegalArgumentException("Could not instantiate implementation: " + classname, e);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Could not find method when configuring locking for: " + classname,e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Could not access method when configuring locking for: " + classname,e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException("Could not invoke method when configuring locking for: " + classname,e);
} catch (ClassCastException e) {
throw new IllegalArgumentException("Could not instantiate implementation: " + classname, e);
}
}
static {
Properties props;
try {
props = new Properties();
InputStream in = TitanFactory.class.getClassLoader().getResourceAsStream(TitanConstants.TITAN_PROPERTIES_FILE);
if (in != null && in.available() > 0) {
props.load(in);
}
} catch (IOException e) {
throw new AssertionError(e);
}
registerShorthands(props, "storage.", REGISTERED_STORAGE_MANAGERS);
registerShorthands(props, "index.", REGISTERED_INDEX_PROVIDERS);
}
public static final void registerShorthands(Properties props, String prefix, Map<String, String> shorthands) {
for (String key : props.stringPropertyNames()) {
if (key.toLowerCase().startsWith(prefix)) {
String shorthand = key.substring(prefix.length()).toLowerCase();
String clazz = props.getProperty(key);
shorthands.put(shorthand, clazz);
log.debug("Registering shorthand [{}] for [{}]", shorthand, clazz);
}
}
}
//
// public synchronized static final void registerStorageManager(String name, Class<? extends StoreManager> clazz) {
// Preconditions.checkNotNull(name);
// Preconditions.checkNotNull(clazz);
// Preconditions.checkArgument(!StringUtils.isEmpty(name));
// Preconditions.checkNotNull(!REGISTERED_STORAGE_MANAGERS.containsKey(name),"A storage manager has already been registered for name: " + name);
// REGISTERED_STORAGE_MANAGERS.put(name,clazz);
// }
//
// public synchronized static final void removeStorageManager(String name) {
// Preconditions.checkNotNull(name);
// REGISTERED_STORAGE_MANAGERS.remove(name);
// }
}
| 1no label
|
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_Backend.java
|
5,766 |
public static class HitContext {
private InternalSearchHit hit;
private IndexReader topLevelReader;
private int topLevelDocId;
private AtomicReaderContext readerContext;
private int docId;
private FieldsVisitor fieldVisitor;
private Map<String, Object> cache;
private IndexSearcher atomicIndexSearcher;
public void reset(InternalSearchHit hit, AtomicReaderContext context, int docId, IndexReader topLevelReader, int topLevelDocId, FieldsVisitor fieldVisitor) {
this.hit = hit;
this.readerContext = context;
this.docId = docId;
this.topLevelReader = topLevelReader;
this.topLevelDocId = topLevelDocId;
this.fieldVisitor = fieldVisitor;
this.atomicIndexSearcher = null;
}
public InternalSearchHit hit() {
return hit;
}
public AtomicReader reader() {
return readerContext.reader();
}
public AtomicReaderContext readerContext() {
return readerContext;
}
public IndexSearcher searcher() {
if (atomicIndexSearcher == null) {
// Use the reader directly otherwise the IndexSearcher assertion will trip because it expects a top level
// reader context.
atomicIndexSearcher = new IndexSearcher(readerContext.reader());
}
return atomicIndexSearcher;
}
public int docId() {
return docId;
}
public IndexReader topLevelReader() {
return topLevelReader;
}
public int topLevelDocId() {
return topLevelDocId;
}
public FieldsVisitor fieldVisitor() {
return fieldVisitor;
}
public Map<String, Object> cache() {
if (cache == null) {
cache = Maps.newHashMap();
}
return cache;
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_fetch_FetchSubPhase.java
|
16 |
class CommandExecutor implements Runnable {
final TextCommand command;
CommandExecutor(TextCommand command) {
this.command = command;
}
@Override
public void run() {
try {
TextCommandType type = command.getType();
TextCommandProcessor textCommandProcessor = textCommandProcessors[type.getValue()];
textCommandProcessor.handle(command);
} catch (Throwable e) {
logger.warning(e);
}
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_TextCommandServiceImpl.java
|
2,574 |
clusterService.submitStateUpdateTask("zen-disco-minimum_master_nodes_changed", Priority.URGENT, new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
final int prevMinimumMasterNode = ZenDiscovery.this.electMaster.minimumMasterNodes();
ZenDiscovery.this.electMaster.minimumMasterNodes(minimumMasterNodes);
// check if we have enough master nodes, if not, we need to move into joining the cluster again
if (!electMaster.hasEnoughMasterNodes(currentState.nodes())) {
return rejoin(currentState, "not enough master nodes on change of minimum_master_nodes from [" + prevMinimumMasterNode + "] to [" + minimumMasterNodes + "]");
}
return currentState;
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
sendInitialStateEventIfNeeded();
}
});
| 1no label
|
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
|
1,500 |
public class OObjectEntitySerializer {
private static final Set<Class<?>> classes = new HashSet<Class<?>>();
private static final HashMap<Class<?>, List<String>> allFields = new HashMap<Class<?>, List<String>>();
private static final HashMap<Class<?>, List<String>> embeddedFields = new HashMap<Class<?>, List<String>>();
private static final HashMap<Class<?>, List<String>> directAccessFields = new HashMap<Class<?>, List<String>>();
private static final HashMap<Class<?>, Field> boundDocumentFields = new HashMap<Class<?>, Field>();
private static final HashMap<Class<?>, List<String>> transientFields = new HashMap<Class<?>, List<String>>();
private static final HashMap<Class<?>, List<String>> cascadeDeleteFields = new HashMap<Class<?>, List<String>>();
private static final HashMap<Class<?>, Map<Field, Class<?>>> serializedFields = new HashMap<Class<?>, Map<Field, Class<?>>>();
private static final HashMap<Class<?>, Field> fieldIds = new HashMap<Class<?>, Field>();
private static final HashMap<Class<?>, Field> fieldVersions = new HashMap<Class<?>, Field>();
private static final HashMap<String, List<Method>> callbacks = new HashMap<String, List<Method>>();
/**
* Method that given an object serialize it an creates a proxy entity, in case the object isn't generated using the
* ODatabaseObject.newInstance()
*
* @param o
* - the object to serialize
* @return the proxied object
*/
public static <T> T serializeObject(T o, ODatabaseObject db) {
if (o instanceof Proxy) {
final ODocument iRecord = getDocument((Proxy) o);
Class<?> pojoClass = o.getClass().getSuperclass();
invokeCallback(pojoClass, o, iRecord, OBeforeSerialization.class);
invokeCallback(pojoClass, o, iRecord, OAfterSerialization.class);
return o;
}
Proxy proxiedObject = (Proxy) db.newInstance(o.getClass());
try {
return toStream(o, proxiedObject, db);
} catch (IllegalArgumentException e) {
throw new OSerializationException("Error serializing object of class " + o.getClass(), e);
} catch (IllegalAccessException e) {
throw new OSerializationException("Error serializing object of class " + o.getClass(), e);
}
}
/**
* Method that attaches all data contained in the object to the associated document
*
* @param <T>
* @param o
* :- the object to attach
* @param db
* :- the database instance
* @return the object serialized or with attached data
*/
public static <T> T attach(T o, ODatabaseObject db) {
if (o instanceof Proxy) {
OObjectProxyMethodHandler handler = (OObjectProxyMethodHandler) ((ProxyObject) o).getHandler();
try {
handler.attach(o);
} catch (IllegalArgumentException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (IllegalAccessException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (NoSuchMethodException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (InvocationTargetException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
}
return o;
} else
return serializeObject(o, db);
}
/**
* Method that detaches all fields contained in the document to the given object. It returns by default a proxied instance. To get
* a detached non proxied instance @see
* {@link OObjectEntitySerializer#detach(T o, ODatabaseObject db, boolean returnNonProxiedInstance)}
*
* @param <T>
* @param o
* :- the object to detach
* @param db
* :- the database instance
* @return proxied instance: the object serialized or with detached data
*/
public static <T> T detach(T o, ODatabaseObject db) {
return detach(o, db, false);
}
/**
* Method that detaches all fields contained in the document to the given object.
*
* @param <T>
* @param o
* :- the object to detach
* @param db
* :- the database instance
* @param returnNonProxiedInstance
* :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id
* and @Version fields it could procude data replication
* @return the object serialized or with detached data
*/
public static <T> T detach(T o, ODatabaseObject db, boolean returnNonProxiedInstance) {
if (o instanceof Proxy) {
OObjectProxyMethodHandler handler = (OObjectProxyMethodHandler) ((ProxyObject) o).getHandler();
try {
if (returnNonProxiedInstance) {
o = getNonProxiedInstance(o);
}
handler.detach(o, returnNonProxiedInstance);
} catch (IllegalArgumentException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (IllegalAccessException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (NoSuchMethodException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (InvocationTargetException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
}
return o;
} else if (!returnNonProxiedInstance)
return serializeObject(o, db);
return o;
}
/**
* Method that detaches all fields contained in the document to the given object and recursively all object tree. This may throw a
* {@link StackOverflowError} with big objects tree. To avoid it set the stack size with -Xss java option
*
* @param <T>
* @param o
* :- the object to detach
* @param db
* :- the database instance
* @param returnNonProxiedInstance
* :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id
* and @Version fields it could procude data replication
* @return the object serialized or with detached data
*/
public static <T> T detachAll(T o, ODatabaseObject db, boolean returnNonProxiedInstance) {
if (o instanceof Proxy) {
OObjectProxyMethodHandler handler = (OObjectProxyMethodHandler) ((ProxyObject) o).getHandler();
try {
if (returnNonProxiedInstance) {
o = getNonProxiedInstance(o);
}
handler.detachAll(o, returnNonProxiedInstance);
} catch (IllegalArgumentException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (IllegalAccessException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (NoSuchMethodException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
} catch (InvocationTargetException e) {
throw new OSerializationException("Error detaching object of class " + o.getClass(), e);
}
return o;
} else if (!returnNonProxiedInstance)
return serializeObject(o, db);
return o;
}
/**
* Method that given a proxied entity returns the associated ODocument
*
* @param proxiedObject
* - the proxied entity object
* @return The ODocument associated with the object
*/
public static ODocument getDocument(Proxy proxiedObject) {
return ((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).getDoc();
}
/**
* Method that given a proxied entity returns the associated ODocument RID
*
* @param proxiedObject
* - the proxied entity object
* @return The ORID of associated ODocument
*/
public static ORID getRid(Proxy proxiedObject) {
return getDocument(proxiedObject).getIdentity();
}
/**
* Method that given a proxied entity returns the associated ODocument version
*
* @param proxiedObject
* - the proxied entity object
* @return The version of associated ODocument
*/
public static ORecordVersion getVersion(Proxy proxiedObject) {
return getDocument(proxiedObject).getRecordVersion();
}
public static boolean isClassField(Class<?> iClass, String iField) {
checkClassRegistration(iClass);
boolean isClassField = false;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && !isClassField;) {
List<String> allClassFields = allFields.get(currentClass);
isClassField = allClassFields != null && allClassFields.contains(iField);
currentClass = currentClass.getSuperclass();
}
return isClassField;
}
public static boolean isTransientField(Class<?> iClass, String iField) {
checkClassRegistration(iClass);
boolean isTransientField = false;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && !isTransientField;) {
List<String> classCascadeDeleteFields = transientFields.get(currentClass);
isTransientField = classCascadeDeleteFields != null && classCascadeDeleteFields.contains(iField);
currentClass = currentClass.getSuperclass();
}
return isTransientField;
}
public static List<String> getCascadeDeleteFields(Class<?> iClass) {
checkClassRegistration(iClass);
List<String> classCascadeDeleteFields = new ArrayList<String>();
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class);) {
List<String> classDeleteFields = cascadeDeleteFields.get(currentClass);
if (classDeleteFields != null)
classCascadeDeleteFields.addAll(classDeleteFields);
currentClass = currentClass.getSuperclass();
}
return classCascadeDeleteFields;
}
public static List<String> getCascadeDeleteFields(String iClassName) {
if (iClassName == null || iClassName.isEmpty())
return null;
for (Class<?> iClass : cascadeDeleteFields.keySet()) {
if (iClass.getSimpleName().equals(iClassName))
return getCascadeDeleteFields(iClass);
}
return null;
}
public static boolean isCascadeDeleteField(Class<?> iClass, String iField) {
checkClassRegistration(iClass);
boolean isTransientField = false;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && !isTransientField;) {
List<String> classEmbeddedFields = cascadeDeleteFields.get(currentClass);
isTransientField = classEmbeddedFields != null && classEmbeddedFields.contains(iField);
currentClass = currentClass.getSuperclass();
}
return isTransientField;
}
public static boolean isEmbeddedField(Class<?> iClass, String iField) {
checkClassRegistration(iClass);
boolean isEmbeddedField = false;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && !isEmbeddedField;) {
List<String> classEmbeddedFields = embeddedFields.get(currentClass);
isEmbeddedField = classEmbeddedFields != null && classEmbeddedFields.contains(iField);
currentClass = currentClass.getSuperclass();
}
return isEmbeddedField;
}
protected static void checkClassRegistration(Class<?> iClass) {
if (!classes.contains(iClass) && !(Proxy.class.isAssignableFrom(iClass)))
registerClass(iClass);
}
/**
* Registers the class informations that will be used in serialization, deserialization and lazy loading of it. If already
* registered does nothing.
*
* @param iClass
* :- the Class<?> to register
*/
@SuppressWarnings("unchecked")
public static synchronized void registerClass(final Class<?> iClass) {
if (Proxy.class.isAssignableFrom(iClass) || iClass.isEnum() || OReflectionHelper.isJavaType(iClass)
|| iClass.isAnonymousClass() || classes.contains(iClass))
return;
boolean reloadSchema = false;
boolean automaticSchemaGeneration = false;
if (ODatabaseRecordThreadLocal.INSTANCE.isDefined() && !ODatabaseRecordThreadLocal.INSTANCE.get().isClosed()
&& !ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().existsClass(iClass.getSimpleName())) {
ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().createClass(iClass.getSimpleName());
reloadSchema = true;
if (ODatabaseRecordThreadLocal.INSTANCE.get().getDatabaseOwner() instanceof OObjectDatabaseTx)
automaticSchemaGeneration = ((OObjectDatabaseTx) ODatabaseRecordThreadLocal.INSTANCE.get().getDatabaseOwner())
.isAutomaticSchemaGeneration();
}
for (Class<?> currentClass = iClass; currentClass != Object.class;) {
if (!classes.contains(currentClass)) {
classes.add(currentClass);
Class<?> fieldType;
for (Field f : currentClass.getDeclaredFields()) {
final String fieldName = f.getName();
final int fieldModifier = f.getModifiers();
List<String> allClassFields = allFields.get(currentClass);
if (allClassFields == null)
allClassFields = new ArrayList<String>();
allClassFields.add(fieldName);
allFields.put(currentClass, allClassFields);
if (Modifier.isStatic(fieldModifier) || Modifier.isFinal(fieldModifier) || Modifier.isNative(fieldModifier)
|| Modifier.isTransient(fieldModifier)) {
List<String> classTransientFields = transientFields.get(currentClass);
if (classTransientFields == null)
classTransientFields = new ArrayList<String>();
classTransientFields.add(fieldName);
transientFields.put(currentClass, classTransientFields);
}
if (fieldName.equals("this$0")) {
List<String> classTransientFields = transientFields.get(currentClass);
if (classTransientFields == null)
classTransientFields = new ArrayList<String>();
classTransientFields.add(fieldName);
transientFields.put(currentClass, classTransientFields);
}
if (OObjectSerializerHelper.jpaTransientClass != null) {
Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaTransientClass);
if (ann != null) {
// @Transient DEFINED
List<String> classTransientFields = transientFields.get(currentClass);
if (classTransientFields == null)
classTransientFields = new ArrayList<String>();
classTransientFields.add(fieldName);
transientFields.put(currentClass, classTransientFields);
}
}
if (OObjectSerializerHelper.jpaOneToOneClass != null) {
Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaOneToOneClass);
if (ann != null) {
// @OneToOne DEFINED
OneToOne oneToOne = ((OneToOne) ann);
if (checkCascadeDelete(oneToOne)) {
addCascadeDeleteField(currentClass, fieldName);
}
}
}
if (OObjectSerializerHelper.jpaOneToManyClass != null) {
Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaOneToManyClass);
if (ann != null) {
// @OneToMany DEFINED
OneToMany oneToMany = ((OneToMany) ann);
if (checkCascadeDelete(oneToMany)) {
addCascadeDeleteField(currentClass, fieldName);
}
}
}
if (OObjectSerializerHelper.jpaManyToManyClass != null) {
Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaManyToManyClass);
if (ann != null) {
// @OneToMany DEFINED
ManyToMany manyToMany = ((ManyToMany) ann);
if (checkCascadeDelete(manyToMany)) {
addCascadeDeleteField(currentClass, fieldName);
}
}
}
fieldType = f.getType();
if (Collection.class.isAssignableFrom(fieldType) || fieldType.isArray() || Map.class.isAssignableFrom(fieldType)) {
fieldType = OReflectionHelper.getGenericMultivalueType(f);
}
if (isToSerialize(fieldType)) {
Map<Field, Class<?>> serializeClass = serializedFields.get(currentClass);
if (serializeClass == null)
serializeClass = new HashMap<Field, Class<?>>();
serializeClass.put(f, fieldType);
serializedFields.put(currentClass, serializeClass);
}
// CHECK FOR DIRECT-BINDING
boolean directBinding = true;
if (f.getAnnotation(OAccess.class) == null || f.getAnnotation(OAccess.class).value() == OAccess.OAccessType.PROPERTY)
directBinding = true;
// JPA 2+ AVAILABLE?
else if (OObjectSerializerHelper.jpaAccessClass != null) {
Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaAccessClass);
if (ann != null) {
directBinding = true;
}
}
if (directBinding) {
List<String> classDirectAccessFields = directAccessFields.get(currentClass);
if (classDirectAccessFields == null)
classDirectAccessFields = new ArrayList<String>();
classDirectAccessFields.add(fieldName);
directAccessFields.put(currentClass, classDirectAccessFields);
}
if (f.getAnnotation(ODocumentInstance.class) != null)
// BOUND DOCUMENT ON IT
boundDocumentFields.put(currentClass, f);
boolean idFound = false;
if (f.getAnnotation(OId.class) != null) {
// RECORD ID
fieldIds.put(currentClass, f);
idFound = true;
}
// JPA 1+ AVAILABLE?
else if (OObjectSerializerHelper.jpaIdClass != null && f.getAnnotation(OObjectSerializerHelper.jpaIdClass) != null) {
// RECORD ID
fieldIds.put(currentClass, f);
idFound = true;
}
if (idFound) {
// CHECK FOR TYPE
if (fieldType.isPrimitive())
OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be a literal to manage the Record Id",
f.toString());
else if (!ORID.class.isAssignableFrom(fieldType) && fieldType != String.class && fieldType != Object.class
&& !Number.class.isAssignableFrom(fieldType))
OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be managed as type: %s", f.toString(),
fieldType);
}
boolean vFound = false;
if (f.getAnnotation(OVersion.class) != null) {
// RECORD ID
fieldVersions.put(currentClass, f);
vFound = true;
}
// JPA 1+ AVAILABLE?
else if (OObjectSerializerHelper.jpaVersionClass != null
&& f.getAnnotation(OObjectSerializerHelper.jpaVersionClass) != null) {
// RECORD ID
fieldVersions.put(currentClass, f);
vFound = true;
}
if (vFound) {
// CHECK FOR TYPE
if (fieldType.isPrimitive())
OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be a literal to manage the Version",
f.toString());
else if (fieldType != String.class && fieldType != Object.class && !Number.class.isAssignableFrom(fieldType))
OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be managed as type: %s", f.toString(),
fieldType);
}
// JPA 1+ AVAILABLE?
if (OObjectSerializerHelper.jpaEmbeddedClass != null && f.getAnnotation(OObjectSerializerHelper.jpaEmbeddedClass) != null) {
List<String> classEmbeddedFields = embeddedFields.get(currentClass);
if (classEmbeddedFields == null)
classEmbeddedFields = new ArrayList<String>();
classEmbeddedFields.add(fieldName);
embeddedFields.put(currentClass, classEmbeddedFields);
}
}
registerCallbacks(currentClass);
}
if (automaticSchemaGeneration && !currentClass.equals(Object.class) && !currentClass.equals(ODocument.class)) {
((OSchemaProxyObject) ODatabaseRecordThreadLocal.INSTANCE.get().getDatabaseOwner().getMetadata().getSchema())
.generateSchema(currentClass, ODatabaseRecordThreadLocal.INSTANCE.get());
}
String iClassName = currentClass.getSimpleName();
currentClass = currentClass.getSuperclass();
if (currentClass == null || currentClass.equals(ODocument.class))
// POJO EXTENDS ODOCUMENT: SPECIAL CASE: AVOID TO CONSIDER
// ODOCUMENT FIELDS
currentClass = Object.class;
if (ODatabaseRecordThreadLocal.INSTANCE.get() != null && !ODatabaseRecordThreadLocal.INSTANCE.get().isClosed()
&& !currentClass.equals(Object.class)) {
OClass oSuperClass;
OClass currentOClass = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().getClass(iClassName);
if (!ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().existsClass(currentClass.getSimpleName())) {
oSuperClass = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema()
.createClass(currentClass.getSimpleName());
reloadSchema = true;
} else {
oSuperClass = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().getClass(currentClass.getSimpleName());
reloadSchema = true;
}
if (currentOClass.getSuperClass() == null || !currentOClass.getSuperClass().equals(oSuperClass)) {
currentOClass.setSuperClass(oSuperClass);
reloadSchema = true;
}
}
}
if (ODatabaseRecordThreadLocal.INSTANCE.get() != null && !ODatabaseRecordThreadLocal.INSTANCE.get().isClosed() && reloadSchema) {
ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().save();
ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().reload();
}
}
public static void deregisterClass(Class<?> iClass) {
classes.remove(iClass);
}
protected static boolean checkCascadeDelete(final OneToOne oneToOne) {
return oneToOne.orphanRemoval() || checkCascadeAnnotationAttribute(oneToOne.cascade());
}
protected static boolean checkCascadeDelete(OneToMany oneToMany) {
return oneToMany.orphanRemoval() || checkCascadeAnnotationAttribute(oneToMany.cascade());
}
protected static boolean checkCascadeDelete(ManyToMany manyToMany) {
return checkCascadeAnnotationAttribute(manyToMany.cascade());
}
protected static boolean checkCascadeAnnotationAttribute(CascadeType[] cascadeList) {
if (cascadeList == null || cascadeList.length <= 0)
return false;
for (CascadeType type : cascadeList) {
if (type.equals(CascadeType.ALL) || type.equals(CascadeType.REMOVE))
return true;
}
return false;
}
protected static void addCascadeDeleteField(Class<?> currentClass, final String fieldName) {
List<String> classCascadeDeleteFields = cascadeDeleteFields.get(currentClass);
if (classCascadeDeleteFields == null)
classCascadeDeleteFields = new ArrayList<String>();
classCascadeDeleteFields.add(fieldName);
cascadeDeleteFields.put(currentClass, classCascadeDeleteFields);
}
public static boolean isSerializedType(final Field iField) {
if (!classes.contains(iField.getDeclaringClass()))
registerCallbacks(iField.getDeclaringClass());
Map<Field, Class<?>> serializerFields = serializedFields.get(iField.getDeclaringClass());
return serializerFields != null && serializerFields.get(iField) != null;
}
public static Class<?> getSerializedType(final Field iField) {
if (!classes.contains(iField.getDeclaringClass()))
registerCallbacks(iField.getDeclaringClass());
return serializedFields.get(iField.getDeclaringClass()) != null ? serializedFields.get(iField.getDeclaringClass()).get(iField)
: null;
}
public static boolean isToSerialize(final Class<?> type) {
for (Class<?> classContext : OObjectSerializerHelper.serializerContexts.keySet()) {
if (classContext != null && classContext.isAssignableFrom(type)) {
return true;
}
}
return OObjectSerializerHelper.serializerContexts.get(null) != null
&& OObjectSerializerHelper.serializerContexts.get(null).isClassBinded(type);
}
public static Object serializeFieldValue(final Class<?> type, final Object iFieldValue) {
for (Class<?> classContext : OObjectSerializerHelper.serializerContexts.keySet()) {
if (classContext != null && classContext.isAssignableFrom(type)) {
return OObjectSerializerHelper.serializerContexts.get(classContext).serializeFieldValue(type, iFieldValue);
}
}
if (OObjectSerializerHelper.serializerContexts.get(null) != null)
return OObjectSerializerHelper.serializerContexts.get(null).serializeFieldValue(type, iFieldValue);
return iFieldValue;
}
public static Object deserializeFieldValue(final Class<?> type, final Object iFieldValue) {
for (Class<?> classContext : OObjectSerializerHelper.serializerContexts.keySet()) {
if (classContext != null && classContext.isAssignableFrom(type)) {
return OObjectSerializerHelper.serializerContexts.get(classContext).unserializeFieldValue(type, iFieldValue);
}
}
if (OObjectSerializerHelper.serializerContexts.get(null) != null)
return OObjectSerializerHelper.serializerContexts.get(null).unserializeFieldValue(type, iFieldValue);
return iFieldValue;
}
public static Object typeToStream(Object iFieldValue, OType iType, final ODatabaseObject db, final ODocument iRecord) {
if (iFieldValue == null)
return null;
if (iFieldValue instanceof Proxy)
return getDocument((Proxy) iFieldValue);
if (!OType.isSimpleType(iFieldValue) || iFieldValue.getClass().isArray()) {
Class<?> fieldClass = iFieldValue.getClass();
if (fieldClass.isArray()) {
if (iType != null && iType.equals(OType.BINARY))
return iFieldValue;
// ARRAY
final int arrayLength = Array.getLength(iFieldValue);
final List<Object> arrayList = new ArrayList<Object>();
for (int i = 0; i < arrayLength; i++)
arrayList.add(Array.get(iFieldValue, i));
iFieldValue = multiValueToStream(arrayList, iType, db, iRecord);
} else if (Collection.class.isAssignableFrom(fieldClass)) {
// COLLECTION (LIST OR SET)
iFieldValue = multiValueToStream(iFieldValue, iType, db, iRecord);
} else if (Map.class.isAssignableFrom(fieldClass)) {
// MAP
iFieldValue = multiValueToStream(iFieldValue, iType, db, iRecord);
} else if (fieldClass.isEnum()) {
// ENUM
iFieldValue = ((Enum<?>) iFieldValue).name();
} else {
// LINK OR EMBEDDED
fieldClass = db.getEntityManager().getEntityClass(fieldClass.getSimpleName());
if (fieldClass != null) {
// RECOGNIZED TYPE, SERIALIZE IT
iFieldValue = getDocument((Proxy) serializeObject(iFieldValue, db));
} else {
final Object result = serializeFieldValue(null, iFieldValue);
if (iFieldValue == result && !ORecordAbstract.class.isAssignableFrom(result.getClass()))
throw new OSerializationException("Linked type [" + iFieldValue.getClass() + ":" + iFieldValue
+ "] cannot be serialized because is not part of registered entities. To fix this error register this class");
iFieldValue = result;
}
}
}
return iFieldValue;
}
public static List<String> getClassFields(final Class<?> iClass) {
return allFields.get(iClass);
}
public static boolean hasBoundedDocumentField(final Class<?> iClass) {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
boolean hasBoundedField = false;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && !hasBoundedField;) {
hasBoundedField = boundDocumentFields.get(currentClass) != null;
currentClass = currentClass.getSuperclass();
}
return hasBoundedField;
}
public static Field getBoundedDocumentField(final Class<?> iClass) {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class);) {
Field f = boundDocumentFields.get(currentClass);
if (f != null)
return f;
currentClass = currentClass.getSuperclass();
}
return null;
}
public static boolean isIdField(final Class<?> iClass, String iFieldName) {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
boolean isIdField = false;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && !isIdField;) {
Field f = fieldIds.get(currentClass);
isIdField = f != null && f.getName().equals(iFieldName);
currentClass = currentClass.getSuperclass();
}
return isIdField;
}
public static boolean isIdField(Field iField) {
if (!classes.contains(iField.getDeclaringClass())) {
registerClass(iField.getDeclaringClass());
}
return fieldIds.containsValue(iField);
}
public static Field getIdField(final Class<?> iClass) {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
Field idField = null;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && idField == null;) {
idField = fieldIds.get(currentClass);
currentClass = currentClass.getSuperclass();
}
return idField;
}
public static void setIdField(final Class<?> iClass, Object iObject, ORID iValue) throws IllegalArgumentException,
IllegalAccessException {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
Field f = null;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class);) {
f = fieldIds.get(currentClass);
if (f != null)
break;
currentClass = currentClass.getSuperclass();
}
if (f != null) {
if (f.getType().equals(String.class))
setFieldValue(f, iObject, iValue.toString());
else if (f.getType().equals(Long.class))
setFieldValue(f, iObject, iValue.getClusterPosition().longValue());
else if (f.getType().isAssignableFrom(ORID.class))
setFieldValue(f, iObject, iValue);
}
}
public static boolean isVersionField(final Class<?> iClass, String iFieldName) {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
boolean isVersionField = false;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && !isVersionField;) {
Field f = fieldVersions.get(currentClass);
isVersionField = f != null && f.getName().equals(iFieldName);
currentClass = currentClass.getSuperclass();
}
return isVersionField;
}
public static Field getVersionField(final Class<?> iClass) {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
Field versionField = null;
for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class
&& !currentClass.equals(ODocument.class) && versionField == null;) {
versionField = fieldVersions.get(currentClass);
currentClass = currentClass.getSuperclass();
}
return versionField;
}
public static void setVersionField(final Class<?> iClass, Object iObject, ORecordVersion iValue) throws IllegalArgumentException,
IllegalAccessException {
Field f = getVersionField(iClass);
if (f != null) {
if (f.getType().equals(String.class))
setFieldValue(f, iObject, String.valueOf(iValue));
else if (f.getType().equals(Long.class)) {
if (iValue instanceof OSimpleVersion)
setFieldValue(f, iObject, (long) iValue.getCounter());
else
OLogManager
.instance()
.warn(OObjectEntitySerializer.class,
"@Version field can't be declared as Long in distributed mode. Should be one of following: String, Object, ORecordVersion");
} else if (f.getType().equals(Object.class) || ORecordVersion.class.isAssignableFrom(f.getType()))
setFieldValue(f, iObject, iValue);
}
}
public static Object getFieldValue(Field iField, Object iInstance) throws IllegalArgumentException, IllegalAccessException {
if (!iField.isAccessible()) {
iField.setAccessible(true);
}
return iField.get(iInstance);
}
public static void setFieldValue(Field iField, Object iInstance, Object iValue) throws IllegalArgumentException,
IllegalAccessException {
if (!iField.isAccessible()) {
iField.setAccessible(true);
}
iField.set(iInstance, iValue);
}
public static void invokeBeforeSerializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) {
invokeCallback(iClass, iInstance, iDocument, OBeforeSerialization.class);
}
public static void invokeAfterSerializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) {
invokeCallback(iClass, iInstance, iDocument, OAfterSerialization.class);
}
public static void invokeAfterDeserializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) {
invokeCallback(iClass, iInstance, iDocument, OAfterDeserialization.class);
}
public static void invokeBeforeDeserializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) {
invokeCallback(iClass, iInstance, iDocument, OBeforeDeserialization.class);
}
public static OType getTypeByClass(final Class<?> iClass, final String fieldName) {
Field f = getField(fieldName, iClass);
return getTypeByClass(iClass, fieldName, f);
}
public static OType getTypeByClass(final Class<?> iClass, final String fieldName, Field f) {
if (f == null)
return null;
if (f.getType().isArray() || Collection.class.isAssignableFrom(f.getType()) || Map.class.isAssignableFrom(f.getType())) {
Class<?> genericMultiValueType = OReflectionHelper.getGenericMultivalueType(f);
if (f.getType().isArray()) {
if (genericMultiValueType.isPrimitive() && Byte.class.isAssignableFrom(genericMultiValueType)) {
return OType.BINARY;
} else {
if (isSerializedType(f)
|| OObjectEntitySerializer.isEmbeddedField(iClass, fieldName)
|| (genericMultiValueType != null && (genericMultiValueType.isEnum() || OReflectionHelper
.isJavaType(genericMultiValueType)))) {
return OType.EMBEDDEDLIST;
} else {
return OType.LINKLIST;
}
}
} else if (Collection.class.isAssignableFrom(f.getType())) {
if (isSerializedType(f)
|| OObjectEntitySerializer.isEmbeddedField(iClass, fieldName)
|| (genericMultiValueType != null && (genericMultiValueType.isEnum() || OReflectionHelper
.isJavaType(genericMultiValueType))))
return Set.class.isAssignableFrom(f.getType()) ? OType.EMBEDDEDSET : OType.EMBEDDEDLIST;
else
return Set.class.isAssignableFrom(f.getType()) ? OType.LINKSET : OType.LINKLIST;
} else {
if (isSerializedType(f)
|| OObjectEntitySerializer.isEmbeddedField(iClass, fieldName)
|| (genericMultiValueType != null && (genericMultiValueType.isEnum() || OReflectionHelper
.isJavaType(genericMultiValueType))))
return OType.EMBEDDEDMAP;
else
return OType.LINKMAP;
}
} else if (OObjectEntitySerializer.isEmbeddedField(iClass, fieldName)) {
return OType.EMBEDDED;
} else if (Date.class.isAssignableFrom(f.getType())) {
return OType.DATETIME;
} else {
return OType.getTypeByClass(f.getType());
}
}
public static Field getField(String fieldName, Class<?> iClass) {
for (Field f : iClass.getDeclaredFields()) {
if (f.getName().equals(fieldName))
return f;
}
if (iClass.getSuperclass().equals(Object.class))
return null;
return getField(fieldName, iClass.getSuperclass());
}
@SuppressWarnings("unchecked")
public static <T> T getNonProxiedInstance(T iObject) {
try {
return (T) iObject.getClass().getSuperclass().newInstance();
} catch (InstantiationException ie) {
OLogManager.instance().error(iObject, "Error creating instance for class " + iObject.getClass().getSuperclass(), ie);
} catch (IllegalAccessException ie) {
OLogManager.instance().error(iObject, "Error creating instance for class " + iObject.getClass().getSuperclass(), ie);
}
return null;
}
public static void synchronizeSchema() {
for (Class<?> clazz : classes) {
registerClass(clazz);
}
}
/**
* Serialize the user POJO to a ORecordDocument instance.
*
* @param iPojo
* User pojo to serialize
* @throws IllegalAccessException
* @throws IllegalArgumentException
*/
@SuppressWarnings("unchecked")
protected static <T> T toStream(final T iPojo, final Proxy iProxiedPojo, ODatabaseObject db) throws IllegalArgumentException,
IllegalAccessException {
final ODocument iRecord = getDocument(iProxiedPojo);
final long timer = Orient.instance().getProfiler().startChrono();
final Integer identityRecord = System.identityHashCode(iPojo);
if (OObjectSerializationThreadLocal.INSTANCE.get().containsKey(identityRecord))
return (T) OObjectSerializationThreadLocal.INSTANCE.get().get(identityRecord);
OObjectSerializationThreadLocal.INSTANCE.get().put(identityRecord, iProxiedPojo);
OProperty schemaProperty;
final Class<?> pojoClass = iPojo.getClass();
final OClass schemaClass = iRecord.getSchemaClass();
// CHECK FOR ID BINDING
final Field idField = getIdField(pojoClass);
if (idField != null) {
Object id = getFieldValue(idField, iPojo);
if (id != null) {
// FOUND
if (id instanceof ORecordId) {
iRecord.setIdentity((ORecordId) id);
} else if (id instanceof Number) {
// TREATS AS CLUSTER POSITION
((ORecordId) iRecord.getIdentity()).clusterId = schemaClass.getDefaultClusterId();
((ORecordId) iRecord.getIdentity()).clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(((Number) id).longValue());
} else if (id instanceof String)
((ORecordId) iRecord.getIdentity()).fromString((String) id);
else if (id.getClass().equals(Object.class))
iRecord.setIdentity((ORecordId) id);
else
OLogManager.instance().warn(OObjectSerializerHelper.class,
"@Id field has been declared as %s while the supported are: ORID, Number, String, Object", id.getClass());
}
if (iRecord.getIdentity().isValid() && iRecord.getIdentity().isPersistent())
iRecord.reload();
}
// CHECK FOR VERSION BINDING
final Field vField = getVersionField(pojoClass);
boolean versionConfigured = false;
if (vField != null) {
versionConfigured = true;
Object ver = getFieldValue(vField, iPojo);
if (ver != null) {
// FOUND
final ORecordVersion version = iRecord.getRecordVersion();
if (ver instanceof ORecordVersion) {
version.copyFrom((ORecordVersion) ver);
} else if (ver instanceof Number) {
if (version instanceof OSimpleVersion)
// TREATS AS CLUSTER POSITION
version.setCounter(((Number) ver).intValue());
else
OLogManager
.instance()
.warn(OObjectEntitySerializer.class,
"@Version field can't be declared as Number in distributed mode. Should be one of following: String, Object, ORecordVersion");
} else if (ver instanceof String) {
version.getSerializer().fromString((String) ver, version);
} else if (ver.getClass().equals(Object.class))
version.copyFrom((ORecordVersion) ver);
else
OLogManager.instance().warn(OObjectSerializerHelper.class,
"@Version field has been declared as %s while the supported are: Number, String, Object", ver.getClass());
}
}
if (db.isMVCC() && !versionConfigured && db.getTransaction() instanceof OTransactionOptimistic)
throw new OTransactionException(
"Cannot involve an object of class '"
+ pojoClass
+ "' in an Optimistic Transaction commit because it does not define @Version or @OVersion and therefore cannot handle MVCC");
String fieldName;
Object fieldValue;
// CALL BEFORE MARSHALLING
invokeCallback(pojoClass, iPojo, iRecord, OBeforeSerialization.class);
Class<?> currentClass = pojoClass;
while (!currentClass.equals(Object.class) && classes.contains(pojoClass)) {
for (Field p : currentClass.getDeclaredFields()) {
if (Modifier.isStatic(p.getModifiers()) || Modifier.isNative(p.getModifiers()) || Modifier.isTransient(p.getModifiers())
|| p.getType().isAnonymousClass())
continue;
fieldName = p.getName();
List<String> classTransientFields = transientFields.get(pojoClass);
if ((idField != null && fieldName.equals(idField.getName()) || (vField != null && fieldName.equals(vField.getName())) || (classTransientFields != null && classTransientFields
.contains(fieldName))))
continue;
fieldValue = getFieldValue(p, iPojo);
if (fieldValue != null && fieldValue.getClass().isAnonymousClass())
continue;
if (isSerializedType(p))
fieldValue = serializeFieldValue(p.getType(), fieldValue);
schemaProperty = schemaClass != null ? schemaClass.getProperty(fieldName) : null;
OType fieldType = schemaProperty != null ? schemaProperty.getType() : getTypeByClass(currentClass, fieldName);
if (fieldValue != null) {
if (isEmbeddedObject(p)) {
// AUTO CREATE SCHEMA CLASS
if (iRecord.getSchemaClass() == null) {
db.getMetadata().getSchema().createClass(iPojo.getClass());
iRecord.setClassNameIfExists(iPojo.getClass().getSimpleName());
}
}
}
fieldValue = typeToStream(fieldValue, fieldType, db, iRecord);
iRecord.field(fieldName, fieldValue, fieldType);
}
currentClass = currentClass.getSuperclass();
if (currentClass == null || currentClass.equals(ODocument.class))
// POJO EXTENDS ODOCUMENT: SPECIAL CASE: AVOID TO CONSIDER
// ODOCUMENT FIELDS
currentClass = Object.class;
}
// CALL AFTER MARSHALLING
invokeCallback(pojoClass, iPojo, iRecord, OAfterSerialization.class);
OObjectSerializationThreadLocal.INSTANCE.get().remove(identityRecord);
Orient.instance().getProfiler().stopChrono("Object.toStream", "Serialize a POJO", timer);
return (T) iProxiedPojo;
}
protected static void invokeCallback(final Object iPojo, final ODocument iDocument, final Class<?> iAnnotation) {
invokeCallback(iPojo.getClass(), iPojo, iDocument, iAnnotation);
}
protected static void invokeCallback(final Class<?> iClass, final Object iPojo, final ODocument iDocument,
final Class<?> iAnnotation) {
final List<Method> methods = getCallbackMethods(iAnnotation, iClass);
if (methods != null && !methods.isEmpty())
for (Method m : methods) {
try {
if (m.getParameterTypes().length > 0)
m.invoke(iPojo, iDocument);
else
m.invoke(iPojo);
} catch (Exception e) {
throw new OConfigurationException("Error on executing user callback '" + m.getName() + "' annotated with '"
+ iAnnotation.getSimpleName() + "'", e);
}
}
}
protected static List<Method> getCallbackMethods(final Class<?> iAnnotation, final Class<?> iClass) {
if (!classes.contains(iClass)) {
registerClass(iClass);
}
List<Method> result = new ArrayList<Method>();
Class<?> currentClass = iClass;
while (classes.contains(currentClass)) {
List<Method> callbackMethods = callbacks.get(currentClass.getSimpleName() + "." + iAnnotation.getSimpleName());
if (callbackMethods != null && !callbackMethods.isEmpty())
result.addAll(callbackMethods);
if (currentClass != Object.class)
currentClass = currentClass.getSuperclass();
}
return result;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static void registerCallbacks(final Class<?> iRootClass) {
// FIND KEY METHODS
for (Method m : iRootClass.getDeclaredMethods()) {
// SEARCH FOR CALLBACK ANNOTATIONS
for (Class annotationClass : OObjectSerializerHelper.callbackAnnotationClasses) {
final String key = iRootClass.getSimpleName() + "." + annotationClass.getSimpleName();
if (m.getAnnotation(annotationClass) != null) {
if (!callbacks.containsKey(key)) {
callbacks.put(key, new ArrayList<Method>(Arrays.asList(m)));
} else {
callbacks.get(key).add(m);
}
}
}
}
}
@SuppressWarnings("unchecked")
private static Object multiValueToStream(final Object iMultiValue, OType iType, final ODatabaseObject db, final ODocument iRecord) {
if (iMultiValue == null)
return null;
final Collection<Object> sourceValues;
if (iMultiValue instanceof Collection<?>) {
sourceValues = (Collection<Object>) iMultiValue;
} else {
sourceValues = (Collection<Object>) ((Map<?, ?>) iMultiValue).values();
}
if (sourceValues.size() == 0)
return iMultiValue;
// TRY TO UNDERSTAND THE COLLECTION TYPE BY ITS CONTENT
final Object firstValue = sourceValues.iterator().next();
if (firstValue == null)
return iMultiValue;
if (iType == null) {
// DETERMINE THE RIGHT TYPE BASED ON SOURCE MULTI VALUE OBJECT
if (OType.isSimpleType(firstValue)) {
if (iMultiValue instanceof List)
iType = OType.EMBEDDEDLIST;
else if (iMultiValue instanceof Set)
iType = OType.EMBEDDEDSET;
else
iType = OType.EMBEDDEDMAP;
} else {
if (iMultiValue instanceof List)
iType = OType.LINKLIST;
else if (iMultiValue instanceof Set)
iType = OType.LINKSET;
else
iType = OType.LINKMAP;
}
}
Object result = iMultiValue;
final OType linkedType;
// CREATE THE RETURN MULTI VALUE OBJECT BASED ON DISCOVERED TYPE
if (iType.equals(OType.EMBEDDEDSET) || iType.equals(OType.LINKSET)) {
if (isToSerialize(firstValue.getClass()))
result = new HashSet<Object>();
else if ((iRecord != null && iType.equals(OType.EMBEDDEDSET)) || OType.isSimpleType(firstValue))
result = new OTrackedSet<Object>(iRecord);
else
result = new ORecordLazySet(iRecord);
} else if (iType.equals(OType.EMBEDDEDLIST) || iType.equals(OType.LINKLIST)) {
if (isToSerialize(firstValue.getClass()))
result = new ArrayList<Object>();
else if ((iRecord != null && iType.equals(OType.EMBEDDEDLIST)) || OType.isSimpleType(firstValue))
result = new OTrackedList<Object>(iRecord);
else
result = new ORecordLazyList(iRecord);
}
if (iType.equals(OType.LINKLIST) || iType.equals(OType.LINKSET) || iType.equals(OType.LINKMAP))
linkedType = OType.LINK;
else if (iType.equals(OType.EMBEDDEDLIST) || iType.equals(OType.EMBEDDEDSET) || iType.equals(OType.EMBEDDEDMAP))
if (firstValue instanceof List)
linkedType = OType.EMBEDDEDLIST;
else if (firstValue instanceof Set)
linkedType = OType.EMBEDDEDSET;
else if (firstValue instanceof Map)
linkedType = OType.EMBEDDEDMAP;
else
linkedType = OType.EMBEDDED;
else
throw new IllegalArgumentException("Type " + iType + " must be a multi value type (collection or map)");
if (iMultiValue instanceof Set<?>) {
for (Object o : sourceValues) {
((Set<Object>) result).add(typeToStream(o, linkedType, db, null));
}
} else if (iMultiValue instanceof List<?>) {
for (int i = 0; i < sourceValues.size(); i++) {
((List<Object>) result).add(typeToStream(((List<?>) sourceValues).get(i), linkedType, db, null));
}
} else {
if (iMultiValue instanceof OObjectLazyMap<?>) {
result = ((OObjectLazyMap<?>) iMultiValue).getUnderlying();
} else {
if (isToSerialize(firstValue.getClass()))
result = new HashMap<Object, Object>();
else if (iRecord != null && iType.equals(OType.EMBEDDEDMAP))
result = new OTrackedMap<Object>(iRecord);
else
result = new ORecordLazyMap(iRecord);
for (Entry<Object, Object> entry : ((Map<Object, Object>) iMultiValue).entrySet()) {
((Map<Object, Object>) result).put(entry.getKey(), typeToStream(entry.getValue(), linkedType, db, null));
}
}
}
return result;
}
private static boolean isEmbeddedObject(Field f) {
if (!classes.contains(f.getDeclaringClass()))
registerClass(f.getDeclaringClass());
return isEmbeddedField(f.getDeclaringClass(), f.getName());
}
}
| 1no label
|
object_src_main_java_com_orientechnologies_orient_object_enhancement_OObjectEntitySerializer.java
|
241 |
public class BroadleafCurrencyProvider {
@DataProvider(name = "USCurrency")
public static Object[][] provideUSCurrency() {
BroadleafCurrency currency=new BroadleafCurrencyImpl();
currency.setCurrencyCode("USD");
currency.setDefaultFlag(true);
currency.setFriendlyName("US Dollar");
return new Object[][] { { currency } };
}
@DataProvider(name = "FRCurrency")
public static Object[][] provideFRCurrency() {
BroadleafCurrency currency=new BroadleafCurrencyImpl();
currency.setCurrencyCode("EUR");
currency.setDefaultFlag(true);
currency.setFriendlyName("EURO Dollar");
return new Object[][] { { currency } };
}
}
| 0true
|
integration_src_test_java_org_broadleafcommerce_common_currency_BroadleafCurrencyProvider.java
|
10 |
private class MessageReceiver
extends SimpleChannelHandler
{
@Override
public void channelOpen( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
Channel ctxChannel = ctx.getChannel();
openedChannel( getURI( (InetSocketAddress) ctxChannel.getRemoteAddress() ), ctxChannel );
channels.add( ctxChannel );
}
@Override
public void messageReceived( ChannelHandlerContext ctx, MessageEvent event ) throws Exception
{
if (!bindingDetected)
{
InetSocketAddress local = ((InetSocketAddress)event.getChannel().getLocalAddress());
bindingDetected = true;
listeningAt( getURI( local ) );
}
final Message message = (Message) event.getMessage();
// Fix FROM header since sender cannot know it's correct IP/hostname
InetSocketAddress remote = (InetSocketAddress) ctx.getChannel().getRemoteAddress();
String remoteAddress = remote.getAddress().getHostAddress();
URI fromHeader = URI.create( message.getHeader( Message.FROM ) );
fromHeader = URI.create(fromHeader.getScheme()+"://"+remoteAddress + ":" + fromHeader.getPort());
message.setHeader( Message.FROM, fromHeader.toASCIIString() );
msgLog.debug( "Received:" + message );
receive( message );
}
@Override
public void channelDisconnected( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
closedChannel( getURI( (InetSocketAddress) ctx.getChannel().getRemoteAddress() ) );
}
@Override
public void channelClosed( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
closedChannel( getURI( (InetSocketAddress) ctx.getChannel().getRemoteAddress() ) );
channels.remove( ctx.getChannel() );
}
@Override
public void exceptionCaught( ChannelHandlerContext ctx, ExceptionEvent e ) throws Exception
{
if ( !(e.getCause() instanceof ConnectException) )
{
msgLog.error( "Receive exception:", e.getCause() );
}
}
}
| 1no label
|
enterprise_cluster_src_main_java_org_neo4j_cluster_com_NetworkReceiver.java
|
302 |
@Deprecated
public class InstrumentationRuntimeFactory {
private static final Log LOG = LogFactory.getLog(InstrumentationRuntimeFactory.class);
private static final String IBM_VM_CLASS = "com.ibm.tools.attach.VirtualMachine";
private static final String SUN_VM_CLASS = "com.sun.tools.attach.VirtualMachine";
private static boolean isIBM = false;
private static Instrumentation inst;
/**
* This method is called by the JVM to set the instrumentation. We can't synchronize this because it will cause
* a deadlock with the thread calling the getInstrumentation() method when the instrumentation is installed.
*
* @param agentArgs
* @param instrumentation
*/
public static void agentmain(String agentArgs, Instrumentation instrumentation) {
inst = instrumentation;
}
/**
* This method returns the Instrumentation object provided by the JVM. If the Instrumentation object is null,
* it does its best to add an instrumentation agent to the JVM and then the instrumentation object.
* @return Instrumentation
*/
public static synchronized Instrumentation getInstrumentation() {
if (inst != null) {
return inst;
}
if (System.getProperty("java.vendor").toUpperCase().contains("IBM")) {
isIBM = true;
}
AccessController.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
try {
if (!InstrumentationRuntimeFactory.class.getClassLoader().equals(
ClassLoader.getSystemClassLoader())) {
return null;
}
} catch (Throwable t) {
return null;
}
File toolsJar = null;
// When running on IBM, the attach api classes are packaged in vm.jar which is a part
// of the default vm classpath.
if (! isIBM) {
// If we can't find the tools.jar and we're not on IBM we can't load the agent.
toolsJar = findToolsJar();
if (toolsJar == null) {
return null;
}
}
Class<?> vmClass = loadVMClass(toolsJar);
if (vmClass == null) {
return null;
}
String agentPath = getAgentJar();
if (agentPath == null) {
return null;
}
loadAgent(agentPath, vmClass);
return null;
}
});
return inst;
}
private static File findToolsJar() {
String javaHome = System.getProperty("java.home");
File javaHomeFile = new File(javaHome);
File toolsJarFile = new File(javaHomeFile, "lib" + File.separator + "tools.jar");
if (!toolsJarFile.exists()) {
// If we're on an IBM SDK, then remove /jre off of java.home and try again.
if (javaHomeFile.getAbsolutePath().endsWith(File.separator + "jre")) {
javaHomeFile = javaHomeFile.getParentFile();
toolsJarFile = new File(javaHomeFile, "lib" + File.separator + "tools.jar");
} else if (System.getProperty("os.name").toLowerCase().contains("mac")) {
// If we're on a Mac, then change the search path to use ../Classes/classes.jar.
if (javaHomeFile.getAbsolutePath().endsWith(File.separator + "Home")) {
javaHomeFile = javaHomeFile.getParentFile();
toolsJarFile = new File(javaHomeFile, "Classes" + File.separator + "classes.jar");
}
}
}
if (! toolsJarFile.exists()) {
return null;
} else {
return toolsJarFile;
}
}
private static String createAgentJar() throws IOException {
File file =
File.createTempFile(InstrumentationRuntimeFactory.class.getName(), ".jar");
file.deleteOnExit();
ZipOutputStream zout = new ZipOutputStream(new FileOutputStream(file));
zout.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF"));
PrintWriter writer = new PrintWriter(new OutputStreamWriter(zout));
writer.println("Agent-Class: " + InstrumentationRuntimeFactory.class.getName());
writer.println("Can-Redefine-Classes: true");
// IBM doesn't support retransform
writer.println("Can-Retransform-Classes: " + Boolean.toString(!isIBM));
writer.close();
return file.getAbsolutePath();
}
private static String getAgentJar() {
File agentJarFile = null;
// Find the name of the File that this class was loaded from. That
// jar *should* be the same location as our agent.
CodeSource cs =
InstrumentationRuntimeFactory.class.getProtectionDomain().getCodeSource();
if (cs != null) {
URL loc = cs.getLocation();
if (loc != null) {
agentJarFile = new File(loc.getFile());
}
}
// Determine whether the File that this class was loaded from has this
// class defined as the Agent-Class.
boolean createJar = false;
if (cs == null || agentJarFile == null
|| agentJarFile.isDirectory()) {
createJar = true;
} else if (!validateAgentJarManifest(agentJarFile, InstrumentationRuntimeFactory.class.getName())) {
// We have an agentJarFile, but this class isn't the Agent-Class.
createJar = true;
}
String agentJar;
if (createJar) {
try {
agentJar = createAgentJar();
} catch (IOException ioe) {
agentJar = null;
}
} else {
agentJar = agentJarFile.getAbsolutePath();
}
return agentJar;
}
private static void loadAgent(String agentJar, Class<?> vmClass) {
try {
// first obtain the PID of the currently-running process
// ### this relies on the undocumented convention of the
// RuntimeMXBean's
// ### name starting with the PID, but there appears to be no other
// ### way to obtain the current process' id, which we need for
// ### the attach process
RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean();
String pid = runtime.getName();
if (pid.contains("@"))
pid = pid.substring(0, pid.indexOf("@"));
// JDK1.6: now attach to the current VM so we can deploy a new agent
// ### this is a Sun JVM specific feature; other JVMs may offer
// ### this feature, but in an implementation-dependent way
Object vm = vmClass.getMethod("attach", new Class<?>[]{String.class}).invoke(null, pid);
vmClass.getMethod("loadAgent", new Class[]{String.class}).invoke(vm, agentJar);
vmClass.getMethod("detach", new Class[]{}).invoke(vm);
} catch (Throwable t) {
if (LOG.isTraceEnabled()) {
LOG.trace("Problem loading the agent", t);
}
}
}
private static Class<?> loadVMClass(File toolsJar) {
try {
ClassLoader loader = Thread.currentThread().getContextClassLoader();
String cls = SUN_VM_CLASS;
if (isIBM) {
cls = IBM_VM_CLASS;
} else {
loader = new URLClassLoader(new URL[]{toolsJar.toURI().toURL()}, loader);
}
return loader.loadClass(cls);
} catch (Exception e) {
if (LOG.isTraceEnabled()) {
LOG.trace("Failed to load the virtual machine class", e);
}
}
return null;
}
private static boolean validateAgentJarManifest(File agentJarFile,
String agentClassName) {
try {
JarFile jar = new JarFile(agentJarFile);
Manifest manifest = jar.getManifest();
if (manifest == null) {
return false;
}
Attributes attributes = manifest.getMainAttributes();
String ac = attributes.getValue("Agent-Class");
if (ac != null && ac.equals(agentClassName)) {
return true;
}
} catch (Exception e) {
if (LOG.isTraceEnabled()) {
LOG.trace("Unexpected exception occured.", e);
}
}
return false;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_InstrumentationRuntimeFactory.java
|
503 |
public class OpenDeclarationAction extends Action {
private static final ImageDescriptor ICON = CeylonPlugin.getInstance().getImageRegistry()
.getDescriptor(CeylonResources.CEYLON_OPEN_DECLARATION);
private final IEditorPart editor;
public OpenDeclarationAction(IEditorPart editor) {
this("Open Ceylon Declaration...", editor);
}
public OpenDeclarationAction(String text, IEditorPart editor) {
super(text);
this.editor = editor;
setActionDefinitionId(PLUGIN_ID + ".action.openDeclaration");
setImageDescriptor(ICON);
}
@Override
public void run() {
Shell shell = getWorkbench().getActiveWorkbenchWindow().getShell();
OpenCeylonDeclarationDialog dialog =
new OpenCeylonDeclarationDialog(shell, editor);
dialog.setTitle("Open Ceylon Declaration");
dialog.setMessage("Select a Ceylon declaration to open:");
if (editor instanceof ITextEditor) {
dialog.setInitialPattern(getSelectionText((ITextEditor) editor));
}
dialog.open();
Object[] types = dialog.getResult();
if (types != null && types.length > 0) {
DeclarationWithProject dwp = (DeclarationWithProject) types[0];
gotoDeclaration(dwp.getDeclaration(), dwp.getProject(), editor);
}
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_open_OpenDeclarationAction.java
|
94 |
final class ClientExceptionConverters {
private static final JavaClientExceptionConverter JAVA = new JavaClientExceptionConverter();
private static final GenericClientExceptionConverter GENERIC = new GenericClientExceptionConverter();
private ClientExceptionConverters() {
}
static ClientExceptionConverter get(ClientType type) {
if (type == ClientType.JAVA) {
return JAVA;
}
return GENERIC;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_ClientExceptionConverters.java
|
569 |
public class OpenIndexClusterStateUpdateRequest extends IndicesClusterStateUpdateRequest<OpenIndexClusterStateUpdateRequest> {
OpenIndexClusterStateUpdateRequest() {
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_open_OpenIndexClusterStateUpdateRequest.java
|
542 |
public class DeleteMappingResponse extends AcknowledgedResponse {
DeleteMappingResponse() {
}
DeleteMappingResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_mapping_delete_DeleteMappingResponse.java
|
524 |
public class FlushAction extends IndicesAction<FlushRequest, FlushResponse, FlushRequestBuilder> {
public static final FlushAction INSTANCE = new FlushAction();
public static final String NAME = "indices/flush";
private FlushAction() {
super(NAME);
}
@Override
public FlushResponse newResponse() {
return new FlushResponse();
}
@Override
public FlushRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new FlushRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_flush_FlushAction.java
|
3,717 |
public final class EntryTaskSchedulerFactory {
private EntryTaskSchedulerFactory() {
}
/**
* Creates a new EntryTaskScheduler that will run all second operations in bulk.
* Imagine a write-behind map where dirty entries will be stored in bulk.
* Note that each key can be only once; meaning you cannot delay the execution
* Once an entry is marked as dirty for example, it will run in write-delay-seconds,
* even if the entry is updated again within write-delay-seconds.
* So two things to
* remember:
* 1. a key cannot be re-scheduled (postponing its execution).
* 2. all entries scheduled for a given second will be executed in once by your
* SecondBulkExecutor implementation.
* Once a key is executed, it can be re-scheduled for another execution.
* <p/>
* EntryTaskScheduler implementation is thread-safe.
*
* @param scheduledExecutorService ScheduledExecutorService instance to execute the second
* @param entryProcessor bulk processor
* @return EntryTaskScheduler
*/
public static <K, V> EntryTaskScheduler<K, V> newScheduler(ScheduledExecutorService scheduledExecutorService,
ScheduledEntryProcessor entryProcessor,
ScheduleType scheduleType) {
return new SecondsBasedEntryTaskScheduler<K, V>(scheduledExecutorService, entryProcessor, scheduleType);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_util_scheduler_EntryTaskSchedulerFactory.java
|
1,522 |
public class RoutingAllocation {
/**
* this class is used to describe results of a {@link RoutingAllocation}
*/
public static class Result {
private final boolean changed;
private final RoutingTable routingTable;
private final AllocationExplanation explanation;
/**
* Creates a new {@link RoutingAllocation.Result}
*
* @param changed a flag to determine whether the actual {@link RoutingTable} has been changed
* @param routingTable the {@link RoutingTable} this Result references
* @param explanation Explanation of the Result
*/
public Result(boolean changed, RoutingTable routingTable, AllocationExplanation explanation) {
this.changed = changed;
this.routingTable = routingTable;
this.explanation = explanation;
}
/** determine whether the actual {@link RoutingTable} has been changed
* @return <code>true</code> if the {@link RoutingTable} has been changed by allocation. Otherwise <code>false</code>
*/
public boolean changed() {
return this.changed;
}
/**
* Get the {@link RoutingTable} referenced by this result
* @return referenced {@link RoutingTable}
*/
public RoutingTable routingTable() {
return routingTable;
}
/**
* Get the explanation of this result
* @return explanation
*/
public AllocationExplanation explanation() {
return explanation;
}
}
private final AllocationDeciders deciders;
private final RoutingNodes routingNodes;
private final DiscoveryNodes nodes;
private final AllocationExplanation explanation = new AllocationExplanation();
private final ClusterInfo clusterInfo;
private Map<ShardId, String> ignoredShardToNodes = null;
private boolean ignoreDisable = false;
private boolean debugDecision = false;
/**
* Creates a new {@link RoutingAllocation}
*
* @param deciders {@link AllocationDeciders} to used to make decisions for routing allocations
* @param routingNodes Routing nodes in the current cluster
* @param nodes TODO: Documentation
*/
public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, DiscoveryNodes nodes, ClusterInfo clusterInfo) {
this.deciders = deciders;
this.routingNodes = routingNodes;
this.nodes = nodes;
this.clusterInfo = clusterInfo;
}
/**
* Get {@link AllocationDeciders} used for allocation
* @return {@link AllocationDeciders} used for allocation
*/
public AllocationDeciders deciders() {
return this.deciders;
}
/**
* Get routing table of current nodes
* @return current routing table
*/
public RoutingTable routingTable() {
return routingNodes.routingTable();
}
/**
* Get current routing nodes
* @return routing nodes
*/
public RoutingNodes routingNodes() {
return routingNodes;
}
/**
* Get metadata of routing nodes
* @return Metadata of routing nodes
*/
public MetaData metaData() {
return routingNodes.metaData();
}
/**
* Get discovery nodes in current routing
* @return discovery nodes
*/
public DiscoveryNodes nodes() {
return nodes;
}
public ClusterInfo clusterInfo() {
return clusterInfo;
}
/**
* Get explanations of current routing
* @return explanation of routing
*/
public AllocationExplanation explanation() {
return explanation;
}
public void ignoreDisable(boolean ignoreDisable) {
this.ignoreDisable = ignoreDisable;
}
public boolean ignoreDisable() {
return this.ignoreDisable;
}
public void debugDecision(boolean debug) {
this.debugDecision = debug;
}
public boolean debugDecision() {
return this.debugDecision;
}
public void addIgnoreShardForNode(ShardId shardId, String nodeId) {
if (ignoredShardToNodes == null) {
ignoredShardToNodes = new HashMap<ShardId, String>();
}
ignoredShardToNodes.put(shardId, nodeId);
}
public boolean shouldIgnoreShardForNode(ShardId shardId, String nodeId) {
return ignoredShardToNodes != null && nodeId.equals(ignoredShardToNodes.get(shardId));
}
/**
* Create a routing decision, including the reason if the debug flag is
* turned on
*/
public Decision decision(Decision decision, String reason, Object... params) {
if (debugDecision()) {
return Decision.single(decision.type(), reason, params);
} else {
return decision;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_cluster_routing_allocation_RoutingAllocation.java
|
46 |
public static interface ConcurrentHashMapSpliterator<T> {
/**
* If possible, returns a new spliterator covering
* approximately one half of the elements, which will not be
* covered by this spliterator. Returns null if cannot be
* split.
*/
ConcurrentHashMapSpliterator<T> trySplit();
/**
* Returns an estimate of the number of elements covered by
* this Spliterator.
*/
long estimateSize();
/** Applies the action to each untraversed element */
void forEachRemaining(Action<? super T> action);
/** If an element remains, applies the action and returns true. */
boolean tryAdvance(Action<? super T> action);
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
508 |
public class DeleteIndexAction extends IndicesAction<DeleteIndexRequest, DeleteIndexResponse, DeleteIndexRequestBuilder> {
public static final DeleteIndexAction INSTANCE = new DeleteIndexAction();
public static final String NAME = "indices/delete";
private DeleteIndexAction() {
super(NAME);
}
@Override
public DeleteIndexResponse newResponse() {
return new DeleteIndexResponse();
}
@Override
public DeleteIndexRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new DeleteIndexRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_delete_DeleteIndexAction.java
|
126 |
static final class RunnableExecuteAction extends ForkJoinTask<Void> {
final Runnable runnable;
RunnableExecuteAction(Runnable runnable) {
if (runnable == null) throw new NullPointerException();
this.runnable = runnable;
}
public final Void getRawResult() { return null; }
public final void setRawResult(Void v) { }
public final boolean exec() { runnable.run(); return true; }
void internalPropagateException(Throwable ex) {
rethrow(ex); // rethrow outside exec() catches.
}
private static final long serialVersionUID = 5232453952276885070L;
}
| 0true
|
src_main_java_jsr166e_ForkJoinTask.java
|
163 |
public interface URLHandlerDao {
public URLHandler findURLHandlerByURI(String uri);
/**
* Gets all the URL handlers configured in the system
* @return
*/
public List<URLHandler> findAllURLHandlers();
public URLHandler saveURLHandler(URLHandler handler);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_url_dao_URLHandlerDao.java
|
1,524 |
private class ProductOptionDTO {
private Long id;
private String type;
private Map<Long, String> values;
private String selectedValue;
@SuppressWarnings("unused")
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@SuppressWarnings("unused")
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@SuppressWarnings("unused")
public Map<Long, String> getValues() {
return values;
}
public void setValues(Map<Long, String> values) {
this.values = values;
}
@SuppressWarnings("unused")
public String getSelectedValue() {
return selectedValue;
}
@SuppressWarnings("unused")
public void setSelectedValue(String selectedValue) {
this.selectedValue = selectedValue;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ProductOptionDTO)) return false;
ProductOptionDTO that = (ProductOptionDTO) o;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
if (selectedValue != null ? !selectedValue.equals(that.selectedValue) : that.selectedValue != null)
return false;
if (type != null ? !type.equals(that.type) : that.type != null) return false;
if (values != null ? !values.equals(that.values) : that.values != null) return false;
return true;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + (values != null ? values.hashCode() : 0);
result = 31 * result + (selectedValue != null ? selectedValue.hashCode() : 0);
return result;
}
}
| 1no label
|
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_processor_ProductOptionsProcessor.java
|
440 |
static final class Fields {
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString VERSIONS = new XContentBuilderString("versions");
static final XContentBuilderString OS = new XContentBuilderString("os");
static final XContentBuilderString PROCESS = new XContentBuilderString("process");
static final XContentBuilderString JVM = new XContentBuilderString("jvm");
static final XContentBuilderString FS = new XContentBuilderString("fs");
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java
|
51 |
static final class EntrySetView<K,V> extends CollectionView<K,V,Map.Entry<K,V>>
implements Set<Map.Entry<K,V>>, java.io.Serializable {
private static final long serialVersionUID = 2249069246763182397L;
EntrySetView(ConcurrentHashMapV8<K,V> map) { super(map); }
public boolean contains(Object o) {
Object k, v, r; Map.Entry<?,?> e;
return ((o instanceof Map.Entry) &&
(k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
(r = map.get(k)) != null &&
(v = e.getValue()) != null &&
(v == r || v.equals(r)));
}
public boolean remove(Object o) {
Object k, v; Map.Entry<?,?> e;
return ((o instanceof Map.Entry) &&
(k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
(v = e.getValue()) != null &&
map.remove(k, v));
}
/**
* @return an iterator over the entries of the backing map
*/
public Iterator<Map.Entry<K,V>> iterator() {
ConcurrentHashMapV8<K,V> m = map;
Node<K,V>[] t;
int f = (t = m.table) == null ? 0 : t.length;
return new EntryIterator<K,V>(t, f, 0, f, m);
}
public boolean add(Entry<K,V> e) {
return map.putVal(e.getKey(), e.getValue(), false) == null;
}
public boolean addAll(Collection<? extends Entry<K,V>> c) {
boolean added = false;
for (Entry<K,V> e : c) {
if (add(e))
added = true;
}
return added;
}
public final int hashCode() {
int h = 0;
Node<K,V>[] t;
if ((t = map.table) != null) {
Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
for (Node<K,V> p; (p = it.advance()) != null; ) {
h += p.hashCode();
}
}
return h;
}
public final boolean equals(Object o) {
Set<?> c;
return ((o instanceof Set) &&
((c = (Set<?>)o) == this ||
(containsAll(c) && c.containsAll(this))));
}
public ConcurrentHashMapSpliterator<Map.Entry<K,V>> spliteratorJSR166() {
Node<K,V>[] t;
ConcurrentHashMapV8<K,V> m = map;
long n = m.sumCount();
int f = (t = m.table) == null ? 0 : t.length;
return new EntrySpliterator<K,V>(t, f, 0, f, n < 0L ? 0L : n, m);
}
public void forEach(Action<? super Map.Entry<K,V>> action) {
if (action == null) throw new NullPointerException();
Node<K,V>[] t;
if ((t = map.table) != null) {
Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
for (Node<K,V> p; (p = it.advance()) != null; )
action.apply(new MapEntry<K,V>(p.key, p.val, map));
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.