Unnamed: 0
int64
0
6.45k
func
stringlengths
29
253k
target
class label
2 classes
project
stringlengths
36
167
1,066
public interface FulfillmentPriceBand extends FulfillmentBand { /** * Gets the minimum amount that this band is valid for. If the addition * of all of the retail prices on all the {@link OrderItem}s in a {@link FulfillmentGroup} * comes to at least this amount, this band result amount will be applied to the * fulfillment cost. * * @return the minimum retail price amount of the sum of the {@link OrderItem}s in a * {@link FulfillmentGroup} that this band qualifies for */ public BigDecimal getRetailPriceMinimumAmount(); /** * Set the minimum amount that this band is valid for. If the addition * of all of the retail prices on all the {@link OrderItem}s in a {@link FulfillmentGroup} * comes to at least this amount, this band result amount will be applied to the * fulfillment cost. * * @param minimumRetailPriceAmount - the minimum retail price amount from adding up * the {@link OrderItem}s in a {@link FulfillmentGroup} */ public void setRetailPriceMinimumAmount(BigDecimal retailPriceMinimumAmount); /** * Gets the {@link BandedPriceFulfillmentOption} that this band is associated to * * @return the associated {@link BandedPriceFulfillmentOption} */ public BandedPriceFulfillmentOption getOption(); /** * Sets the {@link BandedPriceFulfillmentOption} to associate with this band * * @param option */ public void setOption(BandedPriceFulfillmentOption option); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_fulfillment_domain_FulfillmentPriceBand.java
1,525
private class ProductOptionPricingDTO { private Long[] skuOptions; private String price; @SuppressWarnings("unused") public Long[] getSelectedOptions() { return skuOptions; } public void setSelectedOptions(Long[] skuOptions) { this.skuOptions = skuOptions; } @SuppressWarnings("unused") public String getPrice() { return price; } public void setPrice(String price) { this.price = price; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ProductOptionPricingDTO)) return false; ProductOptionPricingDTO that = (ProductOptionPricingDTO) o; if (price != null ? !price.equals(that.price) : that.price != null) return false; if (!Arrays.equals(skuOptions, that.skuOptions)) return false; return true; } @Override public int hashCode() { int result = skuOptions != null ? Arrays.hashCode(skuOptions) : 0; result = 31 * result + (price != null ? price.hashCode() : 0); return result; } }
1no label
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_processor_ProductOptionsProcessor.java
912
public class FulfillmentGroupOfferProcessorTest extends TestCase { protected OfferDao offerDaoMock; protected OrderItemDao orderItemDaoMock; protected OfferServiceImpl offerService; protected final OfferDataItemProvider dataProvider = new OfferDataItemProvider(); protected OrderService orderServiceMock; protected OrderItemService orderItemServiceMock; protected FulfillmentGroupItemDao fgItemDaoMock; protected FulfillmentGroupService fgServiceMock; protected OrderMultishipOptionService multishipOptionServiceMock; protected OfferTimeZoneProcessor offerTimeZoneProcessorMock; protected FulfillmentGroupOfferProcessorImpl fgProcessor; @Override protected void setUp() throws Exception { offerService = new OfferServiceImpl(); CustomerOfferDao customerOfferDaoMock = EasyMock.createMock(CustomerOfferDao.class); OfferCodeDao offerCodeDaoMock = EasyMock.createMock(OfferCodeDao.class); orderServiceMock = EasyMock.createMock(OrderService.class); orderItemDaoMock = EasyMock.createMock(OrderItemDao.class); orderItemServiceMock = EasyMock.createMock(OrderItemService.class); fgItemDaoMock = EasyMock.createMock(FulfillmentGroupItemDao.class); offerDaoMock = EasyMock.createMock(OfferDao.class); fgServiceMock = EasyMock.createMock(FulfillmentGroupService.class); multishipOptionServiceMock = EasyMock.createMock(OrderMultishipOptionService.class); fgProcessor = new FulfillmentGroupOfferProcessorImpl(); fgProcessor.setOfferDao(offerDaoMock); fgProcessor.setOrderItemDao(orderItemDaoMock); fgProcessor.setPromotableItemFactory(new PromotableItemFactoryImpl()); OrderOfferProcessorImpl orderProcessor = new OrderOfferProcessorImpl(); orderProcessor.setOfferDao(offerDaoMock); orderProcessor.setPromotableItemFactory(new PromotableItemFactoryImpl()); offerTimeZoneProcessorMock = EasyMock.createMock(OfferTimeZoneProcessor.class); orderProcessor.setOfferTimeZoneProcessor(offerTimeZoneProcessorMock); orderProcessor.setOrderItemDao(orderItemDaoMock); ItemOfferProcessor itemProcessor = new ItemOfferProcessorImpl(); itemProcessor.setOfferDao(offerDaoMock); itemProcessor.setPromotableItemFactory(new PromotableItemFactoryImpl()); itemProcessor.setOrderItemDao(orderItemDaoMock); offerService.setCustomerOfferDao(customerOfferDaoMock); offerService.setOfferCodeDao(offerCodeDaoMock); offerService.setOfferDao(offerDaoMock); offerService.setOrderOfferProcessor(orderProcessor); offerService.setItemOfferProcessor(itemProcessor); offerService.setFulfillmentGroupOfferProcessor(fgProcessor); offerService.setPromotableItemFactory(new PromotableItemFactoryImpl()); offerService.setOrderService(orderServiceMock); } public void replay() { EasyMock.replay(offerDaoMock); EasyMock.replay(orderItemDaoMock); EasyMock.replay(orderServiceMock); EasyMock.replay(orderItemServiceMock); EasyMock.replay(fgItemDaoMock); EasyMock.replay(fgServiceMock); EasyMock.replay(multishipOptionServiceMock); EasyMock.replay(offerTimeZoneProcessorMock); } public void verify() { EasyMock.verify(offerDaoMock); EasyMock.verify(orderItemDaoMock); EasyMock.verify(orderServiceMock); EasyMock.verify(orderItemServiceMock); EasyMock.verify(fgItemDaoMock); EasyMock.verify(fgServiceMock); EasyMock.verify(multishipOptionServiceMock); EasyMock.verify(offerTimeZoneProcessorMock); } public void testApplyAllFulfillmentGroupOffersWithOrderItemOffers() throws Exception { final ThreadLocal<Order> myOrder = new ThreadLocal<Order>(); EasyMock.expect(orderItemDaoMock.createOrderItemPriceDetail()).andAnswer(OfferDataItemProvider.getCreateOrderItemPriceDetailAnswer()).anyTimes(); EasyMock.expect(orderItemDaoMock.createOrderItemQualifier()).andAnswer(OfferDataItemProvider.getCreateOrderItemQualifierAnswer()).atLeastOnce(); EasyMock.expect(fgServiceMock.addItemToFulfillmentGroup(EasyMock.isA(FulfillmentGroupItemRequest.class), EasyMock.eq(false))).andAnswer(OfferDataItemProvider.getAddItemToFulfillmentGroupAnswer()).anyTimes(); EasyMock.expect(orderServiceMock.removeItem(EasyMock.isA(Long.class), EasyMock.isA(Long.class), EasyMock.eq(false))).andAnswer(OfferDataItemProvider.getRemoveItemFromOrderAnswer()).anyTimes(); EasyMock.expect(orderServiceMock.save(EasyMock.isA(Order.class),EasyMock.isA(Boolean.class))).andAnswer(OfferDataItemProvider.getSaveOrderAnswer()).anyTimes(); EasyMock.expect(orderServiceMock.getAutomaticallyMergeLikeItems()).andReturn(true).anyTimes(); EasyMock.expect(orderItemServiceMock.saveOrderItem(EasyMock.isA(OrderItem.class))).andAnswer(OfferDataItemProvider.getSaveOrderItemAnswer()).anyTimes(); EasyMock.expect(fgItemDaoMock.save(EasyMock.isA(FulfillmentGroupItem.class))).andAnswer(OfferDataItemProvider.getSaveFulfillmentGroupItemAnswer()).anyTimes(); EasyMock.expect(offerDaoMock.createOrderItemPriceDetailAdjustment()).andAnswer(OfferDataItemProvider.getCreateOrderItemPriceDetailAdjustmentAnswer()).anyTimes(); EasyMock.expect(offerDaoMock.createFulfillmentGroupAdjustment()).andAnswer(OfferDataItemProvider.getCreateFulfillmentGroupAdjustmentAnswer()).anyTimes(); EasyMock.expect(orderServiceMock.findOrderById(EasyMock.isA(Long.class))).andAnswer(new IAnswer<Order>() { @Override public Order answer() throws Throwable { return myOrder.get(); } }).anyTimes(); EasyMock.expect(multishipOptionServiceMock.findOrderMultishipOptions(EasyMock.isA(Long.class))).andAnswer(new IAnswer<List<OrderMultishipOption>>() { @Override public List<OrderMultishipOption> answer() throws Throwable { List<OrderMultishipOption> options = new ArrayList<OrderMultishipOption>(); PromotableOrder order = dataProvider.createBasicPromotableOrder(); for (FulfillmentGroup fg : order.getOrder().getFulfillmentGroups()) { Address address = fg.getAddress(); for (FulfillmentGroupItem fgItem : fg.getFulfillmentGroupItems()) { for (int j=0;j<fgItem.getQuantity();j++) { OrderMultishipOption option = new OrderMultishipOptionImpl(); option.setOrder(order.getOrder()); option.setAddress(address); option.setOrderItem(fgItem.getOrderItem()); options.add(option); } } } return options; } }).anyTimes(); multishipOptionServiceMock.deleteAllOrderMultishipOptions(EasyMock.isA(Order.class)); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(fgServiceMock.collapseToOneFulfillmentGroup(EasyMock.isA(Order.class), EasyMock.eq(false))).andAnswer(new IAnswer<Order>() { @Override public Order answer() throws Throwable { Order order = (Order) EasyMock.getCurrentArguments()[0]; order.getFulfillmentGroups().get(0).getFulfillmentGroupItems().addAll(order.getFulfillmentGroups().get(1).getFulfillmentGroupItems()); order.getFulfillmentGroups().remove(order.getFulfillmentGroups().get(1)); return order; } }).anyTimes(); EasyMock.expect(fgItemDaoMock.create()).andAnswer(OfferDataItemProvider.getCreateFulfillmentGroupItemAnswer()).anyTimes(); fgItemDaoMock.delete(EasyMock.isA(FulfillmentGroupItem.class)); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(offerTimeZoneProcessorMock.getTimeZone(EasyMock.isA(OfferImpl.class))).andReturn(TimeZone.getTimeZone("CST")).anyTimes(); replay(); PromotableOrder promotableOrder = dataProvider.createBasicPromotableOrder(); Order order = promotableOrder.getOrder(); myOrder.set(promotableOrder.getOrder()); List<PromotableCandidateFulfillmentGroupOffer> qualifiedOffers = new ArrayList<PromotableCandidateFulfillmentGroupOffer>(); List<Offer> offers = dataProvider.createFGBasedOffer("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF); offers.addAll(dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))")); offers.get(1).setName("secondOffer"); offers.addAll(dataProvider.createItemBasedOfferWithItemCriteria( "order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\"), MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))", "([MVEL.eval(\"toUpperCase()\",\"test1\"), MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))" )); offerService.applyOffersToOrder(offers, promotableOrder.getOrder()); offers.get(0).setTotalitarianOffer(true); offerService.applyFulfillmentGroupOffersToOrder(offers, promotableOrder.getOrder()); int fgAdjustmentCount = 0; for (FulfillmentGroup fg : order.getFulfillmentGroups()) { fgAdjustmentCount += fg.getFulfillmentGroupAdjustments().size(); } //The totalitarian offer that applies to both fg's is not combinable and is a worse offer than the order item offers // - it is therefore ignored //However, the second combinable fg offer is allowed to be applied. assertTrue(fgAdjustmentCount == 1); promotableOrder = dataProvider.createBasicPromotableOrder(); myOrder.set(promotableOrder.getOrder()); offers.get(2).setValue(new BigDecimal("1")); offerService.applyOffersToOrder(offers, promotableOrder.getOrder()); offerService.applyFulfillmentGroupOffersToOrder(offers, promotableOrder.getOrder()); fgAdjustmentCount = 0; order = promotableOrder.getOrder(); for (FulfillmentGroup fg : order.getFulfillmentGroups()) { fgAdjustmentCount += fg.getFulfillmentGroupAdjustments().size(); } //The totalitarian fg offer is now a better deal than the order item offers, therefore the totalitarian fg offer is applied //and the order item offers are removed assertTrue(fgAdjustmentCount == 2); int itemAdjustmentCount = 0; for (OrderItem item : order.getOrderItems()) { for (OrderItemPriceDetail detail : item.getOrderItemPriceDetails()) { itemAdjustmentCount += detail.getOrderItemPriceDetailAdjustments().size(); } } //Confirm that the order item offers are removed assertTrue(itemAdjustmentCount == 0); verify(); } public void testApplyAllFulfillmentGroupOffers() { replay(); PromotableOrder order = dataProvider.createBasicPromotableOrder(); List<PromotableCandidateFulfillmentGroupOffer> qualifiedOffers = new ArrayList<PromotableCandidateFulfillmentGroupOffer>(); List<Offer> offers = dataProvider.createFGBasedOffer("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF); fgProcessor.filterFulfillmentGroupLevelOffer(order, qualifiedOffers, offers.get(0)); boolean offerApplied = fgProcessor.applyAllFulfillmentGroupOffers(qualifiedOffers, order); assertTrue(offerApplied); order = dataProvider.createBasicPromotableOrder(); qualifiedOffers = new ArrayList<PromotableCandidateFulfillmentGroupOffer>(); offers = dataProvider.createFGBasedOffer("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF); offers.addAll(dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))")); offers.get(1).setName("secondOffer"); fgProcessor.filterFulfillmentGroupLevelOffer(order, qualifiedOffers, offers.get(0)); fgProcessor.filterFulfillmentGroupLevelOffer(order, qualifiedOffers, offers.get(1)); offerApplied = fgProcessor.applyAllFulfillmentGroupOffers(qualifiedOffers, order); //the first offer applies to both fulfillment groups, but the second offer only applies to one of the fulfillment groups assertTrue(offerApplied); int fgAdjustmentCount = 0; for (PromotableFulfillmentGroup fg : order.getFulfillmentGroups()) { fgAdjustmentCount += fg.getCandidateFulfillmentGroupAdjustments().size(); } assertTrue(fgAdjustmentCount == 3); verify(); } public void testFilterFulfillmentGroupLevelOffer() { replay(); PromotableOrder order = dataProvider.createBasicPromotableOrder(); List<PromotableCandidateFulfillmentGroupOffer> qualifiedOffers = new ArrayList<PromotableCandidateFulfillmentGroupOffer>(); List<Offer> offers = dataProvider.createFGBasedOffer("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF); fgProcessor.filterFulfillmentGroupLevelOffer(order, qualifiedOffers, offers.get(0)); //test that the valid fg offer is included //No item criteria, so each fulfillment group applies assertTrue(qualifiedOffers.size() == 2 && qualifiedOffers.get(0).getOffer().equals(offers.get(0))); qualifiedOffers = new ArrayList<PromotableCandidateFulfillmentGroupOffer>(); offers = dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))"); fgProcessor.filterFulfillmentGroupLevelOffer(order, qualifiedOffers, offers.get(0)); //test that the valid fg offer is included //only 1 fulfillment group has qualifying items assertTrue(qualifiedOffers.size() == 1 && qualifiedOffers.get(0).getOffer().equals(offers.get(0))) ; qualifiedOffers = new ArrayList<PromotableCandidateFulfillmentGroupOffer>(); offers = dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75240", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\"),MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))"); fgProcessor.filterFulfillmentGroupLevelOffer(order, qualifiedOffers, offers.get(0)); //test that the invalid fg offer is excluded - zipcode is wrong assertTrue(qualifiedOffers.size() == 0) ; qualifiedOffers = new ArrayList<PromotableCandidateFulfillmentGroupOffer>(); offers = dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test5\"),MVEL.eval(\"toUpperCase()\",\"test6\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))"); fgProcessor.filterFulfillmentGroupLevelOffer(order, qualifiedOffers, offers.get(0)); //test that the invalid fg offer is excluded - no qualifying items assertTrue(qualifiedOffers.size() == 0) ; verify(); } public void testCouldOfferApplyToFulfillmentGroup() { replay(); PromotableOrder order = dataProvider.createBasicPromotableOrder(); List<Offer> offers = dataProvider.createFGBasedOffer("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF); boolean couldApply = fgProcessor.couldOfferApplyToFulfillmentGroup(offers.get(0), order.getFulfillmentGroups().get(0)); //test that the valid fg offer is included assertTrue(couldApply); offers = dataProvider.createFGBasedOffer("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75240", OfferDiscountType.PERCENT_OFF); couldApply = fgProcessor.couldOfferApplyToFulfillmentGroup(offers.get(0), order.getFulfillmentGroups().get(0)); //test that the invalid fg offer is excluded assertFalse(couldApply); verify(); } public void testCouldOrderItemMeetOfferRequirement() { replay(); PromotableOrder order = dataProvider.createBasicPromotableOrder(); List<Offer> offers = dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\"), MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))"); boolean couldApply = fgProcessor.couldOrderItemMeetOfferRequirement(offers.get(0).getQualifyingItemCriteria().iterator().next(), order.getDiscountableOrderItems().get(0)); //test that the valid fg offer is included assertTrue(couldApply); offers = dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test5\"), MVEL.eval(\"toUpperCase()\",\"test6\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))"); couldApply = fgProcessor.couldOrderItemMeetOfferRequirement(offers.get(0).getQualifyingItemCriteria().iterator().next(), order.getDiscountableOrderItems().get(0)); //test that the invalid fg offer is excluded assertFalse(couldApply); verify(); } public void testCouldOfferApplyToOrderItems() { replay(); PromotableOrder order = dataProvider.createBasicPromotableOrder(); List<PromotableOrderItem> orderItems = new ArrayList<PromotableOrderItem>(); for (PromotableOrderItem orderItem : order.getDiscountableOrderItems()) { orderItems.add(orderItem); } List<Offer> offers = dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\"), MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))"); CandidatePromotionItems candidates = fgProcessor.couldOfferApplyToOrderItems(offers.get(0), orderItems); //test that the valid fg offer is included assertTrue(candidates.isMatchedQualifier() && candidates.getCandidateQualifiersMap().size() == 1); offers = dataProvider.createFGBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", "fulfillmentGroup.address.postalCode==75244", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test5\"), MVEL.eval(\"toUpperCase()\",\"test6\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))"); candidates = fgProcessor.couldOfferApplyToOrderItems(offers.get(0), orderItems); //test that the invalid fg offer is excluded because there are no qualifying items assertFalse(candidates.isMatchedQualifier() && candidates.getCandidateQualifiersMap().size() == 1); verify(); } public class CandidateFulfillmentGroupOfferAnswer implements IAnswer<CandidateFulfillmentGroupOffer> { @Override public CandidateFulfillmentGroupOffer answer() throws Throwable { return new CandidateFulfillmentGroupOfferImpl(); } } public class FulfillmentGroupAdjustmentAnswer implements IAnswer<FulfillmentGroupAdjustment> { @Override public FulfillmentGroupAdjustment answer() throws Throwable { return new FulfillmentGroupAdjustmentImpl(); } } public class CandidateItemOfferAnswer implements IAnswer<CandidateItemOffer> { @Override public CandidateItemOffer answer() throws Throwable { return new CandidateItemOfferImpl(); } } public class OrderItemAdjustmentAnswer implements IAnswer<OrderItemAdjustment> { @Override public OrderItemAdjustment answer() throws Throwable { return new OrderItemAdjustmentImpl(); } } }
0true
core_broadleaf-framework_src_test_java_org_broadleafcommerce_core_offer_service_processor_FulfillmentGroupOfferProcessorTest.java
2,814
public interface AnalyzerProviderFactory { AnalyzerProvider create(String name, Settings settings); }
0true
src_main_java_org_elasticsearch_index_analysis_AnalyzerProviderFactory.java
68
class AssignToForProposal extends LocalProposal { protected DocumentChange createChange(IDocument document, Node expanse, Integer stopIndex) { DocumentChange change = new DocumentChange("Assign to For", document); change.setEdit(new MultiTextEdit()); change.addEdit(new InsertEdit(offset, "for (" + initialName + " in ")); String terminal = expanse.getEndToken().getText(); if (!terminal.equals(";")) { change.addEdit(new InsertEdit(stopIndex+1, ") {}")); exitPos = stopIndex+4; } else { change.addEdit(new ReplaceEdit(stopIndex, 1, ") {}")); exitPos = stopIndex+3; } return change; } public AssignToForProposal(Tree.CompilationUnit cu, Node node, int currentOffset) { super(cu, node, currentOffset); } protected void addLinkedPositions(IDocument document, Unit unit) throws BadLocationException { // ProposalPosition typePosition = // new ProposalPosition(document, offset, 5, 1, // getSupertypeProposals(offset, unit, // type, true, "value")); ProposalPosition namePosition = new ProposalPosition(document, offset+5, initialName.length(), 0, getNameProposals(offset+5, 0, nameProposals)); // LinkedMode.addLinkedPosition(linkedModeModel, typePosition); LinkedMode.addLinkedPosition(linkedModeModel, namePosition); } @Override String[] computeNameProposals(Node expression) { return Nodes.nameProposals(expression, true); } @Override public String getDisplayString() { return "Assign expression to 'for' loop"; } @Override boolean isEnabled(ProducedType resultType) { return resultType!=null && rootNode.getUnit().isIterableType(resultType); } static void addAssignToForProposal(Tree.CompilationUnit cu, Collection<ICompletionProposal> proposals, Node node, int currentOffset) { AssignToForProposal prop = new AssignToForProposal(cu, node, currentOffset); if (prop.isEnabled()) { proposals.add(prop); } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AssignToForProposal.java
402
public class OomeOnClientAuthenticationMain { private OomeOnClientAuthenticationMain() { } public static void main(String[] args) { HazelcastInstance hz = Hazelcast.newHazelcastInstance(); ClientConfig clientConfig = new ClientConfig(); clientConfig.getGroupConfig().setPassword("foo"); clientConfig.getNetworkConfig().setConnectionAttemptLimit(0); for (int k = 0; k < 1000000; k++) { System.out.println("At:" + k); try { HazelcastClient.newHazelcastClient(clientConfig); } catch (IllegalStateException e) { } } } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_oome_OomeOnClientAuthenticationMain.java
2,290
public static interface C<T> { /** Create a new empty instance of the given size. */ T newInstance(int sizing); /** Clear the data. This operation is called when the data-structure is released. */ void clear(T value); }
0true
src_main_java_org_elasticsearch_common_recycler_Recycler.java
2,091
public class AdapterStreamOutput extends StreamOutput { protected StreamOutput out; public AdapterStreamOutput(StreamOutput out) { this.out = out; super.setVersion(out.getVersion()); } @Override public StreamOutput setVersion(Version version) { out.setVersion(version); return super.setVersion(version); } public void setOut(StreamOutput out) { this.out = out; } public StreamOutput wrappedOut() { return this.out; } @Override public boolean seekPositionSupported() { return out.seekPositionSupported(); } @Override public long position() throws IOException { return out.position(); } @Override public void seek(long position) throws IOException { out.seek(position); } @Override public void writeByte(byte b) throws IOException { out.writeByte(b); } @Override public void writeBytes(byte[] b, int offset, int length) throws IOException { out.writeBytes(b, offset, length); } @Override public void flush() throws IOException { out.flush(); } @Override public void close() throws IOException { out.close(); } @Override public void reset() throws IOException { out.reset(); } @Override public void writeBytes(byte[] b) throws IOException { out.writeBytes(b); } @Override public void writeBytes(byte[] b, int length) throws IOException { out.writeBytes(b, length); } @Override public void writeBytesReference(@Nullable BytesReference bytes) throws IOException { out.writeBytesReference(bytes); } @Override public void writeInt(int i) throws IOException { out.writeInt(i); } @Override public void writeVInt(int i) throws IOException { out.writeVInt(i); } @Override public void writeLong(long i) throws IOException { out.writeLong(i); } @Override public void writeVLong(long i) throws IOException { out.writeVLong(i); } @Override public void writeString(String str) throws IOException { out.writeString(str); } @Override public void writeSharedString(String str) throws IOException { out.writeSharedString(str); } @Override public void writeText(Text text) throws IOException { out.writeText(text); } @Override public void writeSharedText(Text text) throws IOException { out.writeSharedText(text); } @Override public void writeFloat(float v) throws IOException { out.writeFloat(v); } @Override public void writeDouble(double v) throws IOException { out.writeDouble(v); } @Override public void writeBoolean(boolean b) throws IOException { out.writeBoolean(b); } @Override public void write(int b) throws IOException { out.write(b); } @Override public void write(byte[] b, int off, int len) throws IOException { out.write(b, off, len); } @Override public void write(byte[] b) throws IOException { out.write(b); } @Override public String toString() { return out.toString(); } }
0true
src_main_java_org_elasticsearch_common_io_stream_AdapterStreamOutput.java
2,044
public class ContainsKeyOperation extends KeyBasedMapOperation { private boolean containsKey; public ContainsKeyOperation() { } public ContainsKeyOperation(String name, Data dataKey) { super(name, dataKey); } public void run() { containsKey = recordStore.containsKey(dataKey); } @Override public Object getResponse() { return containsKey; } @Override public String toString() { return "ContainsKeyOperation{" + '}'; } }
0true
hazelcast_src_main_java_com_hazelcast_map_operation_ContainsKeyOperation.java
176
class InitializerVisitor extends Visitor { @Override public void visit(Tree.ClassDefinition that) { if (that.getClassBody()==null||that.getIdentifier()==null) return; createAnnotation(that, that.getClassBody(), that.getIdentifier().getText()); } @Override public void visit(Tree.ObjectDefinition that) { if (that.getClassBody()==null||that.getIdentifier()==null) return; createAnnotation(that, that.getClassBody(), that.getIdentifier().getText()); } private void createAnnotation(Node that, Tree.ClassBody body, String name) { // int offset = editor.getSelection().getOffset(); // if (offset>that.getStartIndex()&&offset<that.getStopIndex()) { Tree.Statement les = getLastExecutableStatement(body); if (les != null) { int startIndex = body.getStartIndex() + 2; int stopIndex = les.getStopIndex(); Position initializerPosition = new Position(startIndex, stopIndex - startIndex + 1); initializerAnnotation = new CeylonInitializerAnnotation(name, initializerPosition, 1); } // } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_AdditionalAnnotationCreator.java
2,097
public class DataOutputStreamOutput extends StreamOutput { private final DataOutput out; public DataOutputStreamOutput(DataOutput out) { this.out = out; } @Override public void writeByte(byte b) throws IOException { out.writeByte(b); } @Override public void writeBytes(byte[] b, int offset, int length) throws IOException { out.write(b, offset, length); } @Override public void flush() throws IOException { // nothing to do there... } @Override public void reset() throws IOException { // nothing to do there... } @Override public void close() throws IOException { if (out instanceof Closeable) { ((Closeable) out).close(); } } }
0true
src_main_java_org_elasticsearch_common_io_stream_DataOutputStreamOutput.java
187
public class XaDataSourceManager implements Lifecycle { // key = data source name, value = data source private final Map<String, XaDataSource> dataSources = new HashMap<String, XaDataSource>(); // key = branchId, value = data source private final Map<String, XaDataSource> branchIdMapping = new HashMap<String, XaDataSource>(); // key = data source name, value = branchId private final Map<String, byte[]> sourceIdMapping = new HashMap<String, byte[]>(); private Iterable<DataSourceRegistrationListener> dsRegistrationListeners = Listeners.newListeners(); private LifeSupport life = new LifeSupport(); private final StringLogger msgLog; private boolean isShutdown = false; public XaDataSourceManager( StringLogger msgLog ) { this.msgLog = msgLog; } public static DataSourceRegistrationListener filterListener( final DataSourceRegistrationListener listener, final Predicate<XaDataSource> filter ) { return new DataSourceRegistrationListener() { @Override public void registeredDataSource( XaDataSource ds ) { if ( filter.accept( ds ) ) { listener.registeredDataSource( ds ); } } @Override public void unregisteredDataSource( XaDataSource ds ) { if ( filter.accept( ds ) ) { listener.unregisteredDataSource( ds ); } } }; } public static DataSourceRegistrationListener neoStoreListener( DataSourceRegistrationListener listener ) { return filterListener( listener, new Predicate<XaDataSource>() { @Override public boolean accept( XaDataSource item ) { return item.getName().equals( NeoStoreXaDataSource.DEFAULT_DATA_SOURCE_NAME ); } } ); } public void addDataSourceRegistrationListener( DataSourceRegistrationListener listener ) { if ( life.getStatus().equals( LifecycleStatus.STARTED ) ) { try { for ( XaDataSource ds : dataSources.values() ) { listener.registeredDataSource( ds ); } } catch ( Throwable t ) { msgLog.logMessage( "Failed when notifying registering listener", t ); } } dsRegistrationListeners = Listeners.addListener( listener, dsRegistrationListeners ); } public void removeDataSourceRegistrationListener( DataSourceRegistrationListener dataSourceRegistrationListener ) { dsRegistrationListeners = Listeners.removeListener( dataSourceRegistrationListener, dsRegistrationListeners ); } @Override public void init() throws Throwable { if (dsRegistrationListeners == null) { dsRegistrationListeners = Listeners.newListeners(); } } @Override public void start() throws Throwable { life = new LifeSupport(); for ( XaDataSource ds : dataSources.values() ) { life.add( ds ); } life.start(); for ( DataSourceRegistrationListener listener : dsRegistrationListeners ) { try { for ( XaDataSource ds : dataSources.values() ) { listener.registeredDataSource( ds ); } } catch ( Throwable t ) { msgLog.logMessage( "Failed when notifying registering listener", t ); } } } @Override public void stop() throws Throwable { life.stop(); } @Override public void shutdown() throws Throwable { dsRegistrationListeners = null; life.shutdown(); dataSources.clear(); branchIdMapping.clear(); sourceIdMapping.clear(); isShutdown = true; } /** * Returns the {@link org.neo4j.kernel.impl.transaction.xaframework.XaDataSource} * registered as <CODE>name</CODE>. If no data source is registered with * that name <CODE>null</CODE> is returned. * * @param name the name of the data source */ public XaDataSource getXaDataSource( String name ) { if ( isShutdown ) { throw new IllegalStateException( "XaDataSourceManager has been shut down." ); } return dataSources.get( name ); } /** * Used to access the Neo DataSource. This should be replaced with * DataSource registration listeners instead, since this DataSource is not * always guaranteed to return anything (in HA case). */ @Deprecated public NeoStoreXaDataSource getNeoStoreDataSource() { return (NeoStoreXaDataSource) getXaDataSource( NeoStoreXaDataSource.DEFAULT_DATA_SOURCE_NAME ); } /** * Public for testing purpose. Do not use. */ public synchronized void registerDataSource( final XaDataSource dataSource ) { dataSources.put( dataSource.getName(), dataSource ); branchIdMapping.put( UTF8.decode( dataSource.getBranchId() ), dataSource ); sourceIdMapping.put( dataSource.getName(), dataSource.getBranchId() ); life.add( dataSource ); if ( life.getStatus().equals( LifecycleStatus.STARTED ) ) { Listeners.notifyListeners( dsRegistrationListeners, new Listeners.Notification<DataSourceRegistrationListener>() { @Override public void notify( DataSourceRegistrationListener listener ) { listener.registeredDataSource( dataSource ); } } ); } } /** * Public for testing purpose. Do not use. */ public synchronized void unregisterDataSource( String name ) { final XaDataSource dataSource = dataSources.get( name ); if ( dataSource == null ) { return; } dataSources.remove( name ); branchIdMapping.remove( UTF8.decode( dataSource.getBranchId() ) ); sourceIdMapping.remove( name ); Listeners.notifyListeners( dsRegistrationListeners, new Listeners.Notification<DataSourceRegistrationListener>() { @Override public void notify( DataSourceRegistrationListener listener ) { listener.unregisteredDataSource( dataSource ); } } ); life.remove( dataSource ); // No need for shutdown, removing does that } synchronized byte[] getBranchId( XAResource xaResource ) { if ( xaResource instanceof XaResource ) { byte branchId[] = ((XaResource) xaResource).getBranchId(); if ( branchId != null ) { return branchId; } } for ( Map.Entry<String, XaDataSource> entry : dataSources.entrySet() ) { XaDataSource dataSource = entry.getValue(); XAResource resource = dataSource.getXaConnection().getXaResource(); try { if ( resource.isSameRM( xaResource ) ) { String name = entry.getKey(); return sourceIdMapping.get( name ); } } catch ( XAException e ) { throw new TransactionFailureException( "Unable to check is same resource", e ); } } throw new TransactionFailureException( "Unable to find mapping for XAResource[" + xaResource + "]" ); } private XaDataSource getDataSource( byte branchId[] ) { XaDataSource dataSource = branchIdMapping.get( UTF8.decode( branchId ) ); if ( dataSource == null ) { throw new TransactionFailureException( "No mapping found for branchId[0x" + UTF8.decode( branchId ) + "]" ); } return dataSource; } // not thread safe public Collection<XaDataSource> getAllRegisteredDataSources() { return dataSources.values(); } /** * Recover all datasources */ public void recover( Iterator<List<TxLog.Record>> knownDanglingRecordList ) { // contains NonCompletedTransaction that needs to be committed List<NonCompletedTransaction> commitList = new ArrayList<NonCompletedTransaction>(); // contains Xids that should be rolledback final List<Xid> rollbackList = new LinkedList<Xid>(); // key = Resource(branchId) value = XAResource final Map<Resource, XaDataSource> resourceMap = new HashMap<Resource, XaDataSource>(); buildRecoveryInfo( commitList, rollbackList, resourceMap, knownDanglingRecordList ); // invoke recover on all xa resources found final List<Xid> recoveredXidsList = new LinkedList<Xid>(); try { for ( XaDataSource xaDataSource : dataSources.values() ) { XAResource xaRes = xaDataSource.getXaConnection().getXaResource(); Xid xids[] = xaRes.recover( XAResource.TMNOFLAGS ); for ( Xid xid : xids ) { if ( XidImpl.isThisTm( xid.getGlobalTransactionId() ) ) { // linear search if ( rollbackList.contains( xid ) ) { msgLog.logMessage( "TM: Found pre commit " + xid + " rolling back ... ", true ); rollbackList.remove( xid ); xaRes.rollback( xid ); } else { Resource resource = new Resource( xid.getBranchQualifier() ); if ( !resourceMap.containsKey( resource ) ) { resourceMap.put( resource, xaDataSource ); } recoveredXidsList.add( xid ); } } else { msgLog.warn( "Unknown xid: " + xid ); } } } // sort the commit list after sequence number Collections.sort( commitList ); // go through and commit for ( NonCompletedTransaction nct : commitList ) { int seq = nct.getSequenceNumber(); Xid xids[] = nct.getXids(); msgLog.debug( "Marked as commit tx-seq[" + seq + "] branch length: " + xids.length ); for ( Xid xid : xids ) { if ( !recoveredXidsList.contains( xid ) ) { msgLog.debug( "Tx-seq[" + seq + "][" + xid + "] not found in recovered xid list, " + "assuming already committed" ); continue; } recoveredXidsList.remove( xid ); Resource resource = new Resource( xid.getBranchQualifier() ); if ( !resourceMap.containsKey( resource ) ) { final TransactionFailureException ex = new TransactionFailureException( "Couldn't find XAResource for " + xid ); throw logAndReturn( "TM: recovery error", ex ); } msgLog.debug( "TM: Committing tx " + xid ); resourceMap.get( resource ).getXaConnection().getXaResource().commit( xid, false ); } } // rollback the rest for ( Xid xid : recoveredXidsList ) { Resource resource = new Resource( xid.getBranchQualifier() ); if ( !resourceMap.containsKey( resource ) ) { final TransactionFailureException ex = new TransactionFailureException( "Couldn't find XAResource for " + xid ); throw logAndReturn( "TM: recovery error", ex ); } msgLog.debug( "TM: no match found for " + xid + " removing" ); resourceMap.get( resource ).getXaConnection().getXaResource().rollback( xid ); } if ( rollbackList.size() > 0 ) { msgLog.debug( "TxLog contained unresolved " + "xids that needed rollback. They couldn't be matched to " + "any of the XAResources recover list. " + "Assuming " + rollbackList.size() + " transactions already rolled back." ); } // Rotate the logs of the participated data sources, making sure that // done-records are written so that even if the tm log gets truncated, // which it will be after this recovery, that transaction information // doesn't get lost. for ( XaDataSource participant : MapUtil.reverse( resourceMap ).keySet() ) { participant.recoveryCompleted(); participant.rotateLogicalLog(); } // For all data source that didn't actively participate in recovery // notify them that recovery process has completed. for ( XaDataSource ds : allOtherDataSources( resourceMap.values() ) ) { ds.recoveryCompleted(); } } catch ( IOException | XAException e ) { throw logAndReturn( "TM: recovery failed", new TransactionFailureException( "Recovery failed.", e ) ); } } private Collection<XaDataSource> allOtherDataSources( Collection<XaDataSource> recoveredDataSources ) { Collection<XaDataSource> dataSources = new HashSet<>( this.dataSources.values() ); dataSources.removeAll( recoveredDataSources ); return dataSources; } private void buildRecoveryInfo( List<NonCompletedTransaction> commitList, List<Xid> rollbackList, Map<Resource, XaDataSource> resourceMap, Iterator<List<TxLog.Record>> danglingRecordList ) { while ( danglingRecordList.hasNext() ) { Iterator<TxLog.Record> dListItr = danglingRecordList.next().iterator(); TxLog.Record startRecord = dListItr.next(); if ( startRecord.getType() != TxLog.TX_START ) { throw logAndReturn( "TM error building recovery info", new TransactionFailureException( "First record not a start record, type=" + startRecord.getType() ) ); } // get branches & commit status HashSet<Resource> branchSet = new HashSet<Resource>(); int markedCommit = -1; while ( dListItr.hasNext() ) { TxLog.Record record = dListItr.next(); if ( record.getType() == TxLog.BRANCH_ADD ) { if ( markedCommit != -1 ) { throw logAndReturn( "TM error building recovery info", new TransactionFailureException( "Already marked commit " + startRecord ) ); } branchSet.add( new Resource( record.getBranchId() ) ); } else if ( record.getType() == TxLog.MARK_COMMIT ) { if ( markedCommit != -1 ) { throw logAndReturn( "TM error building recovery info", new TransactionFailureException( "Already marked commit " + startRecord ) ); } markedCommit = record.getSequenceNumber(); } else { throw logAndReturn( "TM error building recovery info", new TransactionFailureException( "Illegal record type[" + record.getType() + "]" ) ); } } Iterator<Resource> resourceItr = branchSet.iterator(); List<Xid> xids = new LinkedList<Xid>(); while ( resourceItr.hasNext() ) { Resource resource = resourceItr.next(); if ( !resourceMap.containsKey( resource ) ) { resourceMap.put( resource, getDataSource( resource.getResourceId() ) ); } xids.add( new XidImpl( startRecord.getGlobalId(), resource.getResourceId() ) ); } if ( markedCommit != -1 ) // this xid needs to be committed { commitList.add( new NonCompletedTransaction( markedCommit, xids ) ); } else { rollbackList.addAll( xids ); } } } private <E extends Exception> E logAndReturn( String msg, E exception ) { try { msgLog.logMessage( msg, exception, true ); return exception; } catch ( Throwable t ) { return exception; } } public void rotateLogicalLogs() { for ( XaDataSource dataSource : dataSources.values() ) { try { dataSource.rotateLogicalLog(); } catch ( IOException e ) { msgLog.logMessage( "Couldn't rotate logical log for " + dataSource.getName(), e ); } } } private static class NonCompletedTransaction implements Comparable<NonCompletedTransaction> { private int seqNr = -1; private List<Xid> xidList = null; NonCompletedTransaction( int seqNr, List<Xid> xidList ) { this.seqNr = seqNr; this.xidList = xidList; } int getSequenceNumber() { return seqNr; } Xid[] getXids() { return xidList.toArray( new Xid[xidList.size()] ); } @Override public String toString() { return "NonCompletedTx[" + seqNr + "," + xidList + "]"; } @Override public int compareTo( NonCompletedTransaction nct ) { return getSequenceNumber() - nct.getSequenceNumber(); } } private static class Resource { private byte resourceId[] = null; Resource( byte resourceId[] ) { if ( resourceId == null || resourceId.length == 0 ) { throw new IllegalArgumentException( "Illegal resourceId" ); } this.resourceId = resourceId; } byte[] getResourceId() { return resourceId; } @Override public boolean equals( Object o ) { if ( !(o instanceof Resource) ) { return false; } byte otherResourceId[] = ((Resource) o).getResourceId(); if ( resourceId.length != otherResourceId.length ) { return false; } for ( int i = 0; i < resourceId.length; i++ ) { if ( resourceId[i] != otherResourceId[i] ) { return false; } } return true; } private volatile int hashCode = 0; @Override public int hashCode() { if ( hashCode == 0 ) { int calcHash = 0; for ( int i = 0; i < resourceId.length; i++ ) { calcHash += resourceId[i] << i * 8; } hashCode = 3217 * calcHash; } return hashCode; } } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_XaDataSourceManager.java
149
public interface StructuredContentType extends Serializable { /** * Gets the primary key. * * @return the primary key */ @Nullable public Long getId(); /** * Sets the primary key. * * @param id the new primary key */ public void setId(@Nullable Long id); /** * Gets the name. * * @return the name */ @Nonnull String getName(); /** * Sets the name. */ void setName(@Nonnull String name); /** * Gets the description. * @return */ @Nullable String getDescription(); /** * Sets the description. */ void setDescription(@Nullable String description); /** * Returns the template associated with this content type. * @return */ @Nonnull StructuredContentFieldTemplate getStructuredContentFieldTemplate(); /** * Sets the template associated with this content type. * @param scft */ void setStructuredContentFieldTemplate(@Nonnull StructuredContentFieldTemplate scft); }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_domain_StructuredContentType.java
1,926
public interface AnnotatedConstantBindingBuilder { /** * See the EDSL examples at {@link org.elasticsearch.common.inject.Binder}. */ ConstantBindingBuilder annotatedWith( Class<? extends Annotation> annotationType); /** * See the EDSL examples at {@link org.elasticsearch.common.inject.Binder}. */ ConstantBindingBuilder annotatedWith(Annotation annotation); }
0true
src_main_java_org_elasticsearch_common_inject_binder_AnnotatedConstantBindingBuilder.java
1,768
public enum ShapeRelation { INTERSECTS("intersects"), DISJOINT("disjoint"), WITHIN("within"); private final String relationName; ShapeRelation(String relationName) { this.relationName = relationName; } public static ShapeRelation getRelationByName(String name) { name = name.toLowerCase(Locale.ENGLISH); for (ShapeRelation relation : ShapeRelation.values()) { if (relation.relationName.equals(name)) { return relation; } } return null; } public String getRelationName() { return relationName; } }
0true
src_main_java_org_elasticsearch_common_geo_ShapeRelation.java
3,070
public class SnapshotIndexCommitExistsMatcher extends TypeSafeMatcher<SnapshotIndexCommit> { @Override public boolean matchesSafely(SnapshotIndexCommit snapshotIndexCommit) { for (String fileName : snapshotIndexCommit.getFiles()) { try { if (!snapshotIndexCommit.getDirectory().fileExists(fileName)) { return false; } } catch (IOException e) { return false; } } return true; } @Override public void describeTo(Description description) { description.appendText("an index commit existence"); } public static Matcher<SnapshotIndexCommit> snapshotIndexCommitExists() { return new SnapshotIndexCommitExistsMatcher(); } }
0true
src_test_java_org_elasticsearch_index_deletionpolicy_SnapshotIndexCommitExistsMatcher.java
2,262
public class NetworkService extends AbstractComponent { public static final String LOCAL = "#local#"; private static final String GLOBAL_NETWORK_HOST_SETTING = "network.host"; private static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host"; private static final String GLOBAL_NETWORK_PUBLISHHOST_SETTING = "network.publish_host"; public static final class TcpSettings { public static final String TCP_NO_DELAY = "network.tcp.no_delay"; public static final String TCP_KEEP_ALIVE = "network.tcp.keep_alive"; public static final String TCP_REUSE_ADDRESS = "network.tcp.reuse_address"; public static final String TCP_SEND_BUFFER_SIZE = "network.tcp.send_buffer_size"; public static final String TCP_RECEIVE_BUFFER_SIZE = "network.tcp.receive_buffer_size"; public static final String TCP_BLOCKING = "network.tcp.blocking"; public static final String TCP_BLOCKING_SERVER = "network.tcp.blocking_server"; public static final String TCP_BLOCKING_CLIENT = "network.tcp.blocking_client"; public static final String TCP_CONNECT_TIMEOUT = "network.tcp.connect_timeout"; public static final ByteSizeValue TCP_DEFAULT_SEND_BUFFER_SIZE = null; public static final ByteSizeValue TCP_DEFAULT_RECEIVE_BUFFER_SIZE = null; public static final TimeValue TCP_DEFAULT_CONNECT_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); } /** * A custom name resolver can support custom lookup keys (my_net_key:ipv4) and also change * the default inet address used in case no settings is provided. */ public static interface CustomNameResolver { /** * Resolves the default value if possible. If not, return <tt>null</tt>. */ InetAddress resolveDefault(); /** * Resolves a custom value handling, return <tt>null</tt> if can't handle it. */ InetAddress resolveIfPossible(String value); } private final List<CustomNameResolver> customNameResolvers = new CopyOnWriteArrayList<CustomNameResolver>(); @Inject public NetworkService(Settings settings) { super(settings); InetSocketTransportAddress.setResolveAddress(settings.getAsBoolean("network.address.serialization.resolve", false)); } /** * Add a custom name resolver. */ public void addCustomNameResolver(CustomNameResolver customNameResolver) { customNameResolvers.add(customNameResolver); } public InetAddress resolveBindHostAddress(String bindHost) throws IOException { return resolveBindHostAddress(bindHost, null); } public InetAddress resolveBindHostAddress(String bindHost, String defaultValue2) throws IOException { return resolveInetAddress(bindHost, settings.get(GLOBAL_NETWORK_BINDHOST_SETTING, settings.get(GLOBAL_NETWORK_HOST_SETTING)), defaultValue2); } public InetAddress resolvePublishHostAddress(String publishHost) throws IOException { InetAddress address = resolvePublishHostAddress(publishHost, null); // verify that its not a local address if (address == null || address.isAnyLocalAddress()) { address = NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv4); if (address == null) { address = NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.getIpStackType()); if (address == null) { address = NetworkUtils.getLocalAddress(); if (address == null) { return NetworkUtils.getLocalhost(NetworkUtils.StackType.IPv4); } } } } return address; } public InetAddress resolvePublishHostAddress(String publishHost, String defaultValue2) throws IOException { return resolveInetAddress(publishHost, settings.get(GLOBAL_NETWORK_PUBLISHHOST_SETTING, settings.get(GLOBAL_NETWORK_HOST_SETTING)), defaultValue2); } public InetAddress resolveInetAddress(String host, String defaultValue1, String defaultValue2) throws UnknownHostException, IOException { if (host == null) { host = defaultValue1; } if (host == null) { host = defaultValue2; } if (host == null) { for (CustomNameResolver customNameResolver : customNameResolvers) { InetAddress inetAddress = customNameResolver.resolveDefault(); if (inetAddress != null) { return inetAddress; } } return null; } String origHost = host; if ((host.startsWith("#") && host.endsWith("#")) || (host.startsWith("_") && host.endsWith("_"))) { host = host.substring(1, host.length() - 1); for (CustomNameResolver customNameResolver : customNameResolvers) { InetAddress inetAddress = customNameResolver.resolveIfPossible(host); if (inetAddress != null) { return inetAddress; } } if (host.equals("local")) { return NetworkUtils.getLocalAddress(); } else if (host.startsWith("non_loopback")) { if (host.toLowerCase(Locale.ROOT).endsWith(":ipv4")) { return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv4); } else if (host.toLowerCase(Locale.ROOT).endsWith(":ipv6")) { return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv6); } else { return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.getIpStackType()); } } else { NetworkUtils.StackType stackType = NetworkUtils.getIpStackType(); if (host.toLowerCase(Locale.ROOT).endsWith(":ipv4")) { stackType = NetworkUtils.StackType.IPv4; host = host.substring(0, host.length() - 5); } else if (host.toLowerCase(Locale.ROOT).endsWith(":ipv6")) { stackType = NetworkUtils.StackType.IPv6; host = host.substring(0, host.length() - 5); } Collection<NetworkInterface> allInterfs = NetworkUtils.getAllAvailableInterfaces(); for (NetworkInterface ni : allInterfs) { if (!ni.isUp()) { continue; } if (host.equals(ni.getName()) || host.equals(ni.getDisplayName())) { if (ni.isLoopback()) { return NetworkUtils.getFirstAddress(ni, stackType); } else { return NetworkUtils.getFirstNonLoopbackAddress(ni, stackType); } } } } throw new IOException("Failed to find network interface for [" + origHost + "]"); } return InetAddress.getByName(host); } }
0true
src_main_java_org_elasticsearch_common_network_NetworkService.java
687
public class CollectionEvent implements IdentifiedDataSerializable { String name; Data data; ItemEventType eventType; Address caller; public CollectionEvent() { } public CollectionEvent(String name, Data data, ItemEventType eventType, Address caller) { this.name = name; this.data = data; this.eventType = eventType; this.caller = caller; } @Override public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(name); out.writeInt(eventType.getType()); caller.writeData(out); IOUtil.writeNullableData(out, data); } @Override public void readData(ObjectDataInput in) throws IOException { name = in.readUTF(); eventType = ItemEventType.getByType(in.readInt()); caller = new Address(); caller.readData(in); data = IOUtil.readNullableData(in); } @Override public int getFactoryId() { return CollectionDataSerializerHook.F_ID; } @Override public int getId() { return CollectionDataSerializerHook.COLLECTION_EVENT; } }
0true
hazelcast_src_main_java_com_hazelcast_collection_CollectionEvent.java
1,097
enum Comparator { SAFE { boolean compare(BytesRef b1, BytesRef b2) { return b1.bytesEquals(b2); } }, UNSAFE { @Override boolean compare(BytesRef b1, BytesRef b2) { return UnsafeUtils.equals(b1, b2); } }; abstract boolean compare(BytesRef b1, BytesRef b2); }
0true
src_test_java_org_elasticsearch_benchmark_common_util_BytesRefComparisonsBenchmark.java
760
public class MultiGetShardResponse extends ActionResponse { IntArrayList locations; List<GetResponse> responses; List<MultiGetResponse.Failure> failures; MultiGetShardResponse() { locations = new IntArrayList(); responses = new ArrayList<GetResponse>(); failures = new ArrayList<MultiGetResponse.Failure>(); } public void add(int location, GetResponse response) { locations.add(location); responses.add(response); failures.add(null); } public void add(int location, MultiGetResponse.Failure failure) { locations.add(location); responses.add(null); failures.add(failure); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); locations = new IntArrayList(size); responses = new ArrayList<GetResponse>(size); failures = new ArrayList<MultiGetResponse.Failure>(size); for (int i = 0; i < size; i++) { locations.add(in.readVInt()); if (in.readBoolean()) { GetResponse response = new GetResponse(); response.readFrom(in); responses.add(response); } else { responses.add(null); } if (in.readBoolean()) { failures.add(MultiGetResponse.Failure.readFailure(in)); } else { failures.add(null); } } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(locations.size()); for (int i = 0; i < locations.size(); i++) { out.writeVInt(locations.get(i)); if (responses.get(i) == null) { out.writeBoolean(false); } else { out.writeBoolean(true); responses.get(i).writeTo(out); } if (failures.get(i) == null) { out.writeBoolean(false); } else { out.writeBoolean(true); failures.get(i).writeTo(out); } } } }
0true
src_main_java_org_elasticsearch_action_get_MultiGetShardResponse.java
462
public class IndicesAliasesAction extends IndicesAction<IndicesAliasesRequest, IndicesAliasesResponse, IndicesAliasesRequestBuilder> { public static final IndicesAliasesAction INSTANCE = new IndicesAliasesAction(); public static final String NAME = "indices/aliases"; private IndicesAliasesAction() { super(NAME); } @Override public IndicesAliasesResponse newResponse() { return new IndicesAliasesResponse(); } @Override public IndicesAliasesRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new IndicesAliasesRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_alias_IndicesAliasesAction.java
6,228
protected static class MavenMessageBuilder extends ReproduceErrorMessageBuilder { public MavenMessageBuilder(StringBuilder b) { super(b); } @Override public ReproduceErrorMessageBuilder appendAllOpts(Description description) { super.appendAllOpts(description); return appendESProperties(); } /** * Append a single VM option. */ @Override public ReproduceErrorMessageBuilder appendOpt(String sysPropName, String value) { if (sysPropName.equals(SYSPROP_ITERATIONS())) { // we don't want the iters to be in there! return this; } if (Strings.hasLength(value)) { return super.appendOpt(sysPropName, value); } return this; } public ReproduceErrorMessageBuilder appendESProperties() { appendProperties("es.logger.level", "es.node.mode", "es.node.local", TestCluster.TESTS_ENABLE_MOCK_MODULES, "tests.assertion.disabled", "tests.security.manager"); if (System.getProperty("tests.jvm.argline") != null && !System.getProperty("tests.jvm.argline").isEmpty()) { appendOpt("tests.jvm.argline", "\"" + System.getProperty("tests.jvm.argline") + "\""); } return this; } protected ReproduceErrorMessageBuilder appendProperties(String... properties) { for (String sysPropName : properties) { if (Strings.hasLength(System.getProperty(sysPropName))) { appendOpt(sysPropName, System.getProperty(sysPropName)); } } return this; } }
1no label
src_test_java_org_elasticsearch_test_junit_listeners_ReproduceInfoPrinter.java
3,265
public class StringScriptDataComparator extends FieldComparator<BytesRef> { public static IndexFieldData.XFieldComparatorSource comparatorSource(SearchScript script) { return new InnerSource(script); } private static class InnerSource extends IndexFieldData.XFieldComparatorSource { private final SearchScript script; private InnerSource(SearchScript script) { this.script = script; } @Override public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { return new StringScriptDataComparator(numHits, script); } @Override public SortField.Type reducedType() { return SortField.Type.STRING; } } private final SearchScript script; private BytesRef[] values; // TODO maybe we can preallocate or use a sentinel to prevent the conditionals in compare private BytesRef bottom; private final BytesRef spare = new BytesRef(); private int spareDoc = -1; public StringScriptDataComparator(int numHits, SearchScript script) { this.script = script; values = new BytesRef[numHits]; } @Override public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) throws IOException { script.setNextReader(context); spareDoc = -1; return this; } @Override public void setScorer(Scorer scorer) { script.setScorer(scorer); } @Override public int compare(int slot1, int slot2) { final BytesRef val1 = values[slot1]; final BytesRef val2 = values[slot2]; if (val1 == null) { if (val2 == null) { return 0; } return -1; } else if (val2 == null) { return 1; } return val1.compareTo(val2); } @Override public int compareBottom(int doc) { if (bottom == null) { return -1; } setSpare(doc); return bottom.compareTo(spare); } @Override public int compareDocToValue(int doc, BytesRef val2) throws IOException { script.setNextDocId(doc); setSpare(doc); return spare.compareTo(val2); } private void setSpare(int doc) { if (spareDoc == doc) { return; } script.setNextDocId(doc); spare.copyChars(script.run().toString()); spareDoc = doc; } @Override public void copy(int slot, int doc) { setSpare(doc); if (values[slot] == null) { values[slot] = BytesRef.deepCopyOf(spare); } else { values[slot].copyBytes(spare); } } @Override public void setBottom(final int bottom) { this.bottom = values[bottom]; } @Override public BytesRef value(int slot) { return values[slot]; } }
1no label
src_main_java_org_elasticsearch_index_fielddata_fieldcomparator_StringScriptDataComparator.java
6,115
clusterService.submitStateUpdateTask("update snapshot state after node removal", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) throws Exception { DiscoveryNodes nodes = currentState.nodes(); MetaData metaData = currentState.metaData(); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); SnapshotMetaData snapshots = metaData.custom(SnapshotMetaData.TYPE); if (snapshots == null) { return currentState; } boolean changed = false; ArrayList<SnapshotMetaData.Entry> entries = newArrayList(); for (final SnapshotMetaData.Entry snapshot : snapshots.entries()) { SnapshotMetaData.Entry updatedSnapshot = snapshot; boolean snapshotChanged = false; if (snapshot.state() == State.STARTED) { ImmutableMap.Builder<ShardId, ShardSnapshotStatus> shards = ImmutableMap.builder(); for (ImmutableMap.Entry<ShardId, ShardSnapshotStatus> shardEntry : snapshot.shards().entrySet()) { ShardSnapshotStatus shardStatus = shardEntry.getValue(); if (!shardStatus.state().completed() && shardStatus.nodeId() != null) { if (nodes.nodeExists(shardStatus.nodeId())) { shards.put(shardEntry); } else { // TODO: Restart snapshot on another node? snapshotChanged = true; logger.warn("failing snapshot of shard [{}] on closed node [{}]", shardEntry.getKey(), shardStatus.nodeId()); shards.put(shardEntry.getKey(), new ShardSnapshotStatus(shardStatus.nodeId(), State.FAILED, "node shutdown")); } } } if (snapshotChanged) { changed = true; ImmutableMap<ShardId, ShardSnapshotStatus> shardsMap = shards.build(); if (!snapshot.state().completed() && completed(shardsMap.values())) { updatedSnapshot = new SnapshotMetaData.Entry(snapshot.snapshotId(), snapshot.includeGlobalState(), State.SUCCESS, snapshot.indices(), shardsMap); endSnapshot(updatedSnapshot); } else { updatedSnapshot = new SnapshotMetaData.Entry(snapshot.snapshotId(), snapshot.includeGlobalState(), snapshot.state(), snapshot.indices(), shardsMap); } } entries.add(updatedSnapshot); } else if (snapshot.state() == State.INIT && newMaster) { // Clean up the snapshot that failed to start from the old master deleteSnapshot(snapshot.snapshotId(), new DeleteSnapshotListener() { @Override public void onResponse() { logger.debug("cleaned up abandoned snapshot {} in INIT state", snapshot.snapshotId()); } @Override public void onFailure(Throwable t) { logger.warn("failed to clean up abandoned snapshot {} in INIT state", snapshot.snapshotId()); } }); } else if (snapshot.state() == State.SUCCESS && newMaster) { // Finalize the snapshot endSnapshot(snapshot); } } if (changed) { snapshots = new SnapshotMetaData(entries.toArray(new SnapshotMetaData.Entry[entries.size()])); mdBuilder.putCustom(SnapshotMetaData.TYPE, snapshots); return ClusterState.builder(currentState).metaData(mdBuilder).build(); } return currentState; } @Override public void onFailure(String source, Throwable t) { logger.warn("failed to update snapshot state after node removal"); } });
1no label
src_main_java_org_elasticsearch_snapshots_SnapshotsService.java
526
public class OSerializationException extends OException { private static final long serialVersionUID = -3003977236233691448L; public OSerializationException(String string) { super(string); } public OSerializationException(String message, Throwable cause) { super(message, cause); } }
0true
core_src_main_java_com_orientechnologies_orient_core_exception_OSerializationException.java
2,457
public static interface ShutdownListener { public void onTerminated(); }
0true
src_main_java_org_elasticsearch_common_util_concurrent_EsThreadPoolExecutor.java
3,637
public static final class Encoding { // With 14 bytes we already have better precision than a double since a double has 11 bits of exponent private static final int MAX_NUM_BYTES = 14; private static final Encoding[] INSTANCES; static { INSTANCES = new Encoding[MAX_NUM_BYTES + 1]; for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) { INSTANCES[numBytes] = new Encoding(numBytes); } } /** Get an instance based on the number of bytes that has been used to encode values. */ public static final Encoding of(int numBytesPerValue) { final Encoding instance = INSTANCES[numBytesPerValue]; if (instance == null) { throw new ElasticsearchIllegalStateException("No encoding for " + numBytesPerValue + " bytes per value"); } return instance; } /** Get an instance based on the expected precision. Here are examples of the number of required bytes per value depending on the * expected precision:<ul> * <li>1km: 4 bytes</li> * <li>3m: 6 bytes</li> * <li>1m: 8 bytes</li> * <li>1cm: 8 bytes</li> * <li>1mm: 10 bytes</li></ul> */ public static final Encoding of(DistanceUnit.Distance precision) { for (Encoding encoding : INSTANCES) { if (encoding != null && encoding.precision().compareTo(precision) <= 0) { return encoding; } } return INSTANCES[MAX_NUM_BYTES]; } private final DistanceUnit.Distance precision; private final int numBytes; private final int numBytesPerCoordinate; private final double factor; private Encoding(int numBytes) { assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES; assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment this.numBytes = numBytes; this.numBytesPerCoordinate = numBytes / 2; this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9); assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) * factor < 180 : numBytesPerCoordinate + " " + factor; if (numBytes == MAX_NUM_BYTES) { // no precision loss compared to a double precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT); } else { precision = new DistanceUnit.Distance( GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), // factor/2 because we use Math.round instead of a cast to convert the double to a long DistanceUnit.DEFAULT); } } public DistanceUnit.Distance precision() { return precision; } /** The number of bytes required to encode a single geo point. */ public final int numBytes() { return numBytes; } /** The number of bits required to encode a single coordinate of a geo point. */ public int numBitsPerCoordinate() { return numBytesPerCoordinate << 3; } /** Return the bits that encode a latitude/longitude. */ public long encodeCoordinate(double lat) { return Math.round((lat + 180) / factor); } /** Decode a sequence of bits into the original coordinate. */ public double decodeCoordinate(long bits) { return bits * factor - 180; } private void encodeBits(long bits, byte[] out, int offset) { for (int i = 0; i < numBytesPerCoordinate; ++i) { out[offset++] = (byte) bits; bits >>>= 8; } assert bits == 0; } private long decodeBits(byte [] in, int offset) { long r = in[offset++] & 0xFFL; for (int i = 1; i < numBytesPerCoordinate; ++i) { r = (in[offset++] & 0xFFL) << (i * 8); } return r; } /** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */ public void encode(double lat, double lon, byte[] out, int offset) { encodeBits(encodeCoordinate(lat), out, offset); encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate); } /** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */ public GeoPoint decode(byte[] in, int offset, GeoPoint out) { final long latBits = decodeBits(in, offset); final long lonBits = decodeBits(in, offset + numBytesPerCoordinate); return decode(latBits, lonBits, out); } /** Decode a geo point from the bits of the encoded latitude and longitudes. */ public GeoPoint decode(long latBits, long lonBits, GeoPoint out) { final double lat = decodeCoordinate(latBits); final double lon = decodeCoordinate(lonBits); return out.reset(lat, lon); } }
0true
src_main_java_org_elasticsearch_index_mapper_geo_GeoPointFieldMapper.java
1,659
new Thread() { public void run() { while (hz.getMap(name).size() > size / 2) { try { sleep(5); } catch (InterruptedException ignored) { return; } } TestUtil.terminateInstance(hz); } }.start();
0true
hazelcast_src_test_java_com_hazelcast_map_BackupTest.java
16
public class CodeCompletions { private static boolean forceExplicitTypeArgs(Declaration d, OccurrenceLocation ol) { if (ol==EXTENDS) { return true; } else { //TODO: this is a pretty limited implementation // for now, but eventually we could do // something much more sophisticated to // guess if explicit type args will be // necessary (variance, etc) if (d instanceof Functional) { List<ParameterList> pls = ((Functional) d).getParameterLists(); return pls.isEmpty() || pls.get(0).getParameters().isEmpty(); } else { return false; } } } static String getTextForDocLink(CeylonParseController cpc, Declaration decl) { Package pkg = decl.getUnit().getPackage(); String qname = decl.getQualifiedNameString(); // handle language package or same module and package Unit unit = cpc.getRootNode().getUnit(); if (pkg!=null && (Module.LANGUAGE_MODULE_NAME.equals(pkg.getNameAsString()) || (unit!=null && pkg.equals(unit.getPackage())))) { if (decl.isToplevel()) { return decl.getNameAsString(); } else { // not top level in language module int loc = qname.indexOf("::"); if (loc>=0) { return qname.substring(loc + 2); } else { return qname; } } } else { return qname; } } public static String getTextFor(Declaration dec, Unit unit) { StringBuilder result = new StringBuilder(); result.append(escapeName(dec, unit)); appendTypeParameters(dec, result); return result.toString(); } public static String getPositionalInvocationTextFor( Declaration dec, OccurrenceLocation ol, ProducedReference pr, Unit unit, boolean includeDefaulted, String typeArgs) { StringBuilder result = new StringBuilder(escapeName(dec, unit)); if (typeArgs!=null) { result.append(typeArgs); } else if (forceExplicitTypeArgs(dec, ol)) { appendTypeParameters(dec, result); } appendPositionalArgs(dec, pr, unit, result, includeDefaulted, false); appendSemiToVoidInvocation(result, dec); return result.toString(); } public static String getNamedInvocationTextFor(Declaration dec, ProducedReference pr, Unit unit, boolean includeDefaulted, String typeArgs) { StringBuilder result = new StringBuilder(escapeName(dec, unit)); if (typeArgs!=null) { result.append(typeArgs); } else if (forceExplicitTypeArgs(dec, null)) { appendTypeParameters(dec, result); } appendNamedArgs(dec, pr, unit, result, includeDefaulted, false); appendSemiToVoidInvocation(result, dec); return result.toString(); } private static void appendSemiToVoidInvocation(StringBuilder result, Declaration dd) { if ((dd instanceof Method) && ((Method) dd).isDeclaredVoid() && ((Method) dd).getParameterLists().size()==1) { result.append(';'); } } public static String getDescriptionFor(Declaration dec, Unit unit) { StringBuilder result = new StringBuilder(dec.getName(unit)); appendTypeParameters(dec, result); return result.toString(); } public static String getPositionalInvocationDescriptionFor( Declaration dec, OccurrenceLocation ol, ProducedReference pr, Unit unit, boolean includeDefaulted, String typeArgs) { StringBuilder result = new StringBuilder(dec.getName(unit)); if (typeArgs!=null) { result.append(typeArgs); } else if (forceExplicitTypeArgs(dec, ol)) { appendTypeParameters(dec, result); } appendPositionalArgs(dec, pr, unit, result, includeDefaulted, true); return result.toString(); } public static String getNamedInvocationDescriptionFor( Declaration dec, ProducedReference pr, Unit unit, boolean includeDefaulted, String typeArgs) { StringBuilder result = new StringBuilder(dec.getName(unit)); if (typeArgs!=null) { result.append(typeArgs); } else if (forceExplicitTypeArgs(dec, null)) { appendTypeParameters(dec, result); } appendNamedArgs(dec, pr, unit, result, includeDefaulted, true); return result.toString(); } public static String getRefinementTextFor(Declaration d, ProducedReference pr, Unit unit, boolean isInterface, ClassOrInterface ci, String indent, boolean containsNewline) { return getRefinementTextFor(d, pr, unit, isInterface, ci, indent, containsNewline, true); } public static String getRefinementTextFor(Declaration d, ProducedReference pr, Unit unit, boolean isInterface, ClassOrInterface ci, String indent, boolean containsNewline, boolean preamble) { StringBuilder result = new StringBuilder(); if (preamble) { result.append("shared actual "); if (isVariable(d) && !isInterface) { result.append("variable "); } } appendDeclarationHeaderText(d, pr, unit, result); appendTypeParameters(d, result); appendParametersText(d, pr, unit, result); if (d instanceof Class) { result.append(extraIndent(extraIndent(indent, containsNewline), containsNewline)) .append(" extends super.").append(escapeName(d)); appendPositionalArgs(d, pr, unit, result, true, false); } appendConstraints(d, pr, unit, indent, containsNewline, result); appendImplText(d, pr, isInterface, unit, indent, result, ci); return result.toString(); } private static void appendConstraints(Declaration d, ProducedReference pr, Unit unit, String indent, boolean containsNewline, StringBuilder result) { if (d instanceof Functional) { for (TypeParameter tp: ((Functional) d).getTypeParameters()) { List<ProducedType> sts = tp.getSatisfiedTypes(); if (!sts.isEmpty()) { result.append(extraIndent(extraIndent(indent, containsNewline), containsNewline)) .append("given ").append(tp.getName()) .append(" satisfies "); boolean first = true; for (ProducedType st: sts) { if (first) { first = false; } else { result.append("&"); } result.append(st.substitute(pr.getTypeArguments()) .getProducedTypeName(unit)); } } } } } static String getInlineFunctionTextFor(Parameter p, ProducedReference pr, Unit unit, String indent) { StringBuilder result = new StringBuilder(); appendNamedArgumentHeader(p, pr, result, false); appendTypeParameters(p.getModel(), result); appendParametersText(p.getModel(), pr, unit, result); if (p.isDeclaredVoid()) { result.append(" {}"); } else { result.append(" => nothing;"); } return result.toString(); } public static boolean isVariable(Declaration d) { return d instanceof TypedDeclaration && ((TypedDeclaration) d).isVariable(); } static String getRefinementDescriptionFor(Declaration d, ProducedReference pr, Unit unit) { StringBuilder result = new StringBuilder("shared actual "); if (isVariable(d)) { result.append("variable "); } appendDeclarationHeaderDescription(d, pr, unit, result); appendTypeParameters(d, result); appendParametersDescription(d, pr, unit, result); /*result.append(" - refine declaration in ") .append(((Declaration) d.getContainer()).getName());*/ return result.toString(); } static String getInlineFunctionDescriptionFor(Parameter p, ProducedReference pr, Unit unit) { StringBuilder result = new StringBuilder(); appendNamedArgumentHeader(p, pr, result, true); appendTypeParameters(p.getModel(), result); appendParametersDescription(p.getModel(), pr, unit, result); return result.toString(); } public static String getLabelDescriptionFor(Declaration d) { StringBuilder result = new StringBuilder(); if (d!=null) { appendDeclarationAnnotations(d, result); appendDeclarationHeaderDescription(d, d.getUnit(), result); appendTypeParameters(d, result, true); appendParametersDescription(d, result, null); } return result.toString(); } private static void appendDeclarationAnnotations(Declaration d, StringBuilder result) { if (d.isActual()) result.append("actual "); if (d.isFormal()) result.append("formal "); if (d.isDefault()) result.append("default "); if (isVariable(d)) result.append("variable "); } public static String getDocDescriptionFor(Declaration d, ProducedReference pr, Unit unit) { StringBuilder result = new StringBuilder(); appendDeclarationHeaderDescription(d, pr, unit, result); appendTypeParameters(d, pr, result, true, unit); appendParametersDescription(d, pr, unit, result); return result.toString(); } public static StyledString getQualifiedDescriptionFor(Declaration d) { StyledString result = new StyledString(); if (d!=null) { appendDeclarationDescription(d, result); if (d.isClassOrInterfaceMember()) { Declaration ci = (Declaration) d.getContainer(); result.append(ci.getName(), Highlights.TYPE_ID_STYLER).append('.'); appendMemberName(d, result); } else { appendDeclarationName(d, result); } appendTypeParameters(d, result, true); appendParametersDescription(d, result); if (d instanceof TypedDeclaration) { if (EditorsUI.getPreferenceStore().getBoolean(DISPLAY_RETURN_TYPES)) { TypedDeclaration td = (TypedDeclaration) d; if (!td.isParameter() && !td.isDynamicallyTyped() && !(td instanceof Method && ((Method) td).isDeclaredVoid())) { ProducedType t = td.getType(); if (t!=null) { result.append(" ∊ "); appendTypeName(result, t, Highlights.ARROW_STYLER); } } } } /*result.append(" - refines declaration in ") .append(((Declaration) d.getContainer()).getName());*/ } return result; } public static StyledString getStyledDescriptionFor(Declaration d) { StyledString result = new StyledString(); if (d!=null) { appendDeclarationAnnotations(d, result); appendDeclarationDescription(d, result); appendDeclarationName(d, result); appendTypeParameters(d, result, true); appendParametersDescription(d, result); if (d instanceof TypedDeclaration) { if (EditorsUI.getPreferenceStore().getBoolean(DISPLAY_RETURN_TYPES)) { TypedDeclaration td = (TypedDeclaration) d; if (!td.isParameter() && !td.isDynamicallyTyped() && !(td instanceof Method && ((Method) td).isDeclaredVoid())) { ProducedType t = td.getType(); if (t!=null) { result.append(" ∊ "); appendTypeName(result, t, Highlights.ARROW_STYLER); } } } } /*result.append(" - refines declaration in ") .append(((Declaration) d.getContainer()).getName());*/ } return result; } private static void appendDeclarationAnnotations(Declaration d, StyledString result) { if (d.isActual()) result.append("actual ", Highlights.ANN_STYLER); if (d.isFormal()) result.append("formal ", Highlights.ANN_STYLER); if (d.isDefault()) result.append("default ", Highlights.ANN_STYLER); if (isVariable(d)) result.append("variable ", Highlights.ANN_STYLER); } public static void appendPositionalArgs(Declaration dec, Unit unit, StringBuilder result, boolean includeDefaulted, boolean descriptionOnly) { appendPositionalArgs(dec, dec.getReference(), unit, result, includeDefaulted, descriptionOnly); } private static void appendPositionalArgs(Declaration d, ProducedReference pr, Unit unit, StringBuilder result, boolean includeDefaulted, boolean descriptionOnly) { if (d instanceof Functional) { List<Parameter> params = getParameters((Functional) d, includeDefaulted, false); if (params.isEmpty()) { result.append("()"); } else { boolean paramTypes = descriptionOnly && EditorsUI.getPreferenceStore().getBoolean(DISPLAY_PARAMETER_TYPES); result.append("("); for (Parameter p: params) { ProducedTypedReference typedParameter = pr.getTypedParameter(p); if (p.getModel() instanceof Functional) { if (p.isDeclaredVoid()) { result.append("void "); } appendParameters(p.getModel(), typedParameter, unit, result, descriptionOnly); if (p.isDeclaredVoid()) { result.append(" {}"); } else { result.append(" => ") .append("nothing"); } } else { ProducedType pt = typedParameter.getType(); if (descriptionOnly && paramTypes && !isTypeUnknown(pt)) { if (p.isSequenced()) { pt = unit.getSequentialElementType(pt); } result.append(pt.getProducedTypeName(unit)); if (p.isSequenced()) { result.append(p.isAtLeastOne()?'+':'*'); } result.append(" "); } else if (p.isSequenced()) { result.append("*"); } result.append(descriptionOnly || p.getModel()==null ? p.getName() : escapeName(p.getModel())); } result.append(", "); } result.setLength(result.length()-2); result.append(")"); } } } static void appendSuperArgsText(Declaration d, ProducedReference pr, Unit unit, StringBuilder result, boolean includeDefaulted) { if (d instanceof Functional) { List<Parameter> params = getParameters((Functional) d, includeDefaulted, false); if (params.isEmpty()) { result.append("()"); } else { result.append("("); for (Parameter p: params) { if (p.isSequenced()) { result.append("*"); } result.append(escapeName(p.getModel())) .append(", "); } result.setLength(result.length()-2); result.append(")"); } } } private static List<Parameter> getParameters(Functional fd, boolean includeDefaults, boolean namedInvocation) { List<ParameterList> plists = fd.getParameterLists(); if (plists==null || plists.isEmpty()) { return Collections.<Parameter>emptyList(); } else { return CompletionUtil.getParameters(plists.get(0), includeDefaults, namedInvocation); } } private static void appendNamedArgs(Declaration d, ProducedReference pr, Unit unit, StringBuilder result, boolean includeDefaulted, boolean descriptionOnly) { if (d instanceof Functional) { List<Parameter> params = getParameters((Functional) d, includeDefaulted, true); if (params.isEmpty()) { result.append(" {}"); } else { boolean paramTypes = descriptionOnly && EditorsUI.getPreferenceStore().getBoolean(DISPLAY_PARAMETER_TYPES); result.append(" { "); for (Parameter p: params) { String name = descriptionOnly ? p.getName() : escapeName(p.getModel()); if (p.getModel() instanceof Functional) { if (p.isDeclaredVoid()) { result.append("void "); } else { if (paramTypes && !isTypeUnknown(p.getType())) { result.append(p.getType().getProducedTypeName(unit)).append(" "); } else { result.append("function "); } } result.append(name); appendParameters(p.getModel(), pr.getTypedParameter(p), unit, result, descriptionOnly); if (descriptionOnly) { result.append("; "); } else if (p.isDeclaredVoid()) { result.append(" {} "); } else { result.append(" => ") //.append(CeylonQuickFixAssistant.defaultValue(p.getUnit(), p.getType())) .append("nothing; "); } } else { if (p==params.get(params.size()-1) && !isTypeUnknown(p.getType()) && unit.isIterableParameterType(p.getType())) { // result.append(" "); } else { if (descriptionOnly && paramTypes && !isTypeUnknown(p.getType())) { result.append(p.getType().getProducedTypeName(unit)).append(" "); } result.append(name) .append(" = ") //.append(CeylonQuickFixAssistant.defaultValue(p.getUnit(), p.getType())) .append("nothing") .append("; "); } } } result.append("}"); } } } private static void appendTypeParameters(Declaration d, StringBuilder result) { appendTypeParameters(d, result, false); } private static void appendTypeParameters(Declaration d, StringBuilder result, boolean variances) { if (d instanceof Generic) { List<TypeParameter> types = ((Generic) d).getTypeParameters(); if (!types.isEmpty()) { result.append("<"); for (TypeParameter tp: types) { if (variances) { if (tp.isCovariant()) { result.append("out "); } if (tp.isContravariant()) { result.append("in "); } } result.append(tp.getName()).append(", "); } result.setLength(result.length()-2); result.append(">"); } } } private static void appendTypeParameters(Declaration d, ProducedReference pr, StringBuilder result, boolean variances, Unit unit) { if (d instanceof Generic) { List<TypeParameter> types = ((Generic) d).getTypeParameters(); if (!types.isEmpty()) { result.append("<"); boolean first = true; for (TypeParameter tp: types) { if (first) { first = false; } else { result.append(", "); } ProducedType arg = pr==null ? null : pr.getTypeArguments().get(tp); if (arg == null) { if (variances) { if (tp.isCovariant()) { result.append("out "); } if (tp.isContravariant()) { result.append("in "); } } result.append(tp.getName()); } else { if (pr instanceof ProducedType) { if (variances) { SiteVariance variance = ((ProducedType) pr).getVarianceOverrides().get(tp); if (variance==SiteVariance.IN) { result.append("in "); } if (variance==SiteVariance.OUT) { result.append("out "); } } } result.append(arg.getProducedTypeName(unit)); } } result.append(">"); } } } private static void appendTypeParameters(Declaration d, StyledString result, boolean variances) { if (d instanceof Generic) { List<TypeParameter> types = ((Generic) d).getTypeParameters(); if (!types.isEmpty()) { result.append("<"); int len = types.size(), i = 0; for (TypeParameter tp: types) { if (variances) { if (tp.isCovariant()) { result.append("out ", Highlights.KW_STYLER); } if (tp.isContravariant()) { result.append("in ", Highlights.KW_STYLER); } } result.append(tp.getName(), Highlights.TYPE_STYLER); if (++i<len) result.append(", "); } result.append(">"); } } } private static void appendDeclarationHeaderDescription(Declaration d, Unit unit, StringBuilder result) { appendDeclarationHeader(d, null, unit, result, true); } private static void appendDeclarationHeaderDescription(Declaration d, ProducedReference pr, Unit unit, StringBuilder result) { appendDeclarationHeader(d, pr, unit, result, true); } private static void appendDeclarationHeaderText(Declaration d, ProducedReference pr, Unit unit, StringBuilder result) { appendDeclarationHeader(d, pr, unit, result, false); } private static void appendDeclarationHeader(Declaration d, ProducedReference pr, Unit unit, StringBuilder result, boolean descriptionOnly) { if (d instanceof Class) { if (d.isAnonymous()) { result.append("object"); } else { result.append("class"); } } else if (d instanceof Interface) { result.append("interface"); } else if (d instanceof TypeAlias) { result.append("alias"); } else if (d instanceof TypedDeclaration) { TypedDeclaration td = (TypedDeclaration) d; boolean isSequenced = d.isParameter() && ((MethodOrValue) d).getInitializerParameter() .isSequenced(); ProducedType type; if (pr == null) { type = td.getType(); } else { type = pr.getType(); } if (isSequenced && type!=null) { type = unit.getIteratedType(type); } if (type==null) { type = new UnknownType(unit).getType(); } String typeName = type.getProducedTypeName(unit); if (td.isDynamicallyTyped()) { result.append("dynamic"); } else if (td instanceof Value && type.getDeclaration().isAnonymous()) { result.append("object"); } else if (d instanceof Method) { if (((Functional) d).isDeclaredVoid()) { result.append("void"); } else { result.append(typeName); } } else { result.append(typeName); } if (isSequenced) { if (((MethodOrValue) d).getInitializerParameter() .isAtLeastOne()) { result.append("+"); } else { result.append("*"); } } } result.append(" ") .append(descriptionOnly ? d.getName() : escapeName(d)); } private static void appendNamedArgumentHeader(Parameter p, ProducedReference pr, StringBuilder result, boolean descriptionOnly) { if (p.getModel() instanceof Functional) { Functional fp = (Functional) p.getModel(); result.append(fp.isDeclaredVoid() ? "void" : "function"); } else { result.append("value"); } result.append(" ") .append(descriptionOnly ? p.getName() : escapeName(p.getModel())); } private static void appendDeclarationDescription(Declaration d, StyledString result) { if (d instanceof Class) { if (d.isAnonymous()) { result.append("object", Highlights.KW_STYLER); } else { result.append("class", Highlights.KW_STYLER); } } else if (d instanceof Interface) { result.append("interface", Highlights.KW_STYLER); } else if (d instanceof TypeAlias) { result.append("alias", Highlights.KW_STYLER); } else if (d.isParameter()) { TypedDeclaration td = (TypedDeclaration) d; ProducedType type = td.getType(); if (td.isDynamicallyTyped()) { result.append("dynamic", Highlights.KW_STYLER); } else if (type!=null) { boolean isSequenced = //d.isParameter() && ((MethodOrValue) d).getInitializerParameter() .isSequenced(); if (isSequenced) { type = d.getUnit().getIteratedType(type); } /*if (td instanceof Value && td.getTypeDeclaration().isAnonymous()) { result.append("object", KW_STYLER); } else*/ if (d instanceof Method) { if (((Functional)d).isDeclaredVoid()) { result.append("void", Highlights.KW_STYLER); } else { appendTypeName(result, type); } } else { appendTypeName(result, type); } if (isSequenced) { result.append("*"); } } } else if (d instanceof Value) { Value v = (Value) d; if (v.isDynamicallyTyped()) { result.append("dynamic", Highlights.KW_STYLER); } else if (v.getTypeDeclaration()!=null && v.getTypeDeclaration().isAnonymous()) { result.append("object", Highlights.KW_STYLER); } else { result.append("value", Highlights.KW_STYLER); } } else if (d instanceof Method) { Method m = (Method) d; if (m.isDynamicallyTyped()) { result.append("dynamic", Highlights.KW_STYLER); } else if (m.isDeclaredVoid()) { result.append("void", Highlights.KW_STYLER); } else { result.append("function", Highlights.KW_STYLER); } } else if (d instanceof Setter) { result.append("assign", Highlights.KW_STYLER); } result.append(" "); } private static void appendMemberName(Declaration d, StyledString result) { String name = d.getName(); if (name != null) { if (d instanceof TypeDeclaration) { result.append(name, Highlights.TYPE_STYLER); } else { result.append(name, Highlights.MEMBER_STYLER); } } } private static void appendDeclarationName(Declaration d, StyledString result) { String name = d.getName(); if (name != null) { if (d instanceof TypeDeclaration) { result.append(name, Highlights.TYPE_STYLER); } else { result.append(name, Highlights.ID_STYLER); } } } /*private static void appendPackage(Declaration d, StringBuilder result) { if (d.isToplevel()) { result.append(" - ").append(getPackageLabel(d)); } if (d.isClassOrInterfaceMember()) { result.append(" - "); ClassOrInterface td = (ClassOrInterface) d.getContainer(); result.append( td.getName() ); appendPackage(td, result); } }*/ private static void appendImplText(Declaration d, ProducedReference pr, boolean isInterface, Unit unit, String indent, StringBuilder result, ClassOrInterface ci) { if (d instanceof Method) { if (ci!=null && !ci.isAnonymous()) { if (d.getName().equals("equals")) { List<ParameterList> pl = ((Method) d).getParameterLists(); if (!pl.isEmpty()) { List<Parameter> ps = pl.get(0).getParameters(); if (!ps.isEmpty()) { appendEqualsImpl(unit, indent, result, ci, ps); return; } } } } if (!d.isFormal()) { result.append(" => super.").append(d.getName()); appendSuperArgsText(d, pr, unit, result, true); result.append(";"); } else { if (((Functional) d).isDeclaredVoid()) { result.append(" {}"); } else { result.append(" => nothing;"); } } } else if (d instanceof Value) { if (ci!=null && !ci.isAnonymous()) { if (d.getName().equals("hash")) { appendHashImpl(unit, indent, result, ci); return; } } if (isInterface/*||d.isParameter()*/) { //interfaces can't have references, //so generate a setter for variables if (d.isFormal()) { result.append(" => nothing;"); } else { result.append(" => super.") .append(d.getName()).append(";"); } if (isVariable(d)) { result.append(indent) .append("assign ").append(d.getName()) .append(" {}"); } } else { //we can have a references, so use = instead //of => for variables String arrow = isVariable(d) ? " = " : " => "; if (d.isFormal()) { result.append(arrow).append("nothing;"); } else { result.append(arrow) .append("super.").append(d.getName()) .append(";"); } } } else { //TODO: in the case of a class, formal member refinements! result.append(" {}"); } } private static void appendHashImpl(Unit unit, String indent, StringBuilder result, ClassOrInterface ci) { result.append(" {") .append(indent).append(getDefaultIndent()) .append("variable value hash = 1;") .append(indent).append(getDefaultIndent()); String ind = indent+getDefaultIndent(); appendMembersToHash(unit, ind, result, ci); result.append("return hash;") .append(indent) .append("}"); } private static void appendEqualsImpl(Unit unit, String indent, StringBuilder result, ClassOrInterface ci, List<Parameter> ps) { Parameter p = ps.get(0); result.append(" {") .append(indent).append(getDefaultIndent()) .append("if (is ").append(ci.getName()).append(" ").append(p.getName()).append(") {") .append(indent).append(getDefaultIndent()).append(getDefaultIndent()) .append("return "); String ind = indent+getDefaultIndent()+getDefaultIndent()+getDefaultIndent(); appendMembersToEquals(unit, ind, result, ci, p); result.append(indent).append(getDefaultIndent()) .append("}") .append(indent).append(getDefaultIndent()) .append("else {") .append(indent).append(getDefaultIndent()).append(getDefaultIndent()) .append("return false;") .append(indent).append(getDefaultIndent()) .append("}") .append(indent) .append("}"); } private static boolean isObjectField(Declaration m) { return m.getName()!=null && m.getName().equals("hash") || m.getName().equals("string"); } private static void appendMembersToEquals(Unit unit, String indent, StringBuilder result, ClassOrInterface ci, Parameter p) { boolean found = false; for (Declaration m: ci.getMembers()) { if (m instanceof Value && !isObjectField(m)) { Value value = (Value) m; if (!value.isTransient()) { if (!unit.getNullValueDeclaration().getType() .isSubtypeOf(value.getType())) { result.append(value.getName()) .append("==") .append(p.getName()) .append(".") .append(value.getName()) .append(" && ") .append(indent); found = true; } } } } if (found) { result.setLength(result.length()-4-indent.length()); result.append(";"); } else { result.append("true;"); } } private static void appendMembersToHash(Unit unit, String indent, StringBuilder result, ClassOrInterface ci) { for (Declaration m: ci.getMembers()) { if (m instanceof Value && !isObjectField(m)) { Value value = (Value) m; if (!value.isTransient()) { if (!unit.getNullValueDeclaration().getType() .isSubtypeOf(value.getType())) { result.append("hash = 31*hash + ") .append(value.getName()) .append(".hash;") .append(indent); } } } } } private static String extraIndent(String indent, boolean containsNewline) { return containsNewline ? indent + getDefaultIndent() : indent; } public static void appendParametersDescription(Declaration d, StringBuilder result, CeylonParseController cpc) { appendParameters(d, null, d.getUnit(), result, cpc, true); } public static void appendParametersText(Declaration d, ProducedReference pr, Unit unit, StringBuilder result) { appendParameters(d, pr, unit, result, null, false); } private static void appendParametersDescription(Declaration d, ProducedReference pr, Unit unit, StringBuilder result) { appendParameters(d, pr, unit, result, null, true); } private static void appendParameters(Declaration d, ProducedReference pr, Unit unit, StringBuilder result, boolean descriptionOnly) { appendParameters(d, pr, unit, result, null, descriptionOnly); } private static void appendParameters(Declaration d, ProducedReference pr, Unit unit, StringBuilder result, CeylonParseController cpc, boolean descriptionOnly) { if (d instanceof Functional) { List<ParameterList> plists = ((Functional) d).getParameterLists(); if (plists!=null) { for (ParameterList params: plists) { if (params.getParameters().isEmpty()) { result.append("()"); } else { result.append("("); for (Parameter p: params.getParameters()) { appendParameter(result, pr, p, unit, descriptionOnly); if (cpc!=null) { result.append(getDefaultValueDescription(p, cpc)); } result.append(", "); } result.setLength(result.length()-2); result.append(")"); } } } } } public static void appendParameterText(StringBuilder result, ProducedReference pr, Parameter p, Unit unit) { appendParameter(result, pr, p, unit, false); } private static void appendParameter(StringBuilder result, ProducedReference pr, Parameter p, Unit unit, boolean descriptionOnly) { if (p.getModel() == null) { result.append(p.getName()); } else { ProducedTypedReference ppr = pr==null ? null : pr.getTypedParameter(p); appendDeclarationHeader(p.getModel(), ppr, unit, result, descriptionOnly); appendParameters(p.getModel(), ppr, unit, result, descriptionOnly); } } public static void appendParameterContextInfo(StringBuilder result, ProducedReference pr, Parameter p, Unit unit, boolean namedInvocation, boolean isListedValues) { if (p.getModel() == null) { result.append(p.getName()); } else { ProducedTypedReference ppr = pr==null ? null : pr.getTypedParameter(p); String typeName; ProducedType type = ppr.getType(); if (isListedValues && namedInvocation) { ProducedType et = unit.getIteratedType(type); typeName = et.getProducedTypeName(unit); if (unit.isEntryType(et)) { typeName = '<' + typeName + '>'; } typeName += unit.isNonemptyIterableType(type) ? '+' : '*'; } else if (p.isSequenced() && !namedInvocation) { ProducedType et = unit.getSequentialElementType(type); typeName = et.getProducedTypeName(unit); if (unit.isEntryType(et)) { typeName = '<' + typeName + '>'; } typeName += p.isAtLeastOne() ? '+' : '*'; } else { typeName = type.getProducedTypeName(unit); } result.append(typeName).append(" ").append(p.getName()); appendParametersDescription(p.getModel(), ppr, unit, result); } if (namedInvocation && !isListedValues) { result.append(p.getModel() instanceof Method ? " => ... " : " = ... " ); } } private static void appendParametersDescription(Declaration d, StyledString result) { if (d instanceof Functional) { List<ParameterList> plists = ((Functional) d).getParameterLists(); if (plists!=null) { for (ParameterList params: plists) { if (params.getParameters().isEmpty()) { result.append("()"); } else { result.append("("); int len = params.getParameters().size(), i=0; for (Parameter p: params.getParameters()) { if (p.getModel()==null) { result.append(p.getName()); } else { appendDeclarationDescription(p.getModel(), result); appendDeclarationName(p.getModel(), result); appendParametersDescription(p.getModel(), result); /*result.append(p.getType().getProducedTypeName(), TYPE_STYLER) .append(" ").append(p.getName(), ID_STYLER); if (p instanceof FunctionalParameter) { result.append("("); FunctionalParameter fp = (FunctionalParameter) p; List<Parameter> fpl = fp.getParameterLists().get(0).getParameters(); int len2 = fpl.size(), j=0; for (Parameter pp: fpl) { result.append(pp.getType().getProducedTypeName(), TYPE_STYLER) .append(" ").append(pp.getName(), ID_STYLER); if (++j<len2) result.append(", "); } result.append(")"); }*/ } if (++i<len) result.append(", "); } result.append(")"); } } } } } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_CodeCompletions.java
3,634
public static class Builder extends AbstractFieldMapper.Builder<Builder, GeoPointFieldMapper> { private ContentPath.Type pathType = Defaults.PATH_TYPE; private boolean enableGeoHash = Defaults.ENABLE_GEOHASH; private boolean enableGeohashPrefix = Defaults.ENABLE_GEOHASH_PREFIX; private boolean enableLatLon = Defaults.ENABLE_LATLON; private Integer precisionStep; private int geoHashPrecision = Defaults.GEO_HASH_PRECISION; boolean validateLat = Defaults.VALIDATE_LAT; boolean validateLon = Defaults.VALIDATE_LON; boolean normalizeLat = Defaults.NORMALIZE_LAT; boolean normalizeLon = Defaults.NORMALIZE_LON; public Builder(String name) { super(name, new FieldType(Defaults.FIELD_TYPE)); this.builder = this; } public Builder multiFieldPathType(ContentPath.Type pathType) { this.pathType = pathType; return this; } public Builder enableGeoHash(boolean enableGeoHash) { this.enableGeoHash = enableGeoHash; return this; } public Builder geohashPrefix(boolean enableGeohashPrefix) { this.enableGeohashPrefix = enableGeohashPrefix; return this; } public Builder enableLatLon(boolean enableLatLon) { this.enableLatLon = enableLatLon; return this; } public Builder precisionStep(int precisionStep) { this.precisionStep = precisionStep; return this; } public Builder geoHashPrecision(int precision) { this.geoHashPrecision = precision; return this; } public Builder fieldDataSettings(Settings settings) { this.fieldDataSettings = settings; return builder; } @Override public GeoPointFieldMapper build(BuilderContext context) { ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); DoubleFieldMapper latMapper = null; DoubleFieldMapper lonMapper = null; context.path().add(name); if (enableLatLon) { NumberFieldMapper.Builder<?, ?> latMapperBuilder = doubleField(Names.LAT).includeInAll(false); NumberFieldMapper.Builder<?, ?> lonMapperBuilder = doubleField(Names.LON).includeInAll(false); if (precisionStep != null) { latMapperBuilder.precisionStep(precisionStep); lonMapperBuilder.precisionStep(precisionStep); } latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).build(context); lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).build(context); } StringFieldMapper geohashMapper = null; if (enableGeoHash) { geohashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).omitNorms(true).indexOptions(IndexOptions.DOCS_ONLY).build(context); } context.path().remove(); context.path().pathType(origPathType); // this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to // store them as a single token. fieldType.setTokenized(false); return new GeoPointFieldMapper(buildNames(context), fieldType, docValues, indexAnalyzer, searchAnalyzer, postingsProvider, docValuesProvider, similarity, fieldDataSettings, context.indexSettings(), origPathType, enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep, geoHashPrecision, latMapper, lonMapper, geohashMapper, validateLon, validateLat, normalizeLon, normalizeLat , multiFieldsBuilder.build(this, context)); } }
0true
src_main_java_org_elasticsearch_index_mapper_geo_GeoPointFieldMapper.java
300
public class ValidateActions { public static ActionRequestValidationException addValidationError(String error, ActionRequestValidationException validationException) { if (validationException == null) { validationException = new ActionRequestValidationException(); } validationException.addValidationError(error); return validationException; } }
0true
src_main_java_org_elasticsearch_action_ValidateActions.java
1,391
@SuppressWarnings("serial") public abstract class OMVRBTreePersistent<K, V> extends OMVRBTree<K, V> { protected OMVRBTreeProvider<K, V> dataProvider; protected ORecord<?> owner; protected final Set<OMVRBTreeEntryPersistent<K, V>> recordsToCommit = new HashSet<OMVRBTreeEntryPersistent<K, V>>(); // STORES IN MEMORY DIRECT REFERENCES TO PORTION OF THE TREE protected volatile int optimization = 0; protected int entryPointsSize; protected float optimizeEntryPointsFactor; private final TreeMap<K, OMVRBTreeEntryPersistent<K, V>> entryPoints; private final Map<ORID, OMVRBTreeEntryPersistent<K, V>> cache; protected static final OProfilerMBean PROFILER = Orient.instance().getProfiler(); private static final int OPTIMIZE_MAX_RETRY = 10; public OMVRBTreePersistent(OMVRBTreeProvider<K, V> iProvider) { super(); cache = new OLimitedMap<ORID, OMVRBTreeEntryPersistent<K, V>>(256, 0.90f, OGlobalConfiguration.MVRBTREE_OPTIMIZE_THRESHOLD.getValueAsInteger()) { /** * Set the optimization rather than remove eldest element. */ @Override protected boolean removeEldestEntry(final Map.Entry<ORID, OMVRBTreeEntryPersistent<K, V>> eldest) { if (super.removeEldestEntry(eldest)) // TOO MANY ITEMS: SET THE OPTIMIZATION setOptimization(2); return false; } }; if (comparator != null) entryPoints = new TreeMap<K, OMVRBTreeEntryPersistent<K, V>>(comparator); else entryPoints = new TreeMap<K, OMVRBTreeEntryPersistent<K, V>>(); pageLoadFactor = (Float) OGlobalConfiguration.MVRBTREE_LOAD_FACTOR.getValue(); dataProvider = iProvider; config(); } public OMVRBTreePersistent(OMVRBTreeProvider<K, V> iProvider, int keySize) { this(iProvider); this.keySize = keySize; dataProvider.setKeySize(keySize); } @Override protected OMVRBTreeEntryPersistent<K, V> createEntry(OMVRBTreeEntry<K, V> iParent) { adjustPageSize(); return new OMVRBTreeEntryPersistent<K, V>(iParent, iParent.getPageSplitItems()); } @Override protected OMVRBTreeEntryPersistent<K, V> createEntry(final K key, final V value) { adjustPageSize(); return new OMVRBTreeEntryPersistent<K, V>(this, key, value, null); } /** * Create a new entry for {@link #loadEntry(OMVRBTreeEntryPersistent, ORID)}. */ protected OMVRBTreeEntryPersistent<K, V> createEntry(OMVRBTreeEntryPersistent<K, V> iParent, ORID iRecordId) { return new OMVRBTreeEntryPersistent<K, V>(this, iParent, iRecordId); } public OMVRBTreePersistent<K, V> load() { dataProvider.load(); // RESET LAST SEARCH STATE setLastSearchNode(null, null); keySize = dataProvider.getKeySize(); // LOAD THE ROOT OBJECT AFTER ALL final ORID rootRid = dataProvider.getRoot(); if (rootRid != null && rootRid.isValid()) root = loadEntry(null, rootRid); return this; } protected void initAfterLoad() throws IOException { } public OMVRBTreePersistent<K, V> save() { commitChanges(); return this; } protected void saveTreeNode() throws IOException { if (root != null) { OMVRBTreeEntryPersistent<K, V> pRoot = (OMVRBTreeEntryPersistent<K, V>) root; if (pRoot.getProvider().getIdentity().isNew()) { // FIRST TIME: SAVE IT pRoot.save(); } } dataProvider.save(); } /** * Lazy loads a node. */ protected OMVRBTreeEntryPersistent<K, V> loadEntry(final OMVRBTreeEntryPersistent<K, V> iParent, final ORID iRecordId) { // SEARCH INTO THE CACHE OMVRBTreeEntryPersistent<K, V> entry = searchNodeInCache(iRecordId); if (entry == null) { // NOT FOUND: CREATE IT AND PUT IT INTO THE CACHE entry = createEntry(iParent, iRecordId); addNodeInMemory(entry); // RECONNECT THE LOADED NODE WITH IN-MEMORY PARENT, LEFT AND RIGHT if (entry.parent == null && entry.dataProvider.getParent().isValid()) { // TRY TO ASSIGN THE PARENT IN CACHE IF ANY final OMVRBTreeEntryPersistent<K, V> parentNode = searchNodeInCache(entry.dataProvider.getParent()); if (parentNode != null) entry.setParent(parentNode); } if (entry.left == null && entry.dataProvider.getLeft().isValid()) { // TRY TO ASSIGN THE PARENT IN CACHE IF ANY final OMVRBTreeEntryPersistent<K, V> leftNode = searchNodeInCache(entry.dataProvider.getLeft()); if (leftNode != null) entry.setLeft(leftNode); } if (entry.right == null && entry.dataProvider.getRight().isValid()) { // TRY TO ASSIGN THE PARENT IN CACHE IF ANY final OMVRBTreeEntryPersistent<K, V> rightNode = searchNodeInCache(entry.dataProvider.getRight()); if (rightNode != null) entry.setRight(rightNode); } } else { // COULD BE A PROBLEM BECAUSE IF A NODE IS DISCONNECTED CAN IT STAY IN CACHE? // entry.load(); if (iParent != null) // FOUND: ASSIGN IT ONLY IF NOT NULL entry.setParent(iParent); } entry.checkEntryStructure(); return entry; } @Override protected int getTreeSize() { return dataProvider.getSize(); } protected void setSize(final int iSize) { if (dataProvider.setSize(iSize)) markDirty(); } public int getDefaultPageSize() { return dataProvider.getDefaultPageSize(); } @Override public void clear() { final long timer = PROFILER.startChrono(); try { recordsToCommit.clear(); entryPoints.clear(); cache.clear(); if (root != null) try { ((OMVRBTreeEntryPersistent<K, V>) root).delete(); } catch (Exception e) { // IGNORE ANY EXCEPTION dataProvider = dataProvider.copy(); } super.clear(); markDirty(); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.clear"), "Clear a MVRBTree", timer); } } public void delete() { clear(); dataProvider.delete(); } /** * Unload all the in-memory nodes. This is called on transaction rollback. */ public void unload() { final long timer = PROFILER.startChrono(); try { // DISCONNECT ALL THE NODES for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints.values()) entryPoint.disconnectLinked(true); entryPoints.clear(); cache.clear(); recordsToCommit.clear(); root = null; final ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(); if (db != null && !db.isClosed() && db.getStorage() instanceof OStorageEmbedded) { // RELOAD IT try { load(); } catch (Exception e) { // IGNORE IT } } } catch (Exception e) { OLogManager.instance().error(this, "Error on unload the tree: " + dataProvider, e, OStorageException.class); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.unload"), "Unload a MVRBTree", timer); } } /** * Calls the optimization in soft mode: free resources only if needed. */ protected void optimize() { optimize(false); } /** * Optimizes the memory needed by the tree in memory by reducing the number of entries to the configured size. * * @return The total freed nodes */ public int optimize(final boolean iForce) { if (optimization == -1) // IS ALREADY RUNNING return 0; if (!iForce && optimization == 0) // NO OPTIMIZATION IS NEEDED return 0; // SET OPTIMIZATION STATUS AS RUNNING optimization = -1; final long timer = PROFILER.startChrono(); try { if (root == null) return 0; if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "Starting optimization of MVRB+Tree with %d items in memory...", cache.size()); // printInMemoryStructure(); if (entryPoints.size() == 0) // FIRST TIME THE LIST IS NULL: START FROM ROOT addNodeAsEntrypoint((OMVRBTreeEntryPersistent<K, V>) root); // RECONFIG IT TO CATCH CHANGED VALUES config(); if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "Found %d items on disk, threshold=%f, entryPoints=%d, nodesInCache=%d", size(), (entryPointsSize * optimizeEntryPointsFactor), entryPoints.size(), cache.size()); final int nodesInMemory = cache.size(); if (!iForce && nodesInMemory < entryPointsSize * optimizeEntryPointsFactor) // UNDER THRESHOLD AVOID TO OPTIMIZE return 0; lastSearchFound = false; lastSearchKey = null; lastSearchNode = null; int totalDisconnected = 0; if (nodesInMemory > entryPointsSize) { // REDUCE THE ENTRYPOINTS final int distance = nodesInMemory / entryPointsSize + 1; final Set<OMVRBTreeEntryPersistent<K, V>> entryPointsToRemove = new HashSet<OMVRBTreeEntryPersistent<K, V>>(nodesInMemory - entryPointsSize + 2); // REMOVE ENTRYPOINTS AT THE SAME DISTANCE int currNode = 0; for (final Iterator<OMVRBTreeEntryPersistent<K, V>> it = entryPoints.values().iterator(); it.hasNext();) { final OMVRBTreeEntryPersistent<K, V> currentNode = it.next(); // JUMP THE FIRST (1 cannot never be the % of distance) THE LAST, ROOT AND LAST USED // RECORDS THAT WERE CREATED INSIDE OF TRANSACTION CAN'T BE REMOVED TILL COMMIT if (currentNode != root && currentNode != lastSearchNode && !currentNode.dataProvider.getIdentity().isTemporary() && it.hasNext()) if (++currNode % distance != 0) { // REMOVE THE NODE entryPointsToRemove.add(currentNode); it.remove(); } } addNodeAsEntrypoint((OMVRBTreeEntryPersistent<K, V>) lastSearchNode); addNodeAsEntrypoint((OMVRBTreeEntryPersistent<K, V>) root); // DISCONNECT THE REMOVED NODES for (OMVRBTreeEntryPersistent<K, V> currentNode : entryPointsToRemove) totalDisconnected += currentNode.disconnectLinked(false); cache.clear(); for (OMVRBTreeEntryPersistent<K, V> entry : entryPoints.values()) addNodeInCache(entry); } if (isRuntimeCheckEnabled()) { for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints.values()) for (OMVRBTreeEntryPersistent<K, V> e = (OMVRBTreeEntryPersistent<K, V>) entryPoint.getFirstInMemory(); e != null; e = e .getNextInMemory()) e.checkEntryStructure(); } // COUNT ALL IN-MEMORY NODES BY BROWSING ALL THE ENTRYPOINT NODES if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "After optimization: %d items on disk, threshold=%f, entryPoints=%d, nodesInCache=%d", size(), (entryPointsSize * optimizeEntryPointsFactor), entryPoints.size(), cache.size()); if (debug) { int i = 0; System.out.println(); for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints.values()) System.out.println("- Entrypoint " + ++i + "/" + entryPoints.size() + ": " + entryPoint); } return totalDisconnected; } finally { optimization = 0; if (isRuntimeCheckEnabled()) { if (!entryPoints.isEmpty()) for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints.values()) checkTreeStructure(entryPoint.getFirstInMemory()); else checkTreeStructure(root); } PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.optimize"), "Optimize a MVRBTree", timer); if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "Optimization completed in %d ms\n", System.currentTimeMillis() - timer); } } @Override public OMVRBTreeEntry<K, V> getCeilingEntry(K key, PartialSearchMode partialSearchMode) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getCeilingEntry(key, partialSearchMode); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getCeilingEntry()"); } @Override public OMVRBTreeEntry<K, V> getFloorEntry(K key, PartialSearchMode partialSearchMode) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getFloorEntry(key, partialSearchMode); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getFloorEntry()"); } @Override public OMVRBTreeEntry<K, V> getHigherEntry(K key) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getHigherEntry(key); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getHigherEntry)"); } @Override public OMVRBTreeEntry<K, V> getLowerEntry(K key) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getLowerEntry(key); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getLowerEntry()"); } @Override public V put(final K key, final V value) { optimize(); final long timer = PROFILER.startChrono(); try { final V v = internalPut(key, value); commitChanges(); return v; } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.put"), "Put a value into a MVRBTree", timer); } } @Override public void putAll(final Map<? extends K, ? extends V> map) { final long timer = PROFILER.startChrono(); try { for (Entry<? extends K, ? extends V> entry : map.entrySet()) internalPut(entry.getKey(), entry.getValue()); commitChanges(); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.putAll"), "Put multiple values into a MVRBTree", timer); } } @Override public V remove(final Object key) { optimize(); final long timer = PROFILER.startChrono(); try { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { V v = super.remove(key); commitChanges(); return v; } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during remove %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); // AVOID CONTINUE EXCEPTIONS optimization = -1; } } } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.remove"), "Remove a value from a MVRBTree", timer); } throw new OLowMemoryException("OMVRBTreePersistent.remove()"); } public int commitChanges() { final long timer = PROFILER.startChrono(); int totalCommitted = 0; try { if (!recordsToCommit.isEmpty()) { final List<OMVRBTreeEntryPersistent<K, V>> tmp = new ArrayList<OMVRBTreeEntryPersistent<K, V>>(); while (recordsToCommit.iterator().hasNext()) { // COMMIT BEFORE THE NEW RECORDS (TO ASSURE RID IN RELATIONSHIPS) tmp.addAll(recordsToCommit); recordsToCommit.clear(); for (OMVRBTreeEntryPersistent<K, V> node : tmp) if (node.dataProvider.isEntryDirty()) { boolean wasNew = node.dataProvider.getIdentity().isNew(); // CREATE THE RECORD node.save(); if (debug) System.out.printf("\nSaved %s tree node %s: parent %s, left %s, right %s", wasNew ? "new" : "", node.dataProvider.getIdentity(), node.dataProvider.getParent(), node.dataProvider.getLeft(), node.dataProvider.getRight()); } totalCommitted += tmp.size(); tmp.clear(); } } if (dataProvider.isDirty()) // TREE IS CHANGED AS WELL saveTreeNode(); } catch (IOException e) { OLogManager.instance().exception("Error on saving the tree", e, OStorageException.class); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.commitChanges"), "Commit pending changes to a MVRBTree", timer); } return totalCommitted; } public void signalNodeChanged(final OMVRBTreeEntry<K, V> iNode) { recordsToCommit.add((OMVRBTreeEntryPersistent<K, V>) iNode); } @Override public int hashCode() { return dataProvider.hashCode(); } protected void adjustPageSize() { } @Override public V get(final Object iKey) { final long timer = PROFILER.startChrono(); try { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.get(iKey); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.get()"); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.get"), "Get a value from a MVRBTree", timer); } } @Override public boolean containsKey(final Object iKey) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.containsKey(iKey); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.containsKey()"); } @Override public boolean containsValue(final Object iValue) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.containsValue(iValue); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.containsValue()"); } public OMVRBTreeProvider<K, V> getProvider() { return dataProvider; } public int getOptimization() { return optimization; } /** * Set the optimization to be executed at the next call. * * @param iMode * <ul> * <li>-1 = ALREADY RUNNING</li> * <li>0 = NO OPTIMIZATION (DEFAULT)</li> * <li>1 = SOFT MODE</li> * <li>2 = HARD MODE</li> * </ul> */ public void setOptimization(final int iMode) { if (iMode > 0 && optimization == -1) // IGNORE IT, ALREADY RUNNING return; optimization = iMode; } /** * Checks if optimization is needed by raising a {@link OLowMemoryException}. */ @Override protected void searchNodeCallback() { if (optimization > 0) throw new OLowMemoryException("Optimization level: " + optimization); } public int getEntryPointSize() { return entryPointsSize; } public void setEntryPointSize(final int entryPointSize) { this.entryPointsSize = entryPointSize; } @Override public String toString() { final StringBuilder buffer = new StringBuilder().append('['); if (size() < 10) { OMVRBTreeEntry<K, V> current = getFirstEntry(); for (int i = 0; i < 10 && current != null; ++i) { if (i > 0) buffer.append(','); buffer.append(current); current = next(current); } } else { buffer.append("size="); final int size = size(); buffer.append(size); final OMVRBTreeEntry<K, V> firstEntry = getFirstEntry(); if (firstEntry != null) { final int currPageIndex = pageIndex; buffer.append(" "); buffer.append(firstEntry.getFirstKey()); if (size > 1) { buffer.append("-"); buffer.append(getLastEntry().getLastKey()); } pageIndex = currPageIndex; } } return buffer.append(']').toString(); } protected V internalPut(final K key, final V value) throws OLowMemoryException { ORecordInternal<?> rec; if (key instanceof ORecordInternal<?>) { // RECORD KEY: ASSURE IT'S PERSISTENT TO AVOID STORING INVALID RIDs rec = (ORecordInternal<?>) key; if (!rec.getIdentity().isValid()) rec.save(); } if (value instanceof ORecordInternal<?>) { // RECORD VALUE: ASSURE IT'S PERSISTENT TO AVOID STORING INVALID RIDs rec = (ORecordInternal<?>) value; if (!rec.getIdentity().isValid()) rec.save(); } for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.put(key, value); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during put %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.put()"); } /** * Returns the best entry point to start the search. Searches first between entrypoints. If nothing is found "root" is always * returned. */ @Override protected OMVRBTreeEntry<K, V> getBestEntryPoint(final K iKey) { if (!entryPoints.isEmpty()) { // SEARCHES EXACT OR BIGGER ENTRY Entry<K, OMVRBTreeEntryPersistent<K, V>> closerNode = entryPoints.floorEntry(iKey); if (closerNode != null) return closerNode.getValue(); // NO WAY: TRY WITH ANY NODE BEFORE THE KEY closerNode = entryPoints.ceilingEntry(iKey); if (closerNode != null) return closerNode.getValue(); } // USE ROOT return super.getBestEntryPoint(iKey); } /** * Remove an entry point from the list */ void removeEntryPoint(final OMVRBTreeEntryPersistent<K, V> iEntry) { entryPoints.remove(iEntry); } synchronized void removeEntry(final ORID iEntryId) { // DELETE THE NODE FROM THE PENDING RECORDS TO COMMIT for (OMVRBTreeEntryPersistent<K, V> node : recordsToCommit) { if (node.dataProvider.getIdentity().equals(iEntryId)) { recordsToCommit.remove(node); break; } } } /** * Returns the first Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is * empty. */ @Override public OMVRBTreeEntry<K, V> getFirstEntry() { if (!entryPoints.isEmpty()) { // FIND THE FIRST ELEMENT STARTING FROM THE FIRST ENTRY-POINT IN MEMORY final Map.Entry<K, OMVRBTreeEntryPersistent<K, V>> entry = entryPoints.firstEntry(); if (entry != null) { OMVRBTreeEntryPersistent<K, V> e = entry.getValue(); OMVRBTreeEntryPersistent<K, V> prev; do { prev = (OMVRBTreeEntryPersistent<K, V>) predecessor(e); if (prev != null) e = prev; } while (prev != null); if (e != null && e.getSize() > 0) pageIndex = 0; return e; } } // SEARCH FROM ROOT return super.getFirstEntry(); } /** * Returns the last Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is * empty. */ @Override protected OMVRBTreeEntry<K, V> getLastEntry() { if (!entryPoints.isEmpty()) { // FIND THE LAST ELEMENT STARTING FROM THE FIRST ENTRY-POINT IN MEMORY final Map.Entry<K, OMVRBTreeEntryPersistent<K, V>> entry = entryPoints.lastEntry(); if (entry != null) { OMVRBTreeEntryPersistent<K, V> e = entry.getValue(); OMVRBTreeEntryPersistent<K, V> next; do { next = (OMVRBTreeEntryPersistent<K, V>) successor(e); if (next != null) e = next; } while (next != null); if (e != null && e.getSize() > 0) pageIndex = e.getSize() - 1; return e; } } // SEARCH FROM ROOT return super.getLastEntry(); } @Override protected void setRoot(final OMVRBTreeEntry<K, V> iRoot) { if (iRoot == root) return; super.setRoot(iRoot); if (iRoot == null) dataProvider.setRoot(null); else dataProvider.setRoot(((OMVRBTreeEntryPersistent<K, V>) iRoot).getProvider().getIdentity()); } protected void config() { if (dataProvider.updateConfig()) markDirty(); pageLoadFactor = OGlobalConfiguration.MVRBTREE_LOAD_FACTOR.getValueAsFloat(); optimizeEntryPointsFactor = OGlobalConfiguration.MVRBTREE_OPTIMIZE_ENTRYPOINTS_FACTOR.getValueAsFloat(); entryPointsSize = OGlobalConfiguration.MVRBTREE_ENTRYPOINTS.getValueAsInteger(); } @Override protected void rotateLeft(final OMVRBTreeEntry<K, V> p) { if (debug && p != null) System.out.printf("\nRotating to the left the node %s", ((OMVRBTreeEntryPersistent<K, V>) p).dataProvider.getIdentity()); super.rotateLeft(p); } @Override protected void rotateRight(final OMVRBTreeEntry<K, V> p) { if (debug && p != null) System.out.printf("\nRotating to the right the node %s", ((OMVRBTreeEntryPersistent<K, V>) p).dataProvider.getIdentity()); super.rotateRight(p); } /** * Removes the node also from the memory. */ @Override protected OMVRBTreeEntry<K, V> removeNode(final OMVRBTreeEntry<K, V> p) { final OMVRBTreeEntryPersistent<K, V> removed = (OMVRBTreeEntryPersistent<K, V>) super.removeNode(p); removeNodeFromMemory(removed); // this prevents NPE in case if tree contains single node and it was deleted inside of super.removeNode method. if (removed.getProvider() != null) removed.getProvider().delete(); // prevent record saving if it has been deleted. recordsToCommit.remove(removed); return removed; } /** * Removes the node from the memory. * * @param iNode * Node to remove */ protected void removeNodeFromMemory(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode.dataProvider != null && iNode.dataProvider.getIdentity().isValid()) cache.remove(iNode.dataProvider.getIdentity()); if (iNode.getSize() > 0) entryPoints.remove(iNode.getKeyAt(0)); } protected void addNodeInMemory(final OMVRBTreeEntryPersistent<K, V> iNode) { addNodeAsEntrypoint(iNode); addNodeInCache(iNode); } protected boolean isNodeEntryPoint(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode != null && iNode.getSize() > 0) return entryPoints.containsKey(iNode.getKeyAt(0)); return false; } protected void addNodeAsEntrypoint(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode != null && iNode.getSize() > 0) entryPoints.put(iNode.getKeyAt(0), iNode); } /** * Updates the position of the node between the entry-points. If the node has 0 items, it's simply removed. * * @param iOldKey * Old key to remove * @param iNode * Node to update */ protected void updateEntryPoint(final K iOldKey, final OMVRBTreeEntryPersistent<K, V> iNode) { final OMVRBTreeEntryPersistent<K, V> node = entryPoints.remove(iOldKey); if (node != null) { if (node != iNode) OLogManager.instance().warn(this, "Entrypoints nodes are different during update: old %s <-> new %s", node, iNode); addNodeAsEntrypoint(iNode); } } /** * Keeps the node in memory. * * @param iNode * Node to store */ protected void addNodeInCache(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode.dataProvider != null && iNode.dataProvider.getIdentity().isValid()) cache.put(iNode.dataProvider.getIdentity(), iNode); } /** * Searches the node in local cache by RID. * * @param iRid * RID to search * @return Node is found, otherwise NULL */ protected OMVRBTreeEntryPersistent<K, V> searchNodeInCache(final ORID iRid) { return cache.get(iRid); } public int getNumberOfNodesInCache() { return cache.size(); } /** * Returns all the RID of the nodes in memory. */ protected Set<ORID> getAllNodesInCache() { return cache.keySet(); } /** * Removes the node from the local cache. * * @param iRid * RID of node to remove */ protected void removeNodeFromCache(final ORID iRid) { cache.remove(iRid); } protected void markDirty() { } public ORecord<?> getOwner() { return owner; } public OMVRBTreePersistent<K, V> setOwner(ORecord<?> owner) { this.owner = owner; return this; } protected void freeMemory(final int i) { // LOW MEMORY DURING LOAD: THIS MEANS DEEP LOADING OF NODES. EXECUTE THE OPTIMIZATION AND RETRY IT optimize(true); OMemoryWatchDog.freeMemoryForOptimization(300 * i); } }
0true
core_src_main_java_com_orientechnologies_orient_core_type_tree_OMVRBTreePersistent.java
1,354
public interface StoreService { public Store readStoreByStoreCode(String storeCode); public Map<Store,Double> findStoresByAddress(Address searchAddress, double distance); public List<Store> readAllStores(); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_store_service_StoreService.java
429
public enum AddMethodType { PERSIST, LOOKUP }
0true
common_src_main_java_org_broadleafcommerce_common_presentation_client_AddMethodType.java
1,484
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> { private boolean isVertex; private Closure<Boolean> closure; @Override public void setup(final Mapper.Context context) throws IOException, InterruptedException { this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class); try { this.closure = (Closure<Boolean>) engine.eval(context.getConfiguration().get(CLOSURE)); } catch (final ScriptException e) { throw new IOException(e.getMessage(), e); } } @Override public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException { if (this.isVertex) { if (value.hasPaths() && !this.closure.call(value)) { value.clearPaths(); DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_FILTERED, 1L); } } else { long counter = 0; for (final Edge e : value.getEdges(Direction.BOTH)) { final StandardFaunusEdge edge = (StandardFaunusEdge) e; if (edge.hasPaths() && !this.closure.call(edge)) { edge.clearPaths(); counter++; } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.EDGES_FILTERED, counter); } context.write(NullWritable.get(), value); } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_filter_FilterMap.java
3
Collection<Long> perm2Ids = BLCCollectionUtils.collect(perm2, new TypedTransformer<Long>() { @Override public Long transform(Object input) { return ((ProductOptionValue) input).getId(); } });
0true
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_AdminCatalogServiceImpl.java
1,074
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() { @Override public void onResponse(IndexResponse response) { UpdateResponse update = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); update.setGetResult(updateHelper.extractGetResult(request, response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); listener.onResponse(update); } @Override public void onFailure(Throwable e) { e = ExceptionsHelper.unwrapCause(e); if (e instanceof VersionConflictEngineException) { if (retryCount < request.retryOnConflict()) { threadPool.executor(executor()).execute(new Runnable() { @Override public void run() { shardOperation(request, listener, retryCount + 1); } }); return; } } listener.onFailure(e); } });
1no label
src_main_java_org_elasticsearch_action_update_TransportUpdateAction.java
791
new Callable<OIndexManager>() { public OIndexManager call() { OIndexManager instance; if (database.getStorage() instanceof OStorageProxy) instance = new OIndexManagerRemote(database); else instance = new OIndexManagerShared(database); if (iLoad) try { instance.load(); } catch (Exception e) { OLogManager.instance().error(this, "[OMetadata] Error on loading index manager, reset index configuration", e); instance.create(); } return instance; } }), database);
0true
core_src_main_java_com_orientechnologies_orient_core_metadata_OMetadataDefault.java
1,193
boolean success = portsRange.iterate(new PortsRange.PortCallback() { @Override public boolean onPortNumber(int portNumber) { try { channel = bootstrap.bind(new InetSocketAddress(hostAddress, portNumber)); } catch (Exception e) { lastException.set(e); return false; } return true; } });
0true
src_main_java_org_elasticsearch_bulk_udp_BulkUdpService.java
1,567
@XmlRootElement(name = "storage") @XmlType(propOrder = { "loadOnStartup", "userPassword", "userName", "path", "name" }) public class OServerStorageConfiguration { @XmlAttribute(required = true) public String name; @XmlAttribute public String path; @XmlAttribute public String userName; @XmlAttribute public String userPassword; @XmlAttribute(name = "loaded-at-startup") public boolean loadOnStartup; public OServerStorageConfiguration() { } }
0true
server_src_main_java_com_orientechnologies_orient_server_config_OServerStorageConfiguration.java
1,286
public class ClusterNameModule extends AbstractModule { private final Settings settings; public ClusterNameModule(Settings settings) { this.settings = settings; } @Override protected void configure() { bind(ClusterName.class).toInstance(ClusterName.clusterNameFromSettings(settings)); } }
0true
src_main_java_org_elasticsearch_cluster_ClusterNameModule.java
2,580
class ApplySettings implements NodeSettingsService.Listener { @Override public void onRefreshSettings(Settings settings) { int minimumMasterNodes = settings.getAsInt("discovery.zen.minimum_master_nodes", ZenDiscovery.this.electMaster.minimumMasterNodes()); if (minimumMasterNodes != ZenDiscovery.this.electMaster.minimumMasterNodes()) { logger.info("updating discovery.zen.minimum_master_nodes from [{}] to [{}]", ZenDiscovery.this.electMaster.minimumMasterNodes(), minimumMasterNodes); handleMinimumMasterNodesChanged(minimumMasterNodes); } } }
1no label
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
1,687
runnable = new Runnable() { public void run() { map.putTransient(null, "value", 1, TimeUnit.SECONDS); } };
0true
hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java
2,242
public class OutputStreamIndexOutput extends OutputStream { private final IndexOutput out; public OutputStreamIndexOutput(IndexOutput out) { this.out = out; } @Override public void write(int b) throws IOException { out.writeByte((byte) b); } @Override public void write(byte[] b) throws IOException { out.writeBytes(b, b.length); } @Override public void write(byte[] b, int off, int len) throws IOException { out.writeBytes(b, off, len); } @Override public void flush() throws IOException { out.flush(); } @Override public void close() throws IOException { out.close(); } }
0true
src_main_java_org_elasticsearch_common_lucene_store_OutputStreamIndexOutput.java
1,363
public class ODataSegmentMemory extends OSharedResourceAdaptive implements ODataSegment { private final String name; private final int id; private final List<byte[]> entries = new ArrayList<byte[]>(); public ODataSegmentMemory(final String iDataSegmentName, int iId) { super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean()); name = iDataSegmentName; id = iId; } public void close() { acquireExclusiveLock(); try { entries.clear(); } finally { releaseExclusiveLock(); } } public void drop() throws IOException { close(); } public int count() { acquireSharedLock(); try { return entries.size(); } finally { releaseSharedLock(); } } public long getSize() { acquireSharedLock(); try { long size = 0; for (byte[] e : entries) if (e != null) size += e.length; return size; } finally { releaseSharedLock(); } } public long createRecord(byte[] iContent) { acquireExclusiveLock(); try { entries.add(iContent); return entries.size() - 1; } finally { releaseExclusiveLock(); } } public void deleteRecord(final long iRecordPosition) { acquireExclusiveLock(); try { entries.set((int) iRecordPosition, null); } finally { releaseExclusiveLock(); } } public byte[] readRecord(final long iRecordPosition) { acquireSharedLock(); try { return entries.get((int) iRecordPosition); } finally { releaseSharedLock(); } } public void updateRecord(final long iRecordPosition, final byte[] iContent) { acquireExclusiveLock(); try { entries.set((int) iRecordPosition, iContent); } finally { releaseExclusiveLock(); } } public String getName() { return name; } public int getId() { return id; } }
0true
core_src_main_java_com_orientechnologies_orient_core_storage_impl_memory_ODataSegmentMemory.java
56
@Service("blAssetFormBuilderService") public class AssetFormBuilderServiceImpl implements AssetFormBuilderService { @Resource(name = "blFormBuilderService") protected FormBuilderService formBuilderService; @Resource(name = "blStaticAssetService") protected StaticAssetService staticAssetService; @Resource(name = "blStaticMapNamedOperationComponent") protected StaticMapNamedOperationComponent operationMap; @Override public void addImageThumbnailField(ListGrid listGrid, String urlField) { listGrid.getHeaderFields().add(new Field() .withName("thumbnail") .withFriendlyName("Asset_thumbnail") .withFieldType(SupportedFieldType.STRING.toString()) .withOrder(Integer.MIN_VALUE) .withColumnWidth("50px") .withFilterSortDisabled(true)); for (ListGridRecord record : listGrid.getRecords()) { // Get the value of the URL String imageUrl = record.getField(urlField).getValue(); // Prepend the static asset url prefix if necessary String staticAssetUrlPrefix = staticAssetService.getStaticAssetUrlPrefix(); if (staticAssetUrlPrefix != null && !staticAssetUrlPrefix.startsWith("/")) { staticAssetUrlPrefix = "/" + staticAssetUrlPrefix; } if (staticAssetUrlPrefix == null) { staticAssetUrlPrefix = ""; } else { imageUrl = staticAssetUrlPrefix + imageUrl; } MediaField mf = (MediaField) new MediaField() .withName("thumbnail") .withFriendlyName("Asset_thumbnail") .withFieldType(SupportedFieldType.IMAGE.toString()) .withOrder(Integer.MIN_VALUE) .withValue(imageUrl); // Add a hidden field for the large thumbnail path record.getHiddenFields().add(new Field() .withName("cmsUrlPrefix") .withValue(staticAssetUrlPrefix)); record.getHiddenFields().add(new Field() .withName("thumbnailKey") .withValue("?smallAdminThumbnail")); record.getHiddenFields().add(new Field() .withName("servletContext") .withValue(BroadleafRequestContext.getBroadleafRequestContext().getRequest().getContextPath())); // Set the height value on this field mf.setHeight(operationMap.getNamedOperations().get("smallAdminThumbnail").get("resize-height-amount")); record.getFields().add(mf); // Since we've added a new field, we need to clear the cached map to ensure it will display record.clearFieldMap(); } } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_admin_web_service_AssetFormBuilderServiceImpl.java
425
trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) { Assert.assertEquals(event.getChangeType(), OMultiValueChangeEvent.OChangeType.UPDATE); Assert.assertEquals(event.getOldValue(), "value2"); Assert.assertEquals(event.getKey().intValue(), 1); Assert.assertEquals(event.getValue(), "value4"); changed.value = true; } });
0true
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedListTest.java
324
SafeRunner.run(new SafeRunnable() { public void run() { l.labelProviderChanged(event); } });
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_explorer_JavaUILabelProvider.java
519
public class TransportTypesExistsAction extends TransportMasterNodeReadOperationAction<TypesExistsRequest, TypesExistsResponse> { @Inject public TransportTypesExistsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) { super(settings, transportService, clusterService, threadPool); } @Override protected String executor() { // lightweight check return ThreadPool.Names.SAME; } @Override protected String transportAction() { return TypesExistsAction.NAME; } @Override protected TypesExistsRequest newRequest() { return new TypesExistsRequest(); } @Override protected TypesExistsResponse newResponse() { return new TypesExistsResponse(); } @Override protected ClusterBlockException checkBlock(TypesExistsRequest request, ClusterState state) { return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices()); } @Override protected void masterOperation(final TypesExistsRequest request, final ClusterState state, final ActionListener<TypesExistsResponse> listener) throws ElasticsearchException { String[] concreteIndices = state.metaData().concreteIndices(request.indices(), request.indicesOptions()); if (concreteIndices.length == 0) { listener.onResponse(new TypesExistsResponse(false)); return; } for (String concreteIndex : concreteIndices) { if (!state.metaData().hasConcreteIndex(concreteIndex)) { listener.onResponse(new TypesExistsResponse(false)); return; } ImmutableOpenMap<String, MappingMetaData> mappings = state.metaData().getIndices().get(concreteIndex).mappings(); if (mappings.isEmpty()) { listener.onResponse(new TypesExistsResponse(false)); return; } for (String type : request.types()) { if (!mappings.containsKey(type)) { listener.onResponse(new TypesExistsResponse(false)); return; } } } listener.onResponse(new TypesExistsResponse(true)); } }
1no label
src_main_java_org_elasticsearch_action_admin_indices_exists_types_TransportTypesExistsAction.java
1,962
public class MoreTypes { public static final Type[] EMPTY_TYPE_ARRAY = new Type[]{}; private MoreTypes() { } private static final Map<TypeLiteral<?>, TypeLiteral<?>> PRIMITIVE_TO_WRAPPER = new ImmutableMap.Builder<TypeLiteral<?>, TypeLiteral<?>>() .put(TypeLiteral.get(boolean.class), TypeLiteral.get(Boolean.class)) .put(TypeLiteral.get(byte.class), TypeLiteral.get(Byte.class)) .put(TypeLiteral.get(short.class), TypeLiteral.get(Short.class)) .put(TypeLiteral.get(int.class), TypeLiteral.get(Integer.class)) .put(TypeLiteral.get(long.class), TypeLiteral.get(Long.class)) .put(TypeLiteral.get(float.class), TypeLiteral.get(Float.class)) .put(TypeLiteral.get(double.class), TypeLiteral.get(Double.class)) .put(TypeLiteral.get(char.class), TypeLiteral.get(Character.class)) .put(TypeLiteral.get(void.class), TypeLiteral.get(Void.class)) .build(); /** * Returns an equivalent type that's safe for use in a key. The returned type will be free of * primitive types. Type literals of primitives will return the corresponding wrapper types. * * @throws ConfigurationException if {@code type} contains a type variable */ public static <T> TypeLiteral<T> makeKeySafe(TypeLiteral<T> type) { if (!isFullySpecified(type.getType())) { String message = type + " cannot be used as a key; It is not fully specified."; throw new ConfigurationException(ImmutableSet.of(new Message(message))); } @SuppressWarnings("unchecked") TypeLiteral<T> wrappedPrimitives = (TypeLiteral<T>) PRIMITIVE_TO_WRAPPER.get(type); return wrappedPrimitives != null ? wrappedPrimitives : type; } /** * Returns true if {@code type} is free from type variables. */ private static boolean isFullySpecified(Type type) { if (type instanceof Class) { return true; } else if (type instanceof CompositeType) { return ((CompositeType) type).isFullySpecified(); } else if (type instanceof TypeVariable) { return false; } else { return ((CompositeType) canonicalize(type)).isFullySpecified(); } } /** * Returns a type that is functionally equal but not necessarily equal * according to {@link Object#equals(Object) Object.equals()}. The returned * type is {@link Serializable}. */ public static Type canonicalize(Type type) { if (type instanceof ParameterizedTypeImpl || type instanceof GenericArrayTypeImpl || type instanceof WildcardTypeImpl) { return type; } else if (type instanceof ParameterizedType) { ParameterizedType p = (ParameterizedType) type; return new ParameterizedTypeImpl(p.getOwnerType(), p.getRawType(), p.getActualTypeArguments()); } else if (type instanceof GenericArrayType) { GenericArrayType g = (GenericArrayType) type; return new GenericArrayTypeImpl(g.getGenericComponentType()); } else if (type instanceof Class && ((Class<?>) type).isArray()) { Class<?> c = (Class<?>) type; return new GenericArrayTypeImpl(c.getComponentType()); } else if (type instanceof WildcardType) { WildcardType w = (WildcardType) type; return new WildcardTypeImpl(w.getUpperBounds(), w.getLowerBounds()); } else { // type is either serializable as-is or unsupported return type; } } /** * Returns a type that's functionally equal but not necessarily equal * according to {@link Object#equals(Object) Object.equals}. The returned * member is {@link Serializable}. */ public static Member serializableCopy(Member member) { return member instanceof MemberImpl ? member : new MemberImpl(member); } public static Class<?> getRawType(Type type) { if (type instanceof Class<?>) { // type is a normal class. return (Class<?>) type; } else if (type instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) type; // I'm not exactly sure why getRawType() returns Type instead of Class. // Neal isn't either but suspects some pathological case related // to nested classes exists. Type rawType = parameterizedType.getRawType(); checkArgument(rawType instanceof Class, "Expected a Class, but <%s> is of type %s", type, type.getClass().getName()); return (Class<?>) rawType; } else if (type instanceof GenericArrayType) { // TODO: Is this sufficient? return Object[].class; } else if (type instanceof TypeVariable) { // we could use the variable's bounds, but that'll won't work if there are multiple. // having a raw type that's more general than necessary is okay return Object.class; } else { throw new IllegalArgumentException("Expected a Class, ParameterizedType, or " + "GenericArrayType, but <" + type + "> is of type " + type.getClass().getName()); } } /** * Returns true if {@code a} and {@code b} are equal. */ public static boolean equals(Type a, Type b) { if (a == b) { // also handles (a == null && b == null) return true; } else if (a instanceof Class) { // Class already specifies equals(). return a.equals(b); } else if (a instanceof ParameterizedType) { if (!(b instanceof ParameterizedType)) { return false; } // TODO: save a .clone() call ParameterizedType pa = (ParameterizedType) a; ParameterizedType pb = (ParameterizedType) b; return Objects.equal(pa.getOwnerType(), pb.getOwnerType()) && pa.getRawType().equals(pb.getRawType()) && Arrays.equals(pa.getActualTypeArguments(), pb.getActualTypeArguments()); } else if (a instanceof GenericArrayType) { if (!(b instanceof GenericArrayType)) { return false; } GenericArrayType ga = (GenericArrayType) a; GenericArrayType gb = (GenericArrayType) b; return equals(ga.getGenericComponentType(), gb.getGenericComponentType()); } else if (a instanceof WildcardType) { if (!(b instanceof WildcardType)) { return false; } WildcardType wa = (WildcardType) a; WildcardType wb = (WildcardType) b; return Arrays.equals(wa.getUpperBounds(), wb.getUpperBounds()) && Arrays.equals(wa.getLowerBounds(), wb.getLowerBounds()); } else if (a instanceof TypeVariable) { if (!(b instanceof TypeVariable)) { return false; } TypeVariable<?> va = (TypeVariable) a; TypeVariable<?> vb = (TypeVariable) b; return va.getGenericDeclaration() == vb.getGenericDeclaration() && va.getName().equals(vb.getName()); } else { // This isn't a type we support. Could be a generic array type, wildcard type, etc. return false; } } /** * Returns the hashCode of {@code type}. */ public static int hashCode(Type type) { if (type instanceof Class) { // Class specifies hashCode(). return type.hashCode(); } else if (type instanceof ParameterizedType) { ParameterizedType p = (ParameterizedType) type; return Arrays.hashCode(p.getActualTypeArguments()) ^ p.getRawType().hashCode() ^ hashCodeOrZero(p.getOwnerType()); } else if (type instanceof GenericArrayType) { return hashCode(((GenericArrayType) type).getGenericComponentType()); } else if (type instanceof WildcardType) { WildcardType w = (WildcardType) type; return Arrays.hashCode(w.getLowerBounds()) ^ Arrays.hashCode(w.getUpperBounds()); } else { // This isn't a type we support. Probably a type variable return hashCodeOrZero(type); } } private static int hashCodeOrZero(Object o) { return o != null ? o.hashCode() : 0; } public static String toString(Type type) { if (type instanceof Class<?>) { return ((Class) type).getName(); } else if (type instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) type; Type[] arguments = parameterizedType.getActualTypeArguments(); Type ownerType = parameterizedType.getOwnerType(); StringBuilder stringBuilder = new StringBuilder(); if (ownerType != null) { stringBuilder.append(toString(ownerType)).append("."); } stringBuilder.append(toString(parameterizedType.getRawType())); if (arguments.length > 0) { stringBuilder .append("<") .append(toString(arguments[0])); for (int i = 1; i < arguments.length; i++) { stringBuilder.append(", ").append(toString(arguments[i])); } } return stringBuilder.append(">").toString(); } else if (type instanceof GenericArrayType) { return toString(((GenericArrayType) type).getGenericComponentType()) + "[]"; } else if (type instanceof WildcardType) { WildcardType wildcardType = (WildcardType) type; Type[] lowerBounds = wildcardType.getLowerBounds(); Type[] upperBounds = wildcardType.getUpperBounds(); if (upperBounds.length != 1 || lowerBounds.length > 1) { throw new UnsupportedOperationException("Unsupported wildcard type " + type); } if (lowerBounds.length == 1) { if (upperBounds[0] != Object.class) { throw new UnsupportedOperationException("Unsupported wildcard type " + type); } return "? super " + toString(lowerBounds[0]); } else if (upperBounds[0] == Object.class) { return "?"; } else { return "? extends " + toString(upperBounds[0]); } } else { return type.toString(); } } /** * Returns {@code Field.class}, {@code Method.class} or {@code Constructor.class}. */ public static Class<? extends Member> memberType(Member member) { checkNotNull(member, "member"); if (member instanceof MemberImpl) { return ((MemberImpl) member).memberType; } else if (member instanceof Field) { return Field.class; } else if (member instanceof Method) { return Method.class; } else if (member instanceof Constructor) { return Constructor.class; } else { throw new IllegalArgumentException( "Unsupported implementation class for Member, " + member.getClass()); } } /** * Formats a member as concise string, such as {@code java.util.ArrayList.size}, * {@code java.util.ArrayList<init>()} or {@code java.util.List.remove()}. */ public static String toString(Member member) { Class<? extends Member> memberType = memberType(member); if (memberType == Method.class) { return member.getDeclaringClass().getName() + "." + member.getName() + "()"; } else if (memberType == Field.class) { return member.getDeclaringClass().getName() + "." + member.getName(); } else if (memberType == Constructor.class) { return member.getDeclaringClass().getName() + ".<init>()"; } else { throw new AssertionError(); } } public static String memberKey(Member member) { checkNotNull(member, "member"); return "<NO_MEMBER_KEY>"; } /** * Returns the generic supertype for {@code supertype}. For example, given a class {@code * IntegerSet}, the result for when supertype is {@code Set.class} is {@code Set<Integer>} and the * result when the supertype is {@code Collection.class} is {@code Collection<Integer>}. */ public static Type getGenericSupertype(Type type, Class<?> rawType, Class<?> toResolve) { if (toResolve == rawType) { return type; } // we skip searching through interfaces if unknown is an interface if (toResolve.isInterface()) { Class[] interfaces = rawType.getInterfaces(); for (int i = 0, length = interfaces.length; i < length; i++) { if (interfaces[i] == toResolve) { return rawType.getGenericInterfaces()[i]; } else if (toResolve.isAssignableFrom(interfaces[i])) { return getGenericSupertype(rawType.getGenericInterfaces()[i], interfaces[i], toResolve); } } } // check our supertypes if (!rawType.isInterface()) { while (rawType != Object.class) { Class<?> rawSupertype = rawType.getSuperclass(); if (rawSupertype == toResolve) { return rawType.getGenericSuperclass(); } else if (toResolve.isAssignableFrom(rawSupertype)) { return getGenericSupertype(rawType.getGenericSuperclass(), rawSupertype, toResolve); } rawType = rawSupertype; } } // we can't resolve this further return toResolve; } public static Type resolveTypeVariable(Type type, Class<?> rawType, TypeVariable unknown) { Class<?> declaredByRaw = declaringClassOf(unknown); // we can't reduce this further if (declaredByRaw == null) { return unknown; } Type declaredBy = getGenericSupertype(type, rawType, declaredByRaw); if (declaredBy instanceof ParameterizedType) { int index = indexOf(declaredByRaw.getTypeParameters(), unknown); return ((ParameterizedType) declaredBy).getActualTypeArguments()[index]; } return unknown; } private static int indexOf(Object[] array, Object toFind) { for (int i = 0; i < array.length; i++) { if (toFind.equals(array[i])) { return i; } } throw new NoSuchElementException(); } /** * Returns the declaring class of {@code typeVariable}, or {@code null} if it was not declared by * a class. */ private static Class<?> declaringClassOf(TypeVariable typeVariable) { GenericDeclaration genericDeclaration = typeVariable.getGenericDeclaration(); return genericDeclaration instanceof Class ? (Class<?>) genericDeclaration : null; } public static class ParameterizedTypeImpl implements ParameterizedType, Serializable, CompositeType { private final Type ownerType; private final Type rawType; private final Type[] typeArguments; public ParameterizedTypeImpl(Type ownerType, Type rawType, Type... typeArguments) { // require an owner type if the raw type needs it if (rawType instanceof Class<?>) { Class rawTypeAsClass = (Class) rawType; checkArgument(ownerType != null || rawTypeAsClass.getEnclosingClass() == null, "No owner type for enclosed %s", rawType); checkArgument(ownerType == null || rawTypeAsClass.getEnclosingClass() != null, "Owner type for unenclosed %s", rawType); } this.ownerType = ownerType == null ? null : canonicalize(ownerType); this.rawType = canonicalize(rawType); this.typeArguments = typeArguments.clone(); for (int t = 0; t < this.typeArguments.length; t++) { checkNotNull(this.typeArguments[t], "type parameter"); checkNotPrimitive(this.typeArguments[t], "type parameters"); this.typeArguments[t] = canonicalize(this.typeArguments[t]); } } public Type[] getActualTypeArguments() { return typeArguments.clone(); } public Type getRawType() { return rawType; } public Type getOwnerType() { return ownerType; } public boolean isFullySpecified() { if (ownerType != null && !MoreTypes.isFullySpecified(ownerType)) { return false; } if (!MoreTypes.isFullySpecified(rawType)) { return false; } for (Type type : typeArguments) { if (!MoreTypes.isFullySpecified(type)) { return false; } } return true; } @Override public boolean equals(Object other) { return other instanceof ParameterizedType && MoreTypes.equals(this, (ParameterizedType) other); } @Override public int hashCode() { return MoreTypes.hashCode(this); } @Override public String toString() { return MoreTypes.toString(this); } private static final long serialVersionUID = 0; } public static class GenericArrayTypeImpl implements GenericArrayType, Serializable, CompositeType { private final Type componentType; public GenericArrayTypeImpl(Type componentType) { this.componentType = canonicalize(componentType); } public Type getGenericComponentType() { return componentType; } public boolean isFullySpecified() { return MoreTypes.isFullySpecified(componentType); } @Override public boolean equals(Object o) { return o instanceof GenericArrayType && MoreTypes.equals(this, (GenericArrayType) o); } @Override public int hashCode() { return MoreTypes.hashCode(this); } @Override public String toString() { return MoreTypes.toString(this); } private static final long serialVersionUID = 0; } /** * The WildcardType interface supports multiple upper bounds and multiple * lower bounds. We only support what the Java 6 language needs - at most one * bound. If a lower bound is set, the upper bound must be Object.class. */ public static class WildcardTypeImpl implements WildcardType, Serializable, CompositeType { private final Type upperBound; private final Type lowerBound; public WildcardTypeImpl(Type[] upperBounds, Type[] lowerBounds) { checkArgument(lowerBounds.length <= 1, "Must have at most one lower bound."); checkArgument(upperBounds.length == 1, "Must have exactly one upper bound."); if (lowerBounds.length == 1) { checkNotNull(lowerBounds[0], "lowerBound"); checkNotPrimitive(lowerBounds[0], "wildcard bounds"); checkArgument(upperBounds[0] == Object.class, "bounded both ways"); this.lowerBound = canonicalize(lowerBounds[0]); this.upperBound = Object.class; } else { checkNotNull(upperBounds[0], "upperBound"); checkNotPrimitive(upperBounds[0], "wildcard bounds"); this.lowerBound = null; this.upperBound = canonicalize(upperBounds[0]); } } public Type[] getUpperBounds() { return new Type[]{upperBound}; } public Type[] getLowerBounds() { return lowerBound != null ? new Type[]{lowerBound} : EMPTY_TYPE_ARRAY; } public boolean isFullySpecified() { return MoreTypes.isFullySpecified(upperBound) && (lowerBound == null || MoreTypes.isFullySpecified(lowerBound)); } @Override public boolean equals(Object other) { return other instanceof WildcardType && MoreTypes.equals(this, (WildcardType) other); } @Override public int hashCode() { return MoreTypes.hashCode(this); } @Override public String toString() { return MoreTypes.toString(this); } private static final long serialVersionUID = 0; } private static void checkNotPrimitive(Type type, String use) { checkArgument(!(type instanceof Class<?>) || !((Class) type).isPrimitive(), "Primitive types are not allowed in %s: %s", use, type); } /** * We cannot serialize the built-in Java member classes, which prevents us from using Members in * our exception types. We workaround this with this serializable implementation. It includes all * of the API methods, plus everything we use for line numbers and messaging. */ public static class MemberImpl implements Member, Serializable { private final Class<?> declaringClass; private final String name; private final int modifiers; private final boolean synthetic; private final Class<? extends Member> memberType; private final String memberKey; private MemberImpl(Member member) { this.declaringClass = member.getDeclaringClass(); this.name = member.getName(); this.modifiers = member.getModifiers(); this.synthetic = member.isSynthetic(); this.memberType = memberType(member); this.memberKey = memberKey(member); } public Class getDeclaringClass() { return declaringClass; } public String getName() { return name; } public int getModifiers() { return modifiers; } public boolean isSynthetic() { return synthetic; } @Override public String toString() { return MoreTypes.toString(this); } } /** * A type formed from other types, such as arrays, parameterized types or wildcard types */ private interface CompositeType { /** * Returns true if there are no type variables in this type. */ boolean isFullySpecified(); } }
0true
src_main_java_org_elasticsearch_common_inject_internal_MoreTypes.java
3,732
public static enum Dynamic { TRUE, FALSE, STRICT }
0true
src_main_java_org_elasticsearch_index_mapper_object_ObjectMapper.java
1,517
@SuppressWarnings("unchecked") public class OObjectIteratorClass<T> implements OObjectIteratorClassInterface<T> { private ODatabaseObject database; private ORecordIteratorClass<ODocument> underlying; private String fetchPlan; public OObjectIteratorClass(final ODatabaseObject iDatabase, final ODatabaseRecordAbstract iUnderlyingDatabase, final String iClusterName, final boolean iPolymorphic) { database = iDatabase; underlying = new ORecordIteratorClass<ODocument>((ODatabaseRecord) iDatabase.getUnderlying(), iUnderlyingDatabase, iClusterName, iPolymorphic, true, false); } public boolean hasNext() { return underlying.hasNext(); } public T next() { return next(fetchPlan); } public T next(final String iFetchPlan) { return (T) database.getUserObjectByRecord(underlying.next(), iFetchPlan); } public void remove() { underlying.remove(); } public Iterator<T> iterator() { return this; } public String getFetchPlan() { return fetchPlan; } public OObjectIteratorClass<T> setFetchPlan(String fetchPlan) { this.fetchPlan = fetchPlan; return this; } }
0true
object_src_main_java_com_orientechnologies_orient_object_iterator_OObjectIteratorClass.java
559
public class TypedQueryBuilder<T> { protected Class<T> rootClass; protected String rootAlias; protected List<TQRestriction> restrictions = new ArrayList<TQRestriction>(); protected Map<String, Object> paramMap = new HashMap<String, Object>(); /** * Creates a new TypedQueryBuilder that will utilize the rootAlias as the named object of the class * * @param rootClass * @param rootAlias */ public TypedQueryBuilder(Class<T> rootClass, String rootAlias) { this.rootClass = rootClass; this.rootAlias = rootAlias; } /** * Adds a simple restriction to the query. Note that all restrictions present on the TypedQueryBuilder will be joined * with an AND clause. * * @param expression * @param operation * @param parameter */ public TypedQueryBuilder<T> addRestriction(String expression, String operation, Object parameter) { restrictions.add(new TQRestriction(expression, operation, parameter)); return this; } /** * Adds an explicit TQRestriction object. Note that all restrictions present on the TypedQueryBuilder will be joined * with an AND clause. * * @param restriction * @return */ public TypedQueryBuilder<T> addRestriction(TQRestriction restriction) { restrictions.add(restriction); return this; } /** * Generates the query string based on the current contents of this builder. As the string is generated, this method * will also populate the paramMap, which binds actual restriction values. * * Note that this method should typically not be invoked through DAOs. Instead, utilize {@link #toQuery(EntityManager)}, * which will automatically generate the TypedQuery and populate the required parameters. * * @return the QL string */ public String toQueryString() { return toQueryString(false); } /** * Generates the query string based on the current contents of this builder. As the string is generated, this method * will also populate the paramMap, which binds actual restriction values. * * Note that this method should typically not be invoked through DAOs. Instead, utilize {@link #toQuery(EntityManager)}, * which will automatically generate the TypedQuery and populate the required parameters. * * If you are using this as a COUNT query, you should look at the corresponding {@link #toCountQuery(EntityManager)} * * @param whether or not the resulting query string should be used as a count query or not * @return the QL string */ public String toQueryString(boolean count) { StringBuilder sb = getSelectClause(new StringBuilder(), count) .append(" FROM ").append(rootClass.getName()).append(" ").append(rootAlias); if (CollectionUtils.isNotEmpty(restrictions)) { sb.append(" WHERE "); for (int i = 0; i < restrictions.size(); i++) { TQRestriction r = restrictions.get(i); sb.append(r.toQl("p" + i, paramMap)); if (i != restrictions.size() - 1) { sb.append(" AND "); } } } return sb.toString(); } /** * Adds the select query from {@link #toQueryString()} * * @return <b>sb</b> with the select query appended to it */ protected StringBuilder getSelectClause(StringBuilder sb, boolean count) { sb.append("SELECT "); if (count) { return sb.append("COUNT(*)"); } else { return sb.append(rootAlias); } } /** * Returns a TypedQuery that represents this builder object. It will already have all of the appropriate parameter * values set and is able to be immediately queried against. * * @param em * @return the TypedQuery */ public TypedQuery<T> toQuery(EntityManager em) { TypedQuery<T> q = em.createQuery(toQueryString(), rootClass); fillParameterMap(q); return q; } public TypedQuery<Long> toCountQuery(EntityManager em) { TypedQuery<Long> q = em.createQuery(toQueryString(true), Long.class); fillParameterMap(q); return q; } protected void fillParameterMap(TypedQuery<?> q) { for (Entry<String, Object> entry : paramMap.entrySet()) { if (entry.getValue() != null) { q.setParameter(entry.getKey(), entry.getValue()); } } } /** * @return the paramMap */ public Map<String, Object> getParamMap() { return paramMap; } }
1no label
common_src_main_java_org_broadleafcommerce_common_util_dao_TypedQueryBuilder.java
2,434
public abstract class ConcurrentCollections { private final static boolean useConcurrentHashMapV8 = Boolean.parseBoolean(System.getProperty("es.useConcurrentHashMapV8", "false")); private final static boolean useLinkedTransferQueue = Boolean.parseBoolean(System.getProperty("es.useLinkedTransferQueue", "false")); static final int aggressiveConcurrencyLevel; static { aggressiveConcurrencyLevel = Math.max(Runtime.getRuntime().availableProcessors() * 2, 16); } /** * Creates a new CHM with an aggressive concurrency level, aimed at high concurrent update rate long living maps. */ public static <K, V> ConcurrentMap<K, V> newConcurrentMapWithAggressiveConcurrency() { if (useConcurrentHashMapV8) { return new ConcurrentHashMapV8<K, V>(16, 0.75f, aggressiveConcurrencyLevel); } return new ConcurrentHashMap<K, V>(16, 0.75f, aggressiveConcurrencyLevel); } public static <K, V> ConcurrentMap<K, V> newConcurrentMap() { if (useConcurrentHashMapV8) { return new ConcurrentHashMapV8<K, V>(); } return new ConcurrentHashMap<K, V>(); } /** * Creates a new CHM with an aggressive concurrency level, aimed at highly updateable long living maps. */ public static <V> ConcurrentMapLong<V> newConcurrentMapLongWithAggressiveConcurrency() { return new ConcurrentHashMapLong<V>(ConcurrentCollections.<Long, V>newConcurrentMapWithAggressiveConcurrency()); } public static <V> ConcurrentMapLong<V> newConcurrentMapLong() { return new ConcurrentHashMapLong<V>(ConcurrentCollections.<Long, V>newConcurrentMap()); } public static <V> Set<V> newConcurrentSet() { return Sets.newSetFromMap(ConcurrentCollections.<V, Boolean>newConcurrentMap()); } public static <T> Queue<T> newQueue() { if (useLinkedTransferQueue) { return new LinkedTransferQueue<T>(); } return new ConcurrentLinkedQueue<T>(); } public static <T> Deque<T> newDeque() { return new ConcurrentLinkedDeque<T>(); } public static <T> BlockingQueue<T> newBlockingQueue() { return new LinkedTransferQueue<T>(); } private ConcurrentCollections() { } }
0true
src_main_java_org_elasticsearch_common_util_concurrent_ConcurrentCollections.java
972
public static class Name { }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_BundleOrderItemImpl.java
3,706
private final ThreadLocal<Field> fieldCache = new ThreadLocal<Field>() { @Override protected Field initialValue() { return new NumericDocValuesField(NAME, -1L); } };
0true
src_main_java_org_elasticsearch_index_mapper_internal_VersionFieldMapper.java
323
public class MergePoint { private static final Log LOG = LogFactory.getLog(MergePoint.class); private MergeHandler handler; private Document doc1; private Document doc2; private XPath xPath; public MergePoint(MergeHandler handler, Document doc1, Document doc2) { this.handler = handler; this.doc1 = doc1; this.doc2 = doc2; XPathFactory factory=XPathFactory.newInstance(); xPath=factory.newXPath(); } /** * Execute the merge operation and also provide a list of nodes that have already been * merged. It is up to the handler implementation to respect or ignore this list. * * @param exhaustedNodes * @return list of merged nodes * @throws XPathExpressionException */ public Node[] merge(List<Node> exhaustedNodes) throws XPathExpressionException, TransformerException { return merge(handler, exhaustedNodes); } private Node[] merge(MergeHandler handler, List<Node> exhaustedNodes) throws XPathExpressionException, TransformerException { if (LOG.isDebugEnabled()) { LOG.debug("Processing handler: " + handler.getXPath()); } if (handler.getChildren() != null) { MergeHandler[] children = handler.getChildren(); for (MergeHandler aChildren : children) { Node[] temp = merge(aChildren, exhaustedNodes); if (temp != null) { Collections.addAll(exhaustedNodes, temp); } } } String[] xPaths = handler.getXPath().split(" "); List<Node> nodeList1 = new ArrayList<Node>(); List<Node> nodeList2 = new ArrayList<Node>(); for (String xPathVal : xPaths) { NodeList temp1 = (NodeList) xPath.evaluate(xPathVal, doc1, XPathConstants.NODESET); if (temp1 != null) { int length = temp1.getLength(); for (int j=0;j<length;j++) { nodeList1.add(temp1.item(j)); } } NodeList temp2 = (NodeList) xPath.evaluate(xPathVal, doc2, XPathConstants.NODESET); if (temp2 != null) { int length = temp2.getLength(); for (int j=0;j<length;j++) { nodeList2.add(temp2.item(j)); } } } if (nodeList1 != null && nodeList2 != null) { return handler.merge(nodeList1, nodeList2, exhaustedNodes); } return null; } }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_MergePoint.java
813
@SuppressWarnings("unchecked") public class OSchemaShared extends ODocumentWrapperNoClass implements OSchema, OCloseable { private static final long serialVersionUID = 1L; public static final int CURRENT_VERSION_NUMBER = 4; private static final String DROP_INDEX_QUERY = "drop index "; protected Map<String, OClass> classes = new HashMap<String, OClass>(); public OSchemaShared(final int schemaClusterId) { super(new ODocument()); } public int countClasses() { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ); return getDatabase().getStorage().callInLock(new Callable<Integer>() { @Override public Integer call() throws Exception { return classes.size(); } }, false); } public OClass createClass(final Class<?> iClass) { final Class<?> superClass = iClass.getSuperclass(); final OClass cls; if (superClass != null && superClass != Object.class && existsClass(superClass.getSimpleName())) cls = getClass(superClass.getSimpleName()); else cls = null; return createClass(iClass.getSimpleName(), cls, OStorage.CLUSTER_TYPE.PHYSICAL); } public OClass createClass(final Class<?> iClass, final int iDefaultClusterId) { final Class<?> superClass = iClass.getSuperclass(); final OClass cls; if (superClass != null && superClass != Object.class && existsClass(superClass.getSimpleName())) cls = getClass(superClass.getSimpleName()); else cls = null; return createClass(iClass.getSimpleName(), cls, iDefaultClusterId); } public OClass createClass(final String iClassName) { return createClass(iClassName, null, OStorage.CLUSTER_TYPE.PHYSICAL); } public OClass createClass(final String iClassName, final OClass iSuperClass) { return createClass(iClassName, iSuperClass, OStorage.CLUSTER_TYPE.PHYSICAL); } public OClass createClass(final String iClassName, final OClass iSuperClass, final OStorage.CLUSTER_TYPE iType) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot create class " + iClassName + " inside a transaction"); int clusterId = getDatabase().getClusterIdByName(iClassName); if (clusterId == -1) // CREATE A NEW CLUSTER clusterId = createCluster(iType.toString(), iClassName); return createClass(iClassName, iSuperClass, clusterId); } public OClass createClass(final String iClassName, final int iDefaultClusterId) { return createClass(iClassName, null, new int[] { iDefaultClusterId }); } public OClass createClass(final String iClassName, final OClass iSuperClass, final int iDefaultClusterId) { return createClass(iClassName, iSuperClass, new int[] { iDefaultClusterId }); } public OClass getOrCreateClass(final String iClassName) { return getOrCreateClass(iClassName, null); } public OClass getOrCreateClass(final String iClassName, final OClass iSuperClass) { return getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { OClass cls = classes.get(iClassName.toLowerCase()); if (cls == null) cls = createClass(iClassName, iSuperClass); else if (iSuperClass != null && !cls.isSubClassOf(iSuperClass)) throw new IllegalArgumentException("Class '" + iClassName + "' is not an instance of " + iSuperClass.getShortName()); return cls; } }, true); } @Override public OClass createAbstractClass(final Class<?> iClass) { final Class<?> superClass = iClass.getSuperclass(); final OClass cls; if (superClass != null && superClass != Object.class && existsClass(superClass.getSimpleName())) cls = getClass(superClass.getSimpleName()); else cls = null; return createClass(iClass.getSimpleName(), cls, -1); } @Override public OClass createAbstractClass(final String iClassName) { return createClass(iClassName, null, -1); } @Override public OClass createAbstractClass(final String iClassName, final OClass iSuperClass) { return createClass(iClassName, iSuperClass, -1); } private int createCluster(String iType, String iClassName) { return getDatabase().command(new OCommandSQL("create cluster " + iClassName + " " + iType)).<Integer> execute(); } public OClass createClass(final String iClassName, final OClass iSuperClass, final int[] iClusterIds) { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_CREATE); final String key = iClassName.toLowerCase(); return getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { if (classes.containsKey(key)) throw new OSchemaException("Class " + iClassName + " already exists in current database"); final StringBuilder cmd = new StringBuilder("create class "); cmd.append(iClassName); if (iSuperClass != null) { cmd.append(" extends "); cmd.append(iSuperClass.getName()); } if (iClusterIds != null) { if (iClusterIds.length == 1 && iClusterIds[0] == -1) cmd.append(" abstract"); else { cmd.append(" cluster "); for (int i = 0; i < iClusterIds.length; ++i) { if (i > 0) cmd.append(','); else cmd.append(' '); cmd.append(iClusterIds[i]); } } } getDatabase().command(new OCommandSQL(cmd.toString())).execute(); if (!(getDatabase().getStorage() instanceof OStorageEmbedded)) getDatabase().reload(); if (classes.containsKey(key)) return classes.get(key); else // ADD IT LOCALLY AVOIDING TO RELOAD THE ENTIRE SCHEMA createClassInternal(iClassName, iSuperClass, iClusterIds); return classes.get(key); } }, true); } public OClass createClassInternal(final String iClassName, final OClass superClass, final int[] iClusterIds) { if (iClassName == null || iClassName.length() == 0) throw new OSchemaException("Found class name null"); final Character wrongCharacter = checkNameIfValid(iClassName); if (wrongCharacter != null) throw new OSchemaException("Found invalid class name. Character '" + wrongCharacter + "' cannot be used in class name."); final ODatabaseRecord database = getDatabase(); final int[] clusterIds; if (iClusterIds == null || iClusterIds.length == 0) // CREATE A NEW CLUSTER clusterIds = new int[] { database.addCluster(CLUSTER_TYPE.PHYSICAL.toString(), iClassName, null, null) }; else clusterIds = iClusterIds; database.checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_CREATE); final String key = iClassName.toLowerCase(); final OSchemaShared me = this; return getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { if (classes.containsKey(key)) throw new OSchemaException("Class " + iClassName + " already exists in current database"); final OClassImpl cls = new OClassImpl(me, iClassName, clusterIds); classes.put(key, cls); if (cls.getShortName() != null) // BIND SHORT NAME TOO classes.put(cls.getShortName().toLowerCase(), cls); if (superClass != null) { cls.setSuperClassInternal(superClass); // UPDATE INDEXES final int[] clustersToIndex = superClass.getPolymorphicClusterIds(); final String[] clusterNames = new String[clustersToIndex.length]; for (int i = 0; i < clustersToIndex.length; i++) clusterNames[i] = database.getClusterNameById(clustersToIndex[i]); for (OIndex<?> index : superClass.getIndexes()) for (String clusterName : clusterNames) if (clusterName != null) database.getMetadata().getIndexManager().addClusterToIndex(clusterName, index.getName()); } return cls; } }, true); } public static Character checkNameIfValid(String iName) { if (iName == null) throw new IllegalArgumentException("Name is null"); iName = iName.trim(); final int nameSize = iName.length(); if (nameSize == 0) throw new IllegalArgumentException("Name is empty"); for (int i = 0; i < nameSize; ++i) { final char c = iName.charAt(i); if (c == ':' || c == ',' || c == ' ') // INVALID CHARACTER return c; } // for (char c : iName.toCharArray()) // if (!Character.isJavaIdentifierPart(c)) // return c; return null; } /* * (non-Javadoc) * * @see com.orientechnologies.orient.core.metadata.schema.OSchema#dropClass(java.lang.String) */ public void dropClass(final String iClassName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a class inside a transaction"); if (iClassName == null) throw new IllegalArgumentException("Class name is null"); getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_DELETE); final String key = iClassName.toLowerCase(); getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { final OClass cls = classes.get(key); if (cls == null) throw new OSchemaException("Class " + iClassName + " was not found in current database"); if (cls.getBaseClasses().hasNext()) throw new OSchemaException("Class " + iClassName + " cannot be dropped because it has sub classes. Remove the dependencies before trying to drop it again"); final StringBuilder cmd = new StringBuilder("drop class "); cmd.append(iClassName); Object result = getDatabase().command(new OCommandSQL(cmd.toString())).execute(); if (result instanceof Boolean && (Boolean) result) { classes.remove(key); } getDatabase().reload(); reload(); return null; } }, true); } public void dropClassInternal(final String iClassName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a class inside a transaction"); if (iClassName == null) throw new IllegalArgumentException("Class name is null"); getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_DELETE); final String key = iClassName.toLowerCase(); getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { final OClass cls = classes.get(key); if (cls == null) throw new OSchemaException("Class " + iClassName + " was not found in current database"); if (cls.getBaseClasses().hasNext()) throw new OSchemaException("Class " + iClassName + " cannot be dropped because it has sub classes. Remove the dependencies before trying to drop it again"); if (cls.getSuperClass() != null) { // REMOVE DEPENDENCY FROM SUPERCLASS ((OClassImpl) cls.getSuperClass()).removeBaseClassInternal(cls); } dropClassIndexes(cls); classes.remove(key); if (cls.getShortName() != null) // REMOVE THE ALIAS TOO classes.remove(cls.getShortName().toLowerCase()); return null; } }, true); } private void dropClassIndexes(final OClass cls) { for (final OIndex<?> index : getDatabase().getMetadata().getIndexManager().getClassIndexes(cls.getName())) { getDatabase().command(new OCommandSQL(DROP_INDEX_QUERY + index.getName())); } } /** * Reloads the schema inside a storage's shared lock. */ @Override public <RET extends ODocumentWrapper> RET reload() { getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { reload(null); return null; } }, true); return (RET) this; } public boolean existsClass(final String iClassName) { return getDatabase().getStorage().callInLock(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return classes.containsKey(iClassName.toLowerCase()); } }, false); } /* * (non-Javadoc) * * @see com.orientechnologies.orient.core.metadata.schema.OSchema#getClass(java.lang.Class) */ public OClass getClass(final Class<?> iClass) { return getClass(iClass.getSimpleName()); } /* * (non-Javadoc) * * @see com.orientechnologies.orient.core.metadata.schema.OSchema#getClass(java.lang.String) */ public OClass getClass(final String iClassName) { if (iClassName == null) return null; OClass cls; cls = getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { return classes.get(iClassName.toLowerCase()); } }, false); if (cls == null && getDatabase().getDatabaseOwner() instanceof ODatabaseObject) { cls = getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { OClass cls = classes.get(iClassName.toLowerCase()); if (cls == null) { // CHECK IF CAN AUTO-CREATE IT final ODatabase ownerDb = getDatabase().getDatabaseOwner(); if (ownerDb instanceof ODatabaseObject) { final Class<?> javaClass = ((ODatabaseObject) ownerDb).getEntityManager().getEntityClass(iClassName); if (javaClass != null) { // AUTO REGISTER THE CLASS AT FIRST USE cls = cascadeCreate(javaClass); } } } return cls; } }, true); } return cls; } public void changeClassName(final String iOldName, final String iNewName) { getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { final OClass clazz = classes.remove(iOldName.toLowerCase()); classes.put(iNewName.toLowerCase(), clazz); return null; } }, true); } /** * Binds ODocument to POJO. */ @Override public void fromStream() { final OSchemaShared me = this; getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { // READ CURRENT SCHEMA VERSION final Integer schemaVersion = (Integer) document.field("schemaVersion"); if (schemaVersion == null) { OLogManager .instance() .error( this, "Database's schema is empty! Recreating the system classes and allow the opening of the database but double check the integrity of the database"); return null; } else if (schemaVersion.intValue() != CURRENT_VERSION_NUMBER) { // HANDLE SCHEMA UPGRADE throw new OConfigurationException( "Database schema is different. Please export your old database with the previous version of OrientDB and reimport it using the current one."); } // REGISTER ALL THE CLASSES classes.clear(); OClassImpl cls; Collection<ODocument> storedClasses = document.field("classes"); for (ODocument c : storedClasses) { cls = new OClassImpl(me, c); cls.fromStream(); classes.put(cls.getName().toLowerCase(), cls); if (cls.getShortName() != null) classes.put(cls.getShortName().toLowerCase(), cls); } // REBUILD THE INHERITANCE TREE String superClassName; OClass superClass; for (ODocument c : storedClasses) { superClassName = c.field("superClass"); if (superClassName != null) { // HAS A SUPER CLASS cls = (OClassImpl) classes.get(((String) c.field("name")).toLowerCase()); superClass = classes.get(superClassName.toLowerCase()); if (superClass == null) throw new OConfigurationException("Super class '" + superClassName + "' was declared in class '" + cls.getName() + "' but was not found in schema. Remove the dependency or create the class to continue."); cls.setSuperClassInternal(superClass); } } return null; } }, true); } /** * Binds POJO to ODocument. */ @Override @OBeforeSerialization public ODocument toStream() { return getDatabase().getStorage().callInLock(new Callable<ODocument>() { @Override public ODocument call() throws Exception { document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING); try { document.field("schemaVersion", CURRENT_VERSION_NUMBER); Set<ODocument> cc = new HashSet<ODocument>(); for (OClass c : classes.values()) cc.add(((OClassImpl) c).toStream()); document.field("classes", cc, OType.EMBEDDEDSET); } finally { document.setInternalStatus(ORecordElement.STATUS.LOADED); } return document; } }, false); } public Collection<OClass> getClasses() { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ); return getDatabase().getStorage().callInLock(new Callable<Collection<OClass>>() { @Override public HashSet<OClass> call() throws Exception { return new HashSet<OClass>(classes.values()); } }, false); } @Override public Set<OClass> getClassesRelyOnCluster(final String iClusterName) { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ); return getDatabase().getStorage().callInLock(new Callable<Set<OClass>>() { @Override public Set<OClass> call() throws Exception { final int clusterId = getDatabase().getClusterIdByName(iClusterName); final Set<OClass> result = new HashSet<OClass>(); for (OClass c : classes.values()) { if (OArrays.contains(c.getPolymorphicClusterIds(), clusterId)) result.add(c); } return result; } }, false); } @Override public OSchemaShared load() { getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { getDatabase(); ((ORecordId) document.getIdentity()).fromString(getDatabase().getStorage().getConfiguration().schemaRecordId); reload("*:-1 index:0"); return null; } }, true); return this; } public void create() { final ODatabaseRecord db = getDatabase(); super.save(OMetadataDefault.CLUSTER_INTERNAL_NAME); db.getStorage().getConfiguration().schemaRecordId = document.getIdentity().toString(); db.getStorage().getConfiguration().update(); } public void close() { classes.clear(); document.clear(); } public void saveInternal() { final ODatabaseRecord db = getDatabase(); if (db.getTransaction().isActive()) throw new OSchemaException("Cannot change the schema while a transaction is active. Schema changes are not transactional"); db.getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { saveInternal(OMetadataDefault.CLUSTER_INTERNAL_NAME); return null; } }, true); } @Deprecated public int getVersion() { return getDatabase().getStorage().callInLock(new Callable<Integer>() { @Override public Integer call() throws Exception { return document.getRecordVersion().getCounter(); } }, false); } public ORID getIdentity() { return document.getIdentity(); } /** * Avoid to handle this by user API. */ @Override public <RET extends ODocumentWrapper> RET save() { return (RET) this; } /** * Avoid to handle this by user API. */ @Override public <RET extends ODocumentWrapper> RET save(final String iClusterName) { return (RET) this; } public OSchemaShared setDirty() { document.setDirty(); return this; } private OClass cascadeCreate(final Class<?> javaClass) { final OClassImpl cls = (OClassImpl) createClass(javaClass.getSimpleName()); final Class<?> javaSuperClass = javaClass.getSuperclass(); if (javaSuperClass != null && !javaSuperClass.getName().equals("java.lang.Object") && !javaSuperClass.getName().startsWith("com.orientechnologies")) { OClass superClass = classes.get(javaSuperClass.getSimpleName().toLowerCase()); if (superClass == null) superClass = cascadeCreate(javaSuperClass); cls.setSuperClass(superClass); } return cls; } private ODatabaseRecord getDatabase() { return ODatabaseRecordThreadLocal.INSTANCE.get(); } private void saveInternal(final String iClusterName) { document.setDirty(); for (int retry = 0; retry < 10; retry++) try { super.save(OMetadataDefault.CLUSTER_INTERNAL_NAME); break; } catch (OConcurrentModificationException e) { reload(null, true); } super.save(OMetadataDefault.CLUSTER_INTERNAL_NAME); } }
1no label
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OSchemaShared.java
2,867
public class HindiAnalyzerProvider extends AbstractIndexAnalyzerProvider<HindiAnalyzer> { private final HindiAnalyzer analyzer; @Inject public HindiAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) { super(index, indexSettings, name, settings); analyzer = new HindiAnalyzer(version, Analysis.parseStopWords(env, settings, HindiAnalyzer.getDefaultStopSet(), version), Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET, version)); } @Override public HindiAnalyzer get() { return this.analyzer; } }
0true
src_main_java_org_elasticsearch_index_analysis_HindiAnalyzerProvider.java
350
public class MergeJPAPersistenceResource extends MergeXmlConfigResource { private static final Log LOG = LogFactory.getLog(MergeJPAPersistenceResource.class); private ErrorHandler handler = new SimpleSaxErrorHandler(LOG); public Resource getMergedConfigResource(ResourceInputStream[] sources) throws BeansException { Resource configResource = null; ResourceInputStream merged = null; try { List<String> mappingFiles = new ArrayList<String>(20); ResourceInputStream[] inMemoryStreams = new ResourceInputStream[sources.length]; for (int j=0;j<sources.length;j++){ byte[] sourceArray = buildArrayFromStream(sources[j]); compileMappingFiles(mappingFiles, sourceArray); inMemoryStreams[j] = new ResourceInputStream(new ByteArrayInputStream(sourceArray), sources[j].getName()); } merged = merge(inMemoryStreams); //read the final stream into a byte array ByteArrayOutputStream baos = new ByteArrayOutputStream(); boolean eof = false; while (!eof) { int temp = merged.read(); if (temp == -1) { eof = true; } else { baos.write(temp); } } configResource = new ByteArrayResource(baos.toByteArray()); if (LOG.isDebugEnabled()) { LOG.debug("Merged config: \n" + serialize(configResource)); } } catch (MergeException e) { throw new FatalBeanException("Unable to merge source and patch locations", e); } catch (MergeManagerSetupException e) { throw new FatalBeanException("Unable to merge source and patch locations", e); } catch (IOException e) { throw new FatalBeanException("Unable to merge source and patch locations", e); } catch (SAXException e) { throw new FatalBeanException("Unable to merge source and patch locations", e); } catch (ParserConfigurationException e) { throw new FatalBeanException("Unable to merge source and patch locations", e); } finally { if (merged != null) { try{ merged.close(); } catch (Throwable e) { LOG.error("Unable to merge source and patch locations", e); } } } return configResource; } private void compileMappingFiles(List<String> mappingFiles, byte[] sourceArray) throws IOException, ParserConfigurationException, SAXException { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); DocumentBuilder parser = dbf.newDocumentBuilder(); parser.setErrorHandler(handler); Document dom = parser.parse(new ByteArrayInputStream(sourceArray)); NodeList nodes = dom.getElementsByTagName("/persistence/persistence-unit/mapping-file"); if (nodes != null && nodes.getLength() > 0) { int length = nodes.getLength(); for (int j=0;j<length;j++){ Node node = nodes.item(j); mappingFiles.add(node.getNodeValue()); } } } }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_MergeJPAPersistenceResource.java
263
{ @Override public boolean visit( XaCommand element ) throws RuntimeException { if( element instanceof Command.NodeCommand ) { Command.NodeCommand cmd = (Command.NodeCommand)element; Collection<DynamicRecord> beforeDynLabels = cmd.getAfter().getDynamicLabelRecords(); assertThat( beforeDynLabels.size(), equalTo(1) ); assertThat( beforeDynLabels.iterator().next().inUse(), equalTo(false) ); } return true; } });
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_xa_WriteTransactionTest.java
1,118
public class OSQLFunctionCoalesce extends OSQLFunctionAbstract { public static final String NAME = "coalesce"; public OSQLFunctionCoalesce() { super(NAME, 1, 1000); } @Override public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) { int length = iParameters.length; for (int i = 0; i < length; i++) { if (iParameters[i] != null) return iParameters[i]; } return null; } @Override public String getSyntax() { return "Returns the first not-null parameter or null if all parameters are null. Syntax: coalesce(<field|value> [,<field|value>]*)"; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_functions_misc_OSQLFunctionCoalesce.java
276
emailServiceTemplate.send(emailServiceDestination, new MessageCreator() { public Message createMessage(Session session) throws JMSException { ObjectMessage message = session.createObjectMessage(props); EmailInfo info = (EmailInfo) props.get(EmailPropertyType.INFO.getType()); message.setJMSPriority(Integer.parseInt(info.getSendAsyncPriority())); return message; } });
0true
common_src_main_java_org_broadleafcommerce_common_email_service_jms_JMSEmailServiceProducerImpl.java
84
protected enum RESULT { OK, ERROR, EXIT };
0true
commons_src_main_java_com_orientechnologies_common_console_OConsoleApplication.java
2,020
public interface BindingTargetVisitor<T, V> { /** * Visit a instance binding. The same instance is returned for every injection. This target is * found in both module and injector bindings. */ V visit(InstanceBinding<? extends T> binding); /** * Visit a provider instance binding. The provider's {@code get} method is invoked to resolve * injections. This target is found in both module and injector bindings. */ V visit(ProviderInstanceBinding<? extends T> binding); /** * Visit a provider key binding. To resolve injections, the provider key is first resolved, then * that provider's {@code get} method is invoked. This target is found in both module and injector * bindings. */ V visit(ProviderKeyBinding<? extends T> binding); /** * Visit a linked key binding. The other key's binding is used to resolve injections. This * target is found in both module and injector bindings. */ V visit(LinkedKeyBinding<? extends T> binding); /** * Visit a binding to a key exposed from an enclosed private environment. This target is only * found in injector bindings. */ V visit(ExposedBinding<? extends T> binding); /** * Visit an untargetted binding. This target is found only on module bindings. It indicates * that the injector should use its implicit binding strategies to resolve injections. */ V visit(UntargettedBinding<? extends T> binding); /** * Visit a constructor binding. To resolve injections, an instance is instantiated by invoking * {@code constructor}. This target is found only on injector bindings. */ V visit(ConstructorBinding<? extends T> binding); /** * Visit a binding created from converting a bound instance to a new type. The source binding * has the same binding annotation but a different type. This target is found only on injector * bindings. */ V visit(ConvertedConstantBinding<? extends T> binding); /** * Visit a binding to a {@link org.elasticsearch.common.inject.Provider} that delegates to the binding for the * provided type. This target is found only on injector bindings. */ V visit(ProviderBinding<? extends T> binding); }
0true
src_main_java_org_elasticsearch_common_inject_spi_BindingTargetVisitor.java
1,002
public static class Order { public static final int Items = 2000; public static final int Pricing = 3000; public static final int Address = 4000; public static final int Advanced = 5000; }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_FulfillmentGroupImpl.java
1,189
@Service("blCompositePaymentService") public class CompositePaymentServiceImpl implements CompositePaymentService { @Resource(name = "blPaymentWorkflow") protected SequenceProcessor paymentWorkflow; public CompositePaymentResponse executePayment(Order order, Map<PaymentInfo, Referenced> payments, PaymentResponse response) throws PaymentException { /* * TODO add validation that checks the order and payment information for * validity. */ try { PaymentSeed seed = new PaymentSeed(order, payments, response); paymentWorkflow.doActivities(seed); return seed; } catch (WorkflowException e) { Throwable cause = null; while (e.getCause() != null) { if (cause != null && cause.equals(e.getCause())) { break; } cause = e.getCause(); } if (cause != null && PaymentException.class.isAssignableFrom(cause.getClass())) { throw (PaymentException) cause; } throw new PaymentException("Unable to execute payment for order -- id: " + order.getId(), e); } } public CompositePaymentResponse executePayment(Order order, Map<PaymentInfo, Referenced> payments) throws PaymentException { return executePayment(order, payments, new PaymentResponseImpl()); } public CompositePaymentResponse executePayment(Order order) throws PaymentException { return executePayment(order, null); } //This convenience method is utilized for those implementations that are not storing secure information (credit card information), such as PayPal and Braintree //It will construct a PaymentInfo based on the implementation of PaymentInfoFactory with an empty Referenced and pass it to the workflow. public CompositePaymentResponse executePaymentForGateway(Order order, PaymentInfoFactory paymentInfoFactory) throws PaymentException { Map<PaymentInfo, Referenced> payments = new HashMap<PaymentInfo, Referenced>(); PaymentInfo paymentInfo = paymentInfoFactory.constructPaymentInfo(order); payments.put(paymentInfo, paymentInfo.createEmptyReferenced()); order.getPaymentInfos().add(paymentInfo); return executePayment(order, payments); } public SequenceProcessor getPaymentWorkflow() { return paymentWorkflow; } public void setPaymentWorkflow(SequenceProcessor paymentWorkflow) { this.paymentWorkflow = paymentWorkflow; } }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_CompositePaymentServiceImpl.java
1,979
public static final Scoping EAGER_SINGLETON = new Scoping() { public <V> V acceptVisitor(BindingScopingVisitor<V> visitor) { return visitor.visitEagerSingleton(); } @Override public Scope getScopeInstance() { return Scopes.SINGLETON; } @Override public String toString() { return "eager singleton"; } public void applyTo(ScopedBindingBuilder scopedBindingBuilder) { scopedBindingBuilder.asEagerSingleton(); } };
0true
src_main_java_org_elasticsearch_common_inject_internal_Scoping.java
558
typeIntersection = Collections2.filter(indexService.mapperService().types(), new Predicate<String>() { @Override public boolean apply(String type) { return Regex.simpleMatch(types, type); } });
0true
src_main_java_org_elasticsearch_action_admin_indices_mapping_get_TransportGetFieldMappingsAction.java
418
@Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD}) public @interface AdminPresentationMapFields { /** * Members of this map can be displayed as form fields, rather than in a standard grid. When populated, * mapDisplayFields informs the form building process to create the fields described here and persist those fields * in this map structure. * * @return the fields to display that represent the members of this map */ AdminPresentationMapField[] mapDisplayFields(); }
0true
common_src_main_java_org_broadleafcommerce_common_presentation_AdminPresentationMapFields.java
1,971
public class ReachabilityHandlerChain { /** * Compares priority of handlers. */ private static final Comparator<ReachabilityHandler> NICE_VALUE_COMPARATOR = new Comparator<ReachabilityHandler>() { @Override public int compare(ReachabilityHandler o1, ReachabilityHandler o2) { final short i1 = o1.niceNumber(); final short i2 = o2.niceNumber(); // we want small nice numbers first. return (i1 < i2) ? -1 : ((i1 == i2) ? 0 : 1); } }; private ReachabilityHandler<Record> firstHandler; private ReachabilityHandler<Record> successorHandler; public ReachabilityHandlerChain(ReachabilityHandler<Record>... handlers) { // TODO eviction handler. for (ReachabilityHandler<Record> evictionHandler : handlers) { addHandler(evictionHandler); } } public Record isReachable(Record record, long criteria, long nowInNanos) { return firstHandler.process(record, criteria, nowInNanos); } public void addHandler(ReachabilityHandler<Record> handler) { addHandlerInternal(handler); final List<ReachabilityHandler> sortedHandlers = sortHandlers(); resetHandlerChain(sortedHandlers); for (ReachabilityHandler sortedHandler : sortedHandlers) { addHandlerInternal(sortedHandler); } } private void addHandlerInternal(ReachabilityHandler<Record> handler) { if (firstHandler == null) { firstHandler = handler; } else { successorHandler.setSuccessorHandler(handler); } successorHandler = handler; } private List<ReachabilityHandler> sortHandlers() { final List<ReachabilityHandler> sortedList = new ArrayList<ReachabilityHandler>(); ReachabilityHandler tempHandler = firstHandler; sortedList.add(tempHandler); while (tempHandler.getSuccessorHandler() != null) { tempHandler = tempHandler.getSuccessorHandler(); sortedList.add(tempHandler); } Collections.sort(sortedList, NICE_VALUE_COMPARATOR); return sortedList; } private void resetHandlerChain(List<ReachabilityHandler> sortedHandlers) { firstHandler = null; successorHandler = null; for (ReachabilityHandler sortedHandler : sortedHandlers) { sortedHandler.resetHandler(); } } }
0true
hazelcast_src_main_java_com_hazelcast_map_eviction_ReachabilityHandlerChain.java
127
public class ImportProposals { static void addImportProposals(Tree.CompilationUnit cu, Node node, Collection<ICompletionProposal> proposals, IFile file) { if (node instanceof Tree.BaseMemberOrTypeExpression || node instanceof Tree.SimpleType) { Node id = getIdentifyingNode(node); String brokenName = id.getText(); Module module = cu.getUnit().getPackage().getModule(); for (Declaration decl: findImportCandidates(module, brokenName, cu)) { ICompletionProposal ip = createImportProposal(cu, file, decl); if (ip!=null) { proposals.add(ip); } } } } private static Set<Declaration> findImportCandidates(Module module, String name, Tree.CompilationUnit cu) { Set<Declaration> result = new HashSet<Declaration>(); for (Package pkg: module.getAllPackages()) { if (!pkg.getName().isEmpty()) { Declaration member = pkg.getMember(name, null, false); if (member!=null) { result.add(member); } } } /*if (result.isEmpty()) { for (Package pkg: module.getAllPackages()) { for (Declaration member: pkg.getMembers()) { if (!isImported(member, cu)) { int dist = getLevenshteinDistance(name, member.getName()); //TODO: would it be better to just sort by dist, and // then select the 3 closest possibilities? if (dist<=name.length()/3+1) { result.add(member); } } } } }*/ return result; } private static ICompletionProposal createImportProposal(Tree.CompilationUnit cu, IFile file, Declaration declaration) { TextFileChange change = new TextFileChange("Add Import", file); IDocument doc = EditorUtil.getDocument(change); List<InsertEdit> ies = importEdits(cu, singleton(declaration), null, null, doc); if (ies.isEmpty()) return null; change.setEdit(new MultiTextEdit()); for (InsertEdit ie: ies) { change.addEdit(ie); } String proposedName = declaration.getName(); /*String brokenName = id.getText(); if (!brokenName.equals(proposedName)) { change.addEdit(new ReplaceEdit(id.getStartIndex(), brokenName.length(), proposedName)); }*/ String description = "Add import of '" + proposedName + "'" + " in package '" + declaration.getUnit().getPackage().getNameAsString() + "'"; return new CorrectionProposal(description, change, null, IMPORT) { @Override public StyledString getStyledDisplayString() { return Highlights.styleProposal(getDisplayString(), true); } }; } public static List<InsertEdit> importEdits(Tree.CompilationUnit cu, Iterable<Declaration> declarations, Iterable<String> aliases, Declaration declarationBeingDeleted, IDocument doc) { String delim = getDefaultLineDelimiter(doc); List<InsertEdit> result = new ArrayList<InsertEdit>(); Set<Package> packages = new HashSet<Package>(); for (Declaration declaration: declarations) { packages.add(declaration.getUnit().getPackage()); } for (Package p: packages) { StringBuilder text = new StringBuilder(); if (aliases==null) { for (Declaration d: declarations) { if (d.getUnit().getPackage().equals(p)) { text.append(",") .append(delim).append(getDefaultIndent()) .append(escapeName(d)); } } } else { Iterator<String> aliasIter = aliases.iterator(); for (Declaration d: declarations) { String alias = aliasIter.next(); if (d.getUnit().getPackage().equals(p)) { text.append(",") .append(delim).append(getDefaultIndent()); if (alias!=null && !alias.equals(d.getName())) { text.append(alias).append('='); } text.append(escapeName(d)); } } } Tree.Import importNode = findImportNode(cu, p.getNameAsString()); if (importNode!=null) { Tree.ImportMemberOrTypeList imtl = importNode.getImportMemberOrTypeList(); if (imtl.getImportWildcard()!=null) { //Do nothing } else { int insertPosition = getBestImportMemberInsertPosition(importNode); if (declarationBeingDeleted!=null && imtl.getImportMemberOrTypes().size()==1 && imtl.getImportMemberOrTypes().get(0).getDeclarationModel() .equals(declarationBeingDeleted)) { text.delete(0, 2); } result.add(new InsertEdit(insertPosition, text.toString())); } } else { int insertPosition = getBestImportInsertPosition(cu); text.delete(0, 2); text.insert(0, "import " + escapePackageName(p) + " {" + delim) .append(delim + "}"); if (insertPosition==0) { text.append(delim); } else { text.insert(0, delim); } result.add(new InsertEdit(insertPosition, text.toString())); } } return result; } public static List<TextEdit> importEditForMove(Tree.CompilationUnit cu, Iterable<Declaration> declarations, Iterable<String> aliases, String newPackageName, String oldPackageName, IDocument doc) { String delim = getDefaultLineDelimiter(doc); List<TextEdit> result = new ArrayList<TextEdit>(); Set<Declaration> set = new HashSet<Declaration>(); for (Declaration d: declarations) { set.add(d); } StringBuilder text = new StringBuilder(); if (aliases==null) { for (Declaration d: declarations) { text.append(",") .append(delim).append(getDefaultIndent()) .append(d.getName()); } } else { Iterator<String> aliasIter = aliases.iterator(); for (Declaration d: declarations) { String alias = aliasIter.next(); text.append(",") .append(delim).append(getDefaultIndent()); if (alias!=null && !alias.equals(d.getName())) { text.append(alias).append('='); } text.append(d.getName()); } } Tree.Import oldImportNode = findImportNode(cu, oldPackageName); if (oldImportNode!=null) { Tree.ImportMemberOrTypeList imtl = oldImportNode.getImportMemberOrTypeList(); if (imtl!=null) { int remaining = 0; for (Tree.ImportMemberOrType imt: imtl.getImportMemberOrTypes()) { if (!set.contains(imt.getDeclarationModel())) { remaining++; } } if (remaining==0) { result.add(new DeleteEdit(oldImportNode.getStartIndex(), oldImportNode.getStopIndex()-oldImportNode.getStartIndex()+1)); } else { //TODO: format it better!!!! StringBuilder sb = new StringBuilder("{").append(delim); for (Tree.ImportMemberOrType imt: imtl.getImportMemberOrTypes()) { if (!set.contains(imt.getDeclarationModel())) { sb.append(getDefaultIndent()); if (imt.getAlias()!=null) { sb.append(imt.getAlias().getIdentifier().getText()) .append('='); } sb.append(imt.getIdentifier().getText()) .append(",") .append(delim); } } sb.setLength(sb.length()-2); sb.append(delim).append("}"); result.add(new ReplaceEdit(imtl.getStartIndex(), imtl.getStopIndex()-imtl.getStartIndex()+1, sb.toString())); } } } if (!cu.getUnit().getPackage().getQualifiedNameString() .equals(newPackageName)) { Tree.Import importNode = findImportNode(cu, newPackageName); if (importNode!=null) { Tree.ImportMemberOrTypeList imtl = importNode.getImportMemberOrTypeList(); if (imtl.getImportWildcard()!=null) { //Do nothing } else { int insertPosition = getBestImportMemberInsertPosition(importNode); result.add(new InsertEdit(insertPosition, text.toString())); } } else { int insertPosition = getBestImportInsertPosition(cu); text.delete(0, 2); text.insert(0, "import " + newPackageName + " {" + delim) .append(delim + "}"); if (insertPosition==0) { text.append(delim); } else { text.insert(0, delim); } result.add(new InsertEdit(insertPosition, text.toString())); } } return result; } private static int getBestImportInsertPosition(Tree.CompilationUnit cu) { Integer stopIndex = cu.getImportList().getStopIndex(); if (stopIndex == null) return 0; return stopIndex+1; } public static Tree.Import findImportNode(Tree.CompilationUnit cu, String packageName) { FindImportNodeVisitor visitor = new FindImportNodeVisitor(packageName); cu.visit(visitor); return visitor.getResult(); } private static int getBestImportMemberInsertPosition(Tree.Import importNode) { Tree.ImportMemberOrTypeList imtl = importNode.getImportMemberOrTypeList(); if (imtl.getImportWildcard()!=null) { return imtl.getImportWildcard().getStartIndex(); } else { List<Tree.ImportMemberOrType> imts = imtl.getImportMemberOrTypes(); if (imts.isEmpty()) { return imtl.getStartIndex()+1; } else { return imts.get(imts.size()-1).getStopIndex()+1; } } } public static int applyImports(TextChange change, Set<Declaration> declarations, Tree.CompilationUnit cu, IDocument doc) { return applyImports(change, declarations, null, cu, doc); } public static int applyImports(TextChange change, Set<Declaration> declarations, Declaration declarationBeingDeleted, Tree.CompilationUnit cu, IDocument doc) { int il=0; for (InsertEdit ie: importEdits(cu, declarations, null, declarationBeingDeleted, doc)) { il+=ie.getText().length(); change.addEdit(ie); } return il; } public static int applyImports(TextChange change, Map<Declaration,String> declarations, Tree.CompilationUnit cu, IDocument doc, Declaration declarationBeingDeleted) { int il=0; for (InsertEdit ie: importEdits(cu, declarations.keySet(), declarations.values(), declarationBeingDeleted, doc)) { il+=ie.getText().length(); change.addEdit(ie); } return il; } public static void importSignatureTypes(Declaration declaration, Tree.CompilationUnit rootNode, Set<Declaration> declarations) { if (declaration instanceof TypedDeclaration) { importType(declarations, ((TypedDeclaration) declaration).getType(), rootNode); } if (declaration instanceof Functional) { for (ParameterList pl: ((Functional) declaration).getParameterLists()) { for (Parameter p: pl.getParameters()) { importSignatureTypes(p.getModel(), rootNode, declarations); } } } } public static void importTypes(Set<Declaration> declarations, Collection<ProducedType> types, Tree.CompilationUnit rootNode) { if (types==null) return; for (ProducedType type: types) { importType(declarations, type, rootNode); } } public static void importType(Set<Declaration> declarations, ProducedType type, Tree.CompilationUnit rootNode) { if (type==null) return; if (type.getDeclaration() instanceof UnionType) { for (ProducedType t: type.getDeclaration().getCaseTypes()) { importType(declarations, t, rootNode); } } else if (type.getDeclaration() instanceof IntersectionType) { for (ProducedType t: type.getDeclaration().getSatisfiedTypes()) { importType(declarations, t, rootNode); } } else { importType(declarations, type.getQualifyingType(), rootNode); TypeDeclaration td = type.getDeclaration(); if (td instanceof ClassOrInterface && td.isToplevel()) { importDeclaration(declarations, td, rootNode); for (ProducedType arg: type.getTypeArgumentList()) { importType(declarations, arg, rootNode); } } } } public static void importDeclaration(Set<Declaration> declarations, Declaration declaration, Tree.CompilationUnit rootNode) { if (!declaration.isParameter()) { Package p = declaration.getUnit().getPackage(); if (!p.getNameAsString().isEmpty() && !p.equals(rootNode.getUnit().getPackage()) && !p.getNameAsString().equals(Module.LANGUAGE_MODULE_NAME) && (!declaration.isClassOrInterfaceMember() || declaration.isStaticallyImportable())) { if (!isImported(declaration, rootNode)) { declarations.add(declaration); } } } } public static boolean isImported(Declaration declaration, Tree.CompilationUnit rootNode) { for (Import i: rootNode.getUnit().getImports()) { if (i.getDeclaration().equals(getAbstraction(declaration))) { return true; } } return false; } public static void importCallableParameterParamTypes(Declaration declaration, HashSet<Declaration> decs, Tree.CompilationUnit cu) { if (declaration instanceof Functional) { List<ParameterList> pls = ((Functional) declaration).getParameterLists(); if (!pls.isEmpty()) { for (Parameter p: pls.get(0).getParameters()) { MethodOrValue pm = p.getModel(); importParameterTypes(pm, cu, decs); } } } } public static void importParameterTypes(Declaration pm, Tree.CompilationUnit cu, HashSet<Declaration> decs) { if (pm instanceof Method) { for (ParameterList ppl: ((Method) pm).getParameterLists()) { for (Parameter pp: ppl.getParameters()) { importSignatureTypes(pp.getModel(), cu, decs); } } } } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ImportProposals.java
1,065
shardAction.execute(shardRequest, new ActionListener<MultiTermVectorsShardResponse>() { @Override public void onResponse(MultiTermVectorsShardResponse response) { for (int i = 0; i < response.locations.size(); i++) { responses.set(response.locations.get(i), new MultiTermVectorsItemResponse(response.responses.get(i), response.failures.get(i))); } if (counter.decrementAndGet() == 0) { finishHim(); } } @Override public void onFailure(Throwable e) { // create failures for all relevant requests String message = ExceptionsHelper.detailedMessage(e); for (int i = 0; i < shardRequest.locations.size(); i++) { TermVectorRequest termVectorRequest = shardRequest.requests.get(i); responses.set(shardRequest.locations.get(i), new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorRequest.type(), termVectorRequest.id(), message))); } if (counter.decrementAndGet() == 0) { finishHim(); } } private void finishHim() { listener.onResponse(new MultiTermVectorsResponse( responses.toArray(new MultiTermVectorsItemResponse[responses.length()]))); } });
0true
src_main_java_org_elasticsearch_action_termvector_TransportMultiTermVectorsAction.java
1,661
public class StopWatch { /** * Identifier of this stop watch. * Handy when we have output from multiple stop watches * and need to distinguish between them in log or console output. */ private final String id; private boolean keepTaskList = true; private final List<TaskInfo> taskList = new LinkedList<TaskInfo>(); /** * Start time of the current task */ private long startTimeMillis; /** * Is the stop watch currently running? */ private boolean running; /** * Name of the current task */ private String currentTaskName; private TaskInfo lastTaskInfo; private int taskCount; /** * Total running time */ private long totalTimeMillis; /** * Construct a new stop watch. Does not start any task. */ public StopWatch() { this.id = ""; } /** * Construct a new stop watch with the given id. * Does not start any task. * * @param id identifier for this stop watch. * Handy when we have output from multiple stop watches * and need to distinguish between them. */ public StopWatch(String id) { this.id = id; } /** * Determine whether the TaskInfo array is built over time. Set this to * "false" when using a StopWatch for millions of intervals, or the task * info structure will consume excessive memory. Default is "true". */ public StopWatch keepTaskList(boolean keepTaskList) { this.keepTaskList = keepTaskList; return this; } /** * Start an unnamed task. The results are undefined if {@link #stop()} * or timing methods are called without invoking this method. * * @see #stop() */ public StopWatch start() throws IllegalStateException { return start(""); } /** * Start a named task. The results are undefined if {@link #stop()} * or timing methods are called without invoking this method. * * @param taskName the name of the task to start * @see #stop() */ public StopWatch start(String taskName) throws IllegalStateException { if (this.running) { throw new IllegalStateException("Can't start StopWatch: it's already running"); } this.startTimeMillis = System.currentTimeMillis(); this.running = true; this.currentTaskName = taskName; return this; } /** * Stop the current task. The results are undefined if timing * methods are called without invoking at least one pair * {@link #start()} / {@link #stop()} methods. * * @see #start() */ public StopWatch stop() throws IllegalStateException { if (!this.running) { throw new IllegalStateException("Can't stop StopWatch: it's not running"); } long lastTime = System.currentTimeMillis() - this.startTimeMillis; this.totalTimeMillis += lastTime; this.lastTaskInfo = new TaskInfo(this.currentTaskName, lastTime); if (this.keepTaskList) { this.taskList.add(lastTaskInfo); } ++this.taskCount; this.running = false; this.currentTaskName = null; return this; } /** * Return whether the stop watch is currently running. */ public boolean isRunning() { return this.running; } /** * Return the time taken by the last task. */ public TimeValue lastTaskTime() throws IllegalStateException { if (this.lastTaskInfo == null) { throw new IllegalStateException("No tests run: can't get last interval"); } return this.lastTaskInfo.getTime(); } /** * Return the name of the last task. */ public String lastTaskName() throws IllegalStateException { if (this.lastTaskInfo == null) { throw new IllegalStateException("No tests run: can't get last interval"); } return this.lastTaskInfo.getTaskName(); } /** * Return the total time for all tasks. */ public TimeValue totalTime() { return new TimeValue(totalTimeMillis, TimeUnit.MILLISECONDS); } /** * Return the number of tasks timed. */ public int taskCount() { return taskCount; } /** * Return an array of the data for tasks performed. */ public TaskInfo[] taskInfo() { if (!this.keepTaskList) { throw new UnsupportedOperationException("Task info is not being kept!"); } return this.taskList.toArray(new TaskInfo[this.taskList.size()]); } /** * Return a short description of the total running time. */ public String shortSummary() { return "StopWatch '" + this.id + "': running time = " + totalTime(); } /** * Return a string with a table describing all tasks performed. * For custom reporting, call getTaskInfo() and use the task info directly. */ public String prettyPrint() { StringBuilder sb = new StringBuilder(shortSummary()); sb.append('\n'); if (!this.keepTaskList) { sb.append("No task info kept"); } else { sb.append("-----------------------------------------\n"); sb.append("ms % Task name\n"); sb.append("-----------------------------------------\n"); NumberFormat nf = NumberFormat.getNumberInstance(Locale.ROOT); nf.setMinimumIntegerDigits(5); nf.setGroupingUsed(false); NumberFormat pf = NumberFormat.getPercentInstance(Locale.ROOT); pf.setMinimumIntegerDigits(3); pf.setGroupingUsed(false); for (TaskInfo task : taskInfo()) { sb.append(nf.format(task.getTime().millis())).append(" "); sb.append(pf.format(task.getTime().secondsFrac() / totalTime().secondsFrac())).append(" "); sb.append(task.getTaskName()).append("\n"); } } return sb.toString(); } /** * Return an informative string describing all tasks performed * For custom reporting, call <code>getTaskInfo()</code> and use the task info directly. */ @Override public String toString() { StringBuilder sb = new StringBuilder(shortSummary()); if (this.keepTaskList) { for (TaskInfo task : taskInfo()) { sb.append("; [").append(task.getTaskName()).append("] took ").append(task.getTime()); long percent = Math.round((100.0f * task.getTime().millis()) / totalTime().millis()); sb.append(" = ").append(percent).append("%"); } } else { sb.append("; no task info kept"); } return sb.toString(); } /** * Inner class to hold data about one task executed within the stop watch. */ public static class TaskInfo { private final String taskName; private final TimeValue timeValue; private TaskInfo(String taskName, long timeMillis) { this.taskName = taskName; this.timeValue = new TimeValue(timeMillis, TimeUnit.MILLISECONDS); } /** * Return the name of this task. */ public String getTaskName() { return taskName; } /** * Return the time this task took. */ public TimeValue getTime() { return timeValue; } } }
0true
src_main_java_org_elasticsearch_common_StopWatch.java
294
public enum STRATEGY { DEPTH_FIRST, BREADTH_FIRST };
0true
core_src_main_java_com_orientechnologies_orient_core_command_traverse_OTraverse.java
57
public class AbstractContentService { private static final Log LOG = LogFactory.getLog(AbstractContentService.class); public <T, U> List<T> findItems(SandBox sandbox, Criteria c, Class<T> baseClass, Class<U> concreteClass, String originalIdProperty) { c.add(Restrictions.eq("archivedFlag", false)); if (sandbox == null) { // Query is hitting the production sandbox for a single site c.add(Restrictions.isNull("sandbox")); return (List<T>) c.list(); } if (SandBoxType.PRODUCTION.equals(sandbox.getSandBoxType())) { // Query is hitting the production sandbox for a multi-site c.add(Restrictions.eq("sandbox", sandbox)); return (List<T>) c.list(); } else { addSandboxCriteria(sandbox, c, concreteClass, originalIdProperty); return (List<T>) c.list(); } } public <T> Long countItems(SandBox sandbox, Criteria c, Class<T> concreteClass, String originalIdProperty) { c.add(Restrictions.eq("archivedFlag", false)); c.setProjection(Projections.rowCount()); if (sandbox == null) { // Query is hitting the production sandbox for a single site c.add(Restrictions.isNull("sandbox")); return (Long) c.uniqueResult(); } if (SandBoxType.PRODUCTION.equals(sandbox.getSandBoxType())) { // Query is hitting the production sandbox for a multi-site c.add(Restrictions.eq("sandbox", sandbox)); return (Long) c.uniqueResult(); } else { addSandboxCriteria(sandbox, c, concreteClass, originalIdProperty); return (Long) c.uniqueResult(); } } private <T> void addSandboxCriteria(SandBox sandbox, Criteria c, Class<T> type, String originalIdProperty) { Criterion originalSandboxExpression = Restrictions.eq("originalSandBox", sandbox); Criterion currentSandboxExpression = Restrictions.eq("sandbox", sandbox); Criterion userSandboxExpression = Restrictions.or(currentSandboxExpression, originalSandboxExpression); Criterion productionSandboxExpression = null; if (sandbox.getSite() == null || sandbox.getSite().getProductionSandbox() == null) { productionSandboxExpression = Restrictions.isNull("sandbox"); } else { productionSandboxExpression = Restrictions.eq("sandbox", sandbox.getSite().getProductionSandbox()); } if (productionSandboxExpression != null) { c.add(Restrictions.or(userSandboxExpression, productionSandboxExpression)); } else { c.add(userSandboxExpression); } // Build a sub-query to exclude items from production that are also in my sandbox. // (e.g. my sandbox always wins even if the items in my sandbox don't match the // current criteria.) // // This subquery prevents the following: // 1. Duplicate items (one for sbox, one for prod) // 2. Filter issues where the production item qualifies for the passed in criteria // but has been modified so that the item in the sandbox no longer does. // 3. Inverse of #2. DetachedCriteria existsInSboxCriteria = DetachedCriteria.forClass(type, "sboxItem"); existsInSboxCriteria.add(userSandboxExpression); existsInSboxCriteria.add(Restrictions.eq("archivedFlag", false)); String outerAlias = c.getAlias(); existsInSboxCriteria.add(Property.forName(outerAlias + ".id").eqProperty("sboxItem."+originalIdProperty)); existsInSboxCriteria.setProjection(Projections.id()); c.add(Subqueries.notExists(existsInSboxCriteria)); } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_common_AbstractContentService.java
1,338
Future future = executorService.submit(new Callable<String>() { @Override public String call() { try { return "success"; } finally { latch1.countDown(); } } });
0true
hazelcast_src_test_java_com_hazelcast_executor_ExecutorServiceTest.java
15
public class TextCommandServiceImpl implements TextCommandService, TextCommandConstants { private final Node node; private final TextCommandProcessor[] textCommandProcessors = new TextCommandProcessor[100]; private final HazelcastInstance hazelcast; private final AtomicLong sets = new AtomicLong(); private final AtomicLong touches = new AtomicLong(); private final AtomicLong getHits = new AtomicLong(); private final AtomicLong getMisses = new AtomicLong(); private final AtomicLong deleteMisses = new AtomicLong(); private final AtomicLong deleteHits = new AtomicLong(); private final AtomicLong incrementHits = new AtomicLong(); private final AtomicLong incrementMisses = new AtomicLong(); private final AtomicLong decrementHits = new AtomicLong(); private final AtomicLong decrementMisses = new AtomicLong(); private final long startTime = Clock.currentTimeMillis(); private final ILogger logger; private volatile ResponseThreadRunnable responseThreadRunnable; private volatile boolean running = true; public TextCommandServiceImpl(Node node) { this.node = node; this.hazelcast = node.hazelcastInstance; this.logger = node.getLogger(this.getClass().getName()); textCommandProcessors[GET.getValue()] = new GetCommandProcessor(this, true); textCommandProcessors[PARTIAL_GET.getValue()] = new GetCommandProcessor(this, false); textCommandProcessors[SET.getValue()] = new SetCommandProcessor(this); textCommandProcessors[APPEND.getValue()] = new SetCommandProcessor(this); textCommandProcessors[PREPEND.getValue()] = new SetCommandProcessor(this); textCommandProcessors[ADD.getValue()] = new SetCommandProcessor(this); textCommandProcessors[REPLACE.getValue()] = new SetCommandProcessor(this); textCommandProcessors[GET_END.getValue()] = new NoOpCommandProcessor(this); textCommandProcessors[DELETE.getValue()] = new DeleteCommandProcessor(this); textCommandProcessors[QUIT.getValue()] = new SimpleCommandProcessor(this); textCommandProcessors[STATS.getValue()] = new StatsCommandProcessor(this); textCommandProcessors[UNKNOWN.getValue()] = new ErrorCommandProcessor(this); textCommandProcessors[VERSION.getValue()] = new VersionCommandProcessor(this); textCommandProcessors[TOUCH.getValue()] = new TouchCommandProcessor(this); textCommandProcessors[INCREMENT.getValue()] = new IncrementCommandProcessor(this); textCommandProcessors[DECREMENT.getValue()] = new IncrementCommandProcessor(this); textCommandProcessors[ERROR_CLIENT.getValue()] = new ErrorCommandProcessor(this); textCommandProcessors[ERROR_SERVER.getValue()] = new ErrorCommandProcessor(this); textCommandProcessors[HTTP_GET.getValue()] = new HttpGetCommandProcessor(this); textCommandProcessors[HTTP_POST.getValue()] = new HttpPostCommandProcessor(this); textCommandProcessors[HTTP_PUT.getValue()] = new HttpPostCommandProcessor(this); textCommandProcessors[HTTP_DELETE.getValue()] = new HttpDeleteCommandProcessor(this); textCommandProcessors[NO_OP.getValue()] = new NoOpCommandProcessor(this); } @Override public Node getNode() { return node; } @Override public byte[] toByteArray(Object value) { Data data = node.getSerializationService().toData(value); return data.getBuffer(); } @Override public Stats getStats() { Stats stats = new Stats(); stats.uptime = (int) ((Clock.currentTimeMillis() - startTime) / 1000); stats.cmd_get = getMisses.get() + getHits.get(); stats.cmd_set = sets.get(); stats.cmd_touch = touches.get(); stats.get_hits = getHits.get(); stats.get_misses = getMisses.get(); stats.delete_hits = deleteHits.get(); stats.delete_misses = deleteMisses.get(); stats.incr_hits = incrementHits.get(); stats.incr_misses = incrementMisses.get(); stats.decr_hits = decrementHits.get(); stats.decr_misses = decrementMisses.get(); stats.curr_connections = node.connectionManager.getCurrentClientConnections(); stats.total_connections = node.connectionManager.getAllTextConnections(); return stats; } @Override public long incrementDeleteHitCount(int inc) { return deleteHits.addAndGet(inc); } @Override public long incrementDeleteMissCount() { return deleteMisses.incrementAndGet(); } @Override public long incrementGetHitCount() { return getHits.incrementAndGet(); } @Override public long incrementGetMissCount() { return getMisses.incrementAndGet(); } @Override public long incrementSetCount() { return sets.incrementAndGet(); } @Override public long incrementIncHitCount() { return incrementHits.incrementAndGet(); } @Override public long incrementIncMissCount() { return incrementMisses.incrementAndGet(); } @Override public long incrementDecrHitCount() { return decrementHits.incrementAndGet(); } @Override public long incrementDecrMissCount() { return decrementMisses.incrementAndGet(); } @Override public long incrementTouchCount() { return touches.incrementAndGet(); } @Override public void processRequest(TextCommand command) { if (responseThreadRunnable == null) { synchronized (this) { if (responseThreadRunnable == null) { responseThreadRunnable = new ResponseThreadRunnable(); String threadNamePrefix = node.getThreadNamePrefix("ascii.service.response"); Thread thread = new Thread(node.threadGroup, responseThreadRunnable, threadNamePrefix); thread.start(); } } } node.nodeEngine.getExecutionService().execute("hz:text", new CommandExecutor(command)); } @Override public Object get(String mapName, String key) { return hazelcast.getMap(mapName).get(key); } @Override public int getAdjustedTTLSeconds(int ttl) { if (ttl <= MONTH_SECONDS) { return ttl; } else { return ttl - (int) (Clock.currentTimeMillis() / 1000); } } @Override public byte[] getByteArray(String mapName, String key) { Object value = hazelcast.getMap(mapName).get(key); byte[] result = null; if (value != null) { if (value instanceof RestValue) { RestValue restValue = (RestValue) value; result = restValue.getValue(); } else if (value instanceof byte[]) { result = (byte[]) value; } else { result = toByteArray(value); } } return result; } @Override public Object put(String mapName, String key, Object value) { return hazelcast.getMap(mapName).put(key, value); } @Override public Object put(String mapName, String key, Object value, int ttlSeconds) { return hazelcast.getMap(mapName).put(key, value, ttlSeconds, TimeUnit.SECONDS); } @Override public Object putIfAbsent(String mapName, String key, Object value, int ttlSeconds) { return hazelcast.getMap(mapName).putIfAbsent(key, value, ttlSeconds, TimeUnit.SECONDS); } @Override public Object replace(String mapName, String key, Object value) { return hazelcast.getMap(mapName).replace(key, value); } @Override public void lock(String mapName, String key) throws InterruptedException { if (!hazelcast.getMap(mapName).tryLock(key, 1, TimeUnit.MINUTES)) { throw new RuntimeException("Memcache client could not get the lock for map:" + mapName + " key:" + key + " in 1 minute"); } } @Override public void unlock(String mapName, String key) { hazelcast.getMap(mapName).unlock(key); } @Override public void deleteAll(String mapName) { final IMap<Object, Object> map = hazelcast.getMap(mapName); map.clear(); } @Override public Object delete(String mapName, String key) { return hazelcast.getMap(mapName).remove(key); } @Override public boolean offer(String queueName, Object value) { return hazelcast.getQueue(queueName).offer(value); } @Override public Object poll(String queueName, int seconds) { try { return hazelcast.getQueue(queueName).poll(seconds, TimeUnit.SECONDS); } catch (InterruptedException e) { return null; } } @Override public Object poll(String queueName) { return hazelcast.getQueue(queueName).poll(); } @Override public int size(String queueName) { return hazelcast.getQueue(queueName).size(); } @Override public void sendResponse(TextCommand textCommand) { if (!textCommand.shouldReply() || textCommand.getRequestId() == -1) { throw new RuntimeException("Shouldn't reply " + textCommand); } responseThreadRunnable.sendResponse(textCommand); } public void stop() { final ResponseThreadRunnable rtr = responseThreadRunnable; if (rtr != null) { rtr.stop(); } } class CommandExecutor implements Runnable { final TextCommand command; CommandExecutor(TextCommand command) { this.command = command; } @Override public void run() { try { TextCommandType type = command.getType(); TextCommandProcessor textCommandProcessor = textCommandProcessors[type.getValue()]; textCommandProcessor.handle(command); } catch (Throwable e) { logger.warning(e); } } } private class ResponseThreadRunnable implements Runnable { private final BlockingQueue<TextCommand> blockingQueue = new ArrayBlockingQueue<TextCommand>(200); private final Object stopObject = new Object(); @edu.umd.cs.findbugs.annotations.SuppressWarnings("RV_RETURN_VALUE_IGNORED_BAD_PRACTICE") public void sendResponse(TextCommand textCommand) { blockingQueue.offer(textCommand); } @Override public void run() { while (running) { try { TextCommand textCommand = blockingQueue.take(); if (TextCommandConstants.TextCommandType.STOP == textCommand.getType()) { synchronized (stopObject) { stopObject.notify(); } } else { SocketTextWriter socketTextWriter = textCommand.getSocketTextWriter(); socketTextWriter.enqueue(textCommand); } } catch (InterruptedException e) { return; } catch (OutOfMemoryError e) { OutOfMemoryErrorDispatcher.onOutOfMemory(e); throw e; } } } @edu.umd.cs.findbugs.annotations.SuppressWarnings("RV_RETURN_VALUE_IGNORED_BAD_PRACTICE") void stop() { running = false; synchronized (stopObject) { try { blockingQueue.offer(new AbstractTextCommand(TextCommandConstants.TextCommandType.STOP) { @Override public boolean readFrom(ByteBuffer cb) { return true; } @Override public boolean writeTo(ByteBuffer bb) { return true; } }); //noinspection WaitNotInLoop stopObject.wait(1000); } catch (Exception ignored) { } } } } }
0true
hazelcast_src_main_java_com_hazelcast_ascii_TextCommandServiceImpl.java
1,081
public abstract class OSQLFilterItemAbstract implements OSQLFilterItem { protected List<OPair<OSQLMethod, Object[]>> operationsChain = null; public OSQLFilterItemAbstract(final OBaseParser iQueryToParse, final String iText) { final List<String> parts = OStringSerializerHelper.smartSplit(iText, '.'); setRoot(iQueryToParse, parts.get(0)); if (parts.size() > 1) { operationsChain = new ArrayList<OPair<OSQLMethod, Object[]>>(); // GET ALL SPECIAL OPERATIONS for (int i = 1; i < parts.size(); ++i) { final String part = parts.get(i); final int pindex = part.indexOf('('); if (pindex > -1) { final String methodName = part.substring(0, pindex).trim().toLowerCase(Locale.ENGLISH); OSQLMethod method = OSQLHelper.getMethodByName(methodName); final Object[] arguments; if (method != null) { if (method.getMaxParams() == -1 || method.getMaxParams() > 0) { arguments = OStringSerializerHelper.getParameters(part).toArray(); if (arguments.length < method.getMinParams() || (method.getMaxParams() > -1 && arguments.length > method.getMaxParams())) throw new OQueryParsingException(iQueryToParse.parserText, "Syntax error: field operator '" + method.getName() + "' needs " + (method.getMinParams() == method.getMaxParams() ? method.getMinParams() : method.getMinParams() + "-" + method.getMaxParams()) + " argument(s) while has been received " + arguments.length, 0); } else arguments = null; } else { // LOOK FOR FUNCTION final OSQLFunction f = OSQLEngine.getInstance().getFunction(methodName); if (f == null) // ERROR: METHOD/FUNCTION NOT FOUND OR MISPELLED throw new OQueryParsingException(iQueryToParse.parserText, "Syntax error: function or field operator not recognized between the supported ones: " + Arrays.toString(OSQLHelper.getAllMethodNames()), 0); if (f.getMaxParams() == -1 || f.getMaxParams() > 0) { arguments = OStringSerializerHelper.getParameters(part).toArray(); if (arguments.length < f.getMinParams() || (f.getMaxParams() > -1 && arguments.length > f.getMaxParams())) throw new OQueryParsingException(iQueryToParse.parserText, "Syntax error: function '" + f.getName() + "' needs " + (f.getMinParams() == f.getMaxParams() ? f.getMinParams() : f.getMinParams() + "-" + f.getMaxParams()) + " argument(s) while has been received " + arguments.length, 0); } else arguments = null; method = new OSQLMethodFunctionDelegate(f); } // SPECIAL OPERATION FOUND: ADD IT IN TO THE CHAIN operationsChain.add(new OPair<OSQLMethod, Object[]>(method, arguments)); } else { operationsChain.add(new OPair<OSQLMethod, Object[]>(OSQLHelper.getMethodByName(OSQLMethodField.NAME), new Object[] { part })); } } } } public abstract String getRoot(); protected abstract void setRoot(OBaseParser iQueryToParse, final String iRoot); public Object transformValue(final OIdentifiable iRecord, final OCommandContext iContext, Object ioResult) { if (ioResult != null && operationsChain != null) { // APPLY OPERATIONS FOLLOWING THE STACK ORDER OSQLMethod operator = null; try { for (OPair<OSQLMethod, Object[]> op : operationsChain) { operator = op.getKey(); // DON'T PASS THE CURRENT RECORD TO FORCE EVALUATING TEMPORARY RESULT ioResult = operator.execute(iRecord, iContext, ioResult, op.getValue()); } } catch (ParseException e) { OLogManager.instance().exception("Error on conversion of value '%s' using field operator %s", e, OCommandExecutionException.class, ioResult, operator.getName()); } } return ioResult; } public boolean hasChainOperators() { return operationsChain != null; } @Override public String toString() { final StringBuilder buffer = new StringBuilder(); final String root = getRoot(); if (root != null) buffer.append(root); if (operationsChain != null) { for (OPair<OSQLMethod, Object[]> op : operationsChain) { buffer.append('.'); buffer.append(op.getKey()); if (op.getValue() != null) { final Object[] values = op.getValue(); buffer.append('('); int i = 0; for (Object v : values) { if (i++ > 0) buffer.append(','); buffer.append(v); } buffer.append(')'); } } } return buffer.toString(); } protected OCollate getCollateForField(final ODocument doc, final String iFieldName) { if (doc.getSchemaClass() != null) { final OProperty p = doc.getSchemaClass().getProperty(iFieldName); if (p != null) return p.getCollate(); } return null; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_filter_OSQLFilterItemAbstract.java
790
schema = new OSchemaProxy(database.getStorage().getResource(OSchema.class.getSimpleName(), new Callable<OSchemaShared>() { public OSchemaShared call() { final OSchemaShared instance = new OSchemaShared(schemaClusterId); if (iLoad) instance.load(); return instance; } }), database);
0true
core_src_main_java_com_orientechnologies_orient_core_metadata_OMetadataDefault.java
1,756
public class EventData implements DataSerializable { protected String source; protected String mapName; protected Address caller; protected Data dataKey; protected Data dataNewValue; protected Data dataOldValue; protected int eventType; public EventData() { } public EventData(String source, String mapName, Address caller, Data dataKey, Data dataNewValue, Data dataOldValue, int eventType) { this.source = source; this.mapName = mapName; this.caller = caller; this.dataKey = dataKey; this.dataNewValue = dataNewValue; this.dataOldValue = dataOldValue; this.eventType = eventType; } public String getSource() { return source; } public String getMapName() { return mapName; } public Address getCaller() { return caller; } public Data getDataKey() { return dataKey; } public Data getDataNewValue() { return dataNewValue; } public Data getDataOldValue() { return dataOldValue; } public int getEventType() { return eventType; } public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(source); out.writeUTF(mapName); out.writeObject(caller); dataKey.writeData(out); IOUtil.writeNullableData(out, dataNewValue); IOUtil.writeNullableData(out, dataOldValue); out.writeInt(eventType); } public void readData(ObjectDataInput in) throws IOException { source = in.readUTF(); mapName = in.readUTF(); caller = in.readObject(); dataKey = IOUtil.readData(in); dataNewValue = IOUtil.readNullableData(in); dataOldValue = IOUtil.readNullableData(in); eventType = in.readInt(); } public Object cloneWithoutValues() { return new EventData(source, mapName, caller, dataKey, null, null, eventType); } }
0true
hazelcast_src_main_java_com_hazelcast_map_EventData.java
55
public class HttpGetCommandParser implements CommandParser { public TextCommand parser(SocketTextReader socketTextReader, String cmd, int space) { StringTokenizer st = new StringTokenizer(cmd); st.nextToken(); String uri = null; if (st.hasMoreTokens()) { uri = st.nextToken(); } else { return new ErrorCommand(ERROR_CLIENT); } return new HttpGetCommand(uri); } }
0true
hazelcast_src_main_java_com_hazelcast_ascii_rest_HttpGetCommandParser.java
6
public class NoOpCommandProcessor extends AbstractTextCommandProcessor<NoOpCommand> { public NoOpCommandProcessor(TextCommandService textCommandService) { super(textCommandService); } public void handle(NoOpCommand command) { textCommandService.sendResponse(command); } public void handleRejection(NoOpCommand command) { handle(command); } }
0true
hazelcast_src_main_java_com_hazelcast_ascii_NoOpCommandProcessor.java
823
public class GetAndAlterOperation extends AbstractAlterOperation { public GetAndAlterOperation() { } public GetAndAlterOperation(String name, IFunction<Long, Long> function) { super(name, function); } @Override public int getId() { return AtomicLongDataSerializerHook.GET_AND_ALTER; } @Override public void run() throws Exception { LongWrapper number = getNumber(); long input = number.get(); response = input; long output = function.apply(input); shouldBackup = input != output; if (shouldBackup) { backup = output; number.set(output); } } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_operations_GetAndAlterOperation.java
1,413
public class MetaDataIndexTemplateService extends AbstractComponent { private final ClusterService clusterService; @Inject public MetaDataIndexTemplateService(Settings settings, ClusterService clusterService) { super(settings); this.clusterService = clusterService; } public void removeTemplates(final RemoveRequest request, final RemoveListener listener) { clusterService.submitStateUpdateTask("remove-index-template [" + request.name + "]", Priority.URGENT, new TimeoutClusterStateUpdateTask() { @Override public TimeValue timeout() { return request.masterTimeout; } @Override public void onFailure(String source, Throwable t) { listener.onFailure(t); } @Override public ClusterState execute(ClusterState currentState) { Set<String> templateNames = Sets.newHashSet(); for (ObjectCursor<String> cursor : currentState.metaData().templates().keys()) { String templateName = cursor.value; if (Regex.simpleMatch(request.name, templateName)) { templateNames.add(templateName); } } if (templateNames.isEmpty()) { // if its a match all pattern, and no templates are found (we have none), don't // fail with index missing... if (Regex.isMatchAllPattern(request.name)) { return currentState; } throw new IndexTemplateMissingException(request.name); } MetaData.Builder metaData = MetaData.builder(currentState.metaData()); for (String templateName : templateNames) { metaData.removeTemplate(templateName); } return ClusterState.builder(currentState).metaData(metaData).build(); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { listener.onResponse(new RemoveResponse(true)); } }); } public void putTemplate(final PutRequest request, final PutListener listener) { ImmutableSettings.Builder updatedSettingsBuilder = ImmutableSettings.settingsBuilder(); for (Map.Entry<String, String> entry : request.settings.getAsMap().entrySet()) { if (!entry.getKey().startsWith("index.")) { updatedSettingsBuilder.put("index." + entry.getKey(), entry.getValue()); } else { updatedSettingsBuilder.put(entry.getKey(), entry.getValue()); } } request.settings(updatedSettingsBuilder.build()); if (request.name == null) { listener.onFailure(new ElasticsearchIllegalArgumentException("index_template must provide a name")); return; } if (request.template == null) { listener.onFailure(new ElasticsearchIllegalArgumentException("index_template must provide a template")); return; } try { validate(request); } catch (Throwable e) { listener.onFailure(e); return; } IndexTemplateMetaData.Builder templateBuilder; try { templateBuilder = IndexTemplateMetaData.builder(request.name); templateBuilder.order(request.order); templateBuilder.template(request.template); templateBuilder.settings(request.settings); for (Map.Entry<String, String> entry : request.mappings.entrySet()) { templateBuilder.putMapping(entry.getKey(), entry.getValue()); } for (Map.Entry<String, IndexMetaData.Custom> entry : request.customs.entrySet()) { templateBuilder.putCustom(entry.getKey(), entry.getValue()); } } catch (Throwable e) { listener.onFailure(e); return; } final IndexTemplateMetaData template = templateBuilder.build(); clusterService.submitStateUpdateTask("create-index-template [" + request.name + "], cause [" + request.cause + "]", Priority.URGENT, new TimeoutClusterStateUpdateTask() { @Override public TimeValue timeout() { return request.masterTimeout; } @Override public void onFailure(String source, Throwable t) { listener.onFailure(t); } @Override public ClusterState execute(ClusterState currentState) { if (request.create && currentState.metaData().templates().containsKey(request.name)) { throw new IndexTemplateAlreadyExistsException(request.name); } MetaData.Builder builder = MetaData.builder(currentState.metaData()).put(template); return ClusterState.builder(currentState).metaData(builder).build(); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { listener.onResponse(new PutResponse(true, template)); } }); } private void validate(PutRequest request) throws ElasticsearchException { if (request.name.contains(" ")) { throw new InvalidIndexTemplateException(request.name, "name must not contain a space"); } if (request.name.contains(",")) { throw new InvalidIndexTemplateException(request.name, "name must not contain a ','"); } if (request.name.contains("#")) { throw new InvalidIndexTemplateException(request.name, "name must not contain a '#'"); } if (request.name.startsWith("_")) { throw new InvalidIndexTemplateException(request.name, "name must not start with '_'"); } if (!request.name.toLowerCase(Locale.ROOT).equals(request.name)) { throw new InvalidIndexTemplateException(request.name, "name must be lower cased"); } if (request.template.contains(" ")) { throw new InvalidIndexTemplateException(request.name, "template must not contain a space"); } if (request.template.contains(",")) { throw new InvalidIndexTemplateException(request.name, "template must not contain a ','"); } if (request.template.contains("#")) { throw new InvalidIndexTemplateException(request.name, "template must not contain a '#'"); } if (request.template.startsWith("_")) { throw new InvalidIndexTemplateException(request.name, "template must not start with '_'"); } if (!Strings.validFileNameExcludingAstrix(request.template)) { throw new InvalidIndexTemplateException(request.name, "template must not container the following characters " + Strings.INVALID_FILENAME_CHARS); } } public static interface PutListener { void onResponse(PutResponse response); void onFailure(Throwable t); } public static class PutRequest { final String name; final String cause; boolean create; int order; String template; Settings settings = ImmutableSettings.Builder.EMPTY_SETTINGS; Map<String, String> mappings = Maps.newHashMap(); Map<String, IndexMetaData.Custom> customs = Maps.newHashMap(); TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; public PutRequest(String cause, String name) { this.cause = cause; this.name = name; } public PutRequest order(int order) { this.order = order; return this; } public PutRequest template(String template) { this.template = template; return this; } public PutRequest create(boolean create) { this.create = create; return this; } public PutRequest settings(Settings settings) { this.settings = settings; return this; } public PutRequest mappings(Map<String, String> mappings) { this.mappings.putAll(mappings); return this; } public PutRequest customs(Map<String, IndexMetaData.Custom> customs) { this.customs.putAll(customs); return this; } public PutRequest putMapping(String mappingType, String mappingSource) { mappings.put(mappingType, mappingSource); return this; } public PutRequest masterTimeout(TimeValue masterTimeout) { this.masterTimeout = masterTimeout; return this; } } public static class PutResponse { private final boolean acknowledged; private final IndexTemplateMetaData template; public PutResponse(boolean acknowledged, IndexTemplateMetaData template) { this.acknowledged = acknowledged; this.template = template; } public boolean acknowledged() { return acknowledged; } public IndexTemplateMetaData template() { return template; } } public static class RemoveRequest { final String name; TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; public RemoveRequest(String name) { this.name = name; } public RemoveRequest masterTimeout(TimeValue masterTimeout) { this.masterTimeout = masterTimeout; return this; } } public static class RemoveResponse { private final boolean acknowledged; public RemoveResponse(boolean acknowledged) { this.acknowledged = acknowledged; } public boolean acknowledged() { return acknowledged; } } public static interface RemoveListener { void onResponse(RemoveResponse response); void onFailure(Throwable t); } }
0true
src_main_java_org_elasticsearch_cluster_metadata_MetaDataIndexTemplateService.java
1,622
public class OUpdateRecordTask extends OAbstractRecordReplicatedTask { private static final long serialVersionUID = 1L; protected byte[] content; protected byte recordType; public OUpdateRecordTask() { } public OUpdateRecordTask(final ORecordId iRid, final byte[] iContent, final ORecordVersion iVersion, final byte iRecordType) { super(iRid, iVersion); content = iContent; recordType = iRecordType; } @Override public Object execute(final OServer iServer, ODistributedServerManager iManager, final ODatabaseDocumentTx database) throws Exception { ODistributedServerLog.debug(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.IN, "updating record %s/%s v.%s", database.getName(), rid.toString(), version.toString()); final ORecordInternal<?> loadedRecord = rid.getRecord(); if (loadedRecord == null) throw new ORecordNotFoundException("Record " + rid + " was not found on update"); final ORecordInternal<?> record = Orient.instance().getRecordFactoryManager().newInstance(recordType); record.fill(rid, version, content, true); if (loadedRecord instanceof ODocument) { ((ODocument) loadedRecord).merge((ODocument) record, false, false); database.save(loadedRecord); } else database.save(record); ODistributedServerLog.debug(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.IN, "+-> updated record %s/%s v.%s", database.getName(), rid.toString(), record.getRecordVersion().toString()); return record.getRecordVersion(); } @Override public QUORUM_TYPE getQuorumType() { return QUORUM_TYPE.WRITE; } @Override public OFixUpdateRecordTask getFixTask(ODistributedRequest iRequest, ODistributedResponse iBadResponse, final ODistributedResponse iGoodResponse) { return new OFixUpdateRecordTask(rid, ((OUpdateRecordTask) iRequest.getTask()).content, version); } @Override public void writeExternal(final ObjectOutput out) throws IOException { out.writeUTF(rid.toString()); out.writeInt(content.length); out.write(content); if (version == null) version = OVersionFactory.instance().createUntrackedVersion(); version.getSerializer().writeTo(out, version); out.write(recordType); } @Override public void readExternal(final ObjectInput in) throws IOException, ClassNotFoundException { rid = new ORecordId(in.readUTF()); final int contentSize = in.readInt(); content = new byte[contentSize]; in.readFully(content); if (version == null) version = OVersionFactory.instance().createUntrackedVersion(); version.getSerializer().readFrom(in, version); recordType = in.readByte(); } @Override public String getName() { return "record_update"; } @Override public String toString() { if (version.isTemporary()) return getName() + "(" + rid + " v." + (version.getCounter() - Integer.MIN_VALUE) + " realV." + version + ")"; else return super.toString(); } }
0true
server_src_main_java_com_orientechnologies_orient_server_distributed_task_OUpdateRecordTask.java
820
public interface OfferInfo extends Serializable{ public Long getId(); public void setId(Long id); public Map<String, String> getFieldValues(); public void setFieldValues(Map<String, String> fieldValues); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OfferInfo.java
1,747
public enum GeoDistance { /** * Calculates distance as points on a plane. Faster, but less accurate than {@link #ARC}. */ PLANE() { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double px = targetLongitude - sourceLongitude; double py = targetLatitude - sourceLatitude; return Math.sqrt(px * px + py * py) * unit.getDistancePerDegree(); } @Override public double normalize(double distance, DistanceUnit unit) { return distance; } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new PlaneFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }, /** * Calculates distance factor. */ FACTOR() { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double longitudeDifference = targetLongitude - sourceLongitude; double a = Math.toRadians(90D - sourceLatitude); double c = Math.toRadians(90D - targetLatitude); return (Math.cos(a) * Math.cos(c)) + (Math.sin(a) * Math.sin(c) * Math.cos(Math.toRadians(longitudeDifference))); } @Override public double normalize(double distance, DistanceUnit unit) { return Math.cos(distance / unit.getEarthRadius()); } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new FactorFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }, /** * Calculates distance as points on a globe. */ ARC() { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double x1 = sourceLatitude * Math.PI / 180D; double x2 = targetLatitude * Math.PI / 180D; double h1 = (1D - Math.cos(x1 - x2)) / 2D; double h2 = (1D - Math.cos((sourceLongitude - targetLongitude) * Math.PI / 180D)) / 2D; double h = h1 + Math.cos(x1) * Math.cos(x2) * h2; return unit.fromMeters(GeoUtils.EARTH_MEAN_RADIUS * 2D * Math.asin(Math.min(1, Math.sqrt(h)))); } @Override public double normalize(double distance, DistanceUnit unit) { return distance; } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new ArcFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }, /** * Calculates distance as points on a globe in a sloppy way. Close to the pole areas the accuracy * of this function decreases. */ SLOPPY_ARC() { @Override public double normalize(double distance, DistanceUnit unit) { return distance; } @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { return unit.fromMeters(SloppyMath.haversin(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude) * 1000.0); } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new SloppyArcFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }; /** * Default {@link GeoDistance} function. This method should be used, If no specific function has been selected. * This is an alias for <code>SLOPPY_ARC</code> */ public static final GeoDistance DEFAULT = SLOPPY_ARC; public abstract double normalize(double distance, DistanceUnit unit); public abstract double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit); public abstract FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit); private static final double MIN_LAT = Math.toRadians(-90d); // -PI/2 private static final double MAX_LAT = Math.toRadians(90d); // PI/2 private static final double MIN_LON = Math.toRadians(-180d); // -PI private static final double MAX_LON = Math.toRadians(180d); // PI public static DistanceBoundingCheck distanceBoundingCheck(double sourceLatitude, double sourceLongitude, double distance, DistanceUnit unit) { // angular distance in radians on a great circle double radDist = distance / unit.getEarthRadius(); double radLat = Math.toRadians(sourceLatitude); double radLon = Math.toRadians(sourceLongitude); double minLat = radLat - radDist; double maxLat = radLat + radDist; double minLon, maxLon; if (minLat > MIN_LAT && maxLat < MAX_LAT) { double deltaLon = Math.asin(Math.sin(radDist) / Math.cos(radLat)); minLon = radLon - deltaLon; if (minLon < MIN_LON) minLon += 2d * Math.PI; maxLon = radLon + deltaLon; if (maxLon > MAX_LON) maxLon -= 2d * Math.PI; } else { // a pole is within the distance minLat = Math.max(minLat, MIN_LAT); maxLat = Math.min(maxLat, MAX_LAT); minLon = MIN_LON; maxLon = MAX_LON; } GeoPoint topLeft = new GeoPoint(Math.toDegrees(maxLat), Math.toDegrees(minLon)); GeoPoint bottomRight = new GeoPoint(Math.toDegrees(minLat), Math.toDegrees(maxLon)); if (minLon > maxLon) { return new Meridian180DistanceBoundingCheck(topLeft, bottomRight); } return new SimpleDistanceBoundingCheck(topLeft, bottomRight); } /** * Get a {@link GeoDistance} according to a given name. Valid values are * * <ul> * <li><b>plane</b> for <code>GeoDistance.PLANE</code></li> * <li><b>sloppy_arc</b> for <code>GeoDistance.SLOPPY_ARC</code></li> * <li><b>factor</b> for <code>GeoDistance.FACTOR</code></li> * <li><b>arc</b> for <code>GeoDistance.ARC</code></li> * </ul> * * @param name name of the {@link GeoDistance} * @return a {@link GeoDistance} */ public static GeoDistance fromString(String name) { name = name.toLowerCase(Locale.ROOT); if ("plane".equals(name)) { return PLANE; } else if ("arc".equals(name)) { return ARC; } else if ("sloppy_arc".equals(name)) { return SLOPPY_ARC; } else if ("factor".equals(name)) { return FACTOR; } throw new ElasticsearchIllegalArgumentException("No geo distance for [" + name + "]"); } public static interface FixedSourceDistance { double calculate(double targetLatitude, double targetLongitude); } public static interface DistanceBoundingCheck { boolean isWithin(double targetLatitude, double targetLongitude); GeoPoint topLeft(); GeoPoint bottomRight(); } public static AlwaysDistanceBoundingCheck ALWAYS_INSTANCE = new AlwaysDistanceBoundingCheck(); private static class AlwaysDistanceBoundingCheck implements DistanceBoundingCheck { @Override public boolean isWithin(double targetLatitude, double targetLongitude) { return true; } @Override public GeoPoint topLeft() { return null; } @Override public GeoPoint bottomRight() { return null; } } public static class Meridian180DistanceBoundingCheck implements DistanceBoundingCheck { private final GeoPoint topLeft; private final GeoPoint bottomRight; public Meridian180DistanceBoundingCheck(GeoPoint topLeft, GeoPoint bottomRight) { this.topLeft = topLeft; this.bottomRight = bottomRight; } @Override public boolean isWithin(double targetLatitude, double targetLongitude) { return (targetLatitude >= bottomRight.lat() && targetLatitude <= topLeft.lat()) && (targetLongitude >= topLeft.lon() || targetLongitude <= bottomRight.lon()); } @Override public GeoPoint topLeft() { return topLeft; } @Override public GeoPoint bottomRight() { return bottomRight; } } public static class SimpleDistanceBoundingCheck implements DistanceBoundingCheck { private final GeoPoint topLeft; private final GeoPoint bottomRight; public SimpleDistanceBoundingCheck(GeoPoint topLeft, GeoPoint bottomRight) { this.topLeft = topLeft; this.bottomRight = bottomRight; } @Override public boolean isWithin(double targetLatitude, double targetLongitude) { return (targetLatitude >= bottomRight.lat() && targetLatitude <= topLeft.lat()) && (targetLongitude >= topLeft.lon() && targetLongitude <= bottomRight.lon()); } @Override public GeoPoint topLeft() { return topLeft; } @Override public GeoPoint bottomRight() { return bottomRight; } } public static class PlaneFixedSourceDistance implements FixedSourceDistance { private final double sourceLatitude; private final double sourceLongitude; private final double distancePerDegree; public PlaneFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { this.sourceLatitude = sourceLatitude; this.sourceLongitude = sourceLongitude; this.distancePerDegree = unit.getDistancePerDegree(); } @Override public double calculate(double targetLatitude, double targetLongitude) { double px = targetLongitude - sourceLongitude; double py = targetLatitude - sourceLatitude; return Math.sqrt(px * px + py * py) * distancePerDegree; } } public static class FactorFixedSourceDistance implements FixedSourceDistance { private final double sourceLongitude; private final double a; private final double sinA; private final double cosA; public FactorFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { this.sourceLongitude = sourceLongitude; this.a = Math.toRadians(90D - sourceLatitude); this.sinA = Math.sin(a); this.cosA = Math.cos(a); } @Override public double calculate(double targetLatitude, double targetLongitude) { double longitudeDifference = targetLongitude - sourceLongitude; double c = Math.toRadians(90D - targetLatitude); return (cosA * Math.cos(c)) + (sinA * Math.sin(c) * Math.cos(Math.toRadians(longitudeDifference))); } } /** * Basic implementation of {@link FixedSourceDistance}. This class keeps the basic parameters for a distance * functions based on a fixed source. Namely latitude, longitude and unit. */ public static abstract class FixedSourceDistanceBase implements FixedSourceDistance { protected final double sourceLatitude; protected final double sourceLongitude; protected final DistanceUnit unit; public FixedSourceDistanceBase(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { this.sourceLatitude = sourceLatitude; this.sourceLongitude = sourceLongitude; this.unit = unit; } } public static class ArcFixedSourceDistance extends FixedSourceDistanceBase { public ArcFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { super(sourceLatitude, sourceLongitude, unit); } @Override public double calculate(double targetLatitude, double targetLongitude) { return ARC.calculate(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude, unit); } } public static class SloppyArcFixedSourceDistance extends FixedSourceDistanceBase { public SloppyArcFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { super(sourceLatitude, sourceLongitude, unit); } @Override public double calculate(double targetLatitude, double targetLongitude) { return SLOPPY_ARC.calculate(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude, unit); } } }
0true
src_main_java_org_elasticsearch_common_geo_GeoDistance.java
1,704
runnable = new Runnable() { public void run() { map.lock(null, 1, TimeUnit.SECONDS); } };
0true
hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java
270
private final class KeyRangeIterator<T extends Token<?>> extends AbstractBufferedRowIter<T> { public KeyRangeIterator(IPartitioner<? extends T> partitioner, SliceQuery columnSlice, int pageSize, ByteBuffer startKey, ByteBuffer endKey) throws BackendException { super(partitioner, columnSlice, pageSize, partitioner.getToken(startKey), partitioner.getToken(endKey), true); Preconditions.checkArgument(partitioner instanceof AbstractByteOrderedPartitioner); // Get first slice with key range instead of token range. Token // ranges are start-exclusive, key ranges are start-inclusive. Both // are end-inclusive. If we don't make the call below, then we will // erroneously miss startKey. List<KeySlice> ks = getKeySlice(startKey, endKey, columnSlice, pageSize); this.ksIter = checkFreshSlices(ks).iterator(); } }
0true
titan-cassandra_src_main_java_com_thinkaurelius_titan_diskstorage_cassandra_thrift_CassandraThriftKeyColumnValueStore.java
4
killerHook = new Thread() { public void run() { killAndUnregisterHook(stat); } };
0true
titan-test_src_main_java_com_thinkaurelius_titan_DaemonRunner.java
3,316
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.WithOrdinals { private final BigDoubleArrayList values; DoubleValues(BigDoubleArrayList values, Ordinals.Docs ordinals) { super(ordinals); this.values = values; } @Override public double getValueByOrd(long ord) { assert ord != Ordinals.MISSING_ORDINAL; return values.get(ord); } }
0true
src_main_java_org_elasticsearch_index_fielddata_plain_DoubleArrayAtomicFieldData.java
148
final class JavaClientExceptionConverter implements ClientExceptionConverter { @Override public Object convert(Throwable t) { return t; } }
0true
hazelcast_src_main_java_com_hazelcast_client_JavaClientExceptionConverter.java
2,397
final class BigByteArray extends AbstractBigArray implements ByteArray { private byte[][] pages; /** Constructor. */ public BigByteArray(long size, PageCacheRecycler recycler, boolean clearOnResize) { super(BYTE_PAGE_SIZE, recycler, clearOnResize); this.size = size; pages = new byte[numPages(size)][]; for (int i = 0; i < pages.length; ++i) { pages[i] = newBytePage(i); } } @Override public byte get(long index) { final int pageIndex = pageIndex(index); final int indexInPage = indexInPage(index); return pages[pageIndex][indexInPage]; } @Override public byte set(long index, byte value) { final int pageIndex = pageIndex(index); final int indexInPage = indexInPage(index); final byte[] page = pages[pageIndex]; final byte ret = page[indexInPage]; page[indexInPage] = value; return ret; } @Override public void get(long index, int len, BytesRef ref) { assert index + len <= size(); int pageIndex = pageIndex(index); final int indexInPage = indexInPage(index); if (indexInPage + len <= pageSize()) { ref.bytes = pages[pageIndex]; ref.offset = indexInPage; ref.length = len; } else { ref.bytes = new byte[len]; ref.offset = 0; ref.length = pageSize() - indexInPage; System.arraycopy(pages[pageIndex], indexInPage, ref.bytes, 0, ref.length); do { ++pageIndex; final int copyLength = Math.min(pageSize(), len - ref.length); System.arraycopy(pages[pageIndex], 0, ref.bytes, ref.length, copyLength); ref.length += copyLength; } while (ref.length < len); } } @Override public void set(long index, byte[] buf, int offset, int len) { assert index + len <= size(); int pageIndex = pageIndex(index); final int indexInPage = indexInPage(index); if (indexInPage + len <= pageSize()) { System.arraycopy(buf, offset, pages[pageIndex], indexInPage, len); } else { int copyLen = pageSize() - indexInPage; System.arraycopy(buf, offset, pages[pageIndex], indexInPage, copyLen); do { ++pageIndex; offset += copyLen; len -= copyLen; copyLen = Math.min(len, pageSize()); System.arraycopy(buf, offset, pages[pageIndex], 0, copyLen); } while (len > copyLen); } } @Override protected int numBytesPerElement() { return RamUsageEstimator.NUM_BYTES_BYTE; } /** Change the size of this array. Content between indexes <code>0</code> and <code>min(size(), newSize)</code> will be preserved. */ public void resize(long newSize) { final int numPages = numPages(newSize); if (numPages > pages.length) { pages = Arrays.copyOf(pages, ArrayUtil.oversize(numPages, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); } for (int i = numPages - 1; i >= 0 && pages[i] == null; --i) { pages[i] = newBytePage(i); } for (int i = numPages; i < pages.length && pages[i] != null; ++i) { pages[i] = null; releasePage(i); } this.size = newSize; } }
0true
src_main_java_org_elasticsearch_common_util_BigByteArray.java
1,193
public class OQueryOperatorOr extends OQueryOperator { public OQueryOperatorOr() { super("OR", 3, false); } @Override public Object evaluateRecord(final OIdentifiable iRecord, ODocument iCurrentResult, final OSQLFilterCondition iCondition, final Object iLeft, final Object iRight, OCommandContext iContext) { if (iLeft == null) return false; return (Boolean) iLeft || (Boolean) iRight; } @Override public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight) { if (iLeft == null || iRight == null) return OIndexReuseType.NO_INDEX; return OIndexReuseType.INDEX_UNION; } @Override public ORID getBeginRidRange(final Object iLeft,final Object iRight) { final ORID leftRange; final ORID rightRange; if(iLeft instanceof OSQLFilterCondition) leftRange = ((OSQLFilterCondition) iLeft).getBeginRidRange(); else leftRange = null; if(iRight instanceof OSQLFilterCondition) rightRange = ((OSQLFilterCondition) iRight).getBeginRidRange(); else rightRange = null; if(leftRange == null || rightRange == null) return null; else return leftRange.compareTo(rightRange) <= 0 ? leftRange : rightRange; } @Override public ORID getEndRidRange(final Object iLeft,final Object iRight) { final ORID leftRange; final ORID rightRange; if(iLeft instanceof OSQLFilterCondition) leftRange = ((OSQLFilterCondition) iLeft).getEndRidRange(); else leftRange = null; if(iRight instanceof OSQLFilterCondition) rightRange = ((OSQLFilterCondition) iRight).getEndRidRange(); else rightRange = null; if(leftRange == null || rightRange == null) return null; else return leftRange.compareTo(rightRange) >= 0 ? leftRange : rightRange; } }
0true
core_src_main_java_com_orientechnologies_orient_core_sql_operator_OQueryOperatorOr.java
1,308
public interface SearchConfig { }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_domain_SearchConfig.java
1,404
class DeleteIndexListener implements Listener { private final AtomicBoolean notified = new AtomicBoolean(); private final Semaphore mdLock; private final Listener listener; volatile ScheduledFuture<?> future; private DeleteIndexListener(Semaphore mdLock, Listener listener) { this.mdLock = mdLock; this.listener = listener; } @Override public void onResponse(final Response response) { if (notified.compareAndSet(false, true)) { mdLock.release(); if (future != null) { future.cancel(false); } listener.onResponse(response); } } @Override public void onFailure(Throwable t) { if (notified.compareAndSet(false, true)) { mdLock.release(); if (future != null) { future.cancel(false); } listener.onFailure(t); } } }
0true
src_main_java_org_elasticsearch_cluster_metadata_MetaDataDeleteIndexService.java
1,888
public interface Scope { /** * Scopes a provider. The returned provider returns objects from this scope. * If an object does not exist in this scope, the provider can use the given * unscoped provider to retrieve one. * <p/> * <p>Scope implementations are strongly encouraged to override * {@link Object#toString} in the returned provider and include the backing * provider's {@code toString()} output. * * @param key binding key * @param unscoped locates an instance when one doesn't already exist in this * scope. * @return a new provider which only delegates to the given unscoped provider * when an instance of the requested object doesn't already exist in this * scope */ public <T> Provider<T> scope(Key<T> key, Provider<T> unscoped); /** * A short but useful description of this scope. For comparison, the standard * scopes that ship with guice use the descriptions * {@code "Scopes.SINGLETON"}, {@code "ServletScopes.SESSION"} and * {@code "ServletScopes.REQUEST"}. */ String toString(); }
0true
src_main_java_org_elasticsearch_common_inject_Scope.java
1,175
public static class Order { public static final int Items = 1000; }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_domain_PaymentInfoImpl.java
1,383
public class TitanCassandraRecordReader extends RecordReader<NullWritable, FaunusVertex> { private static final Logger log = LoggerFactory.getLogger(TitanCassandraRecordReader.class); private ColumnFamilyRecordReader reader; private TitanCassandraHadoopGraph graph; private FaunusVertexQueryFilter vertexQuery; private Configuration configuration; private FaunusVertex vertex; public TitanCassandraRecordReader(final TitanCassandraHadoopGraph graph, final FaunusVertexQueryFilter vertexQuery, final ColumnFamilyRecordReader reader) { this.graph = graph; this.vertexQuery = vertexQuery; this.reader = reader; } @Override public void initialize(final InputSplit inputSplit, final TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { reader.initialize(inputSplit, taskAttemptContext); configuration = ModifiableHadoopConfiguration.of(DEFAULT_COMPAT.getContextConfiguration(taskAttemptContext)); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { while (reader.nextKeyValue()) { // TODO titan05 integration -- the duplicate() call may be unnecessary final FaunusVertex temp = graph.readHadoopVertex(configuration, reader.getCurrentKey().duplicate(), reader.getCurrentValue()); if (null != temp) { vertex = temp; vertexQuery.filterRelationsOf(vertex); return true; } } return false; } @Override public NullWritable getCurrentKey() throws IOException, InterruptedException { return NullWritable.get(); } @Override public FaunusVertex getCurrentValue() throws IOException, InterruptedException { return vertex; } @Override public void close() throws IOException { graph.close(); reader.close(); } @Override public float getProgress() { return reader.getProgress(); } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_formats_cassandra_TitanCassandraRecordReader.java