Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
327 | static class DummyXaConnection extends XaConnectionHelpImpl
{
private XAResource xaResource = null;
public DummyXaConnection( XaResourceManager xaRm )
{
super( xaRm );
xaResource = new DummyXaResource( xaRm );
}
@Override
public XAResource getXaResource()
{
return xaResource;
}
public void doStuff1() throws XAException
{
validate();
getTransaction().addCommand( new DummyCommand( 1 ) );
}
public void doStuff2() throws XAException
{
validate();
getTransaction().addCommand( new DummyCommand( 2 ) );
}
public void enlistWithTx( TransactionManager tm ) throws Exception
{
tm.getTransaction().enlistResource( xaResource );
}
public void delistFromTx( TransactionManager tm ) throws Exception
{
tm.getTransaction().delistResource( xaResource,
XAResource.TMSUCCESS );
}
public int getTransactionId() throws Exception
{
return getTransaction().getIdentifier();
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_DummyXaDataSource.java |
1,932 | public class Annotations {
/**
* Returns true if the given annotation is retained at runtime.
*/
public static boolean isRetainedAtRuntime(Class<? extends Annotation> annotationType) {
Retention retention = annotationType.getAnnotation(Retention.class);
return retention != null && retention.value() == RetentionPolicy.RUNTIME;
}
/**
* Returns the scope annotation on {@code type}, or null if none is specified.
*/
public static Class<? extends Annotation> findScopeAnnotation(
Errors errors, Class<?> implementation) {
return findScopeAnnotation(errors, implementation.getAnnotations());
}
/**
* Returns the scoping annotation, or null if there isn't one.
*/
public static Class<? extends Annotation> findScopeAnnotation(Errors errors, Annotation[] annotations) {
Class<? extends Annotation> found = null;
for (Annotation annotation : annotations) {
if (annotation.annotationType().getAnnotation(ScopeAnnotation.class) != null) {
if (found != null) {
errors.duplicateScopeAnnotations(found, annotation.annotationType());
} else {
found = annotation.annotationType();
}
}
}
return found;
}
public static boolean isScopeAnnotation(Class<? extends Annotation> annotationType) {
return annotationType.getAnnotation(ScopeAnnotation.class) != null;
}
/**
* Adds an error if there is a misplaced annotations on {@code type}. Scoping
* annotations are not allowed on abstract classes or interfaces.
*/
public static void checkForMisplacedScopeAnnotations(
Class<?> type, Object source, Errors errors) {
if (Classes.isConcrete(type)) {
return;
}
Class<? extends Annotation> scopeAnnotation = findScopeAnnotation(errors, type);
if (scopeAnnotation != null) {
errors.withSource(type).scopeAnnotationOnAbstractType(scopeAnnotation, type, source);
}
}
/**
* Gets a key for the given type, member and annotations.
*/
public static Key<?> getKey(TypeLiteral<?> type, Member member, Annotation[] annotations,
Errors errors) throws ErrorsException {
int numErrorsBefore = errors.size();
Annotation found = findBindingAnnotation(errors, member, annotations);
errors.throwIfNewErrors(numErrorsBefore);
return found == null ? Key.get(type) : Key.get(type, found);
}
/**
* Returns the binding annotation on {@code member}, or null if there isn't one.
*/
public static Annotation findBindingAnnotation(
Errors errors, Member member, Annotation[] annotations) {
Annotation found = null;
for (Annotation annotation : annotations) {
if (annotation.annotationType().getAnnotation(BindingAnnotation.class) != null) {
if (found != null) {
errors.duplicateBindingAnnotations(member,
found.annotationType(), annotation.annotationType());
} else {
found = annotation;
}
}
}
return found;
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_internal_Annotations.java |
340 | public class CurrentDatabase
{
private final StoreVersionCheck storeVersionCheck;
private static final Map<String, String> fileNamesToTypeDescriptors = new HashMap<String, String>();
static
{
fileNamesToTypeDescriptors.put( NeoStore.DEFAULT_NAME, NeoStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.nodestore.db", NodeStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.propertystore.db", PropertyStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.propertystore.db.arrays", DynamicArrayStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.propertystore.db.index", PropertyKeyTokenStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.propertystore.db.index.keys", DynamicStringStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.propertystore.db.strings", DynamicStringStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.relationshipstore.db", RelationshipStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.relationshiptypestore.db", RelationshipTypeTokenStore.TYPE_DESCRIPTOR );
fileNamesToTypeDescriptors.put( "neostore.relationshiptypestore.db.names", DynamicStringStore.TYPE_DESCRIPTOR );
}
public CurrentDatabase(StoreVersionCheck storeVersionCheck)
{
this.storeVersionCheck = storeVersionCheck;
}
public boolean storeFilesAtCurrentVersion( File storeDirectory )
{
for ( String fileName : fileNamesToTypeDescriptors.keySet() )
{
String expectedVersion = buildTypeDescriptorAndVersion( fileNamesToTypeDescriptors.get( fileName ) );
if ( !storeVersionCheck.hasVersion(
new File( storeDirectory, fileName ), expectedVersion ).first().isSuccessful() )
{
return false;
}
}
return true;
}
public static Collection<String> fileNames()
{
return fileNamesToTypeDescriptors.keySet();
}
} | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_storemigration_CurrentDatabase.java |
699 | @Test
public class LRUListTest {
public void testSingleAdd() {
LRUList lruList = new LRUList();
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
OCachePointer cachePointer = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, 10, cachePointer, false));
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 10, cachePointer, false));
directMemoryPointer.free();
}
public void testAddTwo() {
LRUList lruList = new LRUList();
ODirectMemoryPointer directMemoryPointerOne = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerTwo = new ODirectMemoryPointer(1);
OCachePointer cachePointerOne = new OCachePointer(directMemoryPointerOne, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerTwo = new OCachePointer(directMemoryPointerTwo, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, 10, cachePointerOne, false));
lruList.putToMRU(new OCacheEntry(1, 20, cachePointerTwo, false));
Assert.assertEquals(lruList.size(), 2);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 20, cachePointerTwo, false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 10, cachePointerOne, false));
directMemoryPointerOne.free();
directMemoryPointerTwo.free();
}
public void testAddThree() {
ODirectMemoryPointer directMemoryPointerOne = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerTwo = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerThree = new ODirectMemoryPointer(1);
OCachePointer cachePointerOne = new OCachePointer(directMemoryPointerOne, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerTwo = new OCachePointer(directMemoryPointerTwo, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerThree = new OCachePointer(directMemoryPointerThree, new OLogSequenceNumber(0, 0));
LRUList lruList = new LRUList();
lruList.putToMRU(new OCacheEntry(1, 10, cachePointerOne, false));
lruList.putToMRU(new OCacheEntry(1, 20, cachePointerTwo, false));
lruList.putToMRU(new OCacheEntry(3, 30, cachePointerThree, false));
Assert.assertEquals(lruList.size(), 3);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(3, 30, cachePointerThree, false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 20, cachePointerTwo, false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 10, cachePointerOne, false));
directMemoryPointerOne.free();
directMemoryPointerTwo.free();
directMemoryPointerThree.free();
}
public void testAddThreePutMiddleToTop() {
ODirectMemoryPointer directMemoryPointerOne = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerTwo = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerThree = new ODirectMemoryPointer(1);
OCachePointer cachePointerOne = new OCachePointer(directMemoryPointerOne, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerTwo = new OCachePointer(directMemoryPointerTwo, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerThree = new OCachePointer(directMemoryPointerThree, new OLogSequenceNumber(0, 0));
LRUList lruList = new LRUList();
lruList.putToMRU(new OCacheEntry(1, 10, cachePointerOne, false));
lruList.putToMRU(new OCacheEntry(1, 20, cachePointerTwo, false));
lruList.putToMRU(new OCacheEntry(3, 30, cachePointerThree, false));
lruList.putToMRU(new OCacheEntry(1, 20, cachePointerTwo, false));
Assert.assertEquals(lruList.size(), 3);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 20, cachePointerTwo, false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(3, 30, cachePointerThree, false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 10, cachePointerOne, false));
directMemoryPointerOne.free();
directMemoryPointerTwo.free();
directMemoryPointerThree.free();
}
public void testAddThreePutMiddleToTopChangePointer() {
ODirectMemoryPointer directMemoryPointerOne = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerTwo = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerThree = new ODirectMemoryPointer(1);
ODirectMemoryPointer directMemoryPointerFour = new ODirectMemoryPointer(1);
OCachePointer cachePointerOne = new OCachePointer(directMemoryPointerOne, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerTwo = new OCachePointer(directMemoryPointerTwo, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerThree = new OCachePointer(directMemoryPointerThree, new OLogSequenceNumber(0, 0));
OCachePointer cachePointerFour = new OCachePointer(directMemoryPointerFour, new OLogSequenceNumber(0, 0));
LRUList lruList = new LRUList();
lruList.putToMRU(new OCacheEntry(1, 10, cachePointerOne, false));
lruList.putToMRU(new OCacheEntry(1, 20, cachePointerTwo, false));
lruList.putToMRU(new OCacheEntry(3, 30, cachePointerThree, false));
lruList.putToMRU(new OCacheEntry(1, 20, cachePointerFour, false));
Assert.assertEquals(lruList.size(), 3);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 20, cachePointerFour, false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(3, 30, cachePointerThree, false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 10, cachePointerOne, false));
directMemoryPointerOne.free();
directMemoryPointerTwo.free();
directMemoryPointerThree.free();
directMemoryPointerFour.free();
}
public void testAddElevenPutMiddleToTopChangePointer() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[11];
for (int i = 0; i < 11; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
lruList.putToMRU(new OCacheEntry(1, 50, cachePointers[5], false));
Assert.assertEquals(lruList.size(), 11);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 50, cachePointers[5], false));
for (int i = 10; i >= 0; i--) {
if (i == 5)
continue;
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (int i = 0; i < 11; i++) {
cachePointers[i].getDataPointer().free();
}
}
public void testAddOneRemoveLRU() {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
LRUList lruList = new LRUList();
OCachePointer cachePointerOne = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, 10, cachePointerOne, false));
lruList.removeLRU();
Assert.assertEquals(lruList.size(), 0);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertFalse(entryIterator.hasNext());
directMemoryPointer.free();
}
public void testRemoveLRUShouldReturnNullIfAllRecordsAreUsed() {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
LRUList lruList = new LRUList();
OCachePointer cachePointerOne = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
OCacheEntry cacheEntry = new OCacheEntry(1, 10, cachePointerOne, false);
lruList.putToMRU(cacheEntry);
cacheEntry.usagesCount++;
OCacheEntry removedLRU = lruList.removeLRU();
Assert.assertNull(removedLRU);
directMemoryPointer.free();
}
public void testAddElevenRemoveLRU() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[11];
for (int i = 0; i < 11; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
lruList.removeLRU();
Assert.assertEquals(lruList.size(), 10);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
for (int i = 10; i > 0; i--) {
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (int i = 0; i < 11; i++) {
cachePointers[i].getDataPointer().free();
}
}
public void testAddElevenRemoveMiddle() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[11];
for (int i = 0; i < 11; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
Assert.assertEquals(lruList.remove(1, 50), new OCacheEntry(1, 50, cachePointers[5], false));
Assert.assertNull(lruList.remove(1, 500));
Assert.assertEquals(lruList.size(), 10);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
for (int i = 10; i >= 0; i--) {
if (i == 5)
continue;
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (int i = 0; i < 11; i++) {
cachePointers[i].getDataPointer().free();
}
}
public void testAddElevenGetMiddle() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[11];
for (int i = 0; i < 11; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
Assert.assertTrue(lruList.contains(1, 50));
Assert.assertEquals(lruList.get(1, 50), new OCacheEntry(1, 50, cachePointers[5], false));
Assert.assertFalse(lruList.contains(2, 50));
Assert.assertEquals(lruList.size(), 11);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
for (int i = 10; i >= 0; i--) {
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (int i = 0; i < 11; i++) {
cachePointers[i].getDataPointer().free();
}
}
public void testAdd9128() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[9128];
for (int i = 0; i < 9128; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
Assert.assertEquals(lruList.size(), 9128);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
for (int i = 9127; i >= 0; i--) {
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (OCachePointer cachePointer : cachePointers)
cachePointer.getDataPointer().free();
}
public void testAdd9128Get() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[9128];
for (int i = 0; i < 9128; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
Assert.assertEquals(lruList.size(), 9128);
for (int i = 0; i < 9128; i++)
Assert.assertEquals(lruList.get(1, i * 10), new OCacheEntry(1, i * 10, cachePointers[i], false));
Iterator<OCacheEntry> entryIterator = lruList.iterator();
for (int i = 9127; i >= 0; i--) {
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (OCachePointer cachePointer : cachePointers)
cachePointer.getDataPointer().free();
}
public void testAdd9128Remove4564() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[9128];
for (int i = 0; i < 9128; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (int i = 4564; i < 9128; i++)
Assert.assertEquals(lruList.remove(1, i * 10), new OCacheEntry(1, i * 10, cachePointers[i], false));
Assert.assertEquals(lruList.size(), 4564);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
for (int i = 4563; i >= 0; i--) {
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (OCachePointer cachePointer : cachePointers)
cachePointer.getDataPointer().free();
}
public void testAdd9128PutLastAndMiddleToTop() {
LRUList lruList = new LRUList();
OCachePointer[] cachePointers = new OCachePointer[9128];
for (int i = 0; i < 9128; i++) {
ODirectMemoryPointer directMemoryPointer = new ODirectMemoryPointer(1);
cachePointers[i] = new OCachePointer(directMemoryPointer, new OLogSequenceNumber(0, 0));
lruList.putToMRU(new OCacheEntry(1, i * 10, cachePointers[i], false));
}
lruList.putToMRU(new OCacheEntry(1, 0, cachePointers[0], false));
lruList.putToMRU(new OCacheEntry(1, 4500 * 10, cachePointers[4500], false));
Assert.assertEquals(lruList.size(), 9128);
Iterator<OCacheEntry> entryIterator = lruList.iterator();
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 4500 * 10, cachePointers[4500], false));
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, 0, cachePointers[0], false));
for (int i = 9127; i >= 1; i--) {
if (i == 4500)
continue;
Assert.assertTrue(entryIterator.hasNext());
Assert.assertEquals(entryIterator.next(), new OCacheEntry(1, i * 10, cachePointers[i], false));
}
for (OCachePointer cachePointer : cachePointers)
cachePointer.getDataPointer().free();
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_index_hashindex_local_cache_LRUListTest.java |
958 | public interface OCompression {
byte[] compress(byte[] content);
byte[] uncompress(byte[] content);
String name();
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_serialization_compression_OCompression.java |
932 | public class OrderOfferProcessorTest extends TestCase {
protected OfferDao offerDaoMock;
protected OrderOfferProcessorImpl orderProcessor;
protected OfferDataItemProvider dataProvider = new OfferDataItemProvider();
protected OfferTimeZoneProcessor offerTimeZoneProcessorMock;
@Override
protected void setUp() throws Exception {
offerDaoMock = EasyMock.createMock(OfferDao.class);
offerTimeZoneProcessorMock = EasyMock.createMock(OfferTimeZoneProcessor.class);
orderProcessor = new OrderOfferProcessorImpl();
orderProcessor.setOfferDao(offerDaoMock);
orderProcessor.setOfferTimeZoneProcessor(offerTimeZoneProcessorMock);
orderProcessor.setPromotableItemFactory(new PromotableItemFactoryImpl());
}
public void replay() {
EasyMock.expect(offerTimeZoneProcessorMock.getTimeZone(EasyMock.isA(OfferImpl.class))).andReturn(TimeZone.getTimeZone("CST")).anyTimes();
EasyMock.replay(offerDaoMock);
EasyMock.replay(offerTimeZoneProcessorMock);
}
public void verify() {
EasyMock.verify(offerDaoMock);
EasyMock.verify(offerTimeZoneProcessorMock);
}
public void testFilterOffers() throws Exception {
replay();
PromotableOrder order = dataProvider.createBasicPromotableOrder();
List<Offer> offers = dataProvider.createCustomerBasedOffer("customer.registered==true", dataProvider.yesterday(), dataProvider.yesterday(), OfferDiscountType.PERCENT_OFF);
orderProcessor.filterOffers(offers, order.getOrder().getCustomer());
//confirm out-of-date orders are filtered out
assertTrue(offers.size() == 0);
offers = dataProvider.createCustomerBasedOffer("customer.registered==true", dataProvider.yesterday(), dataProvider.tomorrow(), OfferDiscountType.PERCENT_OFF);
orderProcessor.filterOffers(offers, order.getOrder().getCustomer());
//confirm valid customer offer is retained
assertTrue(offers.size() == 1);
offers = dataProvider.createCustomerBasedOffer("customer.registered==false", dataProvider.yesterday(), dataProvider.tomorrow(), OfferDiscountType.PERCENT_OFF);
orderProcessor.filterOffers(offers, order.getOrder().getCustomer());
//confirm invalid customer offer is culled
assertTrue(offers.size() == 0);
verify();
}
public void testFilterOrderLevelOffer() throws Exception {
replay();
PromotableOrder order = dataProvider.createBasicPromotableOrder();
List<PromotableCandidateOrderOffer> qualifiedOffers = new ArrayList<PromotableCandidateOrderOffer>();
List<Offer> offers = dataProvider.createOrderBasedOffer("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF);
orderProcessor.filterOrderLevelOffer(order, qualifiedOffers, offers.get(0));
//test that the valid order offer is included
assertTrue(qualifiedOffers.size() == 1 && qualifiedOffers.get(0).getOffer().equals(offers.get(0)));
qualifiedOffers = new ArrayList<PromotableCandidateOrderOffer>();
offers = dataProvider.createOrderBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\"), MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))");
orderProcessor.filterOrderLevelOffer(order, qualifiedOffers, offers.get(0));
//test that the valid order offer is included
assertTrue(qualifiedOffers.size() == 1 && qualifiedOffers.get(0).getOffer().equals(offers.get(0))) ;
qualifiedOffers = new ArrayList<PromotableCandidateOrderOffer>();
offers = dataProvider.createOrderBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF, "([5,6] contains discreteOrderItem.category.id.intValue())");
orderProcessor.filterOrderLevelOffer(order, qualifiedOffers, offers.get(0));
//test that the invalid order offer is excluded
assertTrue(qualifiedOffers.size() == 0) ;
verify();
}
public void testCouldOfferApplyToOrder() throws Exception {
replay();
PromotableOrder order = dataProvider.createBasicPromotableOrder();
List<Offer> offers = dataProvider.createOrderBasedOffer("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF);
boolean couldApply = orderProcessor.couldOfferApplyToOrder(offers.get(0), order, order.getDiscountableOrderItems().get(0), order.getFulfillmentGroups().get(0));
//test that the valid order offer is included
assertTrue(couldApply);
offers = dataProvider.createOrderBasedOffer("order.subTotal.getAmount()==0", OfferDiscountType.PERCENT_OFF);
couldApply = orderProcessor.couldOfferApplyToOrder(offers.get(0), order, order.getDiscountableOrderItems().get(0), order.getFulfillmentGroups().get(0));
//test that the invalid order offer is excluded
assertFalse(couldApply);
verify();
}
public void testCouldOrderItemMeetOfferRequirement() throws Exception {
replay();
PromotableOrder order = dataProvider.createBasicPromotableOrder();
List<Offer> offers = dataProvider.createOrderBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\"), MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))");
boolean couldApply = orderProcessor.couldOrderItemMeetOfferRequirement(offers.get(0).getQualifyingItemCriteria().iterator().next(), order.getDiscountableOrderItems().get(0));
//test that the valid order offer is included
assertTrue(couldApply);
offers = dataProvider.createOrderBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test5\"), MVEL.eval(\"toUpperCase()\",\"test6\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))");
couldApply = orderProcessor.couldOrderItemMeetOfferRequirement(offers.get(0).getQualifyingItemCriteria().iterator().next(), order.getDiscountableOrderItems().get(0));
//test that the invalid order offer is excluded
assertFalse(couldApply);
verify();
}
public void testCouldOfferApplyToOrderItems() throws Exception {
replay();
PromotableOrder order = dataProvider.createBasicPromotableOrder();
List<Offer> offers = dataProvider.createOrderBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test1\"), MVEL.eval(\"toUpperCase()\",\"test2\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))");
List<PromotableOrderItem> orderItems = new ArrayList<PromotableOrderItem>();
for (PromotableOrderItem orderItem : order.getDiscountableOrderItems()) {
orderItems.add(orderItem);
}
CandidatePromotionItems candidates = orderProcessor.couldOfferApplyToOrderItems(offers.get(0), orderItems);
//test that the valid order offer is included
assertTrue(candidates.isMatchedQualifier() && candidates.getCandidateQualifiersMap().size() == 1);
offers = dataProvider.createOrderBasedOfferWithItemCriteria("order.subTotal.getAmount()>20", OfferDiscountType.PERCENT_OFF, "([MVEL.eval(\"toUpperCase()\",\"test5\"), MVEL.eval(\"toUpperCase()\",\"test6\")] contains MVEL.eval(\"toUpperCase()\", discreteOrderItem.category.name))");
candidates = orderProcessor.couldOfferApplyToOrderItems(offers.get(0), orderItems);
//test that the invalid order offer is excluded because there are no qualifying items
assertFalse(candidates.isMatchedQualifier() && candidates.getCandidateQualifiersMap().size() == 1);
verify();
}
} | 0true
| core_broadleaf-framework_src_test_java_org_broadleafcommerce_core_offer_service_processor_OrderOfferProcessorTest.java |
762 | public class ListService extends CollectionService {
public static final String SERVICE_NAME = "hz:impl:listService";
private final ConcurrentMap<String, ListContainer> containerMap = new ConcurrentHashMap<String, ListContainer>();
public ListService(NodeEngine nodeEngine) {
super(nodeEngine);
}
@Override
public ListContainer getOrCreateContainer(String name, boolean backup) {
ListContainer container = containerMap.get(name);
if (container == null) {
container = new ListContainer(name, nodeEngine);
final ListContainer current = containerMap.putIfAbsent(name, container);
if (current != null) {
container = current;
}
}
return container;
}
@Override
public Map<String, ? extends CollectionContainer> getContainerMap() {
return containerMap;
}
@Override
public String getServiceName() {
return SERVICE_NAME;
}
@Override
public DistributedObject createDistributedObject(String objectId) {
return new ListProxyImpl(objectId, nodeEngine, this);
}
@Override
public TransactionalListProxy createTransactionalObject(String name, TransactionSupport transaction) {
return new TransactionalListProxy(name, transaction, nodeEngine, this);
}
@Override
public Operation prepareReplicationOperation(PartitionReplicationEvent event) {
final Map<String, CollectionContainer> migrationData = getMigrationData(event);
return migrationData.isEmpty()
? null
: new ListReplicationOperation(migrationData, event.getPartitionId(), event.getReplicaIndex());
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_list_ListService.java |
320 | public class KCVSConfigTest extends WritableConfigurationTest {
@Override
public WriteConfiguration getConfig() {
final KeyColumnValueStoreManager manager = new InMemoryStoreManager(Configuration.EMPTY);
try {
return new KCVSConfiguration(new BackendOperation.TransactionalProvider() {
@Override
public StoreTransaction openTx() throws BackendException {
return manager.beginTransaction(StandardBaseTransactionConfig.of(Timestamps.MICRO, manager.getFeatures().getKeyConsistentTxConfig()));
}
@Override
public void close() throws BackendException {
manager.close();
}
}, Timestamps.MICRO,manager.openDatabase("titan"),"general");
} catch (BackendException e) {
throw new RuntimeException(e);
}
}
} | 0true
| titan-test_src_test_java_com_thinkaurelius_titan_diskstorage_configuration_KCVSConfigTest.java |
1,269 | addOperation(operations, new Runnable() {
public void run() {
IQueue q = hazelcast.getQueue("myQ");
q.isEmpty();
}
}, 1); | 0true
| hazelcast_src_main_java_com_hazelcast_examples_AllTest.java |
164 | @Repository("blURLHandlerDao")
public class URlHandlerDaoImpl implements URLHandlerDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public URLHandler findURLHandlerByURI(String uri) {
Query query;
query = em.createNamedQuery("BC_READ_OUTGOING_URL");
query.setParameter("incomingURL", uri);
@SuppressWarnings("unchecked")
List<URLHandler> results = query.getResultList();
if (results != null && !results.isEmpty()) {
return results.get(0);
} else {
return null;
}
}
@Override
public List<URLHandler> findAllURLHandlers() {
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<URLHandler> criteria = builder.createQuery(URLHandler.class);
Root<URLHandlerImpl> handler = criteria.from(URLHandlerImpl.class);
criteria.select(handler);
try {
return em.createQuery(criteria).getResultList();
} catch (NoResultException e) {
return new ArrayList<URLHandler>();
}
}
public URLHandler saveURLHandler(URLHandler handler) {
return em.merge(handler);
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_url_dao_URlHandlerDaoImpl.java |
588 | public interface TaxResponse extends Serializable {
public boolean isErrorDetected();
public void setErrorDetected(boolean isErrorDetected);
public String getErrorCode();
public void setErrorCode(String errorCode);
public String getErrorText();
public void setErrorText(String errorText);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_vendor_service_message_TaxResponse.java |
281 | public class CTConnection implements Closeable {
private final TTransport transport;
private final Cassandra.Client client;
private final CTConnectionFactory.Config cfg;
public CTConnection(TTransport transport, Client client, CTConnectionFactory.Config cfg) {
this.transport = transport;
this.client = client;
this.cfg = cfg;
}
public TTransport getTransport() {
return transport;
}
public Cassandra.Client getClient() {
return client;
}
public CTConnectionFactory.Config getConfig() {
return cfg;
}
public boolean isOpen() {
return transport.isOpen();
}
@Override
public void close() {
if (transport != null && transport.isOpen())
transport.close();
}
@Override
public String toString() {
return "CTConnection [transport=" + transport + ", client=" + client + ", cfg=" + cfg + "]";
}
} | 0true
| titan-cassandra_src_main_java_com_thinkaurelius_titan_diskstorage_cassandra_thrift_thriftpool_CTConnection.java |
3,228 | public class ReplicatedRecord<K, V>
implements IdentifiedDataSerializable {
private final AtomicLong hits = new AtomicLong();
private final AtomicLong lastAccessTime = new AtomicLong();
private K key;
private V value;
private VectorClock vectorClock;
private int latestUpdateHash;
private long ttlMillis;
private volatile long updateTime = System.currentTimeMillis();
public ReplicatedRecord() {
}
public ReplicatedRecord(K key, V value, VectorClock vectorClock, int hash, long ttlMillis) {
this.key = key;
this.value = value;
this.vectorClock = vectorClock;
this.latestUpdateHash = hash;
this.ttlMillis = ttlMillis;
}
public K getKey() {
access();
return key;
}
public V getValue() {
access();
return value;
}
public VectorClock getVectorClock() {
return vectorClock;
}
public long getTtlMillis() {
return ttlMillis;
}
public V setValue(V value, int hash, long ttlMillis) {
access();
V oldValue = this.value;
this.value = value;
this.latestUpdateHash = hash;
this.updateTime = System.currentTimeMillis();
this.ttlMillis = ttlMillis;
return oldValue;
}
public long getUpdateTime() {
return updateTime;
}
public int getLatestUpdateHash() {
return latestUpdateHash;
}
public long getHits() {
return hits.get();
}
public long getLastAccessTime() {
return lastAccessTime.get();
}
public void access() {
hits.incrementAndGet();
lastAccessTime.set(System.currentTimeMillis());
}
@Override
public int getFactoryId() {
return ReplicatedMapDataSerializerHook.F_ID;
}
@Override
public int getId() {
return ReplicatedMapDataSerializerHook.RECORD;
}
@Override
public void writeData(ObjectDataOutput out)
throws IOException {
out.writeObject(key);
out.writeObject(value);
vectorClock.writeData(out);
out.writeInt(latestUpdateHash);
out.writeLong(ttlMillis);
}
@Override
public void readData(ObjectDataInput in)
throws IOException {
key = in.readObject();
value = in.readObject();
vectorClock = new VectorClock();
vectorClock.readData(in);
latestUpdateHash = in.readInt();
ttlMillis = in.readLong();
}
//CHECKSTYLE:OFF
// Deactivated due to complexity of the equals method
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ReplicatedRecord that = (ReplicatedRecord) o;
if (latestUpdateHash != that.latestUpdateHash) {
return false;
}
if (ttlMillis != that.ttlMillis) {
return false;
}
if (key != null ? !key.equals(that.key) : that.key != null) {
return false;
}
if (value != null ? !value.equals(that.value) : that.value != null) {
return false;
}
return true;
}
//CHECKSTYLE:ON
@Override
public int hashCode() {
int result = key != null ? key.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
result = 31 * result + latestUpdateHash;
result = 31 * result + (int) (ttlMillis ^ (ttlMillis >>> 32));
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ReplicatedRecord{");
sb.append("key=").append(key);
sb.append(", value=").append(value);
sb.append(", vector=").append(vectorClock);
sb.append(", latestUpdateHash=").append(latestUpdateHash);
sb.append(", ttlMillis=").append(ttlMillis);
sb.append('}');
return sb.toString();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_replicatedmap_record_ReplicatedRecord.java |
23 | {
@Override
public boolean matchesSafely( LogEntry.Done done )
{
return done != null && done.getIdentifier() == identifier;
}
@Override
public void describeTo( Description description )
{
description.appendText( String.format( "Done[%d]", identifier ) );
}
}; | 1no label
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_LogMatchers.java |
71 | @SuppressWarnings("unchecked")
public class OSharedContainerImpl implements OSharedContainer {
protected Map<String, Object> sharedResources = new HashMap<String, Object>();
public synchronized boolean existsResource(final String iName) {
return sharedResources.containsKey(iName);
}
public synchronized <T> T removeResource(final String iName) {
T resource = (T) sharedResources.remove(iName);
if (resource instanceof OSharedResource)
((OSharedResource) resource).releaseExclusiveLock();
return resource;
}
public synchronized <T> T getResource(final String iName, final Callable<T> iCallback) {
T value = (T) sharedResources.get(iName);
if (value == null) {
// CREATE IT
try {
value = iCallback.call();
} catch (Exception e) {
throw new OException("Error on creation of shared resource", e);
}
if (value instanceof OSharedResource)
((OSharedResource) value).acquireExclusiveLock();
sharedResources.put(iName, value);
}
return value;
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_resource_OSharedContainerImpl.java |
3,336 | static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.WithOrdinals {
private final BigFloatArrayList values;
DoubleValues(BigFloatArrayList values, Ordinals.Docs ordinals) {
super(ordinals);
this.values = values;
}
@Override
public double getValueByOrd(long ord) {
return values.get(ord);
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_FloatArrayAtomicFieldData.java |
3,353 | public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointFieldData<ScriptDocValues> {
private final int numDocs;
protected long size = -1;
public GeoPointDoubleArrayAtomicFieldData(int numDocs) {
this.numDocs = numDocs;
}
@Override
public void close() {
}
@Override
public int getNumDocs() {
return numDocs;
}
@Override
public ScriptDocValues getScriptValues() {
return new ScriptDocValues.GeoPoints(getGeoPointValues());
}
static class WithOrdinals extends GeoPointDoubleArrayAtomicFieldData {
private final BigDoubleArrayList lon, lat;
private final Ordinals ordinals;
public WithOrdinals(BigDoubleArrayList lon, BigDoubleArrayList lat, int numDocs, Ordinals ordinals) {
super(numDocs);
this.lon = lon;
this.lat = lat;
this.ordinals = ordinals;
}
@Override
public boolean isMultiValued() {
return ordinals.isMultiValued();
}
@Override
public boolean isValuesOrdered() {
return true;
}
@Override
public long getNumberUniqueValues() {
return ordinals.getNumOrds();
}
@Override
public long getMemorySizeInBytes() {
if (size == -1) {
size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + lon.sizeInBytes() + lat.sizeInBytes();
}
return size;
}
@Override
public GeoPointValues getGeoPointValues() {
return new GeoPointValuesWithOrdinals(lon, lat, ordinals.ordinals());
}
public static class GeoPointValuesWithOrdinals extends GeoPointValues {
private final BigDoubleArrayList lon, lat;
private final Ordinals.Docs ordinals;
private final GeoPoint scratch = new GeoPoint();
GeoPointValuesWithOrdinals(BigDoubleArrayList lon, BigDoubleArrayList lat, Ordinals.Docs ordinals) {
super(ordinals.isMultiValued());
this.lon = lon;
this.lat = lat;
this.ordinals = ordinals;
}
@Override
public GeoPoint nextValue() {
final long ord = ordinals.nextOrd();
assert ord > 0;
return scratch.reset(lat.get(ord), lon.get(ord));
}
@Override
public int setDocument(int docId) {
this.docId = docId;
return ordinals.setDocument(docId);
}
}
}
/**
* Assumes unset values are marked in bitset, and docId is used as the index to the value array.
*/
public static class SingleFixedSet extends GeoPointDoubleArrayAtomicFieldData {
private final BigDoubleArrayList lon, lat;
private final FixedBitSet set;
private final long numOrds;
public SingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, int numDocs, FixedBitSet set, long numOrds) {
super(numDocs);
this.lon = lon;
this.lat = lat;
this.set = set;
this.numOrds = numOrds;
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public boolean isValuesOrdered() {
return false;
}
@Override
public long getNumberUniqueValues() {
return numOrds;
}
@Override
public long getMemorySizeInBytes() {
if (size == -1) {
size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + lon.sizeInBytes() + lat.sizeInBytes() + RamUsageEstimator.sizeOf(set.getBits());
}
return size;
}
@Override
public GeoPointValues getGeoPointValues() {
return new GeoPointValuesSingleFixedSet(lon, lat, set);
}
static class GeoPointValuesSingleFixedSet extends GeoPointValues {
private final BigDoubleArrayList lon;
private final BigDoubleArrayList lat;
private final FixedBitSet set;
private final GeoPoint scratch = new GeoPoint();
GeoPointValuesSingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, FixedBitSet set) {
super(false);
this.lon = lon;
this.lat = lat;
this.set = set;
}
@Override
public int setDocument(int docId) {
this.docId = docId;
return set.get(docId) ? 1 : 0;
}
@Override
public GeoPoint nextValue() {
return scratch.reset(lat.get(docId), lon.get(docId));
}
}
}
/**
* Assumes all the values are "set", and docId is used as the index to the value array.
*/
public static class Single extends GeoPointDoubleArrayAtomicFieldData {
private final BigDoubleArrayList lon, lat;
private final long numOrds;
public Single(BigDoubleArrayList lon, BigDoubleArrayList lat, int numDocs, long numOrds) {
super(numDocs);
this.lon = lon;
this.lat = lat;
this.numOrds = numOrds;
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public boolean isValuesOrdered() {
return false;
}
@Override
public long getNumberUniqueValues() {
return numOrds;
}
@Override
public long getMemorySizeInBytes() {
if (size == -1) {
size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + (lon.sizeInBytes() + lat.sizeInBytes());
}
return size;
}
@Override
public GeoPointValues getGeoPointValues() {
return new GeoPointValuesSingle(lon, lat);
}
static class GeoPointValuesSingle extends GeoPointValues {
private final BigDoubleArrayList lon;
private final BigDoubleArrayList lat;
private final GeoPoint scratch = new GeoPoint();
GeoPointValuesSingle(BigDoubleArrayList lon, BigDoubleArrayList lat) {
super(false);
this.lon = lon;
this.lat = lat;
}
@Override
public int setDocument(int docId) {
this.docId = docId;
return 1;
}
@Override
public GeoPoint nextValue() {
return scratch.reset(lat.get(docId), lon.get(docId));
}
}
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_GeoPointDoubleArrayAtomicFieldData.java |
1,093 | public class PermissionPolicyConfig {
private String className = null;
private IPermissionPolicy implementation = null;
private Properties properties = new Properties();
public PermissionPolicyConfig() {
super();
}
public PermissionPolicyConfig(String className) {
super();
this.className = className;
}
public String getClassName() {
return className;
}
public PermissionPolicyConfig setClassName(String className) {
this.className = className;
return this;
}
public IPermissionPolicy getImplementation() {
return implementation;
}
public PermissionPolicyConfig setImplementation(IPermissionPolicy policyImpl) {
this.implementation = policyImpl;
return this;
}
public Properties getProperties() {
return properties;
}
public PermissionPolicyConfig setProperties(Properties properties) {
this.properties = properties;
return this;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("PermissionPolicyConfig");
sb.append("{className='").append(className).append('\'');
sb.append(", implementation=").append(implementation);
sb.append(", properties=").append(properties);
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_PermissionPolicyConfig.java |
1,072 | indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
if (request.fields() != null && request.fields().length > 0) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
update.setGetResult(updateHelper.extractGetResult(request, response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
} else {
update.setGetResult(null);
}
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException || e instanceof DocumentAlreadyExistsException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new Runnable() {
@Override
public void run() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
}); | 1no label
| src_main_java_org_elasticsearch_action_update_TransportUpdateAction.java |
299 | {
@Override
public NeoStoreRecord newUnused( Long key, Void additionalData )
{
throw new UnsupportedOperationException();
}
@Override
public NeoStoreRecord load( Long key, Void additionalData )
{
return neoStore.asRecord();
}
@Override
public void ensureHeavy( NeoStoreRecord record )
{
}
@Override
public NeoStoreRecord clone(NeoStoreRecord neoStoreRecord) {
// We do not expect to manage the before state, so this operation will not be called.
throw new UnsupportedOperationException("Clone on NeoStoreRecord");
}
}, false ); | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_NeoStoreTransaction.java |
2,849 | public class CzechStemTokenFilterFactory extends AbstractTokenFilterFactory {
@Inject
public CzechStemTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new CzechStemFilter(tokenStream);
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_CzechStemTokenFilterFactory.java |
1,233 | @Service("blPricingService")
public class PricingServiceImpl implements PricingService {
@Resource(name="blPricingWorkflow")
protected SequenceProcessor pricingWorkflow;
public Order executePricing(Order order) throws PricingException {
try {
PricingContext context = (PricingContext) pricingWorkflow.doActivities(order);
Order response = context.getSeedData();
return response;
} catch (WorkflowException e) {
throw new PricingException("Unable to execute pricing for order -- id: " + order.getId(), e);
}
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_service_PricingServiceImpl.java |
227 | public interface ModuleConfigurationDao {
public ModuleConfiguration readById(Long id);
public ModuleConfiguration save(ModuleConfiguration config);
public void delete(ModuleConfiguration config);
public List<ModuleConfiguration> readAllByType(ModuleConfigurationType type);
public List<ModuleConfiguration> readActiveByType(ModuleConfigurationType type);
public List<ModuleConfiguration> readByType(Class<? extends ModuleConfiguration> type);
/**
* Returns the number of milliseconds that the current date/time will be cached for queries before refreshing.
* This aids in query caching, otherwise every query that utilized current date would be different and caching
* would be ineffective.
*
* @return the milliseconds to cache the current date/time
*/
public Long getCurrentDateResolution();
/**
* Sets the number of milliseconds that the current date/time will be cached for queries before refreshing.
* This aids in query caching, otherwise every query that utilized current date would be different and caching
* would be ineffective.
*
* @param currentDateResolution the milliseconds to cache the current date/time
*/
public void setCurrentDateResolution(Long currentDateResolution);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_config_dao_ModuleConfigurationDao.java |
593 | public class OIndexDefinitionFactory {
private static final Pattern FILED_NAME_PATTERN = Pattern.compile("\\s+");
/**
* Creates an instance of {@link OIndexDefinition} for automatic index.
*
* @param oClass
* class which will be indexed
* @param fieldNames
* list of properties which will be indexed. Format should be '<property> [by key|value]', use 'by key' or 'by value' to
* describe how to index maps. By default maps indexed by key
* @param types
* types of indexed properties
* @return index definition instance
*/
public static OIndexDefinition createIndexDefinition(final OClass oClass, final List<String> fieldNames, final List<OType> types) {
checkTypes(oClass, fieldNames, types);
if (fieldNames.size() == 1)
return createSingleFieldIndexDefinition(oClass, fieldNames.get(0), types.get(0));
else
return createMultipleFieldIndexDefinition(oClass, fieldNames, types);
}
/**
* Extract field name from '<property> [by key|value]' field format.
*
* @param fieldDefinition
* definition of field
* @return extracted property name
*/
public static String extractFieldName(final String fieldDefinition) {
String[] fieldNameParts = FILED_NAME_PATTERN.split(fieldDefinition);
if (fieldNameParts.length == 1)
return fieldDefinition;
if (fieldNameParts.length == 3 && "by".equalsIgnoreCase(fieldNameParts[1]))
return fieldNameParts[0];
throw new IllegalArgumentException("Illegal field name format, should be '<property> [by key|value]' but was '"
+ fieldDefinition + '\'');
}
private static OIndexDefinition createMultipleFieldIndexDefinition(final OClass oClass, final List<String> fieldsToIndex,
final List<OType> types) {
final String className = oClass.getName();
final OCompositeIndexDefinition compositeIndex = new OCompositeIndexDefinition(className);
for (int i = 0, fieldsToIndexSize = fieldsToIndex.size(); i < fieldsToIndexSize; i++) {
compositeIndex.addIndex(createSingleFieldIndexDefinition(oClass, fieldsToIndex.get(i), types.get(i)));
}
return compositeIndex;
}
private static void checkTypes(OClass oClass, List<String> fieldNames, List<OType> types) {
if (fieldNames.size() != types.size())
throw new IllegalArgumentException("Count of field names doesn't match count of field types. It was " + fieldNames.size()
+ " fields, but " + types.size() + " types.");
for (int i = 0, fieldNamesSize = fieldNames.size(); i < fieldNamesSize; i++) {
String fieldName = fieldNames.get(i);
OType type = types.get(i);
final OProperty property = oClass.getProperty(fieldName);
if (property != null && !type.equals(property.getType())) {
throw new IllegalArgumentException("Property type list not match with real property types");
}
}
}
private static OIndexDefinition createSingleFieldIndexDefinition(OClass oClass, final String field, final OType type) {
final String fieldName = adjustFieldName(oClass, extractFieldName(field));
final OIndexDefinition indexDefinition;
final OType indexType;
if (type == OType.EMBEDDEDMAP || type == OType.LINKMAP) {
final OPropertyMapIndexDefinition.INDEX_BY indexBy = extractMapIndexSpecifier(field);
if (indexBy.equals(OPropertyMapIndexDefinition.INDEX_BY.KEY))
indexType = OType.STRING;
else {
if (type == OType.LINKMAP)
indexType = OType.LINK;
else {
final OProperty propertyToIndex = oClass.getProperty(fieldName);
indexType = propertyToIndex.getLinkedType();
if (indexType == null)
throw new OIndexException("Linked type was not provided."
+ " You should provide linked type for embedded collections that are going to be indexed.");
}
}
indexDefinition = new OPropertyMapIndexDefinition(oClass.getName(), fieldName, indexType, indexBy);
} else if (type.equals(OType.EMBEDDEDLIST) || type.equals(OType.EMBEDDEDSET) || type.equals(OType.LINKLIST)
|| type.equals(OType.LINKSET)) {
if (type.equals(OType.LINKSET))
throw new OIndexException("LINKSET indexing is not supported.");
else if (type.equals(OType.LINKLIST)) {
indexType = OType.LINK;
} else {
final OProperty propertyToIndex = oClass.getProperty(fieldName);
indexType = propertyToIndex.getLinkedType();
if (indexType == null)
throw new OIndexException("Linked type was not provided."
+ " You should provide linked type for embedded collections that are going to be indexed.");
}
indexDefinition = new OPropertyListIndexDefinition(oClass.getName(), fieldName, indexType);
} else
indexDefinition = new OPropertyIndexDefinition(oClass.getName(), fieldName, type);
return indexDefinition;
}
private static OPropertyMapIndexDefinition.INDEX_BY extractMapIndexSpecifier(final String fieldName) {
String[] fieldNameParts = FILED_NAME_PATTERN.split(fieldName);
if (fieldNameParts.length == 1)
return OPropertyMapIndexDefinition.INDEX_BY.KEY;
if (fieldNameParts.length == 3) {
if ("by".equals(fieldNameParts[1].toLowerCase()))
try {
return OPropertyMapIndexDefinition.INDEX_BY.valueOf(fieldNameParts[2].toUpperCase());
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("Illegal field name format, should be '<property> [by key|value]' but was '"
+ fieldName + '\'');
}
}
throw new IllegalArgumentException("Illegal field name format, should be '<property> [by key|value]' but was '" + fieldName
+ '\'');
}
private static String adjustFieldName(final OClass clazz, final String fieldName) {
final OProperty property = clazz.getProperty(fieldName);
if (property != null)
return property.getName();
else
return fieldName;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_OIndexDefinitionFactory.java |
227 | public class ExpirationCacheTest extends KCVSCacheTest {
public static final String METRICS_STRING = "metrics";
public static final long CACHE_SIZE = 1024*1024*48; //48 MB
@Override
public KCVSCache getCache(KeyColumnValueStore store) {
return getCache(store,new StandardDuration(1,TimeUnit.DAYS),ZeroDuration.INSTANCE);
}
private static KCVSCache getCache(KeyColumnValueStore store, Duration expirationTime, Duration graceWait) {
return new ExpirationKCVSCache(store,METRICS_STRING,expirationTime.getLength(TimeUnit.MILLISECONDS),graceWait.getLength(TimeUnit.MILLISECONDS),CACHE_SIZE);
}
@Test
public void testExpiration() throws Exception {
testExpiration(new StandardDuration(200,TimeUnit.MILLISECONDS));
testExpiration(new StandardDuration(4,TimeUnit.SECONDS));
testExpiration(new StandardDuration(1,TimeUnit.SECONDS));
}
private void testExpiration(Duration expirationTime) throws Exception {
final int numKeys = 100, numCols = 10;
loadStore(numKeys,numCols);
//Replace cache with proper times
cache = getCache(store,expirationTime,ZeroDuration.INSTANCE);
StaticBuffer key = BufferUtil.getIntBuffer(81);
List<StaticBuffer> keys = new ArrayList<StaticBuffer>();
keys.add(key);
keys.add(BufferUtil.getIntBuffer(37));
keys.add(BufferUtil.getIntBuffer(2));
SliceQuery query = getQuery(2,8);
verifyResults(key,keys,query,6);
//Modify store directly
StoreTransaction txs = getStoreTx();
store.mutate(key,KeyColumnValueStore.NO_ADDITIONS, Lists.newArrayList(BufferUtil.getIntBuffer(5)),txs);
txs.commit();
Timepoint utime = times.getTime();
//Should still see cached results
verifyResults(key,keys,query,6);
times.sleepPast(utime.add(expirationTime.multiply(0.5))); //Sleep half way through expiration time
verifyResults(key, keys, query, 6);
times.sleepPast(utime.add(expirationTime)); //Sleep past expiration time...
times.sleepFor(new StandardDuration(5,TimeUnit.MILLISECONDS)); //...and just a little bit longer
//Now the results should be different
verifyResults(key, keys, query, 5);
//If we modify through cache store...
CacheTransaction tx = getCacheTx();
cache.mutateEntries(key, KeyColumnValueStore.NO_ADDITIONS, Lists.newArrayList(getEntry(4, 4)), tx);
tx.commit();
store.resetCounter();
//...invalidation should happen and the result set is updated immediately
verifyResults(key, keys, query, 4);
}
@Test
public void testGracePeriod() throws Exception {
testGracePeriod(new StandardDuration(200,TimeUnit.MILLISECONDS));
testGracePeriod(ZeroDuration.INSTANCE);
testGracePeriod(new StandardDuration(1,TimeUnit.SECONDS));
}
private void testGracePeriod(Duration graceWait) throws Exception {
final int minCleanupTriggerCalls = 5;
final int numKeys = 100, numCols = 10;
loadStore(numKeys,numCols);
//Replace cache with proper times
cache = getCache(store,new StandardDuration(200,TimeUnit.DAYS),graceWait);
StaticBuffer key = BufferUtil.getIntBuffer(81);
List<StaticBuffer> keys = new ArrayList<StaticBuffer>();
keys.add(key);
keys.add(BufferUtil.getIntBuffer(37));
keys.add(BufferUtil.getIntBuffer(2));
SliceQuery query = getQuery(2,8);
verifyResults(key,keys,query,6);
//If we modify through cache store...
CacheTransaction tx = getCacheTx();
cache.mutateEntries(key,KeyColumnValueStore.NO_ADDITIONS, Lists.newArrayList(getEntry(4,4)),tx);
tx.commit();
Timepoint utime = times.getTime();
store.resetCounter();
//...invalidation should happen and the result set is updated immediately
verifyResults(key, keys, query, 5);
assertEquals(2,store.getSliceCalls());
//however, the key is expired and hence repeated calls need to go through to the store
verifyResults(key, keys, query, 5);
assertEquals(4,store.getSliceCalls());
//however, when we sleep past the grace wait time and trigger a cleanup...
times.sleepPast(utime.add(graceWait));
for (int t=0; t<minCleanupTriggerCalls;t++) {
assertEquals(5,cache.getSlice(new KeySliceQuery(key,query),tx).size());
times.sleepFor(new StandardDuration(5,TimeUnit.MILLISECONDS));
}
//...the cache should cache results again
store.resetCounter();
verifyResults(key, keys, query, 5);
assertEquals(0,store.getSliceCalls());
verifyResults(key, keys, query, 5);
assertEquals(0,store.getSliceCalls());
}
private void verifyResults(StaticBuffer key, List<StaticBuffer> keys, SliceQuery query, int expectedResults) throws Exception {
CacheTransaction tx = getCacheTx();
assertEquals(expectedResults,cache.getSlice(new KeySliceQuery(key,query),tx).size());
Map<StaticBuffer,EntryList> results = cache.getSlice(keys,query,tx);
assertEquals(keys.size(),results.size());
assertEquals(expectedResults, results.get(key).size());
tx.commit();
}
} | 0true
| titan-test_src_test_java_com_thinkaurelius_titan_diskstorage_cache_ExpirationCacheTest.java |
5,133 | public abstract class InternalAggregation implements Aggregation, ToXContent, Streamable {
/**
* The aggregation type that holds all the string types that are associated with an aggregation:
* <ul>
* <li>name - used as the parser type</li>
* <li>stream - used as the stream type</li>
* </ul>
*/
public static class Type {
private String name;
private BytesReference stream;
public Type(String name) {
this(name, new BytesArray(name));
}
public Type(String name, String stream) {
this(name, new BytesArray(stream));
}
public Type(String name, BytesReference stream) {
this.name = name;
this.stream = stream;
}
/**
* @return The name of the type (mainly used for registering the parser for the aggregator (see {@link org.elasticsearch.search.aggregations.Aggregator.Parser#type()}).
*/
public String name() {
return name;
}
/**
* @return The name of the stream type (used for registering the aggregation stream
* (see {@link AggregationStreams#registerStream(AggregationStreams.Stream, org.elasticsearch.common.bytes.BytesReference...)}).
*/
public BytesReference stream() {
return stream;
}
}
protected static class ReduceContext {
private final List<InternalAggregation> aggregations;
private final CacheRecycler cacheRecycler;
public ReduceContext(List<InternalAggregation> aggregations, CacheRecycler cacheRecycler) {
this.aggregations = aggregations;
this.cacheRecycler = cacheRecycler;
}
public List<InternalAggregation> aggregations() {
return aggregations;
}
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
}
protected String name;
/** Constructs an un initialized addAggregation (used for serialization) **/
protected InternalAggregation() {}
/**
* Constructs an get with a given name.
*
* @param name The name of the get.
*/
protected InternalAggregation(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
/**
* @return The {@link Type} of this aggregation
*/
public abstract Type type();
/**
* Reduces the given addAggregation to a single one and returns it. In <b>most</b> cases, the assumption will be the all given
* addAggregation are of the same type (the same type as this aggregation). For best efficiency, when implementing,
* try reusing an existing get instance (typically the first in the given list) to save on redundant object
* construction.
*/
public abstract InternalAggregation reduce(ReduceContext reduceContext);
/**
* Common xcontent fields that are shared among addAggregation
*/
public static final class CommonFields {
public static final XContentBuilderString BUCKETS = new XContentBuilderString("buckets");
public static final XContentBuilderString VALUE = new XContentBuilderString("value");
public static final XContentBuilderString VALUE_AS_STRING = new XContentBuilderString("value_as_string");
public static final XContentBuilderString DOC_COUNT = new XContentBuilderString("doc_count");
public static final XContentBuilderString KEY = new XContentBuilderString("key");
public static final XContentBuilderString KEY_AS_STRING = new XContentBuilderString("key_as_string");
public static final XContentBuilderString FROM = new XContentBuilderString("from");
public static final XContentBuilderString FROM_AS_STRING = new XContentBuilderString("from_as_string");
public static final XContentBuilderString TO = new XContentBuilderString("to");
public static final XContentBuilderString TO_AS_STRING = new XContentBuilderString("to_as_string");
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_InternalAggregation.java |
2,378 | SINGLE {
@Override
public long toSingles(long size) {
return size;
}
@Override
public long toKilo(long size) {
return size / (C1 / C0);
}
@Override
public long toMega(long size) {
return size / (C2 / C0);
}
@Override
public long toGiga(long size) {
return size / (C3 / C0);
}
@Override
public long toTera(long size) {
return size / (C4 / C0);
}
@Override
public long toPeta(long size) {
return size / (C5 / C0);
}
}, | 0true
| src_main_java_org_elasticsearch_common_unit_SizeUnit.java |
1,924 | public abstract class AbstractTxnMapRequest extends BaseTransactionRequest {
String name;
TxnMapRequestType requestType;
Data key;
Data value;
Data newValue;
long ttl = -1;
public AbstractTxnMapRequest() {
}
public AbstractTxnMapRequest(String name, TxnMapRequestType requestType) {
this.name = name;
this.requestType = requestType;
}
public AbstractTxnMapRequest(String name, TxnMapRequestType requestType, Data key) {
this(name, requestType);
this.key = key;
}
public AbstractTxnMapRequest(String name, TxnMapRequestType requestType, Data key, Data value) {
this(name, requestType, key);
this.value = value;
}
public AbstractTxnMapRequest(String name, TxnMapRequestType requestType, Data key, Data value, Data newValue) {
this(name, requestType, key, value);
this.newValue = newValue;
}
public AbstractTxnMapRequest(String name, TxnMapRequestType requestType, Data key, Data value, long ttl, TimeUnit timeUnit) {
this(name, requestType, key, value);
this.ttl = timeUnit == null ? ttl : timeUnit.toMillis(ttl);
}
public Object innerCall() throws Exception {
final TransactionContext context = getEndpoint().getTransactionContext(txnId);
final TransactionalMap map = context.getMap(name);
switch (requestType) {
case CONTAINS_KEY:
return map.containsKey(key);
case GET:
return map.get(key);
case GET_FOR_UPDATE:
return map.getForUpdate(key);
case SIZE:
return map.size();
case PUT:
return map.put(key, value);
case PUT_WITH_TTL:
return map.put(key, value, ttl, TimeUnit.MILLISECONDS);
case PUT_IF_ABSENT:
return map.putIfAbsent(key, value);
case REPLACE:
return map.replace(key, value);
case REPLACE_IF_SAME:
return map.replace(key, value, newValue);
case SET:
map.set(key, value);
break;
case REMOVE:
return map.remove(key);
case DELETE:
map.delete(key);
break;
case REMOVE_IF_SAME:
return map.remove(key, value);
case KEYSET:
return getMapKeySet(map.keySet());
case KEYSET_BY_PREDICATE:
return getMapKeySet(map.keySet(getPredicate()));
case VALUES:
return getMapValueCollection(map.values());
case VALUES_BY_PREDICATE:
return getMapValueCollection(map.values(getPredicate()));
}
return null;
}
private MapKeySet getMapKeySet(Set keySet) {
final HashSet<Data> dataKeySet = new HashSet<Data>();
for (Object key : keySet) {
final Data dataKey = getClientEngine().toData(key);
dataKeySet.add(dataKey);
}
return new MapKeySet(dataKeySet);
}
private MapValueCollection getMapValueCollection(Collection coll) {
final HashSet<Data> valuesCollection = new HashSet<Data>(coll.size());
for (Object value : coll) {
final Data dataValue = getClientEngine().toData(value);
valuesCollection.add(dataValue);
}
return new MapValueCollection(valuesCollection);
}
public String getServiceName() {
return MapService.SERVICE_NAME;
}
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public void write(PortableWriter writer) throws IOException {
super.write(writer);
writer.writeUTF("n", name);
writer.writeInt("t", requestType.type);
final ObjectDataOutput out = writer.getRawDataOutput();
IOUtil.writeNullableData(out, key);
IOUtil.writeNullableData(out, value);
IOUtil.writeNullableData(out, newValue);
writeDataInner(out);
out.writeLong(ttl);
}
public void read(PortableReader reader) throws IOException {
super.read(reader);
name = reader.readUTF("n");
requestType = TxnMapRequestType.getByType(reader.readInt("t"));
final ObjectDataInput in = reader.getRawDataInput();
key = IOUtil.readNullableData(in);
value = IOUtil.readNullableData(in);
newValue = IOUtil.readNullableData(in);
readDataInner(in);
ttl = in.readLong();
}
protected abstract Predicate getPredicate();
protected abstract void writeDataInner(ObjectDataOutput writer) throws IOException;
protected abstract void readDataInner(ObjectDataInput reader) throws IOException;
public enum TxnMapRequestType {
CONTAINS_KEY(1),
GET(2),
SIZE(3),
PUT(4),
PUT_IF_ABSENT(5),
REPLACE(6),
REPLACE_IF_SAME(7),
SET(8),
REMOVE(9),
DELETE(10),
REMOVE_IF_SAME(11),
KEYSET(12),
KEYSET_BY_PREDICATE(13),
VALUES(14),
VALUES_BY_PREDICATE(15),
GET_FOR_UPDATE(16),
PUT_WITH_TTL(17);
int type;
TxnMapRequestType(int i) {
type = i;
}
public static TxnMapRequestType getByType(int type) {
for (TxnMapRequestType requestType : values()) {
if (requestType.type == type) {
return requestType;
}
}
return null;
}
}
public Permission getRequiredPermission() {
String action;
boolean isLock = true;
switch (requestType) {
case CONTAINS_KEY:
case GET:
case SIZE:
case KEYSET:
case KEYSET_BY_PREDICATE:
case VALUES:
case VALUES_BY_PREDICATE:
action = ActionConstants.ACTION_READ;
isLock = false;
break;
case GET_FOR_UPDATE:
action = ActionConstants.ACTION_READ;
break;
case PUT:
case PUT_IF_ABSENT:
case REPLACE:
case REPLACE_IF_SAME:
case SET:
case PUT_WITH_TTL:
action = ActionConstants.ACTION_PUT;
break;
case REMOVE:
case DELETE:
case REMOVE_IF_SAME:
action = ActionConstants.ACTION_REMOVE;
break;
default:
throw new IllegalArgumentException("Invalid request type: " + requestType);
}
if (isLock) {
return new MapPermission(name, action, ActionConstants.ACTION_LOCK);
}
return new MapPermission(name, action);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_client_AbstractTxnMapRequest.java |
1,453 | return new MessageListener<Object>() {
public void onMessage(final Message<Object> message) {
final Timestamp ts = (Timestamp) message.getMessageObject();
final Object key = ts.getKey();
for (;;) {
final Value value = cache.get(key);
final Long current = value != null ? (Long) value.getValue() : null;
if (current != null) {
if (ts.getTimestamp() > current) {
if (cache.replace(key, value, new Value(value.getVersion(),
ts.getTimestamp(), Clock.currentTimeMillis()))) {
return;
}
} else {
return;
}
} else {
if (cache.putIfAbsent(key, new Value(null, ts.getTimestamp(),
Clock.currentTimeMillis())) == null) {
return;
}
}
}
}
}; | 1no label
| hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_local_TimestampsRegionCache.java |
232 | XPostingsHighlighter highlighter = new XPostingsHighlighter() {
@Override
protected PassageFormatter getFormatter(String field) {
return new PassageFormatter() {
PassageFormatter defaultFormatter = new DefaultPassageFormatter();
@Override
public String[] format(Passage passages[], String content) {
// Just turns the String snippet into a length 2
// array of String
return new String[] {"blah blah", defaultFormatter.format(passages, content).toString()};
}
};
}
}; | 0true
| src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java |
279 | public interface EmailServiceProducer {
public void send(@SuppressWarnings("rawtypes") final HashMap props);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_email_service_message_EmailServiceProducer.java |
565 | public class PutMappingResponse extends AcknowledgedResponse {
PutMappingResponse() {
}
PutMappingResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_mapping_put_PutMappingResponse.java |
2,612 | private class SendPingRequest implements Runnable {
private final DiscoveryNode node;
private SendPingRequest(DiscoveryNode node) {
this.node = node;
}
@Override
public void run() {
if (!running) {
return;
}
transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()), options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout),
new BaseTransportResponseHandler<PingResponse>() {
@Override
public PingResponse newInstance() {
return new PingResponse();
}
@Override
public void handleResponse(PingResponse response) {
if (!running) {
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
nodeFD.retryCount = 0;
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, SendPingRequest.this);
}
}
@Override
public void handleException(TransportException exp) {
// check if the master node did not get switched on us...
if (!running) {
return;
}
if (exp instanceof ConnectTransportException) {
// ignore this one, we already handle it by registering a connection listener
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
int retryCount = ++nodeFD.retryCount;
logger.trace("[node ] failed to ping [{}], retry [{}] out of [{}]", exp, node, retryCount, pingRetryCount);
if (retryCount >= pingRetryCount) {
logger.debug("[node ] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", node, pingRetryCount, pingRetryTimeout);
// not good, failure
if (nodesFD.remove(node) != null) {
notifyNodeFailure(node, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout");
}
} else {
// resend the request, not reschedule, rely on send timeout
transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()),
options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout), this);
}
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}
} | 1no label
| src_main_java_org_elasticsearch_discovery_zen_fd_NodesFaultDetection.java |
2,735 | public class LocalGatewayShardsState extends AbstractComponent implements ClusterStateListener {
private final NodeEnvironment nodeEnv;
private final LocalGatewayMetaState metaState;
private volatile Map<ShardId, ShardStateInfo> currentState = Maps.newHashMap();
@Inject
public LocalGatewayShardsState(Settings settings, NodeEnvironment nodeEnv, TransportNodesListGatewayStartedShards listGatewayStartedShards, LocalGatewayMetaState metaState) throws Exception {
super(settings);
this.nodeEnv = nodeEnv;
this.metaState = metaState;
listGatewayStartedShards.initGateway(this);
if (DiscoveryNode.dataNode(settings)) {
try {
pre019Upgrade();
long start = System.currentTimeMillis();
currentState = loadShardsStateInfo();
logger.debug("took {} to load started shards state", TimeValue.timeValueMillis(System.currentTimeMillis() - start));
} catch (Exception e) {
logger.error("failed to read local state (started shards), exiting...", e);
throw e;
}
}
}
public Map<ShardId, ShardStateInfo> currentStartedShards() {
return this.currentState;
}
public ShardStateInfo loadShardInfo(ShardId shardId) throws Exception {
return loadShardStateInfo(shardId);
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (event.state().blocks().disableStatePersistence()) {
return;
}
if (!event.state().nodes().localNode().dataNode()) {
return;
}
if (!event.routingTableChanged()) {
return;
}
Map<ShardId, ShardStateInfo> newState = Maps.newHashMap();
newState.putAll(this.currentState);
// remove from the current state all the shards that are completely started somewhere, we won't need them anymore
// and if they are still here, we will add them in the next phase
// Also note, this works well when closing an index, since a closed index will have no routing shards entries
// so they won't get removed (we want to keep the fact that those shards are allocated on this node if needed)
for (IndexRoutingTable indexRoutingTable : event.state().routingTable()) {
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
if (indexShardRoutingTable.countWithState(ShardRoutingState.STARTED) == indexShardRoutingTable.size()) {
newState.remove(indexShardRoutingTable.shardId());
}
}
}
// remove deleted indices from the started shards
for (ShardId shardId : currentState.keySet()) {
if (!event.state().metaData().hasIndex(shardId.index().name())) {
newState.remove(shardId);
}
}
// now, add all the ones that are active and on this node
RoutingNode routingNode = event.state().readOnlyRoutingNodes().node(event.state().nodes().localNodeId());
if (routingNode != null) {
// our node is not in play yet...
for (MutableShardRouting shardRouting : routingNode) {
if (shardRouting.active()) {
newState.put(shardRouting.shardId(), new ShardStateInfo(shardRouting.version(), shardRouting.primary()));
}
}
}
// go over the write started shards if needed
for (Iterator<Map.Entry<ShardId, ShardStateInfo>> it = newState.entrySet().iterator(); it.hasNext(); ) {
Map.Entry<ShardId, ShardStateInfo> entry = it.next();
ShardId shardId = entry.getKey();
ShardStateInfo shardStateInfo = entry.getValue();
String writeReason = null;
ShardStateInfo currentShardStateInfo = currentState.get(shardId);
if (currentShardStateInfo == null) {
writeReason = "freshly started, version [" + shardStateInfo.version + "]";
} else if (currentShardStateInfo.version != shardStateInfo.version) {
writeReason = "version changed from [" + currentShardStateInfo.version + "] to [" + shardStateInfo.version + "]";
}
// we update the write reason if we really need to write a new one...
if (writeReason == null) {
continue;
}
try {
writeShardState(writeReason, shardId, shardStateInfo, currentShardStateInfo);
} catch (Exception e) {
// we failed to write the shard state, remove it from our builder, we will try and write
// it next time...
it.remove();
}
}
// REMOVED: don't delete shard state, rely on IndicesStore to delete the shard location
// only once all shards are allocated on another node
// now, go over the current ones and delete ones that are not in the new one
// for (Map.Entry<ShardId, ShardStateInfo> entry : currentState.entrySet()) {
// ShardId shardId = entry.getKey();
// if (!newState.containsKey(shardId)) {
// if (!metaState.isDangling(shardId.index().name())) {
// deleteShardState(shardId);
// }
// }
// }
this.currentState = newState;
}
private Map<ShardId, ShardStateInfo> loadShardsStateInfo() throws Exception {
Set<ShardId> shardIds = nodeEnv.findAllShardIds();
long highestVersion = -1;
Map<ShardId, ShardStateInfo> shardsState = Maps.newHashMap();
for (ShardId shardId : shardIds) {
ShardStateInfo shardStateInfo = loadShardStateInfo(shardId);
if (shardStateInfo == null) {
continue;
}
shardsState.put(shardId, shardStateInfo);
// update the global version
if (shardStateInfo.version > highestVersion) {
highestVersion = shardStateInfo.version;
}
}
return shardsState;
}
private ShardStateInfo loadShardStateInfo(ShardId shardId) {
long highestShardVersion = -1;
ShardStateInfo highestShardState = null;
for (File shardLocation : nodeEnv.shardLocations(shardId)) {
File shardStateDir = new File(shardLocation, "_state");
if (!shardStateDir.exists() || !shardStateDir.isDirectory()) {
continue;
}
// now, iterate over the current versions, and find latest one
File[] stateFiles = shardStateDir.listFiles();
if (stateFiles == null) {
continue;
}
for (File stateFile : stateFiles) {
if (!stateFile.getName().startsWith("state-")) {
continue;
}
try {
long version = Long.parseLong(stateFile.getName().substring("state-".length()));
if (version > highestShardVersion) {
byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
if (data.length == 0) {
logger.debug("[{}][{}]: not data for [" + stateFile.getAbsolutePath() + "], ignoring...", shardId.index().name(), shardId.id());
continue;
}
ShardStateInfo readState = readShardState(data);
if (readState == null) {
logger.debug("[{}][{}]: not data for [" + stateFile.getAbsolutePath() + "], ignoring...", shardId.index().name(), shardId.id());
continue;
}
assert readState.version == version;
highestShardState = readState;
highestShardVersion = version;
}
} catch (Exception e) {
logger.debug("[{}][{}]: failed to read [" + stateFile.getAbsolutePath() + "], ignoring...", e, shardId.index().name(), shardId.id());
}
}
}
return highestShardState;
}
@Nullable
private ShardStateInfo readShardState(byte[] data) throws Exception {
XContentParser parser = null;
try {
parser = XContentHelper.createParser(data, 0, data.length);
XContentParser.Token token = parser.nextToken();
if (token == null) {
return null;
}
long version = -1;
Boolean primary = null;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("version".equals(currentFieldName)) {
version = parser.longValue();
} else if ("primary".equals(currentFieldName)) {
primary = parser.booleanValue();
}
}
}
return new ShardStateInfo(version, primary);
} finally {
if (parser != null) {
parser.close();
}
}
}
private void writeShardState(String reason, ShardId shardId, ShardStateInfo shardStateInfo, @Nullable ShardStateInfo previousStateInfo) throws Exception {
logger.trace("[{}][{}] writing shard state, reason [{}]", shardId.index().name(), shardId.id(), reason);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, new BytesStreamOutput());
builder.prettyPrint();
builder.startObject();
builder.field("version", shardStateInfo.version);
if (shardStateInfo.primary != null) {
builder.field("primary", shardStateInfo.primary);
}
builder.endObject();
builder.flush();
Exception lastFailure = null;
boolean wroteAtLeastOnce = false;
for (File shardLocation : nodeEnv.shardLocations(shardId)) {
File shardStateDir = new File(shardLocation, "_state");
FileSystemUtils.mkdirs(shardStateDir);
File stateFile = new File(shardStateDir, "state-" + shardStateInfo.version);
FileOutputStream fos = null;
try {
fos = new FileOutputStream(stateFile);
BytesReference bytes = builder.bytes();
fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
fos.getChannel().force(true);
fos.close();
wroteAtLeastOnce = true;
} catch (Exception e) {
lastFailure = e;
} finally {
IOUtils.closeWhileHandlingException(fos);
}
}
if (!wroteAtLeastOnce) {
logger.warn("[{}][{}]: failed to write shard state", shardId.index().name(), shardId.id(), lastFailure);
throw new IOException("failed to write shard state for " + shardId, lastFailure);
}
// delete the old files
if (previousStateInfo != null && previousStateInfo.version != shardStateInfo.version) {
for (File shardLocation : nodeEnv.shardLocations(shardId)) {
File stateFile = new File(new File(shardLocation, "_state"), "state-" + previousStateInfo.version);
stateFile.delete();
}
}
}
private void deleteShardState(ShardId shardId) {
logger.trace("[{}][{}] delete shard state", shardId.index().name(), shardId.id());
File[] shardLocations = nodeEnv.shardLocations(shardId);
for (File shardLocation : shardLocations) {
if (!shardLocation.exists()) {
continue;
}
FileSystemUtils.deleteRecursively(new File(shardLocation, "_state"));
}
}
private void pre019Upgrade() throws Exception {
long index = -1;
File latest = null;
for (File dataLocation : nodeEnv.nodeDataLocations()) {
File stateLocation = new File(dataLocation, "_state");
if (!stateLocation.exists()) {
continue;
}
File[] stateFiles = stateLocation.listFiles();
if (stateFiles == null) {
continue;
}
for (File stateFile : stateFiles) {
if (logger.isTraceEnabled()) {
logger.trace("[find_latest_state]: processing [" + stateFile.getName() + "]");
}
String name = stateFile.getName();
if (!name.startsWith("shards-")) {
continue;
}
long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
if (fileIndex >= index) {
// try and read the meta data
try {
byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
if (data.length == 0) {
logger.debug("[upgrade]: not data for [" + name + "], ignoring...");
}
pre09ReadState(data);
index = fileIndex;
latest = stateFile;
} catch (IOException e) {
logger.warn("[upgrade]: failed to read state from [" + name + "], ignoring...", e);
}
}
}
}
if (latest == null) {
return;
}
logger.info("found old shards state, loading started shards from [{}] and converting to new shards state locations...", latest.getAbsolutePath());
Map<ShardId, ShardStateInfo> shardsState = pre09ReadState(Streams.copyToByteArray(new FileInputStream(latest)));
for (Map.Entry<ShardId, ShardStateInfo> entry : shardsState.entrySet()) {
writeShardState("upgrade", entry.getKey(), entry.getValue(), null);
}
// rename shards state to backup state
File backupFile = new File(latest.getParentFile(), "backup-" + latest.getName());
if (!latest.renameTo(backupFile)) {
throw new IOException("failed to rename old state to backup state [" + latest.getAbsolutePath() + "]");
}
// delete all other shards state files
for (File dataLocation : nodeEnv.nodeDataLocations()) {
File stateLocation = new File(dataLocation, "_state");
if (!stateLocation.exists()) {
continue;
}
File[] stateFiles = stateLocation.listFiles();
if (stateFiles == null) {
continue;
}
for (File stateFile : stateFiles) {
String name = stateFile.getName();
if (!name.startsWith("shards-")) {
continue;
}
stateFile.delete();
}
}
logger.info("conversion to new shards state location and format done, backup create at [{}]", backupFile.getAbsolutePath());
}
private Map<ShardId, ShardStateInfo> pre09ReadState(byte[] data) throws IOException {
XContentParser parser = null;
try {
Map<ShardId, ShardStateInfo> shardsState = Maps.newHashMap();
parser = XContentHelper.createParser(data, 0, data.length);
String currentFieldName = null;
XContentParser.Token token = parser.nextToken();
if (token == null) {
// no data...
return shardsState;
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if ("shards".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
String shardIndex = null;
int shardId = -1;
long version = -1;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
shardIndex = parser.text();
} else if ("id".equals(currentFieldName)) {
shardId = parser.intValue();
} else if ("version".equals(currentFieldName)) {
version = parser.longValue();
}
}
}
shardsState.put(new ShardId(shardIndex, shardId), new ShardStateInfo(version, null));
}
}
}
}
}
return shardsState;
} finally {
if (parser != null) {
parser.close();
}
}
}
} | 0true
| src_main_java_org_elasticsearch_gateway_local_state_shards_LocalGatewayShardsState.java |
201 | public class OEngineRemote extends OEngineAbstract {
public static final String NAME = "remote";
private static final Map<String, OStorageRemote> sharedStorages = new ConcurrentHashMap<String, OStorageRemote>();
public OEngineRemote() {
}
public OStorage createStorage(final String iURL, final Map<String, String> iConfiguration) {
try {
synchronized (sharedStorages) {
OStorageRemote sharedStorage = sharedStorages.get(iURL);
if (sharedStorage == null) {
sharedStorage = new OStorageRemote(null, iURL, "rw");
sharedStorages.put(iURL, sharedStorage);
}
return new OStorageRemoteThread(sharedStorage);
}
} catch (Throwable t) {
OLogManager.instance().error(this, "Error on opening database: " + iURL, t, ODatabaseException.class);
}
return null;
}
public void removeStorage(final String iURL) {
synchronized (sharedStorages) {
sharedStorages.remove(iURL);
}
}
@Override
public void removeStorage(final OStorage iStorage) {
synchronized (sharedStorages) {
for (Entry<String, OStorageRemote> entry : sharedStorages.entrySet()) {
if (entry.getValue() == iStorage) {
sharedStorages.remove(entry.getKey());
break;
}
}
}
}
@Override
public void shutdown() {
super.shutdown();
sharedStorages.clear();
}
public String getName() {
return NAME;
}
public boolean isShared() {
return false;
}
} | 0true
| client_src_main_java_com_orientechnologies_orient_client_remote_OEngineRemote.java |
1,952 | public class MapRemoveEntryListenerRequest extends BaseClientRemoveListenerRequest {
public MapRemoveEntryListenerRequest() {
}
public MapRemoveEntryListenerRequest(String name, String registrationId) {
super(name, registrationId);
}
public Object call() throws Exception {
final MapService service = getService();
return service.removeEventListener(name, registrationId);
}
public String getServiceName() {
return MapService.SERVICE_NAME;
}
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public int getClassId() {
return MapPortableHook.REMOVE_ENTRY_LISTENER;
}
@Override
public Permission getRequiredPermission() {
return new MapPermission(name, ActionConstants.ACTION_LISTEN);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_client_MapRemoveEntryListenerRequest.java |
361 | public class FilterParameter {
protected String name;
protected String type;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_filter_FilterParameter.java |
881 | public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent {
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final SearchServiceTransportAction searchService;
private final SearchPhaseController searchPhaseController;
@Inject
public TransportSearchScrollQueryAndFetchAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.searchService = searchService;
this.searchPhaseController = searchPhaseController;
}
public void execute(SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
new AsyncAction(request, scrollId, listener).start();
}
private class AsyncAction {
private final SearchScrollRequest request;
private final ActionListener<SearchResponse> listener;
private final ParsedScrollId scrollId;
private final DiscoveryNodes nodes;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
private final AtomicArray<QueryFetchSearchResult> queryFetchResults;
private final AtomicInteger successfulOps;
private final AtomicInteger counter;
private final long startTime = System.currentTimeMillis();
private AsyncAction(SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.request = request;
this.listener = listener;
this.scrollId = scrollId;
this.nodes = clusterService.state().nodes();
this.successfulOps = new AtomicInteger(scrollId.getContext().length);
this.counter = new AtomicInteger(scrollId.getContext().length);
this.queryFetchResults = new AtomicArray<QueryFetchSearchResult>(scrollId.getContext().length);
}
protected final ShardSearchFailure[] buildShardFailures() {
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
for (int i = 0; i < failures.length; i++) {
failures[i] = entries.get(i).value;
}
return failures;
}
// we do our best to return the shard failures, but its ok if its not fully concurrently safe
// we simply try and return as much as possible
protected final void addShardFailure(final int shardIndex, ShardSearchFailure failure) {
if (shardFailures == null) {
shardFailures = new AtomicArray<ShardSearchFailure>(scrollId.getContext().length);
}
shardFailures.set(shardIndex, failure);
}
public void start() {
if (scrollId.getContext().length == 0) {
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", null));
return;
}
int localOperations = 0;
Tuple<String, Long>[] context = scrollId.getContext();
for (int i = 0; i < context.length; i++) {
Tuple<String, Long> target = context[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null) {
if (nodes.localNodeId().equals(node.id())) {
localOperations++;
} else {
executePhase(i, node, target.v2());
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]");
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
}
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
Tuple<String, Long> target = context1[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
executePhase(i, node, target.v2());
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
final Tuple<String, Long> target = context1[i];
final int shardIndex = i;
final DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
try {
if (localAsync) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executePhase(shardIndex, node, target.v2());
}
});
} else {
executePhase(shardIndex, node, target.v2());
}
} catch (Throwable t) {
onPhaseFailure(t, target.v2(), shardIndex);
}
}
}
}
}
for (Tuple<String, Long> target : scrollId.getContext()) {
DiscoveryNode node = nodes.get(target.v1());
if (node == null) {
if (logger.isDebugEnabled()) {
logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]");
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
} else {
}
}
}
void executePhase(final int shardIndex, DiscoveryNode node, final long searchId) {
searchService.sendExecuteFetch(node, internalScrollSearchRequest(searchId, request), new SearchServiceListener<QueryFetchSearchResult>() {
@Override
public void onResult(QueryFetchSearchResult result) {
queryFetchResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
onPhaseFailure(t, searchId, shardIndex);
}
});
}
private void onPhaseFailure(Throwable t, long searchId, int shardIndex) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute query phase", t, searchId);
}
addShardFailure(shardIndex, new ShardSearchFailure(t));
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
private void finishHim() {
try {
innerFinishHim();
} catch (Throwable e) {
listener.onFailure(new ReduceSearchPhaseException("fetch", "", e, buildShardFailures()));
}
}
private void innerFinishHim() {
ScoreDoc[] sortedShardList = searchPhaseController.sortDocs(queryFetchResults);
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults, queryFetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = request.scrollId();
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(),
System.currentTimeMillis() - startTime, buildShardFailures()));
}
}
} | 1no label
| src_main_java_org_elasticsearch_action_search_type_TransportSearchScrollQueryAndFetchAction.java |
2,575 | public class NodeIOService implements IOService {
private final Node node;
private final NodeEngineImpl nodeEngine;
public NodeIOService(Node node) {
this.node = node;
this.nodeEngine = node.nodeEngine;
}
public boolean isActive() {
return node.isActive();
}
public ILogger getLogger(String name) {
return node.getLogger(name);
}
public SystemLogService getSystemLogService() {
return node.getSystemLogService();
}
public void onOutOfMemory(OutOfMemoryError oom) {
OutOfMemoryErrorDispatcher.onOutOfMemory(oom);
}
public Address getThisAddress() {
return node.getThisAddress();
}
public void onFatalError(Exception e) {
getSystemLogService().logConnection(e.getClass().getName() + ": " + e.getMessage());
new Thread(node.threadGroup, node.getThreadNamePrefix("io.error.shutdown")) {
public void run() {
node.shutdown(false);
}
} .start();
}
public SocketInterceptorConfig getSocketInterceptorConfig() {
return node.getConfig().getNetworkConfig().getSocketInterceptorConfig();
}
public SymmetricEncryptionConfig getSymmetricEncryptionConfig() {
return node.getConfig().getNetworkConfig().getSymmetricEncryptionConfig();
}
public SSLConfig getSSLConfig() {
return node.getConfig().getNetworkConfig().getSSLConfig();
}
public void handleMemberPacket(final Packet packet) {
final Address endPoint = packet.getConn().getEndPoint();
if (endPoint != null) {
final MemberImpl member = node.clusterService.getMember(endPoint);
if (member != null) {
member.didRead();
}
}
nodeEngine.handlePacket(packet);
}
public void handleClientPacket(ClientPacket p) {
node.clientEngine.handlePacket(p);
}
public TextCommandService getTextCommandService() {
return node.getTextCommandService();
}
public boolean isMemcacheEnabled() {
return node.groupProperties.MEMCACHE_ENABLED.getBoolean();
}
public boolean isRestEnabled() {
return node.groupProperties.REST_ENABLED.getBoolean();
}
public void removeEndpoint(final Address endPoint) {
nodeEngine.getExecutionService().execute(ExecutionService.IO_EXECUTOR, new Runnable() {
public void run() {
node.clusterService.removeAddress(endPoint);
}
});
}
public String getThreadPrefix() {
return node.getThreadPoolNamePrefix("IO");
}
public ThreadGroup getThreadGroup() {
return node.threadGroup;
}
public void onFailedConnection(Address address) {
if (!node.joined()) {
node.failedConnection(address);
}
}
public void shouldConnectTo(Address address) {
if (node.getThisAddress().equals(address)) {
throw new RuntimeException("Connecting to self! " + address);
}
}
public boolean isReuseSocketAddress() {
return node.getConfig().getNetworkConfig().isReuseAddress();
}
public int getSocketPort() {
return node.getConfig().getNetworkConfig().getPort();
}
public boolean isSocketBind() {
return node.groupProperties.SOCKET_CLIENT_BIND.getBoolean();
}
public boolean isSocketBindAny() {
return node.groupProperties.SOCKET_CLIENT_BIND_ANY.getBoolean();
}
public boolean isSocketPortAutoIncrement() {
return node.getConfig().getNetworkConfig().isPortAutoIncrement();
}
public int getSocketReceiveBufferSize() {
return this.node.getGroupProperties().SOCKET_RECEIVE_BUFFER_SIZE.getInteger();
}
public int getSocketSendBufferSize() {
return this.node.getGroupProperties().SOCKET_SEND_BUFFER_SIZE.getInteger();
}
public int getSocketLingerSeconds() {
return this.node.getGroupProperties().SOCKET_LINGER_SECONDS.getInteger();
}
public boolean getSocketKeepAlive() {
return this.node.getGroupProperties().SOCKET_KEEP_ALIVE.getBoolean();
}
public boolean getSocketNoDelay() {
return this.node.getGroupProperties().SOCKET_NO_DELAY.getBoolean();
}
public int getSelectorThreadCount() {
return node.groupProperties.IO_THREAD_COUNT.getInteger();
}
public void onDisconnect(final Address endpoint) {
}
public boolean isClient() {
return false;
}
public long getConnectionMonitorInterval() {
return node.groupProperties.CONNECTION_MONITOR_INTERVAL.getLong();
}
public int getConnectionMonitorMaxFaults() {
return node.groupProperties.CONNECTION_MONITOR_MAX_FAULTS.getInteger();
}
public void executeAsync(final Runnable runnable) {
nodeEngine.getExecutionService().execute(ExecutionService.IO_EXECUTOR, runnable);
}
public Data toData(Object obj) {
return nodeEngine.toData(obj);
}
public Object toObject(Data data) {
return nodeEngine.toObject(data);
}
public SerializationService getSerializationService() {
return node.getSerializationService();
}
public SerializationContext getSerializationContext() {
return node.getSerializationService().getSerializationContext();
}
public Collection<Integer> getOutboundPorts() {
final NetworkConfig networkConfig = node.getConfig().getNetworkConfig();
final Collection<String> portDefinitions = networkConfig.getOutboundPortDefinitions() == null
? Collections.<String>emptySet() : networkConfig.getOutboundPortDefinitions();
final Set<Integer> ports = networkConfig.getOutboundPorts() == null
? new HashSet<Integer>() : new HashSet<Integer>(networkConfig.getOutboundPorts());
if (portDefinitions.isEmpty() && ports.isEmpty()) {
// means any port
return Collections.emptySet();
}
if (portDefinitions.contains("*") || portDefinitions.contains("0")) {
// means any port
return Collections.emptySet();
}
// not checking port ranges...
for (String portDef : portDefinitions) {
String[] portDefs = portDef.split("[,; ]");
for (String def : portDefs) {
def = def.trim();
final int dashPos = def.indexOf('-');
if (dashPos > 0) {
final int start = Integer.parseInt(def.substring(0, dashPos));
final int end = Integer.parseInt(def.substring(dashPos + 1));
for (int port = start; port <= end; port++) {
ports.add(port);
}
} else {
ports.add(Integer.parseInt(def));
}
}
}
if (ports.contains(0)) {
// means any port
return Collections.emptySet();
}
return ports;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_nio_NodeIOService.java |
95 | class TemplateVisitor extends Visitor {
Tree.StringTemplate result;
@Override
public void visit(Tree.StringTemplate that) {
if (that.getStartIndex()<=node.getStartIndex() &&
that.getStopIndex()>=node.getStopIndex()) {
result = that;
}
super.visit(that);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ConvertToConcatenationProposal.java |
342 | static class TestMapStore extends MapStoreAdapter<Long, String> {
public volatile CountDownLatch latch;
@Override
public void store(Long key, String value) {
if (latch != null) {
latch.countDown();
}
}
@Override
public void storeAll(Map<Long, String> map) {
if (latch != null) {
latch.countDown();
}
}
@Override
public void deleteAll(Collection<Long> keys) {
if (latch != null) {
latch.countDown();
}
}
@Override
public void delete(Long key) {
if (latch != null) {
latch.countDown();
}
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTest.java |
311 | return new Predicate<Integer>() {
@Override
public boolean apply(@Nullable Integer num) {
return num!=null && num>0;
}
}; | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_configuration_ConfigOption.java |
161 | class RenameProposal implements ICompletionProposal,
ICompletionProposalExtension6 {
private final Declaration dec;
private final CeylonEditor editor;
RenameProposal(Declaration dec,
CeylonEditor editor) {
this.dec = dec;
this.editor = editor;
}
@Override
public Point getSelection(IDocument doc) {
return null;
}
@Override
public Image getImage() {
return RENAME;
}
@Override
public String getDisplayString() {
return "Rename '" + dec.getName() + "'";
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument doc) {
if (useLinkedMode()) {
new RenameLinkedMode(editor).start();
}
else {
new RenameRefactoringAction(editor).run();
}
}
@Override
public StyledString getStyledDisplayString() {
return Highlights.styleProposal(getDisplayString(), false);
}
public static void add(Collection<ICompletionProposal> proposals,
CeylonEditor editor) {
RenameRefactoring rr = new RenameRefactoring(editor);
if (rr.isEnabled()) {
proposals.add(new RenameProposal(rr.getDeclaration(), editor));
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_RenameProposal.java |
2,613 | private static class DefaultStringCreator implements UTFEncoderDecoder.StringCreator {
@Override
public String buildString(char[] chars) {
return new String(chars);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_nio_UTFEncoderDecoder.java |
143 | public class GetDistributedObjectsRequest extends ClientRequest {
@Override
void process() throws Exception {
ClientEndpoint endpoint = getEndpoint();
Collection<DistributedObject> distributedObjects = clientEngine.getProxyService().getAllDistributedObjects();
SerializationService serializationService = clientEngine.getSerializationService();
List<Data> dataArrayList = new ArrayList<Data>(distributedObjects.size());
for (DistributedObject distributedObject : distributedObjects) {
DistributedObjectInfo distributedObjectInfo = new DistributedObjectInfo(
distributedObject.getServiceName(), distributedObject.getName());
Data data = serializationService.toData(distributedObjectInfo);
dataArrayList.add(data);
}
SerializableCollection collection = new SerializableCollection(dataArrayList);
endpoint.sendResponse(collection, getCallId());
}
@Override
public String getServiceName() {
return ClientEngineImpl.SERVICE_NAME;
}
public int getFactoryId() {
return ClientPortableHook.ID;
}
public int getClassId() {
return ClientPortableHook.GET_DISTRIBUTED_OBJECT_INFO;
}
@Override
public Permission getRequiredPermission() {
return null;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_GetDistributedObjectsRequest.java |
91 | public enum Geo implements TitanPredicate {
/**
* Whether the intersection between two geographic regions is non-empty
*/
INTERSECT {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).intersect((Geoshape) condition);
}
@Override
public String toString() {
return "intersect";
}
@Override
public boolean hasNegation() {
return true;
}
@Override
public TitanPredicate negate() {
return DISJOINT;
}
},
/**
* Whether the intersection between two geographic regions is empty
*/
DISJOINT {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).disjoint((Geoshape) condition);
}
@Override
public String toString() {
return "disjoint";
}
@Override
public boolean hasNegation() {
return true;
}
@Override
public TitanPredicate negate() {
return INTERSECT;
}
},
/**
* Whether one geographic region is completely contains within another
*/
WITHIN {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).within((Geoshape) condition);
}
@Override
public String toString() {
return "within";
}
@Override
public boolean hasNegation() {
return false;
}
@Override
public TitanPredicate negate() {
throw new UnsupportedOperationException();
}
};
@Override
public boolean isValidCondition(Object condition) {
return condition != null && condition instanceof Geoshape;
}
@Override
public boolean isValidValueType(Class<?> clazz) {
Preconditions.checkNotNull(clazz);
return clazz.equals(Geoshape.class);
}
@Override
public boolean isQNF() {
return true;
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geo.java |
187 | public class KeyColumn {
public final int key;
public final int column;
public KeyColumn(int key, int column) {
this.key = key;
this.column = column;
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(key).append(column).toHashCode();
}
public boolean equals(Object other) {
if (this == other) return true;
else if (!getClass().isInstance(other)) return false;
KeyColumn oth = (KeyColumn) other;
return key == oth.key && column == oth.column;
}
} | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_KeyColumn.java |
211 | public class OStorageRemoteSession {
public boolean commandExecuting = false;
public Integer sessionId = -1;
public String serverURL = null;
} | 0true
| client_src_main_java_com_orientechnologies_orient_client_remote_OStorageRemoteThreadLocal.java |
4,655 | public class PercolateContext extends SearchContext {
public boolean limit;
public int size;
public boolean doSort;
public byte percolatorTypeId;
private boolean trackScores;
private final PercolateShardRequest request;
private final SearchShardTarget searchShardTarget;
private final IndexService indexService;
private final IndexFieldDataService fieldDataService;
private final IndexShard indexShard;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final ConcurrentMap<HashedBytesRef, Query> percolateQueries;
private String[] types;
private Engine.Searcher docEngineSearcher;
private Engine.Searcher engineSearcher;
private ContextIndexSearcher searcher;
private SearchContextHighlight highlight;
private SearchLookup searchLookup;
private ParsedQuery parsedQuery;
private Query query;
private boolean queryRewritten;
private Query percolateQuery;
private FetchSubPhase.HitContext hitContext;
private SearchContextFacets facets;
private SearchContextAggregations aggregations;
private QuerySearchResult querySearchResult;
private Sort sort;
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard, IndexService indexService, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler) {
this.request = request;
this.indexShard = indexShard;
this.indexService = indexService;
this.fieldDataService = indexService.fieldData();
this.searchShardTarget = searchShardTarget;
this.percolateQueries = indexShard.percolateRegistry().percolateQueries();
this.types = new String[]{request.documentType()};
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.querySearchResult = new QuerySearchResult(0, searchShardTarget);
this.engineSearcher = indexShard.acquireSearcher("percolate");
this.searcher = new ContextIndexSearcher(this, engineSearcher);
}
public void initialize(final MemoryIndex memoryIndex, ParsedDocument parsedDocument) {
final IndexSearcher docSearcher = memoryIndex.createSearcher();
final IndexReader topLevelReader = docSearcher.getIndexReader();
AtomicReaderContext readerContext = topLevelReader.leaves().get(0);
docEngineSearcher = new Engine.Searcher() {
@Override
public String source() {
return "percolate";
}
@Override
public IndexReader reader() {
return topLevelReader;
}
@Override
public IndexSearcher searcher() {
return docSearcher;
}
@Override
public boolean release() throws ElasticsearchException {
try {
docSearcher.getIndexReader().close();
memoryIndex.reset();
} catch (IOException e) {
throw new ElasticsearchException("failed to close percolator in-memory index", e);
}
return true;
}
};
lookup().setNextReader(readerContext);
lookup().setNextDocId(0);
lookup().source().setNextSource(parsedDocument.source());
Map<String, SearchHitField> fields = new HashMap<String, SearchHitField>();
for (IndexableField field : parsedDocument.rootDoc().getFields()) {
fields.put(field.name(), new InternalSearchHitField(field.name(), ImmutableList.of()));
}
hitContext = new FetchSubPhase.HitContext();
hitContext.reset(new InternalSearchHit(0, "unknown", new StringText(request.documentType()), fields), readerContext, 0, topLevelReader, 0, new JustSourceFieldsVisitor());
}
public IndexSearcher docSearcher() {
return docEngineSearcher.searcher();
}
public IndexShard indexShard() {
return indexShard;
}
public IndexService indexService() {
return indexService;
}
public ConcurrentMap<HashedBytesRef, Query> percolateQueries() {
return percolateQueries;
}
public Query percolateQuery() {
return percolateQuery;
}
public void percolateQuery(Query percolateQuery) {
this.percolateQuery = percolateQuery;
}
public FetchSubPhase.HitContext hitContext() {
return hitContext;
}
@Override
public SearchContextHighlight highlight() {
return highlight;
}
@Override
public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}
@Override
public SearchShardTarget shardTarget() {
return searchShardTarget;
}
@Override
public SearchLookup lookup() {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService(), fieldData(), types);
}
return searchLookup;
}
@Override
public boolean release() throws ElasticsearchException {
try {
if (docEngineSearcher != null) {
IndexReader indexReader = docEngineSearcher.reader();
fieldDataService.clear(indexReader);
indexService.cache().clear(indexReader);
return docEngineSearcher.release();
} else {
return false;
}
} finally {
engineSearcher.release();
}
}
@Override
public MapperService mapperService() {
return indexService.mapperService();
}
@Override
public SearchContext parsedQuery(ParsedQuery query) {
this.parsedQuery = query;
this.query = query.query();
this.queryRewritten = false;
return this;
}
@Override
public ParsedQuery parsedQuery() {
return parsedQuery;
}
@Override
public Query query() {
return query;
}
@Override
public boolean queryRewritten() {
return queryRewritten;
}
@Override
public SearchContext updateRewriteQuery(Query rewriteQuery) {
queryRewritten = true;
query = rewriteQuery;
return this;
}
@Override
public String[] types() {
return types;
}
public void types(String[] types) {
this.types = types;
searchLookup = new SearchLookup(mapperService(), fieldData(), types);
}
@Override
public IndexFieldDataService fieldData() {
return fieldDataService;
}
@Override
public SearchContextAggregations aggregations() {
return aggregations;
}
@Override
public SearchContext aggregations(SearchContextAggregations aggregations) {
this.aggregations = aggregations;
return this;
}
@Override
public SearchContextFacets facets() {
return facets;
}
@Override
public SearchContext facets(SearchContextFacets facets) {
this.facets = facets;
return this;
}
// Unused:
@Override
public boolean clearAndRelease() {
throw new UnsupportedOperationException();
}
@Override
public void preProcess() {
throw new UnsupportedOperationException();
}
@Override
public Filter searchFilter(String[] types) {
throw new UnsupportedOperationException();
}
@Override
public long id() {
throw new UnsupportedOperationException();
}
@Override
public String source() {
throw new UnsupportedOperationException();
}
@Override
public ShardSearchRequest request() {
throw new UnsupportedOperationException();
}
@Override
public SearchType searchType() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext searchType(SearchType searchType) {
throw new UnsupportedOperationException();
}
@Override
public int numberOfShards() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasTypes() {
throw new UnsupportedOperationException();
}
@Override
public float queryBoost() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext queryBoost(float queryBoost) {
throw new UnsupportedOperationException();
}
@Override
public long nowInMillis() {
throw new UnsupportedOperationException();
}
@Override
public Scroll scroll() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext scroll(Scroll scroll) {
throw new UnsupportedOperationException();
}
@Override
public SuggestionSearchContext suggest() {
throw new UnsupportedOperationException();
}
@Override
public void suggest(SuggestionSearchContext suggest) {
throw new UnsupportedOperationException();
}
@Override
public List<RescoreSearchContext> rescore() {
throw new UnsupportedOperationException();
}
@Override
public void addRescore(RescoreSearchContext rescore) {
throw new UnsupportedOperationException();
}
@Override
public boolean hasFieldDataFields() {
throw new UnsupportedOperationException();
}
@Override
public FieldDataFieldsContext fieldDataFields() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasScriptFields() {
throw new UnsupportedOperationException();
}
@Override
public ScriptFieldsContext scriptFields() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasPartialFields() {
throw new UnsupportedOperationException();
}
@Override
public PartialFieldsContext partialFields() {
throw new UnsupportedOperationException();
}
@Override
public boolean sourceRequested() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasFetchSourceContext() {
throw new UnsupportedOperationException();
}
@Override
public FetchSourceContext fetchSourceContext() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
throw new UnsupportedOperationException();
}
@Override
public ContextIndexSearcher searcher() {
return searcher;
}
@Override
public AnalysisService analysisService() {
throw new UnsupportedOperationException();
}
@Override
public IndexQueryParserService queryParserService() {
throw new UnsupportedOperationException();
}
@Override
public SimilarityService similarityService() {
throw new UnsupportedOperationException();
}
@Override
public ScriptService scriptService() {
throw new UnsupportedOperationException();
}
@Override
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
@Override
public PageCacheRecycler pageCacheRecycler() {
return pageCacheRecycler;
}
@Override
public FilterCache filterCache() {
throw new UnsupportedOperationException();
}
@Override
public DocSetCache docSetCache() {
throw new UnsupportedOperationException();
}
@Override
public IdCache idCache() {
throw new UnsupportedOperationException();
}
@Override
public long timeoutInMillis() {
return -1;
}
@Override
public void timeoutInMillis(long timeoutInMillis) {
throw new UnsupportedOperationException();
}
@Override
public SearchContext minimumScore(float minimumScore) {
throw new UnsupportedOperationException();
}
@Override
public Float minimumScore() {
return null;
}
@Override
public SearchContext sort(Sort sort) {
this.sort = sort;
return this;
}
@Override
public Sort sort() {
return sort;
}
@Override
public SearchContext trackScores(boolean trackScores) {
this.trackScores = trackScores;
return this;
}
@Override
public boolean trackScores() {
return trackScores;
}
@Override
public SearchContext parsedPostFilter(ParsedFilter postFilter) {
throw new UnsupportedOperationException();
}
@Override
public ParsedFilter parsedPostFilter() {
return null;
}
@Override
public Filter aliasFilter() {
throw new UnsupportedOperationException();
}
@Override
public int from() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext from(int from) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext size(int size) {
throw new UnsupportedOperationException();
}
@Override
public boolean hasFieldNames() {
throw new UnsupportedOperationException();
}
@Override
public List<String> fieldNames() {
throw new UnsupportedOperationException();
}
@Override
public void emptyFieldNames() {
throw new UnsupportedOperationException();
}
@Override
public boolean explain() {
throw new UnsupportedOperationException();
}
@Override
public void explain(boolean explain) {
throw new UnsupportedOperationException();
}
@Override
public List<String> groupStats() {
throw new UnsupportedOperationException();
}
@Override
public void groupStats(List<String> groupStats) {
throw new UnsupportedOperationException();
}
@Override
public boolean version() {
throw new UnsupportedOperationException();
}
@Override
public void version(boolean version) {
throw new UnsupportedOperationException();
}
@Override
public int[] docIdsToLoad() {
throw new UnsupportedOperationException();
}
@Override
public int docIdsToLoadFrom() {
throw new UnsupportedOperationException();
}
@Override
public int docIdsToLoadSize() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
throw new UnsupportedOperationException();
}
@Override
public void accessed(long accessTime) {
throw new UnsupportedOperationException();
}
@Override
public long lastAccessTime() {
throw new UnsupportedOperationException();
}
@Override
public long keepAlive() {
throw new UnsupportedOperationException();
}
@Override
public void keepAlive(long keepAlive) {
throw new UnsupportedOperationException();
}
@Override
public DfsSearchResult dfsResult() {
throw new UnsupportedOperationException();
}
@Override
public QuerySearchResult queryResult() {
return querySearchResult;
}
@Override
public FetchSearchResult fetchResult() {
throw new UnsupportedOperationException();
}
@Override
public void addReleasable(Releasable releasable) {
throw new UnsupportedOperationException();
}
@Override
public void clearReleasables() {
throw new UnsupportedOperationException();
}
@Override
public ScanContext scanContext() {
throw new UnsupportedOperationException();
}
@Override
public MapperService.SmartNameFieldMappers smartFieldMappers(String name) {
throw new UnsupportedOperationException();
}
@Override
public FieldMappers smartNameFieldMappers(String name) {
throw new UnsupportedOperationException();
}
@Override
public FieldMapper smartNameFieldMapper(String name) {
return mapperService().smartNameFieldMapper(name, types);
}
@Override
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) {
throw new UnsupportedOperationException();
}
} | 1no label
| src_main_java_org_elasticsearch_percolator_PercolateContext.java |
679 | public static class Order {
public static final int General = 1000;
public static final int ActiveDateRange = 2000;
public static final int Advanced = 1000;
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_CategoryImpl.java |
2,515 | public class JsonXContent implements XContent {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(jsonXContent);
}
private final static JsonFactory jsonFactory;
public final static JsonXContent jsonXContent;
static {
jsonFactory = new JsonFactory();
jsonFactory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
jsonFactory.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
jsonFactory.configure(JsonParser.Feature.ALLOW_COMMENTS, true);
jsonXContent = new JsonXContent();
}
private JsonXContent() {
}
@Override
public XContentType type() {
return XContentType.JSON;
}
@Override
public byte streamSeparator() {
return '\n';
}
@Override
public XContentGenerator createGenerator(OutputStream os) throws IOException {
return new JsonXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8));
}
@Override
public XContentGenerator createGenerator(Writer writer) throws IOException {
return new JsonXContentGenerator(jsonFactory.createGenerator(writer));
}
@Override
public XContentParser createParser(String content) throws IOException {
return new JsonXContentParser(jsonFactory.createParser(new FastStringReader(content)));
}
@Override
public XContentParser createParser(InputStream is) throws IOException {
return new JsonXContentParser(jsonFactory.createParser(is));
}
@Override
public XContentParser createParser(byte[] data) throws IOException {
return new JsonXContentParser(jsonFactory.createParser(data));
}
@Override
public XContentParser createParser(byte[] data, int offset, int length) throws IOException {
return new JsonXContentParser(jsonFactory.createParser(data, offset, length));
}
@Override
public XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
return createParser(bytes.streamInput());
}
@Override
public XContentParser createParser(Reader reader) throws IOException {
return new JsonXContentParser(jsonFactory.createParser(reader));
}
} | 0true
| src_main_java_org_elasticsearch_common_xcontent_json_JsonXContent.java |
5,100 | class SearchFreeContextRequest extends TransportRequest {
private long id;
SearchFreeContextRequest() {
}
SearchFreeContextRequest(TransportRequest request, long id) {
super(request);
this.id = id;
}
public long id() {
return this.id;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
id = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeLong(id);
}
} | 1no label
| src_main_java_org_elasticsearch_search_action_SearchServiceTransportAction.java |
113 | (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}}); | 0true
| src_main_java_jsr166e_ForkJoinPool.java |
127 | {
@Override
public boolean accept( LogEntry item )
{
return !(item instanceof LogEntry.Done);
}
}; | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestApplyTransactions.java |
302 | {
@Override
public PropertyRecord newUnused( Long key, PrimitiveRecord additionalData )
{
PropertyRecord record = new PropertyRecord( key );
setOwner( record, additionalData );
return record;
}
private void setOwner( PropertyRecord record, PrimitiveRecord owner )
{
if ( owner != null )
{
owner.setIdTo( record );
}
}
@Override
public PropertyRecord load( Long key, PrimitiveRecord additionalData )
{
PropertyRecord record = getPropertyStore().getRecord( key.longValue() );
setOwner( record, additionalData );
return record;
}
@Override
public void ensureHeavy( PropertyRecord record )
{
for ( PropertyBlock block : record.getPropertyBlocks() )
{
getPropertyStore().ensureHeavy( block );
}
}
@Override
public PropertyRecord clone(PropertyRecord propertyRecord)
{
return propertyRecord.clone();
}
}, true ); | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_NeoStoreTransaction.java |
948 | public class OSchedulerTrigger extends ODocumentHookAbstract {
public OSchedulerTrigger() {
setIncludeClasses(OScheduler.CLASSNAME);
}
public DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() {
return DISTRIBUTED_EXECUTION_MODE.TARGET_NODE;
}
@Override
public RESULT onRecordBeforeCreate(final ODocument iDocument) {
String name = iDocument.field(OScheduler.PROP_NAME);
OScheduler scheduler = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchedulerListener().getScheduler(name);
if (scheduler != null) {
throw new OException("Duplicate Scheduler");
}
boolean start = iDocument.field(OScheduler.PROP_STARTED) == null ? false : ((Boolean) iDocument.field(OScheduler.PROP_STARTED));
if (start)
iDocument.field(OScheduler.PROP_STATUS, SCHEDULER_STATUS.WAITING.name());
else
iDocument.field(OScheduler.PROP_STATUS, SCHEDULER_STATUS.STOPPED.name());
iDocument.field(OScheduler.PROP_STARTED, start);
return RESULT.RECORD_CHANGED;
}
@Override
public void onRecordAfterCreate(final ODocument iDocument) {
OScheduler scheduler = new OScheduler(iDocument);
ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchedulerListener().addScheduler(scheduler);
}
@Override
public RESULT onRecordBeforeUpdate(final ODocument iDocument) {
try {
boolean isStart = iDocument.field(OScheduler.PROP_STARTED) == null ? false : ((Boolean) iDocument
.field(OScheduler.PROP_STARTED));
String schedulerName = iDocument.field(OScheduler.PROP_NAME);
OScheduler scheduler = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchedulerListener()
.getScheduler(schedulerName);
if (isStart) {
if (scheduler == null) {
scheduler = new OScheduler(iDocument);
ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchedulerListener().addScheduler(scheduler);
}
String currentStatus = iDocument.field(OScheduler.PROP_STATUS);
if (currentStatus.equals(SCHEDULER_STATUS.STOPPED.name())) {
iDocument.field(OScheduler.PROP_STATUS, SCHEDULER_STATUS.WAITING.name());
}
} else {
if (scheduler != null) {
iDocument.field(OScheduler.PROP_STATUS, SCHEDULER_STATUS.STOPPED.name());
}
}
scheduler.resetDocument(iDocument);
} catch (Exception ex) {
OLogManager.instance().error(this, "Error when updating scheduler - " + ex.getMessage());
return RESULT.RECORD_NOT_CHANGED;
}
return RESULT.RECORD_CHANGED;
}
@Override
public RESULT onRecordBeforeDelete(final ODocument iDocument) {
String schedulerName = iDocument.field(OScheduler.PROP_NAME);
OScheduler scheduler = null;
scheduler = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchedulerListener().getScheduler(schedulerName);
if (scheduler != null) {
ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchedulerListener().removeScheduler(scheduler);
}
return RESULT.RECORD_CHANGED;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_schedule_OSchedulerTrigger.java |
573 | return new Iterator<ODocument>() {
public boolean hasNext() {
return iterator.hasNext();
}
public ODocument next() {
return iterator.next().document;
}
public void remove() {
iterator.remove();
}
}; | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_ODocumentFieldsHashSet.java |
227 | @Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
/**
* Tells to OrientDB to call the method AFTER the record is marshalled and written to the database.
* Applies only to the entity Objects reachable by the OrientDB engine after have registered them.
*/
public @interface OAfterSerialization {
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_annotation_OAfterSerialization.java |
1,584 | public final class Logger {
private static volatile LoggerFactory loggerFactory;
private static final Object FACTORY_LOCK = new Object();
private Logger() {
}
public static ILogger getLogger(Class clazz) {
return getLogger(clazz.getName());
}
public static ILogger getLogger(String name) {
//noinspection DoubleCheckedLocking
if (loggerFactory == null) {
//noinspection SynchronizationOnStaticField
synchronized (FACTORY_LOCK) {
if (loggerFactory == null) {
String loggerType = System.getProperty("hazelcast.logging.type");
loggerFactory = newLoggerFactory(loggerType);
}
}
}
return loggerFactory.getLogger(name);
}
public static LoggerFactory newLoggerFactory(String loggerType) {
LoggerFactory loggerFactory = null;
String loggerClass = System.getProperty("hazelcast.logging.class");
if (loggerClass != null) {
loggerFactory = loadLoggerFactory(loggerClass);
}
if (loggerFactory == null) {
if (loggerType != null) {
if ("log4j".equals(loggerType)) {
loggerFactory = loadLoggerFactory("com.hazelcast.logging.Log4jFactory");
} else if ("slf4j".equals(loggerType)) {
loggerFactory = loadLoggerFactory("com.hazelcast.logging.Slf4jFactory");
} else if ("jdk".equals(loggerType)) {
loggerFactory = new StandardLoggerFactory();
} else if ("none".equals(loggerType)) {
loggerFactory = new NoLogFactory();
}
}
}
if (loggerFactory == null) {
loggerFactory = new StandardLoggerFactory();
}
return loggerFactory;
}
private static LoggerFactory loadLoggerFactory(String className) {
try {
return ClassLoaderUtil.newInstance(null, className);
} catch (Exception e) {
//since we don't have a logger available, lets log it to the System.err
e.printStackTrace();
return null;
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_logging_Logger.java |
3,360 | public class GeoPointDoubleArrayIndexFieldData extends AbstractGeoPointIndexFieldData {
private final CircuitBreakerService breakerService;
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService) {
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService);
}
}
public GeoPointDoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
this.breakerService = breakerService;
}
@Override
public AtomicGeoPointFieldData<ScriptDocValues> loadDirect(AtomicReaderContext context) throws Exception {
AtomicReader reader = context.reader();
Terms terms = reader.terms(getFieldNames().indexName());
AtomicGeoPointFieldData data = null;
// TODO: Use an actual estimator to estimate before loading.
NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker());
if (terms == null) {
data = new Empty(reader.maxDoc());
estimator.afterLoad(null, data.getMemorySizeInBytes());
return data;
}
final BigDoubleArrayList lat = new BigDoubleArrayList();
final BigDoubleArrayList lon = new BigDoubleArrayList();
lat.add(0); // first "t" indicates null value
lon.add(0); // first "t" indicates null value
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio);
boolean success = false;
try {
final GeoPointEnum iter = new GeoPointEnum(builder.buildFromTerms(terms.iterator(null)));
GeoPoint point;
while ((point = iter.next()) != null) {
lat.add(point.getLat());
lon.add(point.getLon());
}
Ordinals build = builder.build(fieldDataType.getSettings());
if (!build.isMultiValued() && CommonSettings.removeOrdsOnSingleValue(fieldDataType)) {
Docs ordinals = build.ordinals();
int maxDoc = reader.maxDoc();
BigDoubleArrayList sLat = new BigDoubleArrayList(reader.maxDoc());
BigDoubleArrayList sLon = new BigDoubleArrayList(reader.maxDoc());
for (int i = 0; i < maxDoc; i++) {
long nativeOrdinal = ordinals.getOrd(i);
sLat.add(lat.get(nativeOrdinal));
sLon.add(lon.get(nativeOrdinal));
}
FixedBitSet set = builder.buildDocsWithValuesSet();
if (set == null) {
data = new GeoPointDoubleArrayAtomicFieldData.Single(sLon, sLat, reader.maxDoc(), ordinals.getNumOrds());
} else {
data = new GeoPointDoubleArrayAtomicFieldData.SingleFixedSet(sLon, sLat, reader.maxDoc(), set, ordinals.getNumOrds());
}
} else {
data = new GeoPointDoubleArrayAtomicFieldData.WithOrdinals(lon, lat, reader.maxDoc(), build);
}
success = true;
return data;
} finally {
if (success) {
estimator.afterLoad(null, data.getMemorySizeInBytes());
}
builder.close();
}
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_GeoPointDoubleArrayIndexFieldData.java |
907 | public class LegacyOfferTest extends LegacyCommonSetupBaseTest {
@Resource
private OfferService offerService;
@Resource
private CustomerService customerService;
@Resource(name = "blOrderService")
private OrderService orderService;
@Resource
private OfferDao offerDao;
@Resource
private CustomerOfferDao customerOfferDao;
@Resource
private CatalogService catalogService;
@Resource
private OfferCodeDao offerCodeDao;
@Resource(name = "blOrderItemService")
private OrderItemService orderItemService;
@Resource
private CountryService countryService;
@Resource
private StateService stateService;
private long sku1;
private long sku2;
private CreateOfferUtility createOfferUtility;
@Test(groups = { "offerCreateSku1Legacy" }, dataProvider = "basicSku", dataProviderClass = SkuDaoDataProvider.class)
@Rollback(false)
public void createSku1(Sku sku) {
createOfferUtility = new CreateOfferUtility(offerDao, offerCodeDao, offerService);
sku.setSalePrice(new Money(BigDecimal.valueOf(10.0)));
sku.setRetailPrice(new Money(BigDecimal.valueOf(15.0)));
sku.setName("test1");
assert sku.getId() == null;
sku = catalogService.saveSku(sku);
assert sku.getId() != null;
sku1 = sku.getId();
}
@Test(groups = { "offerCreateSku2Legacy" }, dataProvider = "basicSku", dataProviderClass = SkuDaoDataProvider.class)
@Rollback(false)
public void createSku2(Sku sku) {
sku.setSalePrice(new Money(BigDecimal.valueOf(10.0)));
sku.setRetailPrice(new Money(BigDecimal.valueOf(15.0)));
sku.setName("test2");
assert sku.getId() == null;
sku = catalogService.saveSku(sku);
assert sku.getId() != null;
sku2 = sku.getId();
}
@Test(groups = {"testPercentageOffOfferLegacy"}, dependsOnGroups = { "offerCreateSku1Legacy", "offerCreateSku2Legacy" })
@Transactional
public void testPercentOffOfferWithScaleGreaterThanTwo() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.setFulfillmentGroups(createFulfillmentGroups("standard", ShippingServiceType.BANDED_SHIPPING.getType(), 5D, order));
orderService.save(order, false);
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 1, order));
order.addOfferCode(createOfferUtility.createOfferCode("20.5 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20.5, null, null, true, true, 10));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
// 20% results in $240. 20.5% off results in $238.50
assert ( order.getSubTotal().equals(new Money(238.50D) ));
}
@Test(groups = {"offerUsedForPricingLegacy"}, dependsOnGroups = { "offerCreateSku1Legacy", "offerCreateSku2Legacy" })
@Transactional
public void testOfferUsedForPricing() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 10D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 20D, null, true, 1, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id == " + sku1, true, true, 10));
order.addOfferCode(createOfferUtility.createOfferCode("3 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 3, null, "discreteOrderItem.sku.id != " + sku1, true, true, 10));
order.addOfferCode(createOfferUtility.createOfferCode("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert order.getSubTotal().subtract(order.getOrderAdjustmentsValue()).equals(new Money(31.80D));
}
@Test(groups = {"testOfferNotStackableItemOffersLegacy"}, dependsOnGroups = { "offerUsedForPricingLegacy"})
@Transactional
public void testOfferNotStackableItemOffers() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 2, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id == " + sku1, false, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, "discreteOrderItem.sku.id == " + sku1, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id != " + sku1, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, "discreteOrderItem.sku.id != " + sku1, false, true, 1));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert (order.getSubTotal().equals(new Money(252D)));
}
@Test(groups = {"testOfferNotCombinableItemOffersLegacy"}, dependsOnGroups = { "testOfferNotStackableItemOffersLegacy"})
@Transactional
public void testOfferNotCombinableItemOffers() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 2, order));
// Only one of the "not-combinable" orders can apply.
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id == " + sku1, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, "discreteOrderItem.sku.id == " + sku1, true, false, 1));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id != " + sku1, true, false, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, "discreteOrderItem.sku.id != " + sku1, true, true, 1));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert (order.getSubTotal().equals(new Money(280D)));
}
@Test(groups = { "testOfferLowerSalePriceLegacy" })
@Transactional
public void testOfferLowerSalePrice() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, 50D, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 2, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id == " + sku1, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, "discreteOrderItem.sku.id == " + sku1, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id != " + sku1, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, "discreteOrderItem.sku.id != " + sku1, true, true, 1));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert (order.getSubTotal().equals(new Money(212D)));
}
@Test(groups = { "testOfferLowerSalePriceWithNotCombinableOfferLegacy" })
@Transactional
public void testOfferLowerSalePriceWithNotCombinableOffer() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, 50D, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 2, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, null, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, null, true, false, 1));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert (order.getSubTotal().equals(new Money(240D)));
}
@Test(groups = { "testOfferLowerSalePriceWithNotCombinableOfferAndInformationLegacy" })
@Transactional
public void testOfferLowerSalePriceWithNotCombinableOfferAndInformation() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, 50D, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 2, order));
OfferCode offerCode1 = createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, null, true, true, 1);
OfferCode offerCode2 = createOfferUtility.createOfferCode("30 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 30, null, null, true, false, 1);
order.addOfferCode(offerCode1);
order.addOfferCode(offerCode2);
order = orderService.save(order, false);
OfferInfo info1 = offerDao.createOfferInfo();
info1.getFieldValues().put("key1", "value1");
order.getAdditionalOfferInformation().put(offerCode1.getOffer(), info1);
OfferInfo info2 = offerDao.createOfferInfo();
info2.getFieldValues().put("key2", "value2");
order.getAdditionalOfferInformation().put(offerCode2.getOffer(), info2);
order = orderService.save(order, false);
order = orderService.save(order, true);
assert (order.getSubTotal().equals(new Money(240D)));
order = orderService.findOrderById(order.getId());
assert(order.getAdditionalOfferInformation().get(offerCode1.getOffer()).equals(info1));
}
@Test(groups = { "testOfferLowerSalePriceWithNotCombinableOffer2Legacy" })
@Transactional
public void testOfferLowerSalePriceWithNotCombinableOffer2() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, 50D, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, 50D, true, 2, order));
order.addOfferCode(createOfferUtility.createOfferCode("25 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 25, null, null, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("35 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 35, null, null, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("45 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 45, null, null, true, false, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 30, null, null, true, true, 1));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert order.getSubTotal().subtract(order.getOrderAdjustmentsValue()).equals(new Money(130D));
}
@Test(groups = { "testOfferNotCombinableOrderOffersLegacy" })
@Transactional
public void testOfferNotCombinableOrderOffers() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 2, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Order Offer", OfferType.ORDER, OfferDiscountType.PERCENT_OFF, 20, null, null, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("30 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 30, null, null, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("50 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 50, null, null, true, false, 1));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert order.getSubTotal().subtract(order.getOrderAdjustmentsValue()).equals(new Money(290D));
}
@Test(groups = { "testOfferNotCombinableOrderOffersWithItemOfferLegacy" })
@Transactional
public void testOfferNotCombinableOrderOffersWithItemOffer() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 100D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 100D, null, true, 2, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, null, true, false, 1));
order.addOfferCode(createOfferUtility.createOfferCode("10 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 10, null, null, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("15 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 15, null, null, true, true, 1));
order.addOfferCode(createOfferUtility.createOfferCode("90 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 90, null, null, true, false, 1));
order.addOfferCode(createOfferUtility.createOfferCode("50 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 50, null, null, true, true, 1));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert order.getSubTotal().subtract(order.getOrderAdjustmentsValue()).equals(new Money(210D));
}
@Test(groups = { "testGlobalOffersLegacy" })
@Transactional
public void testGlobalOffers() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 10D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 20D, null, true, 1, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id == " + sku1, true, true, 10));
order.addOfferCode(createOfferUtility.createOfferCode("3 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 3, null, "discreteOrderItem.sku.id != " + sku1, true, true, 10));
Offer offer = createOfferUtility.createOffer("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10);
offer.setDeliveryType(OfferDeliveryType.AUTOMATIC);
offer.setAutomaticallyAdded(true);
offer = offerService.save(offer);
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert order.getSubTotal().subtract(order.getOrderAdjustmentsValue()).equals(new Money(31.80D));
}
@Test(groups = { "testCustomerAssociatedOffersLegacy" })
@Transactional
public void testCustomerAssociatedOffers() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 10D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 20D, null, true, 1, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id == " + sku1, true, true, 10));
order.addOfferCode(createOfferUtility.createOfferCode("3 Dollars Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.AMOUNT_OFF, 3, null, "discreteOrderItem.sku.id != " + sku1, true, true, 10));
Offer offer = createOfferUtility.createOffer("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10);
offer.setDeliveryType(OfferDeliveryType.MANUAL);
offer = offerService.save(offer);
CustomerOffer customerOffer = new CustomerOfferImpl();
customerOffer.setCustomer(order.getCustomer());
customerOffer.setOffer(offer);
customerOffer = customerOfferDao.save(customerOffer);
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert order.getSubTotal().subtract(order.getOrderAdjustmentsValue()).equals(new Money(31.80D));
}
@Test(groups = { "testCustomerAssociatedOffers2Legacy" })
@Transactional
public void testCustomerAssociatedOffers2() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.addOrderItem(createDiscreteOrderItem(sku1, 20D, null, true, 1, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 20D, null, true, 1, order));
order.addOfferCode(createOfferUtility.createOfferCode("15%OFF", "15 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 15, null, null, false, true, 0));
Offer offer1 = createOfferUtility.createOffer("20 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 20, null, "discreteOrderItem.sku.id == " + sku1, false, true, 0);
offer1.setDeliveryType(OfferDeliveryType.MANUAL);
offerDao.save(offer1);
CustomerOffer customerOffer1 = new CustomerOfferImpl();
customerOffer1.setCustomer(order.getCustomer());
customerOffer1.setOffer(offer1);
customerOfferDao.save(customerOffer1);
Offer offer2 = createOfferUtility.createOffer("10 Percent Off Item Offer", OfferType.ORDER_ITEM, OfferDiscountType.PERCENT_OFF, 10, null, "discreteOrderItem.sku.id == " + sku2, false, true, 0);
offer2.setDeliveryType(OfferDeliveryType.MANUAL);
offerDao.save(offer2);
CustomerOffer customerOffer2 = new CustomerOfferImpl();
customerOffer2.setCustomer(order.getCustomer());
customerOffer2.setOffer(offer2);
customerOfferDao.save(customerOffer2);
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
assert (order.getSubTotal().equals(new Money(33D)));
}
@Test(groups = { "testFulfillmentGroupOffersLegacy" })
@Transactional
public void testFulfillmentGroupOffers() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
order.setFulfillmentGroups(createFulfillmentGroups("standard", ShippingServiceType.BANDED_SHIPPING.getType(), 5D, order));
orderService.save(order, false);
order.addOrderItem(createDiscreteOrderItem(sku1, 10D, null, true, 2, order));
order.addOrderItem(createDiscreteOrderItem(sku2, 20D, null, true, 1, order));
order.addOfferCode(createOfferUtility.createOfferCode("20 Percent Off Item Offer", OfferType.FULFILLMENT_GROUP, OfferDiscountType.PERCENT_OFF, 20, null, null, true, true, 10));
order.addOfferCode(createOfferUtility.createOfferCode("3 Dollars Off Item Offer", OfferType.FULFILLMENT_GROUP, OfferDiscountType.AMOUNT_OFF, 3, null, null, true, true, 10));
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyOffersToOrder(offers, order);
offerService.applyFulfillmentGroupOffersToOrder(offers, order);
assert (order.getFulfillmentGroups().get(0).getShippingPrice().equals(new Money(1.6D)));
}
@Test(groups = { "testOfferDeleteLegacy" })
@Transactional
public void testOfferDelete() throws Exception {
CustomerOffer customerOffer = customerOfferDao.create();
Customer customer = createCustomer();
Long customerId = customer.getId();
customerOffer.setCustomer(customerService.saveCustomer(customer));
Offer offer = createOfferUtility.createOffer("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10);
offer = offerService.save(offer);
Long offerId = offer.getId();
offerDao.delete(offer);
Offer deletedOffer = offerDao.readOfferById(offerId);
assert ((OfferImpl) deletedOffer).getArchived() == 'Y';
offer = createOfferUtility.createOffer("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10);
offer = offerService.save(offer);
customerOffer.setOffer(offer);
customerOffer = customerOfferDao.save(customerOffer);
Long customerOfferId = customerOffer.getId();
customerOffer = customerOfferDao.readCustomerOfferById(customerOfferId);
assert(customerOffer != null);
Customer customer2 = createCustomer();
customerOffer.setCustomer(customerService.saveCustomer(customer2));
customerOffer = customerOfferDao.save(customerOffer);
assert !customerOffer.getCustomer().getId().equals(customerId);
customerOfferDao.delete(customerOffer);
customerOffer = customerOfferDao.readCustomerOfferById(customerOfferId);
assert customerOffer == null || ((OfferImpl) customerOffer).getArchived() == 'Y';
}
@Test(groups = { "testReadAllOffersLegacy" })
@Transactional
public void testReadAllOffers() throws Exception {
Offer offer = createOfferUtility.createOffer("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10);
offer.setDeliveryType(OfferDeliveryType.MANUAL);
offer = offerService.save(offer);
List<Offer> allOffers = offerService.findAllOffers();
assert allOffers != null && allOffers.isEmpty() == false;
}
@Test(groups = { "testOfferCodeDaoLegacy" })
@Transactional
public void testOfferCodeDao() throws Exception {
String offerCodeString = "AJ's Code";
OfferCode offerCode = createOfferUtility.createOfferCode("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10);
offerCode.setOfferCode(offerCodeString);
offerCode = offerService.saveOfferCode(offerCode);
Long offerCodeId = offerCode.getId();
assert offerCode.getOfferCode().equals(offerCodeString);
Offer offer = offerCode.getOffer();
Offer storedOffer = offerService.lookupOfferByCode(offerCodeString);
assert offer.getId().equals(storedOffer.getId());
OfferCode newOfferCode = offerCodeDao.readOfferCodeById(offerCodeId);
assert newOfferCode.getOfferCode().equals(offerCode.getOfferCode());
newOfferCode = offerCodeDao.readOfferCodeByCode(offerCodeString);
assert newOfferCode.getOfferCode().equals(offerCode.getOfferCode());
offerCodeId = newOfferCode.getId();
offerCodeDao.delete(newOfferCode);
OfferCode deletedOfferCode = offerCodeDao.readOfferCodeById(offerCodeId);
assert deletedOfferCode == null;
}
@Test(groups = { "testCustomerOffersLegacy" })
@Transactional
public void testCustomerOffers() throws Exception {
Order order = orderService.createNewCartForCustomer(createCustomer());
Offer offer = createOfferUtility.createOffer("1.20 Dollars Off Order Offer", OfferType.ORDER, OfferDiscountType.AMOUNT_OFF, 1.20, null, null, true, true, 10);
CustomerOffer customerOffer = new CustomerOfferImpl();
customerOffer.setCustomer(order.getCustomer());
customerOffer.setOffer(offer);
customerOffer = customerOfferDao.save(customerOffer);
CustomerOffer customerOfferTest = customerOfferDao.readCustomerOfferById(customerOffer.getId());
assert (customerOffer.getId().equals(customerOfferTest.getId()));
}
private List<FulfillmentGroup> createFulfillmentGroups(String method, String service, Double shippingPrice, Order order) {
List<FulfillmentGroup> groups = new ArrayList<FulfillmentGroup>();
FulfillmentGroup group = new FulfillmentGroupImpl();
group.setMethod(method);
group.setService(service);
groups.add(group);
group.setRetailShippingPrice(new Money(shippingPrice));
group.setOrder(order);
Address address = new AddressImpl();
address.setAddressLine1("123 Test Rd");
address.setCity("Dallas");
address.setFirstName("Jeff");
address.setLastName("Fischer");
address.setPostalCode("75240");
address.setPrimaryPhone("972-978-9067");
Country country = new CountryImpl();
country.setAbbreviation("US");
country.setName("United States");
countryService.save(country);
State state = new StateImpl();
state.setAbbreviation("TX");
state.setName("Texas");
state.setCountry(country);
stateService.save(state);
address.setState(state);
address.setCountry(country);
for (OrderItem orderItem : order.getOrderItems()) {
FulfillmentGroupItem fgItem = new FulfillmentGroupItemImpl();
fgItem.setFulfillmentGroup(group);
fgItem.setOrderItem(orderItem);
fgItem.setQuantity(orderItem.getQuantity());
group.addFulfillmentGroupItem(fgItem);
}
group.setAddress(address);
return groups;
}
private DiscreteOrderItem createDiscreteOrderItem(Long skuId, Double retailPrice, Double salePrice, boolean isDiscountable, int quantity, Order order) {
DiscreteOrderItem item = new DiscreteOrderItemImpl();
Sku sku = catalogService.findSkuById(skuId);
sku.setRetailPrice(new Money(retailPrice));
if (salePrice != null) {
sku.setSalePrice(new Money(salePrice));
} else {
sku.setSalePrice(null);
}
sku.setDiscountable(isDiscountable);
sku.setName("test");
sku = catalogService.saveSku(sku);
item.setSku(sku);
item.setQuantity(quantity);
Product product = new ProductImpl();
product.setDefaultSku(sku);
product = catalogService.saveProduct(product);
item.setProduct(product);
item.setOrder(order);
item = (DiscreteOrderItem) orderItemService.saveOrderItem(item);
return item;
}
} | 0true
| integration_src_test_java_org_broadleafcommerce_core_offer_service_legacy_LegacyOfferTest.java |
1,617 | public class MapConfigAdapter implements DataSerializable {
private MapConfig config;
public MapConfigAdapter() {
}
public MapConfigAdapter(MapConfig mapConfig) {
this.config = mapConfig;
}
@Override
public void readData(ObjectDataInput in) throws IOException {
config = new MapConfig();
config.setName(in.readUTF());
config.setInMemoryFormat(InMemoryFormat.valueOf(in.readUTF()));
config.setBackupCount(in.readInt());
config.setAsyncBackupCount(in.readInt());
config.setEvictionPercentage(in.readInt());
config.setTimeToLiveSeconds(in.readInt());
config.setMaxIdleSeconds(in.readInt());
config.setMaxSizeConfig(new MaxSizeConfig().setSize(in.readInt())
.setMaxSizePolicy(MaxSizeConfig.MaxSizePolicy.valueOf(in.readUTF())));
config.setReadBackupData(in.readBoolean());
config.setEvictionPolicy(MapConfig.EvictionPolicy.valueOf(in.readUTF()));
config.setMergePolicy(in.readUTF());
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(config.getName());
out.writeUTF(config.getInMemoryFormat().toString());
out.writeInt(config.getBackupCount());
out.writeInt(config.getAsyncBackupCount());
out.writeInt(config.getEvictionPercentage());
out.writeInt(config.getTimeToLiveSeconds());
out.writeInt(config.getMaxIdleSeconds());
out.writeInt(config.getMaxSizeConfig().getSize());
out.writeUTF(config.getMaxSizeConfig().getMaxSizePolicy().toString());
out.writeBoolean(config.isReadBackupData());
out.writeUTF(config.getEvictionPolicy().name());
out.writeUTF(config.getMergePolicy());
}
public MapConfig getMapConfig() {
return config;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_MapConfigAdapter.java |
33 | @Service("blProductFieldService")
public class ProductFieldServiceImpl extends AbstractRuleBuilderFieldService {
@Override
public void init() {
fields.add(new FieldData.Builder()
.label("rule_productUrl")
.name("url")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productUrlKey")
.name("urlKey")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productIsFeatured")
.name("isFeaturedProduct")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_productManufacturer")
.name("manufacturer")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productModel")
.name("model")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuName")
.name("defaultSku.name")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuFulfillmentType")
.name("defaultSku.fulfillmentType")
.operators("blcOperators_Enumeration")
.options("blcOptions_FulfillmentType")
.type(SupportedFieldType.BROADLEAF_ENUMERATION)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuInventoryType")
.name("defaultSku.inventoryType")
.operators("blcOperators_Enumeration")
.options("blcOptions_InventoryType")
.type(SupportedFieldType.BROADLEAF_ENUMERATION)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuDescription")
.name("defaultSku.description")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuLongDescription")
.name("defaultSku.longDescription")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuTaxable")
.name("defaultSku.taxable")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuAvailable")
.name("defaultSku.available")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuStartDate")
.name("defaultSku.activeStartDate")
.operators("blcOperators_Date")
.options("[]")
.type(SupportedFieldType.DATE)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuEndDate")
.name("defaultSku.activeEndDate")
.operators("blcOperators_Date")
.options("[]")
.type(SupportedFieldType.DATE)
.build());
}
@Override
public String getName() {
return RuleIdentifier.PRODUCT;
}
@Override
public String getDtoClassName() {
return "org.broadleafcommerce.core.catalog.domain.ProductImpl";
}
} | 0true
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_web_rulebuilder_service_ProductFieldServiceImpl.java |
680 | constructors[SET_REPLICATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new SetReplicationOperation();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java |
28 | public abstract class AbstractCassandraBlueprintsTest extends TitanBlueprintsTest {
@Override
public void beforeSuite() {
//Do nothing
}
@Override
public TitanGraph openGraph(String uid) {
return TitanFactory.open(getGraphConfig());
}
@Override
public boolean supportsMultipleGraphs() {
return false;
}
protected abstract WriteConfiguration getGraphConfig();
} | 0true
| titan-cassandra_src_test_java_com_thinkaurelius_titan_blueprints_AbstractCassandraBlueprintsTest.java |
4,443 | public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener<IndicesFieldDataCache.Key, AtomicFieldData> {
Cache<Key, AtomicFieldData> cache;
private volatile String size;
private volatile long sizeInBytes;
private volatile TimeValue expire;
@Inject
public IndicesFieldDataCache(Settings settings) {
super(settings);
this.size = componentSettings.get("size", "-1");
this.sizeInBytes = componentSettings.getAsMemory("size", "-1").bytes();
this.expire = componentSettings.getAsTime("expire", null);
buildCache();
}
private void buildCache() {
CacheBuilder<Key, AtomicFieldData> cacheBuilder = CacheBuilder.newBuilder()
.removalListener(this);
if (sizeInBytes > 0) {
cacheBuilder.maximumWeight(sizeInBytes).weigher(new FieldDataWeigher());
}
// defaults to 4, but this is a busy map for all indices, increase it a bit
cacheBuilder.concurrencyLevel(16);
if (expire != null && expire.millis() > 0) {
cacheBuilder.expireAfterAccess(expire.millis(), TimeUnit.MILLISECONDS);
}
logger.debug("using size [{}] [{}], expire [{}]", size, new ByteSizeValue(sizeInBytes), expire);
cache = cacheBuilder.build();
}
public void close() {
cache.invalidateAll();
}
public IndexFieldDataCache buildIndexFieldDataCache(@Nullable IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) {
return new IndexFieldCache(indexService, index, fieldNames, fieldDataType);
}
@Override
public void onRemoval(RemovalNotification<Key, AtomicFieldData> notification) {
Key key = notification.getKey();
if (key == null || key.listener == null) {
return; // nothing to do here really...
}
IndexFieldCache indexCache = key.indexCache;
long sizeInBytes = key.sizeInBytes;
AtomicFieldData value = notification.getValue();
if (sizeInBytes == -1 && value != null) {
sizeInBytes = value.getMemorySizeInBytes();
}
key.listener.onUnload(indexCache.fieldNames, indexCache.fieldDataType, notification.wasEvicted(), sizeInBytes, value);
}
public static class FieldDataWeigher implements Weigher<Key, AtomicFieldData> {
@Override
public int weigh(Key key, AtomicFieldData fieldData) {
int weight = (int) Math.min(fieldData.getMemorySizeInBytes(), Integer.MAX_VALUE);
return weight == 0 ? 1 : weight;
}
}
/**
* A specific cache instance for the relevant parameters of it (index, fieldNames, fieldType).
*/
class IndexFieldCache implements IndexFieldDataCache, SegmentReader.CoreClosedListener {
@Nullable
private final IndexService indexService;
final Index index;
final FieldMapper.Names fieldNames;
final FieldDataType fieldDataType;
IndexFieldCache(@Nullable IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) {
this.indexService = indexService;
this.index = index;
this.fieldNames = fieldNames;
this.fieldDataType = fieldDataType;
}
@Override
public <FD extends AtomicFieldData, IFD extends IndexFieldData<FD>> FD load(final AtomicReaderContext context, final IFD indexFieldData) throws Exception {
final Key key = new Key(this, context.reader().getCoreCacheKey());
//noinspection unchecked
return (FD) cache.get(key, new Callable<AtomicFieldData>() {
@Override
public AtomicFieldData call() throws Exception {
SegmentReaderUtils.registerCoreListener(context.reader(), IndexFieldCache.this);
AtomicFieldData fieldData = indexFieldData.loadDirect(context);
if (indexService != null) {
ShardId shardId = ShardUtils.extractShardId(context.reader());
if (shardId != null) {
IndexShard shard = indexService.shard(shardId.id());
if (shard != null) {
key.listener = shard.fieldData();
}
}
}
if (key.listener != null) {
key.listener.onLoad(fieldNames, fieldDataType, fieldData);
}
return fieldData;
}
});
}
@Override
public void onClose(Object coreKey) {
cache.invalidate(new Key(this, coreKey));
}
@Override
public void clear() {
for (Key key : cache.asMap().keySet()) {
if (key.indexCache.index.equals(index)) {
cache.invalidate(key);
}
}
}
@Override
public void clear(String fieldName) {
for (Key key : cache.asMap().keySet()) {
if (key.indexCache.index.equals(index)) {
if (key.indexCache.fieldNames.fullName().equals(fieldName)) {
cache.invalidate(key);
}
}
}
}
@Override
public void clear(Object coreCacheKey) {
cache.invalidate(new Key(this, coreCacheKey));
}
}
public static class Key {
public final IndexFieldCache indexCache;
public final Object readerKey;
@Nullable
public IndexFieldDataCache.Listener listener; // optional stats listener
long sizeInBytes = -1; // optional size in bytes (we keep it here in case the values are soft references)
Key(IndexFieldCache indexCache, Object readerKey) {
this.indexCache = indexCache;
this.readerKey = readerKey;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
Key key = (Key) o;
if (!indexCache.equals(key.indexCache)) return false;
if (!readerKey.equals(key.readerKey)) return false;
return true;
}
@Override
public int hashCode() {
int result = indexCache.hashCode();
result = 31 * result + readerKey.hashCode();
return result;
}
}
} | 1no label
| src_main_java_org_elasticsearch_indices_fielddata_cache_IndicesFieldDataCache.java |
1,651 | public class Classes {
/**
* The package separator character '.'
*/
private static final char PACKAGE_SEPARATOR = '.';
/**
* Return the default ClassLoader to use: typically the thread context
* ClassLoader, if available; the ClassLoader that loaded the ClassUtils
* class will be used as fallback.
* <p/>
* <p>Call this method if you intend to use the thread context ClassLoader
* in a scenario where you absolutely need a non-null ClassLoader reference:
* for example, for class path resource loading (but not necessarily for
* <code>Class.forName</code>, which accepts a <code>null</code> ClassLoader
* reference as well).
*
* @return the default ClassLoader (never <code>null</code>)
* @see java.lang.Thread#getContextClassLoader()
*/
public static ClassLoader getDefaultClassLoader() {
ClassLoader cl = null;
try {
cl = Thread.currentThread().getContextClassLoader();
} catch (Throwable ex) {
// Cannot access thread context ClassLoader - falling back to system class loader...
}
if (cl == null) {
// No thread context class loader -> use class loader of this class.
cl = Classes.class.getClassLoader();
}
return cl;
}
/**
* Determine the name of the package of the given class:
* e.g. "java.lang" for the <code>java.lang.String</code> class.
*
* @param clazz the class
* @return the package name, or the empty String if the class
* is defined in the default package
*/
public static String getPackageName(Class<?> clazz) {
String className = clazz.getName();
int lastDotIndex = className.lastIndexOf(PACKAGE_SEPARATOR);
return (lastDotIndex != -1 ? className.substring(0, lastDotIndex) : "");
}
public static String getPackageNameNoDomain(Class<?> clazz) {
String fullPackage = getPackageName(clazz);
if (fullPackage.startsWith("org.") || fullPackage.startsWith("com.") || fullPackage.startsWith("net.")) {
return fullPackage.substring(4);
}
return fullPackage;
}
public static boolean isInnerClass(Class<?> clazz) {
return !Modifier.isStatic(clazz.getModifiers())
&& clazz.getEnclosingClass() != null;
}
public static boolean isConcrete(Class<?> clazz) {
int modifiers = clazz.getModifiers();
return !clazz.isInterface() && !Modifier.isAbstract(modifiers);
}
public static <T> Class<? extends T> loadClass(ClassLoader classLoader, String className, String prefixPackage, String suffixClassName) {
return loadClass(classLoader, className, prefixPackage, suffixClassName, null);
}
@SuppressWarnings({"unchecked"})
public static <T> Class<? extends T> loadClass(ClassLoader classLoader, String className, String prefixPackage, String suffixClassName, String errorPrefix) {
Throwable t = null;
String[] classNames = classNames(className, prefixPackage, suffixClassName);
for (String fullClassName : classNames) {
try {
return (Class<? extends T>) classLoader.loadClass(fullClassName);
} catch (ClassNotFoundException ex) {
t = ex;
} catch (NoClassDefFoundError er) {
t = er;
}
}
if (errorPrefix == null) {
errorPrefix = "failed to load class";
}
throw new NoClassSettingsException(errorPrefix + " with value [" + className + "]; tried " + Arrays.toString(classNames), t);
}
private static String[] classNames(String className, String prefixPackage, String suffixClassName) {
String prefixValue = prefixPackage;
int packageSeparator = className.lastIndexOf('.');
String classNameValue = className;
// If class name contains package use it as package prefix instead of specified default one
if (packageSeparator > 0) {
prefixValue = className.substring(0, packageSeparator + 1);
classNameValue = className.substring(packageSeparator + 1);
}
return new String[]{
className,
prefixValue + Strings.capitalize(toCamelCase(classNameValue)) + suffixClassName,
prefixValue + toCamelCase(classNameValue) + "." + Strings.capitalize(toCamelCase(classNameValue)) + suffixClassName,
prefixValue + toCamelCase(classNameValue).toLowerCase(Locale.ROOT) + "." + Strings.capitalize(toCamelCase(classNameValue)) + suffixClassName,
};
}
private Classes() {
}
} | 0true
| src_main_java_org_elasticsearch_common_Classes.java |
98 | @SuppressWarnings("serial")
static final class SearchMappingsTask<K,V,U>
extends BulkTask<K,V,U> {
final BiFun<? super K, ? super V, ? extends U> searchFunction;
final AtomicReference<U> result;
SearchMappingsTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
BiFun<? super K, ? super V, ? extends U> searchFunction,
AtomicReference<U> result) {
super(p, b, i, f, t);
this.searchFunction = searchFunction; this.result = result;
}
public final U getRawResult() { return result.get(); }
public final void compute() {
final BiFun<? super K, ? super V, ? extends U> searchFunction;
final AtomicReference<U> result;
if ((searchFunction = this.searchFunction) != null &&
(result = this.result) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
if (result.get() != null)
return;
addToPendingCount(1);
new SearchMappingsTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
searchFunction, result).fork();
}
while (result.get() == null) {
U u;
Node<K,V> p;
if ((p = advance()) == null) {
propagateCompletion();
break;
}
if ((u = searchFunction.apply(p.key, p.val)) != null) {
if (result.compareAndSet(null, u))
quietlyCompleteRoot();
break;
}
}
}
}
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
726 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name="BLC_PRODUCT_UP_SALE")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
public class UpSaleProductImpl implements RelatedProduct {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "UpSaleProductId")
@GenericGenerator(
name="UpSaleProductId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="UpSaleProductImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.catalog.domain.UpSaleProductImpl")
}
)
@Column(name = "UP_SALE_PRODUCT_ID")
private Long id;
@Column(name = "PROMOTION_MESSAGE")
@AdminPresentation(friendlyName = "UpSaleProductImpl_Upsale_Promotion_Message", largeEntry=true)
private String promotionMessage;
@Column(name = "SEQUENCE")
@AdminPresentation(visibility = VisibilityEnum.HIDDEN_ALL)
private Long sequence;
@ManyToOne(targetEntity = ProductImpl.class)
@JoinColumn(name = "PRODUCT_ID")
@Index(name="UPSALE_PRODUCT_INDEX", columnNames={"PRODUCT_ID"})
private Product product;
@ManyToOne(targetEntity = CategoryImpl.class)
@JoinColumn(name = "CATEGORY_ID")
@Index(name="UPSALE_CATEGORY_INDEX", columnNames={"CATEGORY_ID"})
protected Category category;
@ManyToOne(targetEntity = ProductImpl.class)
@JoinColumn(name = "RELATED_SALE_PRODUCT_ID", referencedColumnName = "PRODUCT_ID")
@Index(name="UPSALE_RELATED_INDEX", columnNames={"RELATED_SALE_PRODUCT_ID"})
private Product relatedSaleProduct = new ProductImpl();
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getPromotionMessage() {
return promotionMessage;
}
@Override
public void setPromotionMessage(String promotionMessage) {
this.promotionMessage = promotionMessage;
}
@Override
public Long getSequence() {
return sequence;
}
@Override
public void setSequence(Long sequence) {
this.sequence = sequence;
}
@Override
public Product getProduct() {
return product;
}
@Override
public void setProduct(Product product) {
this.product = product;
}
@Override
public Category getCategory() {
return category;
}
@Override
public void setCategory(Category category) {
this.category = category;
}
@Override
public Product getRelatedProduct() {
return relatedSaleProduct;
}
@Override
public void setRelatedProduct(Product relatedSaleProduct) {
this.relatedSaleProduct = relatedSaleProduct;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_UpSaleProductImpl.java |
646 | public class OSimpleKeyIndexDefinition extends OAbstractIndexDefinition {
private OType[] keyTypes;
public OSimpleKeyIndexDefinition(final OType... keyTypes) {
this.keyTypes = keyTypes;
}
public OSimpleKeyIndexDefinition() {
}
public List<String> getFields() {
return Collections.emptyList();
}
public List<String> getFieldsToIndex() {
return Collections.emptyList();
}
public String getClassName() {
return null;
}
public Comparable<?> createValue(final List<?> params) {
return createValue(params != null ? params.toArray() : null);
}
public Comparable<?> createValue(final Object... params) {
if (params == null || params.length == 0)
return null;
if (keyTypes.length == 1)
return (Comparable<?>) OType.convert(params[0], keyTypes[0].getDefaultJavaType());
final OCompositeKey compositeKey = new OCompositeKey();
for (int i = 0; i < params.length; ++i) {
final Comparable<?> paramValue = (Comparable<?>) OType.convert(params[i], keyTypes[i].getDefaultJavaType());
if (paramValue == null)
return null;
compositeKey.addKey(paramValue);
}
return compositeKey;
}
public int getParamCount() {
return keyTypes.length;
}
public OType[] getTypes() {
return keyTypes;
}
@Override
public ODocument toStream() {
document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
try {
final List<String> keyTypeNames = new ArrayList<String>(keyTypes.length);
for (final OType keyType : keyTypes)
keyTypeNames.add(keyType.toString());
document.field("keyTypes", keyTypeNames, OType.EMBEDDEDLIST);
document.field("collate", collate.getName());
return document;
} finally {
document.setInternalStatus(ORecordElement.STATUS.LOADED);
}
}
@Override
protected void fromStream() {
final List<String> keyTypeNames = document.field("keyTypes");
keyTypes = new OType[keyTypeNames.size()];
int i = 0;
for (final String keyTypeName : keyTypeNames) {
keyTypes[i] = OType.valueOf(keyTypeName);
i++;
}
setCollate((String) document.field("collate"));
}
public Object getDocumentValueToIndex(final ODocument iDocument) {
throw new OIndexException("This method is not supported in given index definition.");
}
@Override
public boolean equals(final Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
final OSimpleKeyIndexDefinition that = (OSimpleKeyIndexDefinition) o;
if (!Arrays.equals(keyTypes, that.keyTypes))
return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (keyTypes != null ? Arrays.hashCode(keyTypes) : 0);
return result;
}
@Override
public String toString() {
return "OSimpleKeyIndexDefinition{" + "keyTypes=" + (keyTypes == null ? null : Arrays.asList(keyTypes)) + '}';
}
/**
* {@inheritDoc}
*
* @param indexName
* @param indexType
*/
public String toCreateIndexDDL(final String indexName, final String indexType) {
final StringBuilder ddl = new StringBuilder("create index ");
ddl.append(indexName).append(' ').append(indexType).append(' ');
if (keyTypes != null && keyTypes.length > 0) {
ddl.append(keyTypes[0].toString());
for (int i = 1; i < keyTypes.length; i++) {
ddl.append(", ").append(keyTypes[i].toString());
}
}
return ddl.toString();
}
@Override
public boolean isAutomatic() {
return false;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_OSimpleKeyIndexDefinition.java |
1,785 | public abstract class ShapeBuilder implements ToXContent {
protected static final ESLogger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName());
private static final boolean DEBUG;
static {
// if asserts are enabled we run the debug statements even if they are not logged
// to prevent exceptions only present if debug enabled
boolean debug = false;
assert debug = true;
DEBUG = debug;
}
public static final double DATELINE = 180;
public static final GeometryFactory FACTORY = new GeometryFactory();
public static final JtsSpatialContext SPATIAL_CONTEXT = new JtsSpatialContext(true);
protected final boolean wrapdateline = true;
protected ShapeBuilder() {
}
protected static Coordinate coordinate(double longitude, double latitude) {
return new Coordinate(longitude, latitude);
}
/**
* Create a new point
*
* @param longitude longitude of the point
* @param latitude latitude of the point
* @return a new {@link PointBuilder}
*/
public static PointBuilder newPoint(double longitude, double latitude) {
return newPoint(new Coordinate(longitude, latitude));
}
/**
* Create a new {@link PointBuilder} from a {@link Coordinate}
* @param coordinate coordinate defining the position of the point
* @return a new {@link PointBuilder}
*/
public static PointBuilder newPoint(Coordinate coordinate) {
return new PointBuilder().coordinate(coordinate);
}
/**
* Create a new set of points
* @return new {@link MultiPointBuilder}
*/
public static MultiPointBuilder newMultiPoint() {
return new MultiPointBuilder();
}
/**
* Create a new lineString
* @return a new {@link LineStringBuilder}
*/
public static LineStringBuilder newLineString() {
return new LineStringBuilder();
}
/**
* Create a new Collection of lineStrings
* @return a new {@link MultiLineStringBuilder}
*/
public static MultiLineStringBuilder newMultiLinestring() {
return new MultiLineStringBuilder();
}
/**
* Create a new Polygon
* @return a new {@link PointBuilder}
*/
public static PolygonBuilder newPolygon() {
return new PolygonBuilder();
}
/**
* Create a new Collection of polygons
* @return a new {@link MultiPolygonBuilder}
*/
public static MultiPolygonBuilder newMultiPolygon() {
return new MultiPolygonBuilder();
}
/**
* create a new Circle
* @return a new {@link CircleBuilder}
*/
public static CircleBuilder newCircleBuilder() {
return new CircleBuilder();
}
/**
* create a new rectangle
* @return a new {@link EnvelopeBuilder}
*/
public static EnvelopeBuilder newEnvelope() {
return new EnvelopeBuilder();
}
@Override
public String toString() {
try {
XContentBuilder xcontent = JsonXContent.contentBuilder();
return toXContent(xcontent, EMPTY_PARAMS).prettyPrint().string();
} catch (IOException e) {
return super.toString();
}
}
/**
* Create a new Shape from this builder. Since calling this method could change the
* defined shape. (by inserting new coordinates or change the position of points)
* the builder looses its validity. So this method should only be called once on a builder
* @return new {@link Shape} defined by the builder
*/
public abstract Shape build();
/**
* Recursive method which parses the arrays of coordinates used to define
* Shapes
*
* @param parser
* Parser that will be read from
* @return CoordinateNode representing the start of the coordinate tree
* @throws IOException
* Thrown if an error occurs while reading from the
* XContentParser
*/
private static CoordinateNode parseCoordinates(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken();
// Base case
if (token != XContentParser.Token.START_ARRAY) {
double lon = parser.doubleValue();
token = parser.nextToken();
double lat = parser.doubleValue();
token = parser.nextToken();
return new CoordinateNode(new Coordinate(lon, lat));
}
List<CoordinateNode> nodes = new ArrayList<CoordinateNode>();
while (token != XContentParser.Token.END_ARRAY) {
nodes.add(parseCoordinates(parser));
token = parser.nextToken();
}
return new CoordinateNode(nodes);
}
/**
* Create a new {@link ShapeBuilder} from {@link XContent}
* @param parser parser to read the GeoShape from
* @return {@link ShapeBuilder} read from the parser or null
* if the parsers current token has been <code><null</code>
* @throws IOException if the input could not be read
*/
public static ShapeBuilder parse(XContentParser parser) throws IOException {
return GeoShapeType.parse(parser);
}
protected static XContentBuilder toXContent(XContentBuilder builder, Coordinate coordinate) throws IOException {
return builder.startArray().value(coordinate.x).value(coordinate.y).endArray();
}
protected static Coordinate shift(Coordinate coordinate, double dateline) {
if (dateline == 0) {
return coordinate;
} else {
return new Coordinate(-2 * dateline + coordinate.x, coordinate.y);
}
}
/**
* get the shapes type
* @return type of the shape
*/
public abstract GeoShapeType type();
/**
* Calculate the intersection of a line segment and a vertical dateline.
*
* @param p1
* start-point of the line segment
* @param p2
* end-point of the line segment
* @param dateline
* x-coordinate of the vertical dateline
* @return position of the intersection in the open range (0..1] if the line
* segment intersects with the line segment. Otherwise this method
* returns {@link Double#NaN}
*/
protected static final double intersection(Coordinate p1, Coordinate p2, double dateline) {
if (p1.x == p2.x) {
return Double.NaN;
} else {
final double t = (dateline - p1.x) / (p2.x - p1.x);
if (t > 1 || t <= 0) {
return Double.NaN;
} else {
return t;
}
}
}
/**
* Calculate all intersections of line segments and a vertical line. The
* Array of edges will be ordered asc by the y-coordinate of the
* intersections of edges.
*
* @param dateline
* x-coordinate of the dateline
* @param edges
* set of edges that may intersect with the dateline
* @return number of intersecting edges
*/
protected static int intersections(double dateline, Edge[] edges) {
int numIntersections = 0;
assert !Double.isNaN(dateline);
for (int i = 0; i < edges.length; i++) {
Coordinate p1 = edges[i].coordinate;
Coordinate p2 = edges[i].next.coordinate;
assert !Double.isNaN(p2.x) && !Double.isNaN(p1.x);
edges[i].intersect = IntersectionOrder.SENTINEL;
double position = intersection(p1, p2, dateline);
if (!Double.isNaN(position)) {
if (position == 1) {
if (Double.compare(p1.x, dateline) == Double.compare(edges[i].next.next.coordinate.x, dateline)) {
// Ignore the ear
continue;
} else if (p2.x == dateline) {
// Ignore Linesegment on dateline
continue;
}
}
edges[i].intersection(position);
numIntersections++;
}
}
Arrays.sort(edges, INTERSECTION_ORDER);
return numIntersections;
}
/**
* Node used to represent a tree of coordinates.
* <p/>
* Can either be a leaf node consisting of a Coordinate, or a parent with
* children
*/
protected static class CoordinateNode implements ToXContent {
protected final Coordinate coordinate;
protected final List<CoordinateNode> children;
/**
* Creates a new leaf CoordinateNode
*
* @param coordinate
* Coordinate for the Node
*/
protected CoordinateNode(Coordinate coordinate) {
this.coordinate = coordinate;
this.children = null;
}
/**
* Creates a new parent CoordinateNode
*
* @param children
* Children of the Node
*/
protected CoordinateNode(List<CoordinateNode> children) {
this.children = children;
this.coordinate = null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (children == null) {
builder.startArray().value(coordinate.x).value(coordinate.y).endArray();
} else {
builder.startArray();
for (CoordinateNode child : children) {
child.toXContent(builder, params);
}
builder.endArray();
}
return builder;
}
}
/**
* This helper class implements a linked list for {@link Coordinate}. It contains
* fields for a dateline intersection and component id
*/
protected static final class Edge {
Coordinate coordinate; // coordinate of the start point
Edge next; // next segment
Coordinate intersect; // potential intersection with dateline
int component = -1; // id of the component this edge belongs to
protected Edge(Coordinate coordinate, Edge next, Coordinate intersection) {
this.coordinate = coordinate;
this.next = next;
this.intersect = intersection;
if (next != null) {
this.component = next.component;
}
}
protected Edge(Coordinate coordinate, Edge next) {
this(coordinate, next, IntersectionOrder.SENTINEL);
}
private static final int top(Coordinate[] points, int offset, int length) {
int top = 0; // we start at 1 here since top points to 0
for (int i = 1; i < length; i++) {
if (points[offset + i].y < points[offset + top].y) {
top = i;
} else if (points[offset + i].y == points[offset + top].y) {
if (points[offset + i].x < points[offset + top].x) {
top = i;
}
}
}
return top;
}
/**
* Concatenate a set of points to a polygon
*
* @param component
* component id of the polygon
* @param direction
* direction of the ring
* @param points
* list of points to concatenate
* @param pointOffset
* index of the first point
* @param edges
* Array of edges to write the result to
* @param edgeOffset
* index of the first edge in the result
* @param length
* number of points to use
* @return the edges creates
*/
private static Edge[] concat(int component, boolean direction, Coordinate[] points, final int pointOffset, Edge[] edges, final int edgeOffset,
int length) {
assert edges.length >= length+edgeOffset;
assert points.length >= length+pointOffset;
edges[edgeOffset] = new Edge(points[pointOffset], null);
for (int i = 1; i < length; i++) {
if (direction) {
edges[edgeOffset + i] = new Edge(points[pointOffset + i], edges[edgeOffset + i - 1]);
edges[edgeOffset + i].component = component;
} else {
edges[edgeOffset + i - 1].next = edges[edgeOffset + i] = new Edge(points[pointOffset + i], null);
edges[edgeOffset + i - 1].component = component;
}
}
if (direction) {
edges[edgeOffset].next = edges[edgeOffset + length - 1];
edges[edgeOffset].component = component;
} else {
edges[edgeOffset + length - 1].next = edges[edgeOffset];
edges[edgeOffset + length - 1].component = component;
}
return edges;
}
/**
* Create a connected list of a list of coordinates
*
* @param points
* array of point
* @param offset
* index of the first point
* @param length
* number of points
* @return Array of edges
*/
protected static Edge[] ring(int component, boolean direction, Coordinate[] points, int offset, Edge[] edges, int toffset,
int length) {
// calculate the direction of the points:
// find the point a the top of the set and check its
// neighbors orientation. So direction is equivalent
// to clockwise/counterclockwise
final int top = top(points, offset, length);
final int prev = (offset + ((top + length - 1) % length));
final int next = (offset + ((top + 1) % length));
final boolean orientation = points[offset + prev].x > points[offset + next].x;
return concat(component, direction ^ orientation, points, offset, edges, toffset, length);
}
/**
* Set the intersection of this line segment to the given position
*
* @param position
* position of the intersection [0..1]
* @return the {@link Coordinate} of the intersection
*/
protected Coordinate intersection(double position) {
return intersect = position(coordinate, next.coordinate, position);
}
public static Coordinate position(Coordinate p1, Coordinate p2, double position) {
if (position == 0) {
return p1;
} else if (position == 1) {
return p2;
} else {
final double x = p1.x + position * (p2.x - p1.x);
final double y = p1.y + position * (p2.y - p1.y);
return new Coordinate(x, y);
}
}
@Override
public String toString() {
return "Edge[Component=" + component + "; start=" + coordinate + " " + "; intersection=" + intersect + "]";
}
}
protected static final IntersectionOrder INTERSECTION_ORDER = new IntersectionOrder();
private static final class IntersectionOrder implements Comparator<Edge> {
private static final Coordinate SENTINEL = new Coordinate(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY);
@Override
public int compare(Edge o1, Edge o2) {
return Double.compare(o1.intersect.y, o2.intersect.y);
}
}
public static final String FIELD_TYPE = "type";
public static final String FIELD_COORDINATES = "coordinates";
protected static final boolean debugEnabled() {
return LOGGER.isDebugEnabled() || DEBUG;
}
/**
* Enumeration that lists all {@link GeoShapeType}s that can be handled
*/
public static enum GeoShapeType {
POINT("point"),
MULTIPOINT("multipoint"),
LINESTRING("linestring"),
MULTILINESTRING("multilinestring"),
POLYGON("polygon"),
MULTIPOLYGON("multipolygon"),
ENVELOPE("envelope"),
CIRCLE("circle");
protected final String shapename;
private GeoShapeType(String shapename) {
this.shapename = shapename;
}
public static GeoShapeType forName(String geoshapename) {
String typename = geoshapename.toLowerCase(Locale.ROOT);
for (GeoShapeType type : values()) {
if(type.shapename.equals(typename)) {
return type;
}
}
throw new ElasticsearchIllegalArgumentException("unknown geo_shape ["+geoshapename+"]");
}
public static ShapeBuilder parse(XContentParser parser) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
return null;
} else if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("Shape must be an object consisting of type and coordinates");
}
GeoShapeType shapeType = null;
Distance radius = null;
CoordinateNode node = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName();
if (FIELD_TYPE.equals(fieldName)) {
parser.nextToken();
shapeType = GeoShapeType.forName(parser.text());
} else if (FIELD_COORDINATES.equals(fieldName)) {
parser.nextToken();
node = parseCoordinates(parser);
} else if (CircleBuilder.FIELD_RADIUS.equals(fieldName)) {
parser.nextToken();
radius = Distance.parseDistance(parser.text());
} else {
parser.nextToken();
parser.skipChildren();
}
}
}
if (shapeType == null) {
throw new ElasticsearchParseException("Shape type not included");
} else if (node == null) {
throw new ElasticsearchParseException("Coordinates not included");
} else if (radius != null && GeoShapeType.CIRCLE != shapeType) {
throw new ElasticsearchParseException("Field [" + CircleBuilder.FIELD_RADIUS + "] is supported for [" + CircleBuilder.TYPE
+ "] only");
}
switch (shapeType) {
case POINT: return parsePoint(node);
case MULTIPOINT: return parseMultiPoint(node);
case LINESTRING: return parseLineString(node);
case MULTILINESTRING: return parseMultiLine(node);
case POLYGON: return parsePolygon(node);
case MULTIPOLYGON: return parseMultiPolygon(node);
case CIRCLE: return parseCircle(node, radius);
case ENVELOPE: return parseEnvelope(node);
default:
throw new ElasticsearchParseException("Shape type [" + shapeType + "] not included");
}
}
protected static PointBuilder parsePoint(CoordinateNode node) {
return newPoint(node.coordinate);
}
protected static CircleBuilder parseCircle(CoordinateNode coordinates, Distance radius) {
return newCircleBuilder().center(coordinates.coordinate).radius(radius);
}
protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates) {
return newEnvelope().topLeft(coordinates.children.get(0).coordinate).bottomRight(coordinates.children.get(1).coordinate);
}
protected static MultiPointBuilder parseMultiPoint(CoordinateNode coordinates) {
MultiPointBuilder points = new MultiPointBuilder();
for (CoordinateNode node : coordinates.children) {
points.point(node.coordinate);
}
return points;
}
protected static LineStringBuilder parseLineString(CoordinateNode coordinates) {
LineStringBuilder line = newLineString();
for (CoordinateNode node : coordinates.children) {
line.point(node.coordinate);
}
return line;
}
protected static MultiLineStringBuilder parseMultiLine(CoordinateNode coordinates) {
MultiLineStringBuilder multiline = newMultiLinestring();
for (CoordinateNode node : coordinates.children) {
multiline.linestring(parseLineString(node));
}
return multiline;
}
protected static PolygonBuilder parsePolygon(CoordinateNode coordinates) {
LineStringBuilder shell = parseLineString(coordinates.children.get(0));
PolygonBuilder polygon = new PolygonBuilder(shell.points);
for (int i = 1; i < coordinates.children.size(); i++) {
polygon.hole(parseLineString(coordinates.children.get(i)));
}
return polygon;
}
protected static MultiPolygonBuilder parseMultiPolygon(CoordinateNode coordinates) {
MultiPolygonBuilder polygons = newMultiPolygon();
for (CoordinateNode node : coordinates.children) {
polygons.polygon(parsePolygon(node));
}
return polygons;
}
}
} | 1no label
| src_main_java_org_elasticsearch_common_geo_builders_ShapeBuilder.java |
3,257 | public class ListPermission extends InstancePermission {
private static final int ADD = 0x4;
private static final int READ = 0x8;
private static final int REMOVE = 0x16;
private static final int LISTEN = 0x32;
private static final int ALL = ADD | REMOVE | READ | CREATE | DESTROY | LISTEN;
public ListPermission(String name, String... actions) {
super(name, actions);
}
@Override
protected int initMask(String[] actions) {
int mask = NONE;
for (String action : actions) {
if (ActionConstants.ACTION_ALL.equals(action)) {
return ALL;
}
if (ActionConstants.ACTION_CREATE.equals(action)) {
mask |= CREATE;
} else if (ActionConstants.ACTION_ADD.equals(action)) {
mask |= ADD;
} else if (ActionConstants.ACTION_REMOVE.equals(action)) {
mask |= REMOVE;
} else if (ActionConstants.ACTION_READ.equals(action)) {
mask |= READ;
} else if (ActionConstants.ACTION_DESTROY.equals(action)) {
mask |= DESTROY;
} else if (ActionConstants.ACTION_LISTEN.equals(action)) {
mask |= LISTEN;
}
}
return mask;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_security_permission_ListPermission.java |
478 | public class ProtoChannelBeanPostProcessor implements BeanPostProcessor, Ordered {
Log LOG = LogFactory.getLog(ProtoChannelBeanPostProcessor.class);
protected List<ChannelProcessor> channelProcessorOverrides;
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
return bean;
}
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
if (bean instanceof ChannelDecisionManagerImpl) {
try {
ChannelDecisionManagerImpl manager = (ChannelDecisionManagerImpl) bean;
Field channelProcessors = manager.getClass().getDeclaredField("channelProcessors");
channelProcessors.setAccessible(true);
List<ChannelProcessor> list = (List<ChannelProcessor>) channelProcessors.get(manager);
list.clear();
manager.setChannelProcessors(channelProcessorOverrides);
LOG.info("Replacing the standard Spring Security channel processors with custom processors that look for a " +
"'X-Forwarded-Proto' request header. This allows Spring Security to sit behind a load balancer with SSL termination.");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return bean;
}
@Override
public int getOrder() {
return 9999;
}
/**
* @return the channelProcessors
*/
public List<ChannelProcessor> getChannelProcessorOverrides() {
return channelProcessorOverrides;
}
/**
* @param channelProcessors the channelProcessors to set
*/
public void setChannelProcessorOverrides(List<ChannelProcessor> channelProcessorOverrides) {
this.channelProcessorOverrides = channelProcessorOverrides;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_security_channel_ProtoChannelBeanPostProcessor.java |
1,314 | public class SearchFacetResultDTO {
protected SearchFacet facet;
protected String value;
protected BigDecimal minValue;
protected BigDecimal maxValue;
protected Integer quantity;
protected boolean active;
public SearchFacet getFacet() {
return facet;
}
public void setFacet(SearchFacet facet) {
this.facet = facet;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public BigDecimal getMinValue() {
return minValue;
}
public void setMinValue(BigDecimal minValue) {
this.minValue = minValue;
}
public BigDecimal getMaxValue() {
return maxValue;
}
public void setMaxValue(BigDecimal maxValue) {
this.maxValue = maxValue;
}
public Integer getQuantity() {
return quantity;
}
public void setQuantity(Integer quantity) {
this.quantity = quantity;
}
public boolean isActive() {
return active;
}
public void setActive(boolean active) {
this.active = active;
}
public String getValueKey() {
String value = getValue();
if (value == null) {
value = "range[" + getMinValue() + ":" + getMaxValue() + "]";
}
return value;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_domain_SearchFacetResultDTO.java |
55 | public class TitanConfigurationException extends TitanException {
private static final long serialVersionUID = 4056436257763972423L;
/**
* @param msg Exception message
*/
public TitanConfigurationException(String msg) {
super(msg);
}
/**
* @param msg Exception message
* @param cause Cause of the exception
*/
public TitanConfigurationException(String msg, Throwable cause) {
super(msg, cause);
}
/**
* Constructs an exception with a generic message
*
* @param cause Cause of the exception
*/
public TitanConfigurationException(Throwable cause) {
this("Exception in graph database configuration", cause);
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_TitanConfigurationException.java |
3,050 | return new FieldsConsumer() {
@Override
public void close() throws IOException {
fieldsConsumer.close();
}
@Override
public TermsConsumer addField(FieldInfo field) throws IOException {
if (UidFieldMapper.NAME.equals(field.name)) {
// only go through bloom for the UID field
return fieldsConsumer.addField(field);
}
return fieldsConsumer.getDelegate().addField(field);
}
}; | 0true
| src_main_java_org_elasticsearch_index_codec_postingsformat_Elasticsearch090PostingsFormat.java |
728 | public interface CatalogService {
public Product saveProduct(Product product);
public Product findProductById(Long productId);
public List<Product> findProductsByName(String searchName);
/**
* Find a subset of {@code Product} instances whose name starts with
* or is equal to the passed in search parameter. Res
* @param searchName
* @param limit the maximum number of results
* @param offset the starting point in the record set
* @return the list of product instances that fit the search criteria
*/
public List<Product> findProductsByName(String searchName, int limit, int offset);
public List<Product> findActiveProductsByCategory(Category category);
/**
* @deprecated Use findActiveProductsByCategory
*
* @param category
* @param currentDate
* @return
*/
public List<Product> findActiveProductsByCategory(Category category, Date currentDate);
/**
* Given a category and a ProudctSearchCriteria, returns the appropriate matching products
*
* @param category
* @param searchCriteria
* @return the matching products
*/
public List<Product> findFilteredActiveProductsByCategory(Category category, ProductSearchCriteria searchCriteria);
/**
* @deprecated Use {@link #findFilteredActiveProductsByCategory(Category, ProductSearchCriteria)}
*
* @param category
* @param currentDate
* @param searchCriteria
* @return
*/
public List<Product> findFilteredActiveProductsByCategory(Category category, Date currentDate, ProductSearchCriteria searchCriteria);
/**
* Given a search query and a ProductSearchCriteria, returns the appropriate matching products
*
* @param query
* @param searchCriteria
* @return the matching products
*/
public List<Product> findFilteredActiveProductsByQuery(String query, ProductSearchCriteria searchCriteria);
/**
* @deprecated Use {@link #findFilteredActiveProductsByCategory(Category, ProductSearchCriteria)}
*/
public List<Product> findFilteredActiveProductsByQuery(String query, Date currentDate, ProductSearchCriteria searchCriteria);
/**
* Same as {@link #findActiveProductsByCategory(Category)} but allowing for pagination.
*
* @param category
* @param limit
* @param offset
* @return
*/
public List<Product> findActiveProductsByCategory(Category category, int limit, int offset);
/**
* @deprecated Use {@link #findActiveProductsByCategory(Category, limit, offset}
*/
public List<Product> findActiveProductsByCategory(Category category, Date currentDate, int limit, int offset);
/**
* Find all ProductBundles whose automatic attribute is set to true.
*
* Automatic product bundles are collections of products that can receive special
* pricing. With automatic product bundles, if a customer adds all of the
* components of the bundle individually to the cart, they will automatically get
* assembeled into a bundle.
*
* @return
*/
public List<ProductBundle> findAutomaticProductBundles();
public Category saveCategory(Category category);
public void removeCategory(Category category);
public Category findCategoryById(Long categoryId);
/**
* Retrieve a {@code Category} instance based on its name property.
*
* Broadleaf allows more than one category to have the same name. Calling
* this method could produce an exception in such situations. Use
* {@link #findCategoriesByName(String)} instead.
*
* @param categoryName the category name to search by
* @return the Category instance matching the categoryName
*/
@Deprecated
public Category findCategoryByName(String categoryName);
/**
* Retrieve a list of {@code Category} instance based on the name
* property.
*
* @param categoryName the category name to search by
* @return the list of matching Category instances
*/
public List<Category> findCategoriesByName(String categoryName);
/**
* Retrieve a list of {@code Category} instances based on the search criteria
*
* @param categoryName the name of the category to search by
* @param limit the maximum number of results to return
* @param offset the starting point of the records to return
* @return a list of category instances that match the search criteria
*/
public List<Category> findCategoriesByName(String categoryName, int limit, int offset);
public List<Category> findAllCategories();
public List<Category> findAllCategories(int limit, int offset);
public List<Product> findAllProducts();
public List<Product> findAllProducts(int limit, int offset);
public List<Product> findProductsForCategory(Category category);
public List<Product> findProductsForCategory(Category category, int limit, int offset);
public Sku saveSku(Sku sku);
public SkuFee saveSkuFee(SkuFee fee);
public List<Sku> findAllSkus();
public List<Sku> findSkusByIds(List<Long> ids);
public Sku findSkuById(Long skuId);
/**
* Get a hierarchical map of all child categories keyed on the url
*
* @param categoryId the parent category to which the children belong
* @return hierarchical map of all child categories
* @deprecated this approach is inherently inefficient - don't use.
*/
@Deprecated
public Map<String, List<Long>> getChildCategoryURLMapByCategoryId(Long categoryId);
public Category createCategory();
public Sku createSku();
public Product createProduct(ProductType productType);
public List<Category> findAllParentCategories();
public List<Category> findAllSubCategories(Category category);
public List<Category> findAllSubCategories(Category category, int limit, int offset);
public List<Category> findActiveSubCategoriesByCategory(Category category);
public List<Category> findActiveSubCategoriesByCategory(Category category, int limit, int offset);
public List<ProductOption> readAllProductOptions();
public ProductOption saveProductOption(ProductOption option);
public ProductOption findProductOptionById(Long productOptionId);
public ProductOptionValue findProductOptionValueById(Long productOptionValueId);
/**
* Returns a category associated with the passed in URI or null if no Category is
* mapped to this URI.
*
* @param uri
* @return
*/
public Category findCategoryByURI(String uri);
/**
* Returns a product associated with the passed in URI or null if no Product is
* mapped to this URI.
*
* @param uri
* @return
*/
public Product findProductByURI(String uri);
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_service_CatalogService.java |
1,444 | public static class EdgeMap extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private TitanGraph graph;
private boolean trackState;
private ModifiableHadoopConfiguration faunusConf;
private LoaderScriptWrapper loaderScript;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
faunusConf = ModifiableHadoopConfiguration.of(DEFAULT_COMPAT.getContextConfiguration(context));
graph = TitanGraphOutputMapReduce.generateGraph(faunusConf);
trackState = context.getConfiguration().getBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_STATE, false);
// Check whether a script is defined in the config
if (faunusConf.has(OUTPUT_LOADER_SCRIPT_FILE)) {
Path scriptPath = new Path(faunusConf.get(OUTPUT_LOADER_SCRIPT_FILE));
FileSystem scriptFS = FileSystem.get(DEFAULT_COMPAT.getJobContextConfiguration(context));
loaderScript = new LoaderScriptWrapper(scriptFS, scriptPath);
}
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
try {
for (final TitanEdge edge : value.query().queryAll().direction(IN).titanEdges()) {
this.getCreateOrDeleteEdge(value, (StandardFaunusEdge)edge, context);
}
} catch (final Exception e) {
graph.rollback();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.FAILED_TRANSACTIONS, 1L);
throw new IOException(e.getMessage(), e);
}
}
@Override
public void cleanup(final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
try {
graph.commit();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.SUCCESSFUL_TRANSACTIONS, 1L);
} catch (Exception e) {
LOGGER.error("Could not commit transaction during Reduce.cleanup(): ", e);
graph.rollback();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.FAILED_TRANSACTIONS, 1L);
throw new IOException(e.getMessage(), e);
}
graph.shutdown();
}
public TitanEdge getCreateOrDeleteEdge(final FaunusVertex faunusVertex, final StandardFaunusEdge faunusEdge, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws InterruptedException {
final Direction dir = IN;
final TitanVertex titanVertex = (TitanVertex) this.graph.getVertex(faunusVertex.getProperty(TITAN_ID));
if (null != loaderScript && loaderScript.hasEdgeMethod()) {
TitanEdge te = loaderScript.getEdge(faunusEdge, titanVertex, getOtherTitanVertex(faunusVertex, faunusEdge, dir.opposite(), graph), graph, context);
synchronizeRelationProperties(graph, faunusEdge, te, context);
return te;
} else {
return (TitanEdge) getCreateOrDeleteRelation(graph, trackState, dir, faunusVertex, titanVertex, faunusEdge, context);
}
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_formats_util_TitanGraphOutputMapReduce.java |
563 | public class FileSystemApplicationContextTask extends Task {
private String path;
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_util_sql_FileSystemApplicationContextTask.java |
2,687 | public class MorphingPortableReader extends DefaultPortableReader {
public MorphingPortableReader(PortableSerializer serializer, BufferObjectDataInput in, ClassDefinition cd) {
super(serializer, in, cd);
}
@Override
public int readInt(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return 0;
}
switch (fd.getType()) {
case INT:
return super.readInt(fieldName);
case BYTE:
return super.readByte(fieldName);
case CHAR:
return super.readChar(fieldName);
case SHORT:
return super.readShort(fieldName);
default:
throw new IncompatibleClassChangeError();
}
}
@Override
public long readLong(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return 0L;
}
switch (fd.getType()) {
case LONG:
return super.readLong(fieldName);
case INT:
return super.readInt(fieldName);
case BYTE:
return super.readByte(fieldName);
case CHAR:
return super.readChar(fieldName);
case SHORT:
return super.readShort(fieldName);
default:
throw new IncompatibleClassChangeError();
}
}
@Override
public String readUTF(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return null;
}
if (fd.getType() != FieldType.UTF) {
throw new IncompatibleClassChangeError();
}
return super.readUTF(fieldName);
}
@Override
public boolean readBoolean(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return false;
}
if (fd.getType() != FieldType.BOOLEAN) {
throw new IncompatibleClassChangeError();
}
return super.readBoolean(fieldName);
}
@Override
public byte readByte(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return 0;
}
if (fd.getType() != FieldType.BYTE) {
throw new IncompatibleClassChangeError();
}
return super.readByte(fieldName);
}
@Override
public char readChar(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return 0;
}
if (fd.getType() != FieldType.CHAR) {
throw new IncompatibleClassChangeError();
}
return super.readChar(fieldName);
}
@Override
public double readDouble(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return 0d;
}
switch (fd.getType()) {
case DOUBLE:
return super.readDouble(fieldName);
case LONG:
return super.readLong(fieldName);
case FLOAT:
return super.readFloat(fieldName);
case INT:
return super.readInt(fieldName);
case BYTE:
return super.readByte(fieldName);
case CHAR:
return super.readChar(fieldName);
case SHORT:
return super.readShort(fieldName);
default:
throw new IncompatibleClassChangeError();
}
}
@Override
public float readFloat(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return 0f;
}
switch (fd.getType()) {
case FLOAT:
return super.readFloat(fieldName);
case INT:
return super.readInt(fieldName);
case BYTE:
return super.readByte(fieldName);
case CHAR:
return super.readChar(fieldName);
case SHORT:
return super.readShort(fieldName);
default:
throw new IncompatibleClassChangeError();
}
}
@Override
public short readShort(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return 0;
}
switch (fd.getType()) {
case SHORT:
return super.readShort(fieldName);
case BYTE:
return super.readByte(fieldName);
default:
throw new IncompatibleClassChangeError();
}
}
@Override
public byte[] readByteArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new byte[0];
}
if (fd.getType() != FieldType.BYTE_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readByteArray(fieldName);
}
@Override
public char[] readCharArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new char[0];
}
if (fd.getType() != FieldType.CHAR_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readCharArray(fieldName);
}
@Override
public int[] readIntArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new int[0];
}
if (fd.getType() != FieldType.INT_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readIntArray(fieldName);
}
@Override
public long[] readLongArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new long[0];
}
if (fd.getType() != FieldType.LONG_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readLongArray(fieldName);
}
@Override
public double[] readDoubleArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new double[0];
}
if (fd.getType() != FieldType.DOUBLE_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readDoubleArray(fieldName);
}
@Override
public float[] readFloatArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new float[0];
}
if (fd.getType() != FieldType.FLOAT_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readFloatArray(fieldName);
}
@Override
public short[] readShortArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new short[0];
}
if (fd.getType() != FieldType.SHORT_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readShortArray(fieldName);
}
@Override
public Portable readPortable(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return null;
}
if (fd.getType() != FieldType.PORTABLE) {
throw new IncompatibleClassChangeError();
}
return super.readPortable(fieldName);
}
@Override
public Portable[] readPortableArray(String fieldName) throws IOException {
FieldDefinition fd = cd.get(fieldName);
if (fd == null) {
return new Portable[0];
}
if (fd.getType() != FieldType.PORTABLE_ARRAY) {
throw new IncompatibleClassChangeError();
}
return super.readPortableArray(fieldName);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_nio_serialization_MorphingPortableReader.java |
1,953 | public class MapRemoveIfSameRequest extends KeyBasedClientRequest implements Portable, SecureRequest {
protected String name;
protected Data key;
protected Data value;
protected long threadId;
public MapRemoveIfSameRequest() {
}
public MapRemoveIfSameRequest(String name, Data key, Data value, long threadId) {
this.name = name;
this.key = key;
this.value = value;
this.threadId = threadId;
}
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public int getClassId() {
return MapPortableHook.REMOVE_IF_SAME;
}
public Object getKey() {
return key;
}
protected Operation prepareOperation() {
RemoveIfSameOperation op = new RemoveIfSameOperation(name, key, value);
op.setThreadId(threadId);
return op;
}
public String getServiceName() {
return MapService.SERVICE_NAME;
}
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
writer.writeLong("t", threadId);
final ObjectDataOutput out = writer.getRawDataOutput();
key.writeData(out);
value.writeData(out);
}
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
threadId = reader.readLong("t");
final ObjectDataInput in = reader.getRawDataInput();
key = new Data();
key.readData(in);
value = new Data();
value.readData(in);
}
public Permission getRequiredPermission() {
return new MapPermission(name, ActionConstants.ACTION_REMOVE);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_client_MapRemoveIfSameRequest.java |
1,941 | public final class Errors implements Serializable {
/**
* The root errors object. Used to access the list of error messages.
*/
private final Errors root;
/**
* The parent errors object. Used to obtain the chain of source objects.
*/
private final Errors parent;
/**
* The leaf source for errors added here.
*/
private final Object source;
/**
* null unless (root == this) and error messages exist. Never an empty list.
*/
private List<Message> errors; // lazy, use getErrorsForAdd()
public Errors() {
this.root = this;
this.parent = null;
this.source = SourceProvider.UNKNOWN_SOURCE;
}
public Errors(Object source) {
this.root = this;
this.parent = null;
this.source = source;
}
private Errors(Errors parent, Object source) {
this.root = parent.root;
this.parent = parent;
this.source = source;
}
/**
* Returns an instance that uses {@code source} as a reference point for newly added errors.
*/
public Errors withSource(Object source) {
return source == SourceProvider.UNKNOWN_SOURCE
? this
: new Errors(this, source);
}
/**
* We use a fairly generic error message here. The motivation is to share the
* same message for both bind time errors:
* <pre><code>Guice.createInjector(new AbstractModule() {
* public void configure() {
* bind(Runnable.class);
* }
* }</code></pre>
* ...and at provide-time errors:
* <pre><code>Guice.createInjector().getInstance(Runnable.class);</code></pre>
* Otherwise we need to know who's calling when resolving a just-in-time
* binding, which makes things unnecessarily complex.
*/
public Errors missingImplementation(Key key) {
return addMessage("No implementation for %s was bound.", key);
}
public Errors converterReturnedNull(String stringValue, Object source,
TypeLiteral<?> type, MatcherAndConverter matchingConverter) {
return addMessage("Received null converting '%s' (bound at %s) to %s%n"
+ " using %s.",
stringValue, convert(source), type, matchingConverter);
}
public Errors conversionTypeError(String stringValue, Object source, TypeLiteral<?> type,
MatcherAndConverter matchingConverter, Object converted) {
return addMessage("Type mismatch converting '%s' (bound at %s) to %s%n"
+ " using %s.%n"
+ " Converter returned %s.",
stringValue, convert(source), type, matchingConverter, converted);
}
public Errors conversionError(String stringValue, Object source,
TypeLiteral<?> type, MatcherAndConverter matchingConverter, RuntimeException cause) {
return errorInUserCode(cause, "Error converting '%s' (bound at %s) to %s%n"
+ " using %s.%n"
+ " Reason: %s",
stringValue, convert(source), type, matchingConverter, cause);
}
public Errors ambiguousTypeConversion(String stringValue, Object source, TypeLiteral<?> type,
MatcherAndConverter a, MatcherAndConverter b) {
return addMessage("Multiple converters can convert '%s' (bound at %s) to %s:%n"
+ " %s and%n"
+ " %s.%n"
+ " Please adjust your type converter configuration to avoid overlapping matches.",
stringValue, convert(source), type, a, b);
}
public Errors bindingToProvider() {
return addMessage("Binding to Provider is not allowed.");
}
public Errors subtypeNotProvided(Class<? extends Provider<?>> providerType,
Class<?> type) {
return addMessage("%s doesn't provide instances of %s.", providerType, type);
}
public Errors notASubtype(Class<?> implementationType, Class<?> type) {
return addMessage("%s doesn't extend %s.", implementationType, type);
}
public Errors recursiveImplementationType() {
return addMessage("@ImplementedBy points to the same class it annotates.");
}
public Errors recursiveProviderType() {
return addMessage("@ProvidedBy points to the same class it annotates.");
}
public Errors missingRuntimeRetention(Object source) {
return addMessage("Please annotate with @Retention(RUNTIME).%n"
+ " Bound at %s.", convert(source));
}
public Errors missingScopeAnnotation() {
return addMessage("Please annotate with @ScopeAnnotation.");
}
public Errors optionalConstructor(Constructor constructor) {
return addMessage("%s is annotated @Inject(optional=true), "
+ "but constructors cannot be optional.", constructor);
}
public Errors cannotBindToGuiceType(String simpleName) {
return addMessage("Binding to core guice framework type is not allowed: %s.", simpleName);
}
public Errors scopeNotFound(Class<? extends Annotation> scopeAnnotation) {
return addMessage("No scope is bound to %s.", scopeAnnotation);
}
public Errors scopeAnnotationOnAbstractType(
Class<? extends Annotation> scopeAnnotation, Class<?> type, Object source) {
return addMessage("%s is annotated with %s, but scope annotations are not supported "
+ "for abstract types.%n Bound at %s.", type, scopeAnnotation, convert(source));
}
public Errors misplacedBindingAnnotation(Member member, Annotation bindingAnnotation) {
return addMessage("%s is annotated with %s, but binding annotations should be applied "
+ "to its parameters instead.", member, bindingAnnotation);
}
private static final String CONSTRUCTOR_RULES =
"Classes must have either one (and only one) constructor "
+ "annotated with @Inject or a zero-argument constructor that is not private.";
public Errors missingConstructor(Class<?> implementation) {
return addMessage("Could not find a suitable constructor in %s. " + CONSTRUCTOR_RULES,
implementation);
}
public Errors tooManyConstructors(Class<?> implementation) {
return addMessage("%s has more than one constructor annotated with @Inject. "
+ CONSTRUCTOR_RULES, implementation);
}
public Errors duplicateScopes(Scope existing,
Class<? extends Annotation> annotationType, Scope scope) {
return addMessage("Scope %s is already bound to %s. Cannot bind %s.", existing,
annotationType, scope);
}
public Errors voidProviderMethod() {
return addMessage("Provider methods must return a value. Do not return void.");
}
public Errors missingConstantValues() {
return addMessage("Missing constant value. Please call to(...).");
}
public Errors cannotInjectInnerClass(Class<?> type) {
return addMessage("Injecting into inner classes is not supported. "
+ "Please use a 'static' class (top-level or nested) instead of %s.", type);
}
public Errors duplicateBindingAnnotations(Member member,
Class<? extends Annotation> a, Class<? extends Annotation> b) {
return addMessage("%s has more than one annotation annotated with @BindingAnnotation: "
+ "%s and %s", member, a, b);
}
public Errors duplicateScopeAnnotations(
Class<? extends Annotation> a, Class<? extends Annotation> b) {
return addMessage("More than one scope annotation was found: %s and %s.", a, b);
}
public Errors recursiveBinding() {
return addMessage("Binding points to itself.");
}
public Errors bindingAlreadySet(Key<?> key, Object source) {
return addMessage("A binding to %s was already configured at %s.", key, convert(source));
}
public Errors childBindingAlreadySet(Key<?> key) {
return addMessage("A binding to %s already exists on a child injector.", key);
}
public Errors errorInjectingMethod(Throwable cause) {
return errorInUserCode(cause, "Error injecting method, %s", cause);
}
public Errors errorNotifyingTypeListener(TypeListenerBinding listener,
TypeLiteral<?> type, Throwable cause) {
return errorInUserCode(cause,
"Error notifying TypeListener %s (bound at %s) of %s.%n"
+ " Reason: %s",
listener.getListener(), convert(listener.getSource()), type, cause);
}
public Errors errorInjectingConstructor(Throwable cause) {
return errorInUserCode(cause, "Error injecting constructor, %s", cause);
}
public Errors errorInProvider(RuntimeException runtimeException) {
return errorInUserCode(runtimeException, "Error in custom provider, %s", runtimeException);
}
public Errors errorInUserInjector(
MembersInjector<?> listener, TypeLiteral<?> type, RuntimeException cause) {
return errorInUserCode(cause, "Error injecting %s using %s.%n"
+ " Reason: %s", type, listener, cause);
}
public Errors errorNotifyingInjectionListener(
InjectionListener<?> listener, TypeLiteral<?> type, RuntimeException cause) {
return errorInUserCode(cause, "Error notifying InjectionListener %s of %s.%n"
+ " Reason: %s", listener, type, cause);
}
public void exposedButNotBound(Key<?> key) {
addMessage("Could not expose() %s, it must be explicitly bound.", key);
}
public static Collection<Message> getMessagesFromThrowable(Throwable throwable) {
if (throwable instanceof ProvisionException) {
return ((ProvisionException) throwable).getErrorMessages();
} else if (throwable instanceof ConfigurationException) {
return ((ConfigurationException) throwable).getErrorMessages();
} else if (throwable instanceof CreationException) {
return ((CreationException) throwable).getErrorMessages();
} else {
return ImmutableSet.of();
}
}
public Errors errorInUserCode(Throwable cause, String messageFormat, Object... arguments) {
Collection<Message> messages = getMessagesFromThrowable(cause);
if (!messages.isEmpty()) {
return merge(messages);
} else {
return addMessage(cause, messageFormat, arguments);
}
}
public Errors cannotInjectRawProvider() {
return addMessage("Cannot inject a Provider that has no type parameter");
}
public Errors cannotInjectRawMembersInjector() {
return addMessage("Cannot inject a MembersInjector that has no type parameter");
}
public Errors cannotInjectTypeLiteralOf(Type unsupportedType) {
return addMessage("Cannot inject a TypeLiteral of %s", unsupportedType);
}
public Errors cannotInjectRawTypeLiteral() {
return addMessage("Cannot inject a TypeLiteral that has no type parameter");
}
public Errors cannotSatisfyCircularDependency(Class<?> expectedType) {
return addMessage(
"Tried proxying %s to support a circular dependency, but it is not an interface.",
expectedType);
}
public void throwCreationExceptionIfErrorsExist() {
if (!hasErrors()) {
return;
}
throw new CreationException(getMessages());
}
public void throwConfigurationExceptionIfErrorsExist() {
if (!hasErrors()) {
return;
}
throw new ConfigurationException(getMessages());
}
public void throwProvisionExceptionIfErrorsExist() {
if (!hasErrors()) {
return;
}
throw new ProvisionException(getMessages());
}
private Message merge(Message message) {
List<Object> sources = Lists.newArrayList();
sources.addAll(getSources());
sources.addAll(message.getSources());
return new Message(sources, message.getMessage(), message.getCause());
}
public Errors merge(Collection<Message> messages) {
for (Message message : messages) {
addMessage(merge(message));
}
return this;
}
public Errors merge(Errors moreErrors) {
if (moreErrors.root == root || moreErrors.root.errors == null) {
return this;
}
merge(moreErrors.root.errors);
return this;
}
public List<Object> getSources() {
List<Object> sources = Lists.newArrayList();
for (Errors e = this; e != null; e = e.parent) {
if (e.source != SourceProvider.UNKNOWN_SOURCE) {
sources.add(0, e.source);
}
}
return sources;
}
public void throwIfNewErrors(int expectedSize) throws ErrorsException {
if (size() == expectedSize) {
return;
}
throw toException();
}
public ErrorsException toException() {
return new ErrorsException(this);
}
public boolean hasErrors() {
return root.errors != null;
}
public Errors addMessage(String messageFormat, Object... arguments) {
return addMessage(null, messageFormat, arguments);
}
private Errors addMessage(Throwable cause, String messageFormat, Object... arguments) {
String message = format(messageFormat, arguments);
addMessage(new Message(getSources(), message, cause));
return this;
}
public Errors addMessage(Message message) {
if (root.errors == null) {
root.errors = Lists.newArrayList();
}
root.errors.add(message);
return this;
}
public static String format(String messageFormat, Object... arguments) {
for (int i = 0; i < arguments.length; i++) {
arguments[i] = Errors.convert(arguments[i]);
}
return String.format(Locale.ROOT, messageFormat, arguments);
}
public List<Message> getMessages() {
if (root.errors == null) {
return ImmutableList.of();
}
List<Message> result = Lists.newArrayList(root.errors);
CollectionUtil.timSort(result, new Comparator<Message>() {
public int compare(Message a, Message b) {
return a.getSource().compareTo(b.getSource());
}
});
return result;
}
/**
* Returns the formatted message for an exception with the specified messages.
*/
public static String format(String heading, Collection<Message> errorMessages) {
final Formatter fmt = new Formatter(Locale.ROOT);
try {
fmt.format(heading).format(":%n%n");
int index = 1;
boolean displayCauses = getOnlyCause(errorMessages) == null;
for (Message errorMessage : errorMessages) {
fmt.format("%s) %s%n", index++, errorMessage.getMessage());
List<Object> dependencies = errorMessage.getSources();
for (int i = dependencies.size() - 1; i >= 0; i--) {
Object source = dependencies.get(i);
formatSource(fmt, source);
}
Throwable cause = errorMessage.getCause();
if (displayCauses && cause != null) {
StringWriter writer = new StringWriter();
cause.printStackTrace(new PrintWriter(writer));
fmt.format("Caused by: %s", writer.getBuffer());
}
fmt.format("%n");
}
if (errorMessages.size() == 1) {
fmt.format("1 error");
} else {
fmt.format("%s errors", errorMessages.size());
}
return fmt.toString();
} finally {
fmt.close();
}
}
/**
* Returns {@code value} if it is non-null allowed to be null. Otherwise a message is added and
* an {@code ErrorsException} is thrown.
*/
public <T> T checkForNull(T value, Object source, Dependency<?> dependency)
throws ErrorsException {
if (value != null || dependency.isNullable()) {
return value;
}
int parameterIndex = dependency.getParameterIndex();
String parameterName = (parameterIndex != -1)
? "parameter " + parameterIndex + " of "
: "";
addMessage("null returned by binding at %s%n but %s%s is not @Nullable",
source, parameterName, dependency.getInjectionPoint().getMember());
throw toException();
}
/**
* Returns the cause throwable if there is exactly one cause in {@code messages}. If there are
* zero or multiple messages with causes, null is returned.
*/
public static Throwable getOnlyCause(Collection<Message> messages) {
Throwable onlyCause = null;
for (Message message : messages) {
Throwable messageCause = message.getCause();
if (messageCause == null) {
continue;
}
if (onlyCause != null) {
return null;
}
onlyCause = messageCause;
}
return onlyCause;
}
public int size() {
return root.errors == null ? 0 : root.errors.size();
}
private static abstract class Converter<T> {
final Class<T> type;
Converter(Class<T> type) {
this.type = type;
}
boolean appliesTo(Object o) {
return type.isAssignableFrom(o.getClass());
}
String convert(Object o) {
return toString(type.cast(o));
}
abstract String toString(T t);
}
private static final Collection<Converter<?>> converters = ImmutableList.of(
new Converter<Class>(Class.class) {
public String toString(Class c) {
return c.getName();
}
},
new Converter<Member>(Member.class) {
public String toString(Member member) {
return MoreTypes.toString(member);
}
},
new Converter<Key>(Key.class) {
public String toString(Key key) {
if (key.getAnnotationType() != null) {
return key.getTypeLiteral() + " annotated with "
+ (key.getAnnotation() != null ? key.getAnnotation() : key.getAnnotationType());
} else {
return key.getTypeLiteral().toString();
}
}
}
);
public static Object convert(Object o) {
for (Converter<?> converter : converters) {
if (converter.appliesTo(o)) {
return converter.convert(o);
}
}
return o;
}
public static void formatSource(Formatter formatter, Object source) {
if (source instanceof Dependency) {
Dependency<?> dependency = (Dependency<?>) source;
InjectionPoint injectionPoint = dependency.getInjectionPoint();
if (injectionPoint != null) {
formatInjectionPoint(formatter, dependency, injectionPoint);
} else {
formatSource(formatter, dependency.getKey());
}
} else if (source instanceof InjectionPoint) {
formatInjectionPoint(formatter, null, (InjectionPoint) source);
} else if (source instanceof Class) {
formatter.format(" at %s%n", StackTraceElements.forType((Class<?>) source));
} else if (source instanceof Member) {
formatter.format(" at %s%n", StackTraceElements.forMember((Member) source));
} else if (source instanceof TypeLiteral) {
formatter.format(" while locating %s%n", source);
} else if (source instanceof Key) {
Key<?> key = (Key<?>) source;
formatter.format(" while locating %s%n", convert(key));
} else {
formatter.format(" at %s%n", source);
}
}
public static void formatInjectionPoint(Formatter formatter, Dependency<?> dependency,
InjectionPoint injectionPoint) {
Member member = injectionPoint.getMember();
Class<? extends Member> memberType = MoreTypes.memberType(member);
if (memberType == Field.class) {
dependency = injectionPoint.getDependencies().get(0);
formatter.format(" while locating %s%n", convert(dependency.getKey()));
formatter.format(" for field at %s%n", StackTraceElements.forMember(member));
} else if (dependency != null) {
formatter.format(" while locating %s%n", convert(dependency.getKey()));
formatter.format(" for parameter %s at %s%n",
dependency.getParameterIndex(), StackTraceElements.forMember(member));
} else {
formatSource(formatter, injectionPoint.getMember());
}
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_internal_Errors.java |
1,943 | public class MapGetRequest extends KeyBasedClientRequest implements Portable, RetryableRequest, SecureRequest {
private String name;
private Data key;
private transient long startTime;
public MapGetRequest() {
}
public MapGetRequest(String name, Data key) {
this.name = name;
this.key = key;
}
protected Object getKey() {
return key;
}
@Override
protected Operation prepareOperation() {
return new GetOperation(name, key);
}
@Override
protected void beforeProcess() {
startTime = System.currentTimeMillis();
}
@Override
protected void afterResponse() {
final long latency = System.currentTimeMillis() - startTime;
final MapService mapService = getService();
MapContainer mapContainer = mapService.getMapContainer(name);
if (mapContainer.getMapConfig().isStatisticsEnabled()) {
mapService.getLocalMapStatsImpl(name).incrementGets(latency);
}
}
public String getServiceName() {
return MapService.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public int getClassId() {
return MapPortableHook.GET;
}
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
final ObjectDataOutput out = writer.getRawDataOutput();
key.writeData(out);
}
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
final ObjectDataInput in = reader.getRawDataInput();
key = new Data();
key.readData(in);
}
public MapPermission getRequiredPermission() {
return new MapPermission(name, ActionConstants.ACTION_READ);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_client_MapGetRequest.java |
659 | public class ORemoteIndexEngine implements OIndexEngine {
@Override
public void init() {
}
@Override
public void flush() {
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
}
@Override
public void deleteWithoutLoad(String indexName) {
}
@Override
public void delete() {
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
}
@Override
public boolean contains(Object key) {
return false;
}
@Override
public boolean remove(Object key) {
return false;
}
@Override
public ORID getIdentity() {
return null;
}
@Override
public void clear() {
}
@Override
public Iterator<Map.Entry> iterator() {
return null;
}
@Override
public Iterator<Map.Entry> inverseIterator() {
return null;
}
@Override
public Iterator valuesIterator() {
return null;
}
@Override
public Iterator inverseValuesIterator() {
return null;
}
@Override
public Iterable<Object> keys() {
return null;
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
}
@Override
public void beforeTxBegin() {
}
@Override
public Object get(Object key) {
return null;
}
@Override
public void put(Object key, Object value) {
}
@Override
public void getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
ValuesTransformer transformer, ValuesResultListener resultListener) {
}
@Override
public void getValuesMajor(Object fromKey, boolean isInclusive, ValuesTransformer transformer,
ValuesResultListener valuesResultListener) {
}
@Override
public void getValuesMinor(Object toKey, boolean isInclusive, ValuesTransformer transformer,
ValuesResultListener valuesResultListener) {
}
@Override
public void getEntriesMajor(Object fromKey, boolean isInclusive, ValuesTransformer transformer,
EntriesResultListener entriesResultListener) {
}
@Override
public void getEntriesMinor(Object toKey, boolean isInclusive, ValuesTransformer transformer,
EntriesResultListener entriesResultListener) {
}
@Override
public void getEntriesBetween(Object iRangeFrom, Object iRangeTo, boolean iInclusive, ValuesTransformer transformer,
EntriesResultListener entriesResultListener) {
}
@Override
public long size(ValuesTransformer transformer) {
return 0;
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer transformer) {
return 0;
}
@Override
public boolean hasRangeQuerySupport() {
return false;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_engine_ORemoteIndexEngine.java |
313 | final class ToggleFoldingRunner implements IPartListener2 {
/**
*
*/
private final CeylonEditor editor;
/**
* @param ceylonEditor
*/
ToggleFoldingRunner(CeylonEditor ceylonEditor) {
editor = ceylonEditor;
}
/**
* The workbench page we registered the part listener with, or
* <code>null</code>.
*/
private IWorkbenchPage fPage;
/**
* Does the actual toggling of projection.
*/
private void toggleFolding() {
ProjectionViewer pv= editor.getCeylonSourceViewer();
if (pv.isProjectionMode() != editor.isFoldingEnabled()) {
if (pv.canDoOperation(ProjectionViewer.TOGGLE)) {
pv.doOperation(ProjectionViewer.TOGGLE);
}
}
}
/**
* Makes sure that the editor's folding state is correct the next time
* it becomes visible. If it already is visible, it toggles the folding
* state. If not, it either registers a part listener to toggle folding
* when the editor becomes visible, or cancels an already registered
* runner.
*/
public void runWhenNextVisible() {
// if there is one already: toggling twice is the identity
if (editor.fFoldingRunner != null) {
editor.fFoldingRunner.cancel();
return;
}
IWorkbenchPartSite site= editor.getSite();
if (site != null) {
IWorkbenchPage page= site.getPage();
if (!page.isPartVisible(editor)) {
// if we're not visible - defer until visible
fPage= page;
editor.fFoldingRunner= this;
page.addPartListener(this);
return;
}
}
// we're visible - run now
toggleFolding();
}
/**
* Remove the listener and clear the field.
*/
private void cancel() {
if (fPage != null) {
fPage.removePartListener(this);
fPage= null;
}
if (editor.fFoldingRunner == this)
editor.fFoldingRunner= null;
}
/*
* @see org.eclipse.ui.IPartListener2#partVisible(org.eclipse.ui.IWorkbenchPartReference)
*/
public void partVisible(IWorkbenchPartReference partRef) {
if (editor.equals(partRef.getPart(false))) {
cancel();
toggleFolding();
}
}
/*
* @see org.eclipse.ui.IPartListener2#partClosed(org.eclipse.ui.IWorkbenchPartReference)
*/
public void partClosed(IWorkbenchPartReference partRef) {
if (editor.equals(partRef.getPart(false))) {
cancel();
}
}
public void partActivated(IWorkbenchPartReference partRef) {}
public void partBroughtToTop(IWorkbenchPartReference partRef) {}
public void partDeactivated(IWorkbenchPartReference partRef) {}
public void partOpened(IWorkbenchPartReference partRef) {}
public void partHidden(IWorkbenchPartReference partRef) {}
public void partInputChanged(IWorkbenchPartReference partRef) {}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_ToggleFoldingRunner.java |
276 | public class IntegrityValidator
{
private final NeoStore neoStore;
private final IndexingService indexes;
public IntegrityValidator( NeoStore neoStore, IndexingService indexes )
{
this.neoStore = neoStore;
this.indexes = indexes;
}
public void validateNodeRecord( NodeRecord record ) throws XAException
{
if ( !record.inUse() && record.getNextRel() != Record.NO_NEXT_RELATIONSHIP.intValue() )
{
throw Exceptions.withCause( new XAException( XAException.XA_RBINTEGRITY ),
new ConstraintViolationException(
"Node record " + record + " still has relationships" ) );
}
}
public void validateTransactionStartKnowledge( long lastCommittedTxWhenTransactionStarted )
throws XAException
{
if( lastCommittedTxWhenTransactionStarted < neoStore.getLatestConstraintIntroducingTx() )
{
// Constraints have changed since the transaction begun
// This should be a relatively uncommon case, window for this happening is a few milliseconds when an admin
// explicitly creates a constraint, after the index has been populated. We can improve this later on by
// replicating the constraint validation logic down here, or rethinking where we validate constraints.
// For now, we just kill these transactions.
throw Exceptions.withCause( new XAException( XAException.XA_RBINTEGRITY ),
new ConstraintViolationException(
"Database constraints have changed after this transaction started, which is not yet " +
"supported. Please retry your transaction to ensure all constraints are executed." ) );
}
}
public void validateSchemaRule( SchemaRule schemaRule ) throws XAException
{
if(schemaRule instanceof UniquenessConstraintRule )
{
try
{
indexes.validateIndex( ((UniquenessConstraintRule)schemaRule).getOwnedIndex() );
}
catch ( ConstraintVerificationFailedKernelException e )
{
throw Exceptions.withCause( new XAException( XAException.XA_RBINTEGRITY ), e);
}
catch ( IndexNotFoundKernelException | IndexPopulationFailedKernelException e )
{
// We don't expect this to occur, and if they do, it is because we are in a very bad state - out of
// disk or index corruption, or similar. This will kill the database such that it can be shut down
// and have recovery performed. It's the safest bet to avoid loosing data.
throw Exceptions.withCause( new XAException( XAException.XAER_RMERR ), e);
}
}
}
} | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_IntegrityValidator.java |
2,317 | static class PrePostTimeZoneRounding extends TimeZoneRounding {
final static byte ID = 8;
private TimeZoneRounding timeZoneRounding;
private long preOffset;
private long postOffset;
PrePostTimeZoneRounding() { // for serialization
}
PrePostTimeZoneRounding(TimeZoneRounding timeZoneRounding, long preOffset, long postOffset) {
this.timeZoneRounding = timeZoneRounding;
this.preOffset = preOffset;
this.postOffset = postOffset;
}
@Override
public byte id() {
return ID;
}
@Override
public long roundKey(long utcMillis) {
return timeZoneRounding.roundKey(utcMillis + preOffset);
}
@Override
public long valueForKey(long key) {
return postOffset + timeZoneRounding.valueForKey(key);
}
@Override
public long nextRoundingValue(long value) {
return postOffset + timeZoneRounding.nextRoundingValue(value - postOffset);
}
@Override
public void readFrom(StreamInput in) throws IOException {
timeZoneRounding = (TimeZoneRounding) Rounding.Streams.read(in);
preOffset = in.readVLong();
postOffset = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
Rounding.Streams.write(timeZoneRounding, out);
out.writeVLong(preOffset);
out.writeVLong(postOffset);
}
} | 0true
| src_main_java_org_elasticsearch_common_rounding_TimeZoneRounding.java |
500 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_SITE")
@Cache(usage= CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blCMSElements")
@AdminPresentationClass(friendlyName = "baseSite")
@SQLDelete(sql="UPDATE BLC_SITE SET ARCHIVED = 'Y' WHERE SITE_ID = ?")
public class SiteImpl implements Site, Status {
private static final long serialVersionUID = 1L;
private static final Log LOG = LogFactory.getLog(SiteImpl.class);
@Id
@GeneratedValue(generator = "SiteId")
@GenericGenerator(
name="SiteId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="SiteImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.common.site.domain.SiteImpl")
}
)
@Column(name = "SITE_ID")
protected Long id;
@Column (name = "NAME")
@AdminPresentation(friendlyName = "SiteImpl_Site_Name", order=1, gridOrder = 1, group = "SiteImpl_Site", prominent = true, requiredOverride = RequiredOverride.REQUIRED)
protected String name;
@Column (name = "SITE_IDENTIFIER_TYPE")
@AdminPresentation(friendlyName = "SiteImpl_Site_Identifier_Type", order=2, gridOrder = 2, group = "SiteImpl_Site", prominent = true, broadleafEnumeration = "org.broadleafcommerce.common.site.service.type.SiteResolutionType", fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, requiredOverride = RequiredOverride.REQUIRED)
protected String siteIdentifierType;
@Column (name = "SITE_IDENTIFIER_VALUE")
@AdminPresentation(friendlyName = "SiteImpl_Site_Identifier_Value", order=3, gridOrder = 3, group = "SiteImpl_Site", prominent = true, requiredOverride = RequiredOverride.REQUIRED)
@Index(name = "BLC_SITE_ID_VAL_INDEX", columnNames = { "SITE_IDENTIFIER_VALUE" })
protected String siteIdentifierValue;
@ManyToOne(targetEntity = SandBoxImpl.class)
@JoinColumn(name = "PRODUCTION_SANDBOX_ID")
@AdminPresentation(friendlyName = "SiteImpl_Production_SandBox", visibility = VisibilityEnum.HIDDEN_ALL)
protected SandBox productionSandbox;
@ManyToMany(targetEntity = CatalogImpl.class, cascade = {CascadeType.PERSIST, CascadeType.DETACH, CascadeType.MERGE, CascadeType.REFRESH})
@JoinTable(name = "BLC_SITE_CATALOG", joinColumns = @JoinColumn(name = "SITE_ID"), inverseJoinColumns = @JoinColumn(name = "CATALOG_ID"))
@BatchSize(size = 50)
@AdminPresentationCollection(addType = AddMethodType.LOOKUP, friendlyName = "siteCatalogTitle", manyToField = "sites")
protected List<Catalog> catalogs = new ArrayList<Catalog>();
@Column(name = "DEACTIVATED")
@AdminPresentation(friendlyName = "SiteImpl_Deactivated", order=4, gridOrder = 4, group = "SiteImpl_Site")
protected Boolean deactivated = false;
@Embedded
protected ArchiveStatus archiveStatus = new ArchiveStatus();
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getSiteIdentifierType() {
return siteIdentifierType;
}
@Override
public void setSiteIdentifierType(String siteIdentifierType) {
this.siteIdentifierType = siteIdentifierType;
}
@Override
public String getSiteIdentifierValue() {
return siteIdentifierValue;
}
@Override
public void setSiteIdentifierValue(String siteIdentifierValue) {
this.siteIdentifierValue = siteIdentifierValue;
}
@Override
public SandBox getProductionSandbox() {
return productionSandbox;
}
@Override
public void setProductionSandbox(SandBox productionSandbox) {
this.productionSandbox = productionSandbox;
}
@Override
public SiteResolutionType getSiteResolutionType() {
return SiteResolutionType.getInstance(siteIdentifierType);
}
@Override
public void setSiteResolutionType(SiteResolutionType siteResolutionType) {
this.siteIdentifierType = siteResolutionType.getType();
}
@Override
public List<Catalog> getCatalogs() {
return catalogs;
}
@Override
public void setCatalogs(List<Catalog> catalogs) {
this.catalogs = catalogs;
}
@Override
public Character getArchived() {
if (archiveStatus == null) {
archiveStatus = new ArchiveStatus();
}
return archiveStatus.getArchived();
}
@Override
public void setArchived(Character archived) {
if (archiveStatus == null) {
archiveStatus = new ArchiveStatus();
}
archiveStatus.setArchived(archived);
}
@Override
public boolean isActive() {
if (LOG.isDebugEnabled()) {
if (isDeactivated()) {
LOG.debug("site, " + id + ", inactive due to deactivated property");
}
if ('Y'==getArchived()) {
LOG.debug("site, " + id + ", inactive due to archived status");
}
}
return !isDeactivated() && 'Y'!=getArchived();
}
@Override
public boolean isDeactivated() {
if (deactivated == null) {
return false;
} else {
return deactivated;
}
}
@Override
public void setDeactivated(boolean deactivated) {
this.deactivated = deactivated;
}
public void checkCloneable(Site site) throws CloneNotSupportedException, SecurityException, NoSuchMethodException {
Method cloneMethod = site.getClass().getMethod("clone", new Class[]{});
if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") && !site.getClass().getName().startsWith("org.broadleafcommerce")) {
//subclass is not implementing the clone method
throw new CloneNotSupportedException("Custom extensions and implementations should implement clone.");
}
}
@Override
public Site clone() {
Site clone;
try {
clone = (Site) Class.forName(this.getClass().getName()).newInstance();
try {
checkCloneable(clone);
} catch (CloneNotSupportedException e) {
LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " + clone.getClass().getName(), e);
}
clone.setId(id);
clone.setName(name);
clone.setDeactivated(isDeactivated());
((Status) clone).setArchived(getArchived());
for (Catalog catalog : getCatalogs()) {
Catalog cloneCatalog = new CatalogImpl();
cloneCatalog.setId(catalog.getId());
cloneCatalog.setName(catalog.getName());
clone.getCatalogs().add(cloneCatalog);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return clone;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_site_domain_SiteImpl.java |
667 | public interface Category extends Serializable {
/**
* Gets the primary key.
*
* @return the primary key
*/
@Nullable
public Long getId();
/**
* Sets the primary key.
*
* @param id the new primary key
*/
public void setId(@Nullable Long id);
/**
* Gets the name.
*
* @return the name
*/
@Nonnull
public String getName();
/**
* Sets the name.
*
* @param name the new name
*/
public void setName(@Nonnull String name);
/**
* Gets the default parent category.
*
* @return the default parent category
*/
@Nullable
public Category getDefaultParentCategory();
/**
* Sets the default parent category.
*
* @param defaultParentCategory the new default parent category
*/
public void setDefaultParentCategory(@Nullable Category defaultParentCategory);
/**
* Gets the url. The url represents the presentation layer destination for
* this category. For example, if using Spring MVC, you could send the user
* to this destination by returning {@code "redirect:"+currentCategory.getUrl();}
* from a controller.
*
* @return the url for the presentation layer component for this category
*/
@Nullable
public String getUrl();
/**
* Sets the url. The url represents the presentation layer destination for
* this category. For example, if using Spring MVC, you could send the user
* to this destination by returning {@code "redirect:"+currentCategory.getUrl();}
* from a controller.
*
* @param url the new url for the presentation layer component for this category
*/
public void setUrl(@Nullable String url);
/**
* Gets the url key. The url key is used as part of SEO url generation for this
* category. Each segment of the url leading to a category is comprised of the url
* keys of the various associated categories in a hierarchy leading to this one. If
* the url key is null, the the name for the category formatted with dashes for spaces.
*
* @return the url key for this category to appear in the SEO url
*/
@Nullable
public String getUrlKey();
/**
* Creates the SEO url starting from this category and recursing up the
* hierarchy of default parent categories until the topmost category is
* reached. The url key for each category is used for each segment
* of the SEO url.
*
* @return the generated SEO url for this category
*/
@Nullable
public String getGeneratedUrl();
/**
* Sets the url key. The url key is used as part of SEO url generation for this
* category. Each segment of the url leading to a category is comprised of the url
* keys of the various associated categories in a hierarchy leading to this one.
*
* @param urlKey the new url key for this category to appear in the SEO url
*/
public void setUrlKey(@Nullable String urlKey);
/**
* Gets the description.
*
* @return the description
*/
@Nullable
public String getDescription();
/**
* Sets the description.
*
* @param description the new description
*/
public void setDescription(@Nullable String description);
/**
* Gets the active start date. If the current date is before activeStartDate,
* then this category will not be visible on the site.
*
* @return the active start date
*/
@Nullable
public Date getActiveStartDate();
/**
* Sets the active start date. If the current date is before activeStartDate,
* then this category will not be visible on the site.
*
* @param activeStartDate the new active start date
*/
public void setActiveStartDate(@Nullable Date activeStartDate);
/**
* Gets the active end date. If the current date is after activeEndDate,
* the this category will not be visible on the site.
*
* @return the active end date
*/
@Nullable
public Date getActiveEndDate();
/**
* Sets the active end date. If the current date is after activeEndDate,
* the this category will not be visible on the site.
*
* @param activeEndDate the new active end date
*/
public void setActiveEndDate(@Nullable Date activeEndDate);
/**
* Checks if is active. Returns true if the startDate is null or if the current
* date is after the start date, or if the endDate is null or if the current date
* is before the endDate.
*
* @return true, if is active
*/
public boolean isActive();
/**
* Gets the display template. The display template can be used to help create a unique key
* that drives the presentation layer destination for this category. For example, if
* using Spring MVC, you might derive the view destination in this way:
*
* {@code view = categoryTemplatePrefix + currentCategory.getDisplayTemplate();}
*
* @return the display template
*/
@Nullable
public String getDisplayTemplate();
/**
* Sets the display template. The display template can be used to help create a unique key
* that drives the presentation layer destination for this category. For example, if
* using Spring MVC, you might derive the view destination in this way:
*
* {@code view = categoryTemplatePrefix + currentCategory.getDisplayTemplate();}
*
* @param displayTemplate the new display template
*/
public void setDisplayTemplate(@Nullable String displayTemplate);
/**
* Gets the child category url map. This map is keyed off of the {@link #getGeneratedUrl()} values
* for this category and all of its child categories. By calling get on this map using the
* generated url for a given category, you will receive the list of immediate child categories.
* This is inefficient, so its use is highly discouraged.
*
* @return the child category url map
* @deprecated This approach is inherently inefficient and should no longer be used
*/
@Deprecated
@Nonnull
public Map<String,List<Long>> getChildCategoryURLMap();
/**
* Set the child category url map. This approach is inefficient,
* so its use is highly discouraged.
*
* @param childCategoryURLMap
* @deprecated This approach is inherently inefficient and should no longer be used
*/
@Deprecated
public void setChildCategoryURLMap(@Nonnull Map<String, List<Long>> childCategoryURLMap);
/**
* Gets the category images.
* @deprecated replaced by {@link #getCategoryMedia()}
*
* @return the category images
*/
@Deprecated
@Nonnull
public Map<String, String> getCategoryImages();
/**
* Gets the category image.
* @deprecated replaced by {@link #getCategoryMedia()}
*
* @param imageKey the image key
* @return the category image
*/
@Deprecated
@Nullable
public String getCategoryImage(@Nonnull String imageKey);
/**
* Sets the category images.
* @deprecated replaced by {@link #setCategoryMedia(java.util.Map)}
*
* @param categoryImages the category images
*/
@Deprecated
public void setCategoryImages(@Nonnull Map<String, String> categoryImages);
/**
* Gets the category media map. The key is of arbitrary meaning
* and the {@code Media} instance stores information about the
* media itself (image url, etc...)
*
* @return the category Media
*/
@Nonnull
public Map<String, Media> getCategoryMedia() ;
/**
* Sets the category media. The key is of arbitrary meaning
* and the {@code Media} instance stores information about the
* media itself (image url, etc...)
*
* @param categoryMedia the category media
*/
public void setCategoryMedia(@Nonnull Map<String, Media> categoryMedia);
/**
* Gets the long description.
*
* @return the long description
*/
@Nullable
public String getLongDescription();
/**
* Sets the long description.
*
* @param longDescription the new long description
*/
public void setLongDescription(@Nullable String longDescription);
/**
* Gets the featured products. Featured products are a special list
* of products you would like to showcase for this category.
*
* @return the featured products
*/
@Nonnull
public List<FeaturedProduct> getFeaturedProducts();
/**
* Sets the featured products. Featured products are a special list
* of products you would like to showcase for this category.
*
* @param featuredProducts the featured products
*/
public void setFeaturedProducts(@Nonnull List<FeaturedProduct> featuredProducts);
/**
* Returns a list of cross sale products that are related to this category.
*
* @return a list of cross sale products
*/
public List<RelatedProduct> getCrossSaleProducts();
/**
* Sets the cross sale products that are related to this category.
*
* @see #getCrossSaleProducts()
* @param crossSaleProducts
*/
public void setCrossSaleProducts(List<RelatedProduct> crossSaleProducts);
/**
* Returns a list of cross sale products that are related to this category.
*
* @return a list of cross sale products
*/
public List<RelatedProduct> getUpSaleProducts();
/**
* Sets the upsale products that are related to this category.
*
* @see #getUpSaleProducts()
* @param upSaleProducts
*/
public void setUpSaleProducts(List<RelatedProduct> upSaleProducts);
/**
* Returns a list of the cross sale products in this category as well as
* all cross sale products in all parent categories of this category.
*
* @return the cumulative cross sale products
*/
public List<RelatedProduct> getCumulativeCrossSaleProducts();
/**
* Returns a list of the upsale products in this category as well as
* all upsale products in all parent categories of this category.
*
* @return the cumulative upsale products
*/
public List<RelatedProduct> getCumulativeUpSaleProducts();
/**
* Returns a list of the featured products in this category as well as
* all featured products in all parent categories of this category.
*
* @return the cumulative featured products
*/
public List<FeaturedProduct> getCumulativeFeaturedProducts();
/**
* Returns all of the SearchFacets that are directly associated with this Category
*
* @return related SearchFacets
*/
public List<CategorySearchFacet> getSearchFacets();
/**
* Sets the SearchFacets that are directly associated with this Category
*
* @param searchFacets
*/
public void setSearchFacets(List<CategorySearchFacet> searchFacets);
/**
* Sets the SearchFacets that should not be rendered by this Category. Typically, this will include
* facets from parent categories that do not apply to this category.
*
* @param excludedSearchFacets
*/
public void setExcludedSearchFacets(List<SearchFacet> excludedSearchFacets);
/**
* Gets the excluded SearchFacets
* @return the excluded SearchFacets
*/
public List<SearchFacet> getExcludedSearchFacets();
/**
* Returns a list of CategorySearchFacets that takes into consideration the search facets for this Category,
* the search facets for all parent categories, and the search facets that should be excluded from this
* Category. This method will order the resulting list based on the {@link CategorySearchFacet#getPosition()}
* method for each category level. That is, the facets on this Category will be ordered by their position
* relative to each other with the ordered parent facets after that, etc.
*
* @return the current active search facets for this category and all parent categories
*/
public List<CategorySearchFacet> getCumulativeSearchFacets();
/**
* Build category hierarchy by walking the default category tree up to the root category.
* If the passed in tree is null then create the initial list.
*
* @param currentHierarchy
* @return
*/
public List<Category> buildCategoryHierarchy(List<Category> currentHierarchy);
/**
* Build the full category hierarchy by walking up the default category tree and the all parent
* category tree.
*
* @param currentHierarchy
* @return the full hierarchy
*/
public List<Category> buildFullCategoryHierarchy(List<Category> currentHierarchy);
/**
* Gets the attributes for this {@link Category}. In smaller sites, using these attributes might be preferred to
* extending the domain object itself.
*
* @return
* @see {@link #getMappedCategoryAttributes()}
*/
public Map<String, CategoryAttribute> getCategoryAttributesMap();
public void setCategoryAttributesMap(Map<String, CategoryAttribute> categoryAttributes);
/**
* Gets the attributes for this {@link Category}. In smaller sites, using these attributes might be preferred to
* extending the domain object itself.
*
* @return
* @see {@link #getMappedCategoryAttributes()}
* @deprecated This will be replaced with {@link #getCategoryAttributesMap()} in 3.1.0.
*/
public List<CategoryAttribute> getCategoryAttributes();
/**
* Sets the attributes for this {@link Category}. In smaller sites, using these attributes might be preferred to
* extending the domain object and creating a new table to store custom properties.
*
* @return
* @deprecated This will be replaced with {@link #setCategoryAttributesMap()} in 3.1.0.
*/
public void setCategoryAttributes(List<CategoryAttribute> categoryAttributes);
/**
* Convenience method to get a {@link CategoryAttribute} by name
*
* @param name
* @return
* @see {@link #getCategoryAttributes()}, {@link #getMappedCategoryAttributes()}
* @deprecated This will be removed in 3.1.0
*/
public CategoryAttribute getCategoryAttributeByName(String name);
/**
* Convenience method to return the {@link CategoryAttribute}s for the {@link Category} in an easily-consumable
* form
*
* @return
* @deprecated This will be removed in 3.1.0
*/
public Map<String, CategoryAttribute> getMappedCategoryAttributes();
/**
* Returns the type of inventory for this category
* @return the {@link InventoryType} for this category
*/
public InventoryType getInventoryType();
/**
* Sets the type of inventory for this category
* @param inventoryType the {@link InventoryType} for this category
*/
public void setInventoryType(InventoryType inventoryType);
/**
* Returns the default fulfillment type for skus in this category. May be null.
* @return
*/
public FulfillmentType getFulfillmentType();
/**
* Sets the default fulfillment type for skus in this category. May return null.
* @param fulfillmentType
*/
public void setFulfillmentType(FulfillmentType fulfillmentType);
/**
* Gets the child categories. This list includes all categories, regardless
* of whether or not they are active.
*
* @deprecated use getAllChildCategoryXrefs() instead.
* @return the list of active and inactive child categories.
*/
@Nonnull
@Deprecated
public List<Category> getAllChildCategories();
/**
* Checks for child categories.
*
* @return true, if this category has any children (active or not)
*/
public boolean hasAllChildCategories();
/**
* Sets the list of child categories (active and inactive)
*
* @deprecated Use setAllChildCategoryXrefs() instead.
* @param childCategories the list of child categories
*/
@Deprecated
public void setAllChildCategories(@Nonnull List<Category> childCategories);
/**
* Gets the child categories. If child categories has not been previously
* set, then the list of active only categories will be returned.
*
* @deprecated Use getChildCategoryXrefs() instead.
* @return the list of active child categories
*/
@Deprecated
@Nonnull
public List<Category> getChildCategories();
/**
* Gets the child category ids. If child categories has not been previously
* set, then the list of active only categories will be returned. This method
* is optimized with Hydrated cache, which means that the algorithm required
* to harvest active child categories will not need to be rebuilt as long
* as the parent category (this category) is not evicted from second level cache.
*
* @return the list of active child category ids
*/
@Nonnull
public List<Long> getChildCategoryIds();
/**
* Sets the all child category ids. This should be a list
* of active only child categories.
*
* @param childCategoryIds the list of active child category ids.
*/
public void setChildCategoryIds(@Nonnull List<Long> childCategoryIds);
/**
* Checks for child categories.
*
* @return true, if this category contains any active child categories.
*/
public boolean hasChildCategories();
/**
* Sets the all child categories. This should be a list
* of active only child categories.
*
* @deprecated Use setChildCategoryXrefs() instead.
* @param childCategories the list of active child categories.
*/
@Deprecated
public void setChildCategories(@Nonnull List<Category> childCategories);
public List<CategoryXref> getAllChildCategoryXrefs();
public List<CategoryXref> getChildCategoryXrefs();
public void setChildCategoryXrefs(List<CategoryXref> childCategories);
public void setAllChildCategoryXrefs(List<CategoryXref> childCategories);
public List<CategoryXref> getAllParentCategoryXrefs();
public void setAllParentCategoryXrefs(List<CategoryXref> allParentCategories);
/**
* Retrieve all parent categories
*
* @deprecated Use getAllParentCategoryXrefs() instead.
* @return the list of parent categories
*/
@Deprecated
@Nonnull
public List<Category> getAllParentCategories();
/**
* Sets the list of parent categories
*
* @deprecated Use setAllParentCategoryXrefs() instead.
* @param allParentCategories the list of parent categories
*/
@Deprecated
public void setAllParentCategories(@Nonnull List<Category> allParentCategories);
public List<CategoryProductXref> getActiveProductXrefs();
public List<CategoryProductXref> getAllProductXrefs();
public void setAllProductXrefs(List<CategoryProductXref> allProducts);
/**
* Convenience method to retrieve all of this {@link Category}'s {@link Product}s filtered by
* active. If you want all of the {@link Product}s (whether inactive or not) consider using
* {@link #getAllProducts()}.
*
* @deprecated Use getActiveProductXrefs() instead.
* @return the list of active {@link Product}s for this {@link Category}
* @see {@link Product#isActive()}
*/
@Deprecated
public List<Product> getActiveProducts();
/**
* Retrieve all the {@code Product} instances associated with this
* category.
* <br />
* <b>Note:</b> this method does not take into account whether or not the {@link Product}s are active or not. If
* you need this functionality, use {@link #getActiveProducts()}
* @deprecated Use getAllProductXrefs() instead.
* @return the list of products associated with this category.
*/
@Deprecated
@Nonnull
public List<Product> getAllProducts();
/**
* Set all the {@code Product} instances associated with this
* category.
*
* @deprecated Use setAllProductXrefs() instead.
* @param allProducts the list of products to associate with this category
*/
@Deprecated
public void setAllProducts(@Nonnull List<Product> allProducts);
/**
* Returns the tax code of this category.
* @return taxCode
*/
public String getTaxCode();
/**
* Sets the tax code of this category.
* @param taxCode
*/
public void setTaxCode(String taxCode);
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_Category.java |
2,033 | public abstract class AbstractMapOperation extends AbstractNamedOperation {
protected transient MapService mapService;
protected transient MapContainer mapContainer;
public AbstractMapOperation() {
}
public AbstractMapOperation(String name) {
super();
this.name = name;
}
@Override
public final void beforeRun() throws Exception {
mapService = getService();
mapContainer = mapService.getMapContainer(name);
innerBeforeRun();
}
public void innerBeforeRun() {
}
@Override
public void afterRun() throws Exception {
}
@Override
public boolean returnsResponse() {
return true;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_operation_AbstractMapOperation.java |
2,978 | public class DocSetCacheModule extends AbstractModule {
private final Settings settings;
public DocSetCacheModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(DocSetCache.class)
.to(settings.getAsClass("index.cache.docset.type", SimpleDocSetCache.class, "org.elasticsearch.index.cache.docset.", "DocSetCache"))
.in(Scopes.SINGLETON);
}
} | 0true
| src_main_java_org_elasticsearch_index_cache_docset_DocSetCacheModule.java |
28 | public class GetCommandProcessor extends MemcacheCommandProcessor<GetCommand> {
final boolean single;
private final ILogger logger;
public GetCommandProcessor(TextCommandService textCommandService, boolean single) {
super(textCommandService);
this.single = single;
logger = textCommandService.getNode().getLogger(this.getClass().getName());
}
public void handle(GetCommand getCommand) {
String key = null;
try {
key = URLDecoder.decode(getCommand.getKey(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new HazelcastException(e);
}
String mapName = DEFAULT_MAP_NAME;
int index = key.indexOf(':');
if (index != -1) {
mapName = MAP_NAME_PRECEDER + key.substring(0, index);
key = key.substring(index + 1);
}
Object value = textCommandService.get(mapName, key);
MemcacheEntry entry = null;
if (value != null) {
if (value instanceof MemcacheEntry) {
entry = (MemcacheEntry) value;
} else if (value instanceof byte[]) {
entry = new MemcacheEntry(getCommand.getKey(), ((byte[]) value), 0);
} else if (value instanceof String) {
entry = new MemcacheEntry(getCommand.getKey(), stringToBytes((String) value), 0);
} else {
try {
entry = new MemcacheEntry(getCommand.getKey(), textCommandService.toByteArray(value), 0);
} catch (Exception e) {
logger.warning(e);
}
}
}
if (entry != null) {
textCommandService.incrementGetHitCount();
} else {
textCommandService.incrementGetMissCount();
}
getCommand.setValue(entry, single);
textCommandService.sendResponse(getCommand);
}
public void handleRejection(GetCommand getCommand) {
getCommand.setValue(null, single);
textCommandService.sendResponse(getCommand);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_memcache_GetCommandProcessor.java |
3,223 | public static abstract class WithOrdinals extends LongValues {
protected final Docs ordinals;
protected WithOrdinals(Ordinals.Docs ordinals) {
super(ordinals.isMultiValued());
this.ordinals = ordinals;
}
/**
* Returns the associated ordinals instance.
* @return the associated ordinals instance.
*/
public Docs ordinals() {
return this.ordinals;
}
/**
* Returns the value for the given ordinal.
* @param ord the ordinal to lookup.
* @return a long value associated with the given ordinal.
*/
public abstract long getValueByOrd(long ord);
@Override
public int setDocument(int docId) {
this.docId = docId;
return ordinals.setDocument(docId);
}
@Override
public long nextValue() {
return getValueByOrd(ordinals.nextOrd());
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_LongValues.java |
312 | return new Predicate<Integer>() {
@Override
public boolean apply(@Nullable Integer num) {
return num!=null && num>=0;
}
}; | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_configuration_ConfigOption.java |
1,637 | @Component("blAdornedTargetCollectionFieldMetadataProvider")
@Scope("prototype")
public class AdornedTargetCollectionFieldMetadataProvider extends AdvancedCollectionFieldMetadataProvider {
private static final Log LOG = LogFactory.getLog(AdornedTargetCollectionFieldMetadataProvider.class);
protected boolean canHandleFieldForConfiguredMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) {
AdminPresentationAdornedTargetCollection annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationAdornedTargetCollection.class);
return annot != null;
}
protected boolean canHandleFieldForTypeMetadata(AddMetadataFromFieldTypeRequest addMetadataFromFieldTypeRequest, Map<String, FieldMetadata> metadata) {
AdminPresentationAdornedTargetCollection annot = addMetadataFromFieldTypeRequest.getRequestedField().getAnnotation(AdminPresentationAdornedTargetCollection.class);
return annot != null;
}
protected boolean canHandleAnnotationOverride(OverrideViaAnnotationRequest overrideViaAnnotationRequest, Map<String, FieldMetadata> metadata) {
AdminPresentationOverrides myOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationOverrides.class);
AdminPresentationMergeOverrides myMergeOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationMergeOverrides.class);
return (myOverrides != null && !ArrayUtils.isEmpty(myOverrides.adornedTargetCollections())) ||
myMergeOverrides != null;
}
@Override
public FieldProviderResponse addMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) {
if (!canHandleFieldForConfiguredMetadata(addMetadataRequest, metadata)) {
return FieldProviderResponse.NOT_HANDLED;
}
AdminPresentationAdornedTargetCollection annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationAdornedTargetCollection.class);
FieldInfo info = buildFieldInfo(addMetadataRequest.getRequestedField());
FieldMetadataOverride override = constructAdornedTargetCollectionMetadataOverride(annot);
buildAdornedTargetCollectionMetadata(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, info, override, addMetadataRequest.getDynamicEntityDao());
setClassOwnership(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, info);
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse overrideViaAnnotation(OverrideViaAnnotationRequest overrideViaAnnotationRequest, Map<String, FieldMetadata> metadata) {
if (!canHandleAnnotationOverride(overrideViaAnnotationRequest, metadata)) {
return FieldProviderResponse.NOT_HANDLED;
}
Map<String, AdminPresentationAdornedTargetCollectionOverride> presentationAdornedTargetCollectionOverrides = new HashMap<String, AdminPresentationAdornedTargetCollectionOverride>();
AdminPresentationOverrides myOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationOverrides.class);
if (myOverrides != null) {
for (AdminPresentationAdornedTargetCollectionOverride myOverride : myOverrides.adornedTargetCollections()) {
presentationAdornedTargetCollectionOverrides.put(myOverride.name(), myOverride);
}
}
for (String propertyName : presentationAdornedTargetCollectionOverrides.keySet()) {
for (String key : metadata.keySet()) {
if (key.startsWith(propertyName)) {
buildAdminPresentationAdornedTargetCollectionOverride(overrideViaAnnotationRequest.getPrefix(), overrideViaAnnotationRequest.getParentExcluded(), metadata, presentationAdornedTargetCollectionOverrides, propertyName, key, overrideViaAnnotationRequest.getDynamicEntityDao());
}
}
}
AdminPresentationMergeOverrides myMergeOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationMergeOverrides.class);
if (myMergeOverrides != null) {
for (AdminPresentationMergeOverride override : myMergeOverrides.value()) {
String propertyName = override.name();
Map<String, FieldMetadata> loopMap = new HashMap<String, FieldMetadata>();
loopMap.putAll(metadata);
for (Map.Entry<String, FieldMetadata> entry : loopMap.entrySet()) {
if (entry.getKey().startsWith(propertyName) || StringUtils.isEmpty(propertyName)) {
FieldMetadata targetMetadata = entry.getValue();
if (targetMetadata instanceof AdornedTargetCollectionMetadata) {
AdornedTargetCollectionMetadata serverMetadata = (AdornedTargetCollectionMetadata) targetMetadata;
if (serverMetadata.getTargetClass() != null) {
try {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = overrideViaAnnotationRequest.getDynamicEntityDao().getFieldManager()
.getField(targetClass, fieldName);
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
temp.put(field.getName(), serverMetadata);
FieldInfo info = buildFieldInfo(field);
FieldMetadataOverride fieldMetadataOverride = overrideAdornedTargetMergeMetadata(override);
if (serverMetadata.getExcluded() != null && serverMetadata.getExcluded() &&
(fieldMetadataOverride.getExcluded() == null || fieldMetadataOverride.getExcluded())) {
continue;
}
buildAdornedTargetCollectionMetadata(parentClass, targetClass, temp, info,
fieldMetadataOverride,
overrideViaAnnotationRequest.getDynamicEntityDao());
serverMetadata = (AdornedTargetCollectionMetadata) temp.get(field.getName());
metadata.put(entry.getKey(), serverMetadata);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
}
}
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse overrideViaXml(OverrideViaXmlRequest overrideViaXmlRequest, Map<String, FieldMetadata> metadata) {
Map<String, FieldMetadataOverride> overrides = getTargetedOverride(overrideViaXmlRequest.getDynamicEntityDao(), overrideViaXmlRequest.getRequestedConfigKey(), overrideViaXmlRequest.getRequestedCeilingEntity());
if (overrides != null) {
for (String propertyName : overrides.keySet()) {
final FieldMetadataOverride localMetadata = overrides.get(propertyName);
for (String key : metadata.keySet()) {
if (key.equals(propertyName)) {
try {
if (metadata.get(key) instanceof AdornedTargetCollectionMetadata) {
AdornedTargetCollectionMetadata serverMetadata = (AdornedTargetCollectionMetadata) metadata.get(key);
if (serverMetadata.getTargetClass() != null) {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = overrideViaXmlRequest.getDynamicEntityDao().getFieldManager().getField(targetClass, fieldName);
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
temp.put(field.getName(), serverMetadata);
FieldInfo info = buildFieldInfo(field);
buildAdornedTargetCollectionMetadata(parentClass, targetClass, temp, info, localMetadata, overrideViaXmlRequest.getDynamicEntityDao());
serverMetadata = (AdornedTargetCollectionMetadata) temp.get(field.getName());
metadata.put(key, serverMetadata);
if (overrideViaXmlRequest.getParentExcluded()) {
if (LOG.isDebugEnabled()) {
LOG.debug("applyAdornedTargetCollectionMetadataOverrides:Excluding " + key + "because parent is marked as excluded.");
}
serverMetadata.setExcluded(true);
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse addMetadataFromFieldType(AddMetadataFromFieldTypeRequest addMetadataFromFieldTypeRequest, Map<String, FieldMetadata> metadata) {
if (!canHandleFieldForTypeMetadata(addMetadataFromFieldTypeRequest, metadata)) {
return FieldProviderResponse.NOT_HANDLED;
}
super.addMetadataFromFieldType(addMetadataFromFieldTypeRequest, metadata);
//add additional adorned target support
AdornedTargetCollectionMetadata fieldMetadata = (AdornedTargetCollectionMetadata) addMetadataFromFieldTypeRequest.getPresentationAttribute();
if (StringUtils.isEmpty(fieldMetadata.getCollectionCeilingEntity())) {
fieldMetadata.setCollectionCeilingEntity(addMetadataFromFieldTypeRequest.getType().getReturnedClass().getName());
AdornedTargetList targetList = ((AdornedTargetList) fieldMetadata.getPersistencePerspective().
getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST));
targetList.setAdornedTargetEntityClassname(fieldMetadata.getCollectionCeilingEntity());
}
return FieldProviderResponse.HANDLED;
}
protected FieldMetadataOverride overrideAdornedTargetMergeMetadata(AdminPresentationMergeOverride merge) {
FieldMetadataOverride fieldMetadataOverride = new FieldMetadataOverride();
Map<String, AdminPresentationMergeEntry> overrideValues = getAdminPresentationEntries(merge.mergeEntries());
for (Map.Entry<String, AdminPresentationMergeEntry> entry : overrideValues.entrySet()) {
String stringValue = entry.getValue().overrideValue();
if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.CURRENCYCODEFIELD)) {
fieldMetadataOverride.setCurrencyCodeField(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.CUSTOMCRITERIA)) {
fieldMetadataOverride.setCustomCriteria(entry.getValue().stringArrayOverrideValue());
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.EXCLUDED)) {
fieldMetadataOverride.setExcluded(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.FRIENDLYNAME)) {
fieldMetadataOverride.setFriendlyName(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.GRIDVISIBLEFIELDS)) {
fieldMetadataOverride.setGridVisibleFields(entry.getValue().stringArrayOverrideValue());
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.IGNOREADORNEDPROPERTIES)) {
fieldMetadataOverride.setIgnoreAdornedProperties(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.JOINENTITYCLASS)) {
fieldMetadataOverride.setJoinEntityClass(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.MAINTAINEDADORNEDTARGETFIELDS)) {
fieldMetadataOverride.setMaintainedAdornedTargetFields(entry.getValue().stringArrayOverrideValue());
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.OPERATIONTYPES)) {
AdminPresentationOperationTypes operationType = entry.getValue().operationTypes();
fieldMetadataOverride.setAddType(operationType.addType());
fieldMetadataOverride.setRemoveType(operationType.removeType());
fieldMetadataOverride.setUpdateType(operationType.updateType());
fieldMetadataOverride.setFetchType(operationType.fetchType());
fieldMetadataOverride.setInspectType(operationType.inspectType());
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.ORDER)) {
fieldMetadataOverride.setOrder(StringUtils.isEmpty(stringValue) ? entry.getValue().intOverrideValue() :
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.PARENTOBJECTIDPROPERTY)) {
fieldMetadataOverride.setParentObjectIdProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.PARENTOBJECTPROPERTY)) {
fieldMetadataOverride.setParentObjectProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.READONLY)) {
fieldMetadataOverride.setReadOnly(StringUtils.isEmpty(stringValue) ? entry.getValue()
.booleanOverrideValue() :
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.SECURITYLEVEL)) {
fieldMetadataOverride.setSecurityLevel(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.SHOWIFPROPERTY)) {
fieldMetadataOverride.setShowIfProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.SORTASCENDING)) {
fieldMetadataOverride.setSortAscending(StringUtils.isEmpty(stringValue) ? entry.getValue()
.booleanOverrideValue() :
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.SORTPROPERTY)) {
fieldMetadataOverride.setSortProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.TAB)) {
fieldMetadataOverride.setTab(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.TABORDER)) {
fieldMetadataOverride.setTabOrder(StringUtils.isEmpty(stringValue) ? entry.getValue()
.intOverrideValue() :
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.TARGETOBJECTIDPROPERTY)) {
fieldMetadataOverride.setTargetObjectIdProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.TARGETOBJECTPROPERTY)) {
fieldMetadataOverride.setTargetObjectProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationAdornedTargetCollection.USESERVERSIDEINSPECTIONCACHE)) {
fieldMetadataOverride.setUseServerSideInspectionCache(StringUtils.isEmpty(stringValue) ? entry
.getValue().booleanOverrideValue() :
Boolean.parseBoolean(stringValue));
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Unrecognized type: " + entry.getKey() + ". Not setting on adorned target field.");
}
}
}
return fieldMetadataOverride;
}
protected void buildAdminPresentationAdornedTargetCollectionOverride(String prefix, Boolean isParentExcluded, Map<String, FieldMetadata> mergedProperties, Map<String, AdminPresentationAdornedTargetCollectionOverride> presentationAdornedTargetCollectionOverrides, String propertyName, String key, DynamicEntityDao dynamicEntityDao) {
AdminPresentationAdornedTargetCollectionOverride override = presentationAdornedTargetCollectionOverrides.get(propertyName);
if (override != null) {
AdminPresentationAdornedTargetCollection annot = override.value();
if (annot != null) {
String testKey = prefix + key;
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && annot.excluded()) {
FieldMetadata metadata = mergedProperties.get(key);
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationAdornedTargetCollectionOverride:Excluding " + key + "because an override annotation declared " + testKey + "to be excluded");
}
metadata.setExcluded(true);
return;
}
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && !annot.excluded()) {
FieldMetadata metadata = mergedProperties.get(key);
if (!isParentExcluded) {
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationAdornedTargetCollectionOverride:Showing " + key + "because an override annotation declared " + testKey + " to not be excluded");
}
metadata.setExcluded(false);
}
}
if (!(mergedProperties.get(key) instanceof AdornedTargetCollectionMetadata)) {
return;
}
AdornedTargetCollectionMetadata serverMetadata = (AdornedTargetCollectionMetadata) mergedProperties.get(key);
if (serverMetadata.getTargetClass() != null) {
try {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = dynamicEntityDao.getFieldManager().getField(targetClass, fieldName);
FieldMetadataOverride localMetadata = constructAdornedTargetCollectionMetadataOverride(annot);
//do not include the previous metadata - we want to construct a fresh metadata from the override annotation
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
FieldInfo info = buildFieldInfo(field);
buildAdornedTargetCollectionMetadata(parentClass, targetClass, temp, info, localMetadata, dynamicEntityDao);
AdornedTargetCollectionMetadata result = (AdornedTargetCollectionMetadata) temp.get(field.getName());
result.setInheritedFromType(serverMetadata.getInheritedFromType());
result.setAvailableToTypes(serverMetadata.getAvailableToTypes());
mergedProperties.put(key, result);
if (isParentExcluded) {
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationAdornedTargetCollectionOverride:Excluding " + key + "because the parent was excluded");
}
serverMetadata.setExcluded(true);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
protected FieldMetadataOverride constructAdornedTargetCollectionMetadataOverride(AdminPresentationAdornedTargetCollection adornedTargetCollection) {
if (adornedTargetCollection != null) {
FieldMetadataOverride override = new FieldMetadataOverride();
override.setGridVisibleFields(adornedTargetCollection.gridVisibleFields());
override.setIgnoreAdornedProperties(adornedTargetCollection.ignoreAdornedProperties());
override.setMaintainedAdornedTargetFields(adornedTargetCollection.maintainedAdornedTargetFields());
override.setParentObjectIdProperty(adornedTargetCollection.parentObjectIdProperty());
override.setParentObjectProperty(adornedTargetCollection.parentObjectProperty());
override.setSortAscending(adornedTargetCollection.sortAscending());
override.setSortProperty(adornedTargetCollection.sortProperty());
override.setTargetObjectIdProperty(adornedTargetCollection.targetObjectIdProperty());
override.setTargetObjectProperty(adornedTargetCollection.targetObjectProperty());
override.setJoinEntityClass(adornedTargetCollection.joinEntityClass());
override.setCustomCriteria(adornedTargetCollection.customCriteria());
override.setUseServerSideInspectionCache(adornedTargetCollection.useServerSideInspectionCache());
override.setExcluded(adornedTargetCollection.excluded());
override.setFriendlyName(adornedTargetCollection.friendlyName());
override.setReadOnly(adornedTargetCollection.readOnly());
override.setOrder(adornedTargetCollection.order());
override.setTab(adornedTargetCollection.tab());
override.setTabOrder(adornedTargetCollection.tabOrder());
override.setSecurityLevel(adornedTargetCollection.securityLevel());
override.setAddType(adornedTargetCollection.operationTypes().addType());
override.setFetchType(adornedTargetCollection.operationTypes().fetchType());
override.setRemoveType(adornedTargetCollection.operationTypes().removeType());
override.setUpdateType(adornedTargetCollection.operationTypes().updateType());
override.setInspectType(adornedTargetCollection.operationTypes().inspectType());
override.setShowIfProperty(adornedTargetCollection.showIfProperty());
override.setCurrencyCodeField(adornedTargetCollection.currencyCodeField());
return override;
}
throw new IllegalArgumentException("AdminPresentationAdornedTargetCollection annotation not found on field.");
}
protected void buildAdornedTargetCollectionMetadata(Class<?> parentClass, Class<?> targetClass, Map<String, FieldMetadata> attributes, FieldInfo field, FieldMetadataOverride adornedTargetCollectionMetadata, DynamicEntityDao dynamicEntityDao) {
AdornedTargetCollectionMetadata serverMetadata = (AdornedTargetCollectionMetadata) attributes.get(field.getName());
Class<?> resolvedClass = parentClass==null?targetClass:parentClass;
AdornedTargetCollectionMetadata metadata;
if (serverMetadata != null) {
metadata = serverMetadata;
} else {
metadata = new AdornedTargetCollectionMetadata();
}
metadata.setTargetClass(targetClass.getName());
metadata.setFieldName(field.getName());
if (adornedTargetCollectionMetadata.getReadOnly() != null) {
metadata.setMutable(!adornedTargetCollectionMetadata.getReadOnly());
}
if (adornedTargetCollectionMetadata.getShowIfProperty()!=null) {
metadata.setShowIfProperty(adornedTargetCollectionMetadata.getShowIfProperty());
}
org.broadleafcommerce.openadmin.dto.OperationTypes dtoOperationTypes = new org.broadleafcommerce.openadmin.dto.OperationTypes(OperationType.ADORNEDTARGETLIST, OperationType.ADORNEDTARGETLIST, OperationType.ADORNEDTARGETLIST, OperationType.ADORNEDTARGETLIST, OperationType.BASIC);
if (adornedTargetCollectionMetadata.getAddType() != null) {
dtoOperationTypes.setAddType(adornedTargetCollectionMetadata.getAddType());
}
if (adornedTargetCollectionMetadata.getRemoveType() != null) {
dtoOperationTypes.setRemoveType(adornedTargetCollectionMetadata.getRemoveType());
}
if (adornedTargetCollectionMetadata.getFetchType() != null) {
dtoOperationTypes.setFetchType(adornedTargetCollectionMetadata.getFetchType());
}
if (adornedTargetCollectionMetadata.getInspectType() != null) {
dtoOperationTypes.setInspectType(adornedTargetCollectionMetadata.getInspectType());
}
if (adornedTargetCollectionMetadata.getUpdateType() != null) {
dtoOperationTypes.setUpdateType(adornedTargetCollectionMetadata.getUpdateType());
}
//don't allow additional non-persistent properties or additional foreign keys for an advanced collection datasource - they don't make sense in this context
PersistencePerspective persistencePerspective;
if (serverMetadata != null) {
persistencePerspective = metadata.getPersistencePerspective();
persistencePerspective.setOperationTypes(dtoOperationTypes);
} else {
persistencePerspective = new PersistencePerspective(dtoOperationTypes, new String[]{}, new ForeignKey[]{});
metadata.setPersistencePerspective(persistencePerspective);
}
String parentObjectProperty = null;
if (serverMetadata != null) {
parentObjectProperty = ((AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST)).getLinkedObjectPath();
}
if (!StringUtils.isEmpty(adornedTargetCollectionMetadata.getParentObjectProperty())) {
parentObjectProperty = adornedTargetCollectionMetadata.getParentObjectProperty();
}
if (parentObjectProperty == null && !StringUtils.isEmpty(field.getOneToManyMappedBy())) {
parentObjectProperty = field.getOneToManyMappedBy();
}
if (parentObjectProperty == null && !StringUtils.isEmpty(field.getManyToManyMappedBy())) {
parentObjectProperty = field.getManyToManyMappedBy();
}
if (StringUtils.isEmpty(parentObjectProperty)) {
throw new IllegalArgumentException("Unable to infer a parentObjectProperty for the @AdminPresentationAdornedTargetCollection annotated field("+field.getName()+"). If not using the mappedBy property of @OneToMany or @ManyToMany, please make sure to explicitly define the parentObjectProperty property");
}
String sortProperty = null;
if (serverMetadata != null) {
sortProperty = ((AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST)).getSortField();
}
if (!StringUtils.isEmpty(adornedTargetCollectionMetadata.getSortProperty())) {
sortProperty = adornedTargetCollectionMetadata.getSortProperty();
}
metadata.setParentObjectClass(resolvedClass.getName());
if (adornedTargetCollectionMetadata.getMaintainedAdornedTargetFields() != null) {
metadata.setMaintainedAdornedTargetFields(adornedTargetCollectionMetadata.getMaintainedAdornedTargetFields());
}
if (adornedTargetCollectionMetadata.getGridVisibleFields() != null) {
metadata.setGridVisibleFields(adornedTargetCollectionMetadata.getGridVisibleFields());
}
String parentObjectIdProperty = null;
if (serverMetadata != null) {
parentObjectIdProperty = ((AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST)).getLinkedIdProperty();
}
if (adornedTargetCollectionMetadata.getParentObjectIdProperty()!=null) {
parentObjectIdProperty = adornedTargetCollectionMetadata.getParentObjectIdProperty();
}
String targetObjectProperty = null;
if (serverMetadata != null) {
targetObjectProperty = ((AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST)).getTargetObjectPath();
}
if (adornedTargetCollectionMetadata.getTargetObjectProperty()!=null) {
targetObjectProperty = adornedTargetCollectionMetadata.getTargetObjectProperty();
}
String joinEntityClass = null;
if (serverMetadata != null) {
joinEntityClass = ((AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST)).getJoinEntityClass();
}
if (adornedTargetCollectionMetadata.getJoinEntityClass() != null) {
joinEntityClass = adornedTargetCollectionMetadata.getJoinEntityClass();
}
Class<?> collectionTarget = null;
try {
checkCeiling: {
try {
ParameterizedType pt = (ParameterizedType) field.getGenericType();
java.lang.reflect.Type collectionType = pt.getActualTypeArguments()[0];
String ceilingEntityName = ((Class<?>) collectionType).getName();
collectionTarget = entityConfiguration.lookupEntityClass(ceilingEntityName);
break checkCeiling;
} catch (NoSuchBeanDefinitionException e) {
// We weren't successful at looking at entity configuration to find the type of this collection.
// We will continue and attempt to find it via the Hibernate annotations
}
if (!StringUtils.isEmpty(field.getOneToManyTargetEntity()) && !void.class.getName().equals(field.getOneToManyTargetEntity())) {
collectionTarget = Class.forName(field.getOneToManyTargetEntity());
break checkCeiling;
}
if (!StringUtils.isEmpty(field.getManyToManyTargetEntity()) && !void.class.getName().equals(field.getManyToManyTargetEntity())) {
collectionTarget = Class.forName(field.getManyToManyTargetEntity());
break checkCeiling;
}
}
if (StringUtils.isNotBlank(joinEntityClass)) {
collectionTarget = Class.forName(joinEntityClass);
}
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
if (collectionTarget == null) {
throw new IllegalArgumentException("Unable to infer the type of the collection from the targetEntity property of a OneToMany or ManyToMany collection.");
}
Field collectionTargetField = dynamicEntityDao.getFieldManager().getField(collectionTarget, targetObjectProperty);
ManyToOne manyToOne = collectionTargetField.getAnnotation(ManyToOne.class);
String ceiling = null;
checkCeiling: {
if (manyToOne != null && manyToOne.targetEntity() != void.class) {
ceiling = manyToOne.targetEntity().getName();
break checkCeiling;
}
ceiling = collectionTargetField.getType().getName();
}
if (!StringUtils.isEmpty(ceiling)) {
metadata.setCollectionCeilingEntity(ceiling);
}
String targetObjectIdProperty = null;
if (serverMetadata != null) {
targetObjectIdProperty = ((AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST)).getTargetIdProperty();
}
if (adornedTargetCollectionMetadata.getTargetObjectIdProperty()!=null) {
targetObjectIdProperty = adornedTargetCollectionMetadata.getTargetObjectIdProperty();
}
Boolean isAscending = true;
if (serverMetadata != null) {
isAscending = ((AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST)).getSortAscending();
}
if (adornedTargetCollectionMetadata.isSortAscending()!=null) {
isAscending = adornedTargetCollectionMetadata.isSortAscending();
}
if (serverMetadata != null) {
AdornedTargetList adornedTargetList = (AdornedTargetList) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
adornedTargetList.setCollectionFieldName(field.getName());
adornedTargetList.setLinkedObjectPath(parentObjectProperty);
adornedTargetList.setLinkedIdProperty(parentObjectIdProperty);
adornedTargetList.setTargetObjectPath(targetObjectProperty);
adornedTargetList.setTargetIdProperty(targetObjectIdProperty);
adornedTargetList.setJoinEntityClass(joinEntityClass);
adornedTargetList.setAdornedTargetEntityClassname(collectionTarget.getName());
adornedTargetList.setSortField(sortProperty);
adornedTargetList.setSortAscending(isAscending);
adornedTargetList.setMutable(metadata.isMutable());
} else {
AdornedTargetList adornedTargetList = new AdornedTargetList(field.getName(), parentObjectProperty, parentObjectIdProperty, targetObjectProperty, targetObjectIdProperty, collectionTarget.getName(), sortProperty, isAscending);
adornedTargetList.setJoinEntityClass(joinEntityClass);
adornedTargetList.setMutable(metadata.isMutable());
persistencePerspective.addPersistencePerspectiveItem(PersistencePerspectiveItemType.ADORNEDTARGETLIST, adornedTargetList);
}
if (adornedTargetCollectionMetadata.getExcluded() != null) {
if (LOG.isDebugEnabled()) {
if (adornedTargetCollectionMetadata.getExcluded()) {
LOG.debug("buildAdornedTargetCollectionMetadata:Excluding " + field.getName() + " because it was explicitly declared in config");
} else {
LOG.debug("buildAdornedTargetCollectionMetadata:Showing " + field.getName() + " because it was explicitly declared in config");
}
}
metadata.setExcluded(adornedTargetCollectionMetadata.getExcluded());
}
if (adornedTargetCollectionMetadata.getFriendlyName() != null) {
metadata.setFriendlyName(adornedTargetCollectionMetadata.getFriendlyName());
}
if (adornedTargetCollectionMetadata.getSecurityLevel() != null) {
metadata.setSecurityLevel(adornedTargetCollectionMetadata.getSecurityLevel());
}
if (adornedTargetCollectionMetadata.getOrder() != null) {
metadata.setOrder(adornedTargetCollectionMetadata.getOrder());
}
if (adornedTargetCollectionMetadata.getTab() != null) {
metadata.setTab(adornedTargetCollectionMetadata.getTab());
}
if (adornedTargetCollectionMetadata.getTabOrder() != null) {
metadata.setTabOrder(adornedTargetCollectionMetadata.getTabOrder());
}
if (adornedTargetCollectionMetadata.getCustomCriteria() != null) {
metadata.setCustomCriteria(adornedTargetCollectionMetadata.getCustomCriteria());
}
if (adornedTargetCollectionMetadata.getUseServerSideInspectionCache() != null) {
persistencePerspective.setUseServerSideInspectionCache(adornedTargetCollectionMetadata.getUseServerSideInspectionCache());
}
if (adornedTargetCollectionMetadata.isIgnoreAdornedProperties() != null) {
metadata.setIgnoreAdornedProperties(adornedTargetCollectionMetadata.isIgnoreAdornedProperties());
}
if (adornedTargetCollectionMetadata.getCurrencyCodeField()!=null) {
metadata.setCurrencyCodeField(adornedTargetCollectionMetadata.getCurrencyCodeField());
}
attributes.put(field.getName(), metadata);
}
@Override
public int getOrder() {
return FieldMetadataProvider.ADORNED_TARGET;
}
} | 0true
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_provider_metadata_AdornedTargetCollectionFieldMetadataProvider.java |
1,911 | public class SimpleEntryView<K,V> implements EntryView<K,V>, IdentifiedDataSerializable {
private K key;
private V value;
private long cost;
private long creationTime;
private long expirationTime;
private long hits;
private long lastAccessTime;
private long lastStoredTime;
private long lastUpdateTime;
private long version;
private long evictionCriteriaNumber;
private long ttl;
public SimpleEntryView(K key, V value) {
this.key = key;
this.value = value;
}
public SimpleEntryView() {
}
public K getKey() {
return key;
}
public void setKey(K key) {
this.key = key;
}
public V getValue() {
return value;
}
public void setValue(V value) {
this.value = value;
}
public long getCost() {
return cost;
}
public void setCost(long cost) {
this.cost = cost;
}
public long getCreationTime() {
return creationTime;
}
public void setCreationTime(long creationTime) {
this.creationTime = creationTime;
}
public long getExpirationTime() {
return expirationTime;
}
public void setExpirationTime(long expirationTime) {
this.expirationTime = expirationTime;
}
public long getHits() {
return hits;
}
public void setHits(long hits) {
this.hits = hits;
}
public long getLastAccessTime() {
return lastAccessTime;
}
public void setLastAccessTime(long lastAccessTime) {
this.lastAccessTime = lastAccessTime;
}
public long getLastStoredTime() {
return lastStoredTime;
}
public void setLastStoredTime(long lastStoredTime) {
this.lastStoredTime = lastStoredTime;
}
public long getLastUpdateTime() {
return lastUpdateTime;
}
public void setLastUpdateTime(long lastUpdateTime) {
this.lastUpdateTime = lastUpdateTime;
}
public long getVersion() {
return version;
}
public void setVersion(long version) {
this.version = version;
}
public long getEvictionCriteriaNumber() {
return evictionCriteriaNumber;
}
public void setEvictionCriteriaNumber(long evictionCriteriaNumber) {
this.evictionCriteriaNumber = evictionCriteriaNumber;
}
public long getTtl() {
return ttl;
}
public void setTtl(long ttl) {
this.ttl = ttl;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeObject(key);
out.writeObject(value);
out.writeLong(cost);
out.writeLong(creationTime);
out.writeLong(expirationTime);
out.writeLong(hits);
out.writeLong(lastAccessTime);
out.writeLong(lastStoredTime);
out.writeLong(lastUpdateTime);
out.writeLong(version);
out.writeLong(evictionCriteriaNumber);
out.writeLong(ttl);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
key = in.readObject();
value = in.readObject();
cost = in.readLong();
creationTime = in.readLong();
expirationTime = in.readLong();
hits = in.readLong();
lastAccessTime = in.readLong();
lastStoredTime = in.readLong();
lastUpdateTime = in.readLong();
version = in.readLong();
evictionCriteriaNumber = in.readLong();
ttl = in.readLong();
}
public int getFactoryId() {
return MapDataSerializerHook.F_ID;
}
public int getId() {
return MapDataSerializerHook.ENTRY_VIEW;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_SimpleEntryView.java |
3,746 | public class SessionListener implements HttpSessionListener {
public void sessionCreated(HttpSessionEvent httpSessionEvent) {
}
public void sessionDestroyed(HttpSessionEvent httpSessionEvent) {
WebFilter.destroyOriginalSession(httpSessionEvent.getSession());
}
} | 1no label
| hazelcast-wm_src_main_java_com_hazelcast_web_SessionListener.java |
1,624 | final TimerTask timerTask = new TimerTask() {
@Override
public void run() {
OLogManager.instance().info(this, "[OAutomaticBackup] Scanning databases to backup...");
int ok = 0, errors = 0;
final Map<String, String> databaseNames = serverInstance.getAvailableStorageNames();
for (final Entry<String, String> dbName : databaseNames.entrySet()) {
boolean include;
if (includeDatabases.size() > 0)
include = includeDatabases.contains(dbName.getKey());
else
include = true;
if (excludeDatabases.contains(dbName.getKey()))
include = false;
if (include) {
final String fileName = (String) OVariableParser.resolveVariables(targetFileName, OSystemVariableResolver.VAR_BEGIN,
OSystemVariableResolver.VAR_END, new OVariableParserListener() {
@Override
public String resolve(final String iVariable) {
if (iVariable.equalsIgnoreCase(VARIABLES.DBNAME.toString()))
return dbName.getKey();
else if (iVariable.startsWith(VARIABLES.DATE.toString())) {
return new SimpleDateFormat(iVariable.substring(VARIABLES.DATE.toString().length() + 1)).format(new Date());
}
// NOT FOUND
throw new IllegalArgumentException("Variable '" + iVariable + "' wasn't found");
}
});
final String exportFilePath = targetDirectory + fileName;
ODatabaseDocumentTx db = null;
try {
db = new ODatabaseDocumentTx(dbName.getValue());
db.setProperty(ODatabase.OPTIONS.SECURITY.toString(), Boolean.FALSE);
db.open("admin", "aaa");
final long begin = System.currentTimeMillis();
db.backup(new FileOutputStream(exportFilePath), null, null);
OLogManager.instance().info(
this,
"[OAutomaticBackup] - Backup of database '" + dbName.getValue() + "' completed in "
+ (System.currentTimeMillis() - begin) + "ms");
ok++;
} catch (Exception e) {
OLogManager.instance().error(this,
"[OAutomaticBackup] - Error on exporting database '" + dbName.getValue() + "' to file: " + exportFilePath, e);
errors++;
} finally {
if (db != null)
db.close();
}
}
}
OLogManager.instance().info(this, "[OAutomaticBackup] Backup finished: %d ok, %d errors", ok, errors);
}
}; | 0true
| server_src_main_java_com_orientechnologies_orient_server_handler_OAutomaticBackup.java |
250 | public class CustomFieldQuery extends FieldQuery {
private static Field multiTermQueryWrapperFilterQueryField;
static {
try {
multiTermQueryWrapperFilterQueryField = MultiTermQueryWrapperFilter.class.getDeclaredField("query");
multiTermQueryWrapperFilterQueryField.setAccessible(true);
} catch (NoSuchFieldException e) {
// ignore
}
}
public static final ThreadLocal<Boolean> highlightFilters = new ThreadLocal<Boolean>();
public CustomFieldQuery(Query query, IndexReader reader, FastVectorHighlighter highlighter) throws IOException {
this(query, reader, highlighter.isPhraseHighlight(), highlighter.isFieldMatch());
}
public CustomFieldQuery(Query query, IndexReader reader, boolean phraseHighlight, boolean fieldMatch) throws IOException {
super(query, reader, phraseHighlight, fieldMatch);
highlightFilters.remove();
}
@Override
void flatten(Query sourceQuery, IndexReader reader, Collection<Query> flatQueries) throws IOException {
if (sourceQuery instanceof SpanTermQuery) {
super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries);
} else if (sourceQuery instanceof ConstantScoreQuery) {
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) sourceQuery;
if (constantScoreQuery.getFilter() != null) {
flatten(constantScoreQuery.getFilter(), reader, flatQueries);
} else {
flatten(constantScoreQuery.getQuery(), reader, flatQueries);
}
} else if (sourceQuery instanceof FunctionScoreQuery) {
flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
} else if (sourceQuery instanceof FilteredQuery) {
flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries);
flatten(((FilteredQuery) sourceQuery).getFilter(), reader, flatQueries);
} else if (sourceQuery instanceof XFilteredQuery) {
flatten(((XFilteredQuery) sourceQuery).getQuery(), reader, flatQueries);
flatten(((XFilteredQuery) sourceQuery).getFilter(), reader, flatQueries);
} else if (sourceQuery instanceof MultiPhrasePrefixQuery) {
flatten(sourceQuery.rewrite(reader), reader, flatQueries);
} else if (sourceQuery instanceof FiltersFunctionScoreQuery) {
flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
} else if (sourceQuery instanceof MultiPhraseQuery) {
MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery);
convertMultiPhraseQuery(0, new int[q.getTermArrays().size()] , q, q.getTermArrays(), q.getPositions(), reader, flatQueries);
} else {
super.flatten(sourceQuery, reader, flatQueries);
}
}
private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, List<Term[]> terms, int[] pos, IndexReader reader, Collection<Query> flatQueries) throws IOException {
if (currentPos == 0) {
// if we have more than 16 terms
int numTerms = 0;
for (Term[] currentPosTerm : terms) {
numTerms += currentPosTerm.length;
}
if (numTerms > 16) {
for (Term[] currentPosTerm : terms) {
for (Term term : currentPosTerm) {
super.flatten(new TermQuery(term), reader, flatQueries);
}
}
return;
}
}
/*
* we walk all possible ways and for each path down the MPQ we create a PhraseQuery this is what FieldQuery supports.
* It seems expensive but most queries will pretty small.
*/
if (currentPos == terms.size()) {
PhraseQuery query = new PhraseQuery();
query.setBoost(orig.getBoost());
query.setSlop(orig.getSlop());
for (int i = 0; i < termsIdx.length; i++) {
query.add(terms.get(i)[termsIdx[i]], pos[i]);
}
this.flatten(query, reader, flatQueries);
} else {
Term[] t = terms.get(currentPos);
for (int i = 0; i < t.length; i++) {
termsIdx[currentPos] = i;
convertMultiPhraseQuery(currentPos+1, termsIdx, orig, terms, pos, reader, flatQueries);
}
}
}
void flatten(Filter sourceFilter, IndexReader reader, Collection<Query> flatQueries) throws IOException {
Boolean highlight = highlightFilters.get();
if (highlight == null || highlight.equals(Boolean.FALSE)) {
return;
}
if (sourceFilter instanceof TermFilter) {
flatten(new TermQuery(((TermFilter) sourceFilter).getTerm()), reader, flatQueries);
} else if (sourceFilter instanceof MultiTermQueryWrapperFilter) {
if (multiTermQueryWrapperFilterQueryField != null) {
try {
flatten((Query) multiTermQueryWrapperFilterQueryField.get(sourceFilter), reader, flatQueries);
} catch (IllegalAccessException e) {
// ignore
}
}
} else if (sourceFilter instanceof XBooleanFilter) {
XBooleanFilter booleanFilter = (XBooleanFilter) sourceFilter;
for (FilterClause clause : booleanFilter.clauses()) {
if (clause.getOccur() == BooleanClause.Occur.MUST || clause.getOccur() == BooleanClause.Occur.SHOULD) {
flatten(clause.getFilter(), reader, flatQueries);
}
}
}
}
} | 1no label
| src_main_java_org_apache_lucene_search_vectorhighlight_CustomFieldQuery.java |
2,925 | public static final class NullObject implements Comparable {
@Override
public int compareTo(Object o) {
if (o == this || o instanceof NullObject) {
return 0;
}
return -1;
}
@Override
public int hashCode() {
return 0;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return true;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_query_impl_IndexImpl.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.