Unnamed: 0
int64
0
6.45k
func
stringlengths
29
253k
target
class label
2 classes
project
stringlengths
36
167
349
public class JPAPropertiesPersistenceUnitPostProcessor implements org.springframework.orm.jpa.persistenceunit.PersistenceUnitPostProcessor { protected Map<String, String> persistenceUnitProperties = new HashMap<String, String>(); protected Map<String, String> overrideProperties = new HashMap<String, String>(); @Value("${blPU.hibernate.hbm2ddl.auto}") protected String blPUHibernateHbm2ddlAuto; @Value("${blPU.hibernate.dialect}") protected String blPUHibernateDialect; @Value("${blPU.hibernate.show_sql}") protected String blPUHibernateShow_sql; @Value("${blPU.hibernate.cache.use_second_level_cache}") protected String blPUHibernateCacheUse_second_level_cache; @Value("${blPU.hibernate.cache.use_query_cache}") protected String blPUHibernateCacheUse_query_cache; @Value("${blPU.hibernate.hbm2ddl.import_files}") protected String blPUHibernateHbm2ddlImport_files; @Value("${blPU.hibernate.hbm2ddl.import_files_sql_extractor}") protected String blPUHibernateHbm2ddlImport_files_sql_extractor; @Value("${blCMSStorage.hibernate.hbm2ddl.auto}") protected String blCMSStorageHibernateHbm2ddlAuto; @Value("${blCMSStorage.hibernate.dialect}") protected String blCMSStorageHibernateDialect; @Value("${blCMSStorage.hibernate.show_sql}") protected String blCMSStorageHibernateShow_sql; @Value("${blCMSStorage.hibernate.cache.use_second_level_cache}") protected String blCMSStorageHibernateCacheUse_second_level_cache; @Value("${blCMSStorage.hibernate.cache.use_query_cache}") protected String blCMSStorageHibernateCacheUse_query_cache; @Value("${blCMSStorage.hibernate.hbm2ddl.import_files}") protected String blCMSStorageHibernateHbm2ddlImport_files; @Value("${blCMSStorage.hibernate.hbm2ddl.import_files_sql_extractor}") protected String blCMSStorageHibernateHbm2ddlImport_files_sql_extractor; @Value("${blSecurePU.hibernate.hbm2ddl.auto}") protected String blSecurePUHibernateHbm2ddlAuto; @Value("${blSecurePU.hibernate.dialect}") protected String blSecurePUHibernateDialect; @Value("${blSecurePU.hibernate.show_sql}") protected String blSecurePUHibernateShow_sql; @Value("${blSecurePU.hibernate.cache.use_second_level_cache}") protected String blSecurePUHibernateCacheUse_second_level_cache; @Value("${blSecurePU.hibernate.cache.use_query_cache}") protected String blSecurePUHibernateCacheUse_query_cache; @Value("${blSecurePU.hibernate.hbm2ddl.import_files}") protected String blSecurePUHibernateHbm2ddlImport_files; @Value("${blSecurePU.hibernate.hbm2ddl.import_files_sql_extractor}") protected String blSecurePUHibernateHbm2ddlImport_files_sql_extractor; @PostConstruct public void populatePresetProperties() { if (!blPUHibernateHbm2ddlAuto.startsWith("${")) persistenceUnitProperties.put("blPU.hibernate.hbm2ddl.auto", blPUHibernateHbm2ddlAuto); if (!blPUHibernateDialect.startsWith("${")) persistenceUnitProperties.put("blPU.hibernate.dialect", blPUHibernateDialect); if (!blPUHibernateShow_sql.startsWith("${")) persistenceUnitProperties.put("blPU.hibernate.show_sql", blPUHibernateShow_sql); if (!blPUHibernateCacheUse_second_level_cache.startsWith("${")) persistenceUnitProperties.put("blPU.hibernate.cache.use_second_level_cache", blPUHibernateCacheUse_second_level_cache); if (!blPUHibernateCacheUse_query_cache.startsWith("${")) persistenceUnitProperties.put("blPU.hibernate.cache.use_query_cache", blPUHibernateCacheUse_query_cache); if (!blPUHibernateHbm2ddlImport_files.startsWith("${")) persistenceUnitProperties.put("blPU.hibernate.hbm2ddl.import_files", blPUHibernateHbm2ddlImport_files); if (!blPUHibernateHbm2ddlImport_files_sql_extractor.startsWith("${")) persistenceUnitProperties.put("blPU.hibernate.hbm2ddl.import_files_sql_extractor", blPUHibernateHbm2ddlImport_files_sql_extractor); if (!blCMSStorageHibernateHbm2ddlAuto.startsWith("${")) persistenceUnitProperties.put("blCMSStorage.hibernate.hbm2ddl.auto", blCMSStorageHibernateHbm2ddlAuto); if (!blCMSStorageHibernateDialect.startsWith("${")) persistenceUnitProperties.put("blCMSStorage.hibernate.dialect", blCMSStorageHibernateDialect); if (!blCMSStorageHibernateShow_sql.startsWith("${")) persistenceUnitProperties.put("blCMSStorage.hibernate.show_sql", blCMSStorageHibernateShow_sql); if (!blCMSStorageHibernateCacheUse_second_level_cache.startsWith("${")) persistenceUnitProperties.put("blCMSStorage.hibernate.cache.use_second_level_cache", blCMSStorageHibernateCacheUse_second_level_cache); if (!blCMSStorageHibernateCacheUse_query_cache.startsWith("${")) persistenceUnitProperties.put("blCMSStorage.hibernate.cache.use_query_cache", blCMSStorageHibernateCacheUse_query_cache); if (!blCMSStorageHibernateHbm2ddlImport_files.startsWith("${")) persistenceUnitProperties.put("blCMSStorage.hibernate.hbm2ddl.import_files", blCMSStorageHibernateHbm2ddlImport_files); if (!blCMSStorageHibernateHbm2ddlImport_files_sql_extractor.startsWith("${")) persistenceUnitProperties.put("blCMSStorage.hibernate.hbm2ddl.import_files_sql_extractor", blCMSStorageHibernateHbm2ddlImport_files_sql_extractor); if (!blSecurePUHibernateHbm2ddlAuto.startsWith("${")) persistenceUnitProperties.put("blSecurePU.hibernate.hbm2ddl.auto", blSecurePUHibernateHbm2ddlAuto); if (!blSecurePUHibernateDialect.startsWith("${")) persistenceUnitProperties.put("blSecurePU.hibernate.dialect", blSecurePUHibernateDialect); if (!blSecurePUHibernateShow_sql.startsWith("${")) persistenceUnitProperties.put("blSecurePU.hibernate.show_sql", blSecurePUHibernateShow_sql); if (!blSecurePUHibernateCacheUse_second_level_cache.startsWith("${")) persistenceUnitProperties.put("blSecurePU.hibernate.cache.use_second_level_cache", blSecurePUHibernateCacheUse_second_level_cache); if (!blSecurePUHibernateCacheUse_query_cache.startsWith("${")) persistenceUnitProperties.put("blSecurePU.hibernate.cache.use_query_cache", blSecurePUHibernateCacheUse_query_cache); if (!blSecurePUHibernateHbm2ddlImport_files.startsWith("${")) persistenceUnitProperties.put("blSecurePU.hibernate.hbm2ddl.import_files", blSecurePUHibernateHbm2ddlImport_files); if (!blSecurePUHibernateHbm2ddlImport_files_sql_extractor.startsWith("${")) persistenceUnitProperties.put("blSecurePU.hibernate.hbm2ddl.import_files_sql_extractor", blSecurePUHibernateHbm2ddlImport_files_sql_extractor); persistenceUnitProperties.putAll(overrideProperties); } @Override public void postProcessPersistenceUnitInfo(MutablePersistenceUnitInfo pui) { if (persistenceUnitProperties != null) { String puName = pui.getPersistenceUnitName() + "."; Set<String> keys = persistenceUnitProperties.keySet(); Properties props = pui.getProperties(); for (String key : keys) { if (key.startsWith(puName)){ String value = persistenceUnitProperties.get(key); String newKey = key.substring(puName.length()); if ("null".equalsIgnoreCase(value)){ props.remove(newKey); } else if (value != null && ! "".equals(value)) { props.put(newKey, value); } } } pui.setProperties(props); } } public void setPersistenceUnitProperties(Map<String, String> properties) { this.overrideProperties = properties; } }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_JPAPropertiesPersistenceUnitPostProcessor.java
84
GREATER_THAN_EQUAL { @Override public boolean isValidValueType(Class<?> clazz) { Preconditions.checkNotNull(clazz); return Comparable.class.isAssignableFrom(clazz); } @Override public boolean isValidCondition(Object condition) { return condition!=null && condition instanceof Comparable; } @Override public boolean evaluate(Object value, Object condition) { Integer cmp = AttributeUtil.compare(value,condition); return cmp!=null?cmp>=0:false; } @Override public String toString() { return ">="; } @Override public TitanPredicate negate() { return LESS_THAN; } };
0true
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Cmp.java
547
metaDataMappingService.removeMapping(clusterStateUpdateRequest, new ClusterStateUpdateListener() { @Override public void onResponse(ClusterStateUpdateResponse response) { listener.onResponse(new DeleteMappingResponse(response.isAcknowledged())); } @Override public void onFailure(Throwable t) { listener.onFailure(t); } });
0true
src_main_java_org_elasticsearch_action_admin_indices_mapping_delete_TransportDeleteMappingAction.java
786
public final class AtomicLongDataSerializerHook implements DataSerializerHook { public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.ATOMIC_LONG_DS_FACTORY, -17); public static final int ADD_BACKUP = 0; public static final int ADD_AND_GET = 1; public static final int ALTER = 2; public static final int ALTER_AND_GET = 3; public static final int APPLY = 4; public static final int COMPARE_AND_SET = 5; public static final int GET = 6; public static final int GET_AND_SET = 7; public static final int GET_AND_ALTER = 8; public static final int GET_AND_ADD = 9; public static final int SET_OPERATION = 10; public static final int SET_BACKUP = 11; public static final int REPLICATION = 12; @Override public int getFactoryId() { return F_ID; } @Override public DataSerializableFactory createFactory() { return new DataSerializableFactory() { @Override public IdentifiedDataSerializable create(int typeId) { switch (typeId) { case ADD_BACKUP: return new AddBackupOperation(); case ADD_AND_GET: return new AddAndGetOperation(); case ALTER: return new AlterOperation(); case ALTER_AND_GET: return new AlterAndGetOperation(); case APPLY: return new ApplyOperation(); case COMPARE_AND_SET: return new CompareAndSetOperation(); case GET: return new GetOperation(); case GET_AND_SET: return new GetAndSetOperation(); case GET_AND_ALTER: return new GetAndAlterOperation(); case GET_AND_ADD: return new GetAndAddOperation(); case SET_OPERATION: return new SetOperation(); case SET_BACKUP: return new SetBackupOperation(); case REPLICATION: return new AtomicLongReplicationOperation(); default: return null; } } }; } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_AtomicLongDataSerializerHook.java
107
{ @Override public Void doWork( State state ) { state.tx = state.graphDb.beginTx(); return null; } } );
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestManualAcquireLock.java
1,248
public class FixedPriceFulfillmentPricingProvider implements FulfillmentPricingProvider { @Override public boolean canCalculateCostForFulfillmentGroup(FulfillmentGroup fulfillmentGroup, FulfillmentOption option) { return (option instanceof FixedPriceFulfillmentOption); } @Override public FulfillmentGroup calculateCostForFulfillmentGroup(FulfillmentGroup fulfillmentGroup) throws FulfillmentPriceException { if (canCalculateCostForFulfillmentGroup(fulfillmentGroup, fulfillmentGroup.getFulfillmentOption())) { Money price = ((FixedPriceFulfillmentOption)fulfillmentGroup.getFulfillmentOption()).getPrice(); fulfillmentGroup.setRetailShippingPrice(price); fulfillmentGroup.setSaleShippingPrice(price); fulfillmentGroup.setShippingPrice(price); return fulfillmentGroup; } throw new IllegalArgumentException("Cannot estimate shipping cost for the fulfillment option: " + fulfillmentGroup.getFulfillmentOption().getClass().getName()); } @Override public FulfillmentEstimationResponse estimateCostForFulfillmentGroup(FulfillmentGroup fulfillmentGroup, Set<FulfillmentOption> options) throws FulfillmentPriceException { FulfillmentEstimationResponse response = new FulfillmentEstimationResponse(); HashMap<FulfillmentOption, Money> shippingPrices = new HashMap<FulfillmentOption, Money>(); response.setFulfillmentOptionPrices(shippingPrices); for (FulfillmentOption option : options) { if (canCalculateCostForFulfillmentGroup(fulfillmentGroup, option)) { Money price = ((FixedPriceFulfillmentOption) option).getPrice(); shippingPrices.put(option, price); } } return response; } }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_service_fulfillment_provider_FixedPriceFulfillmentPricingProvider.java
660
public class QueryExplanation implements Streamable { private String index; private boolean valid; private String explanation; private String error; QueryExplanation() { } public QueryExplanation(String index, boolean valid, String explanation, String error) { this.index = index; this.valid = valid; this.explanation = explanation; this.error = error; } public String getIndex() { return this.index; } public boolean isValid() { return this.valid; } public String getError() { return this.error; } public String getExplanation() { return this.explanation; } @Override public void readFrom(StreamInput in) throws IOException { index = in.readString(); valid = in.readBoolean(); explanation = in.readOptionalString(); error = in.readOptionalString(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); out.writeBoolean(valid); out.writeOptionalString(explanation); out.writeOptionalString(error); } public static QueryExplanation readQueryExplanation(StreamInput in) throws IOException { QueryExplanation exp = new QueryExplanation(); exp.readFrom(in); return exp; } }
0true
src_main_java_org_elasticsearch_action_admin_indices_validate_query_QueryExplanation.java
1,702
runnable = new Runnable() { public void run() { map.set("key", null, 1, TimeUnit.SECONDS); } };
0true
hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java
169
@Ignore("not a JUnit test") public class SimpleMapTestFromClient { static { System.setProperty(GroupProperties.PROP_WAIT_SECONDS_BEFORE_JOIN, "0"); System.setProperty("java.net.preferIPv4Stack", "true"); System.setProperty("hazelcast.local.localAddress", "127.0.0.1"); System.setProperty("hazelcast.version.check.enabled", "false"); System.setProperty("hazelcast.socket.bind.any", "false"); Random rand = new Random(); int g1 = rand.nextInt(255); int g2 = rand.nextInt(255); int g3 = rand.nextInt(255); // System.setProperty("hazelcast.multicast.group", "224." + g1 + "." + g2 + "." + g3); } public static int THREAD_COUNT = 40; public static int ENTRY_COUNT = 10 * 1000; public static int VALUE_SIZE = 1000; public static final int STATS_SECONDS = 10; public static int GET_PERCENTAGE = 40; public static int PUT_PERCENTAGE = 40; public static void main(String[] args) { final ClientConfig clientConfig = new ClientConfig(); final HazelcastInstance instance1 = Hazelcast.newHazelcastInstance(); final HazelcastInstance instance2 = Hazelcast.newHazelcastInstance(); final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig); final Stats stats = new Stats(); if (args != null && args.length > 0) { for (String arg : args) { arg = arg.trim(); if (arg.startsWith("t")) { THREAD_COUNT = Integer.parseInt(arg.substring(1)); } else if (arg.startsWith("c")) { ENTRY_COUNT = Integer.parseInt(arg.substring(1)); } else if (arg.startsWith("v")) { VALUE_SIZE = Integer.parseInt(arg.substring(1)); } else if (arg.startsWith("g")) { GET_PERCENTAGE = Integer.parseInt(arg.substring(1)); } else if (arg.startsWith("p")) { PUT_PERCENTAGE = Integer.parseInt(arg.substring(1)); } } } else { System.out.println("Help: sh test.sh t200 v130 p10 g85 "); System.out.println(" // means 200 threads, value-size 130 bytes, 10% put, 85% get"); System.out.println(""); } System.out.println("Starting Test with "); System.out.println(" Thread Count: " + THREAD_COUNT); System.out.println(" Entry Count: " + ENTRY_COUNT); System.out.println(" Value Size: " + VALUE_SIZE); System.out.println(" Get Percentage: " + GET_PERCENTAGE); System.out.println(" Put Percentage: " + PUT_PERCENTAGE); System.out.println(" Remove Percentage: " + (100 - (PUT_PERCENTAGE + GET_PERCENTAGE))); ExecutorService es = Executors.newFixedThreadPool(THREAD_COUNT); for (int i = 0; i < THREAD_COUNT; i++) { es.submit(new Runnable() { public void run() { IMap<String, byte[]> map = client.getMap("default"); while (true) { int key = (int) (Math.random() * ENTRY_COUNT); int operation = ((int) (Math.random() * 100)); if (operation < GET_PERCENTAGE) { map.get(String.valueOf(key)); stats.gets.incrementAndGet(); } else if (operation < GET_PERCENTAGE + PUT_PERCENTAGE) { map.put(String.valueOf(key), new byte[VALUE_SIZE]); stats.puts.incrementAndGet(); } else { map.remove(String.valueOf(key)); stats.removes.incrementAndGet(); } } } }); } Executors.newSingleThreadExecutor().submit(new Runnable() { public void run() { while (true) { try { Thread.sleep(STATS_SECONDS * 1000); System.out.println("cluster size:" + client.getCluster().getMembers().size()); Stats currentStats = stats.getAndReset(); System.out.println(currentStats); System.out.println("Operations per Second : " + currentStats.total() / STATS_SECONDS); } catch (Exception e) { e.printStackTrace(); } } } }); } public static class Stats { public AtomicLong puts = new AtomicLong(); public AtomicLong gets = new AtomicLong(); public AtomicLong removes = new AtomicLong(); public Stats getAndReset() { long putsNow = puts.getAndSet(0); long getsNow = gets.getAndSet(0); long removesNow = removes.getAndSet(0); Stats newOne = new Stats(); newOne.puts.set(putsNow); newOne.gets.set(getsNow); newOne.removes.set(removesNow); return newOne; } public long total() { return puts.get() + gets.get() + removes.get(); } public String toString() { return "total= " + total() + ", gets:" + gets.get() + ", puts: " + puts.get() + ", removes:" + removes.get(); } } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_SimpleMapTestFromClient.java
1,368
public class ClusterBlockException extends ElasticsearchException { private final ImmutableSet<ClusterBlock> blocks; public ClusterBlockException(ImmutableSet<ClusterBlock> blocks) { super(buildMessage(blocks)); this.blocks = blocks; } public boolean retryable() { for (ClusterBlock block : blocks) { if (!block.retryable()) { return false; } } return true; } public ImmutableSet<ClusterBlock> blocks() { return blocks; } private static String buildMessage(ImmutableSet<ClusterBlock> blocks) { StringBuilder sb = new StringBuilder("blocked by: "); for (ClusterBlock block : blocks) { sb.append("[").append(block.status()).append("/").append(block.id()).append("/").append(block.description()).append("];"); } return sb.toString(); } @Override public RestStatus status() { RestStatus status = null; for (ClusterBlock block : blocks) { if (status == null) { status = block.status(); } else if (status.getStatus() < block.status().getStatus()) { status = block.status(); } } return status; } }
0true
src_main_java_org_elasticsearch_cluster_block_ClusterBlockException.java
280
public class ThriftStoreTest extends AbstractCassandraStoreTest { @BeforeClass public static void startCassandra() { CassandraStorageSetup.startCleanEmbedded(); } @Override public ModifiableConfiguration getBaseStorageConfiguration() { return CassandraStorageSetup.getCassandraThriftConfiguration(this.getClass().getSimpleName()); } @Override public AbstractCassandraStoreManager openStorageManager(Configuration c) throws BackendException { return new CassandraThriftStoreManager(c); } }
0true
titan-cassandra_src_test_java_com_thinkaurelius_titan_diskstorage_cassandra_thrift_ThriftStoreTest.java
3,741
public class WanReplicationServiceImpl implements WanReplicationService { private final Node node; private final ILogger logger; private final Map<String, WanReplicationPublisherDelegate> wanReplications = initializeWanReplicationPublisherMapping(); public WanReplicationServiceImpl(Node node) { this.node = node; this.logger = node.getLogger(WanReplicationServiceImpl.class.getName()); } @Override @SuppressWarnings("SynchronizeOnThis") public WanReplicationPublisher getWanReplicationPublisher(String name) { WanReplicationPublisherDelegate wr = wanReplications.get(name); if (wr != null) { return wr; } synchronized (this) { wr = wanReplications.get(name); if (wr != null) { return wr; } WanReplicationConfig wanReplicationConfig = node.getConfig().getWanReplicationConfig(name); if (wanReplicationConfig == null) { return null; } List<WanTargetClusterConfig> targets = wanReplicationConfig.getTargetClusterConfigs(); WanReplicationEndpoint[] targetEndpoints = new WanReplicationEndpoint[targets.size()]; int count = 0; for (WanTargetClusterConfig targetClusterConfig : targets) { WanReplicationEndpoint target; if (targetClusterConfig.getReplicationImpl() != null) { try { target = ClassLoaderUtil .newInstance(node.getConfigClassLoader(), targetClusterConfig.getReplicationImpl()); } catch (Exception e) { throw ExceptionUtil.rethrow(e); } } else { target = new WanNoDelayReplication(); } String groupName = targetClusterConfig.getGroupName(); String password = targetClusterConfig.getGroupPassword(); String[] addresses = new String[targetClusterConfig.getEndpoints().size()]; targetClusterConfig.getEndpoints().toArray(addresses); target.init(node, groupName, password, addresses); targetEndpoints[count++] = target; } wr = new WanReplicationPublisherDelegate(name, targetEndpoints); wanReplications.put(name, wr); return wr; } } @Override public void handleEvent(final Packet packet) { // todo execute in which thread node.nodeEngine.getExecutionService().execute("hz:wan", new Runnable() { @Override public void run() { final Data data = packet.getData(); try { WanReplicationEvent replicationEvent = (WanReplicationEvent) node.nodeEngine.toObject(data); String serviceName = replicationEvent.getServiceName(); ReplicationSupportingService service = node.nodeEngine.getService(serviceName); service.onReplicationEvent(replicationEvent); } catch (Exception e) { logger.severe(e); } } }); } @Override public void shutdown() { synchronized (this) { for (WanReplicationPublisherDelegate wanReplication : wanReplications.values()) { WanReplicationEndpoint[] wanReplicationEndpoints = wanReplication.getEndpoints(); if (wanReplicationEndpoints != null) { for (WanReplicationEndpoint wanReplicationEndpoint : wanReplicationEndpoints) { if (wanReplicationEndpoint != null) { wanReplicationEndpoint.shutdown(); } } } } wanReplications.clear(); } } private ConcurrentHashMap<String, WanReplicationPublisherDelegate> initializeWanReplicationPublisherMapping() { return new ConcurrentHashMap<String, WanReplicationPublisherDelegate>(2); } }
1no label
hazelcast_src_main_java_com_hazelcast_wan_impl_WanReplicationServiceImpl.java
780
public class OMemoryWatchDog extends Thread { private final Map<ListenerWrapper, Object> listeners = new WeakHashMap<ListenerWrapper, Object>(128); private static long lastGC = 0; private int alertTimes = 0; protected final ReferenceQueue<Object> monitorQueue = new ReferenceQueue<Object>(); protected SoftReference<Object> monitorRef = new SoftReference<Object>(new Object(), monitorQueue); /** * we want properties of both IdentityHashMap and WeakHashMap */ private static class ListenerWrapper { final Listener listener; private ListenerWrapper(Listener listener) { this.listener = listener; } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final ListenerWrapper that = (ListenerWrapper) o; return listener == that.listener; } @Override public int hashCode() { return listener != null ? System.identityHashCode(listener) : 0; } } public static interface Listener { /** * Execute a soft free of memory resources. * * @param iType * OS or JVM * @param iFreeMemory * Current used memory * @param iFreeMemoryPercentage * Max memory */ public void memoryUsageLow(long iFreeMemory, long iFreeMemoryPercentage); } /** * Create the memory watch dog with the default memory threshold. * * @param iThreshold */ public OMemoryWatchDog() { super("OrientDB MemoryWatchDog"); setDaemon(true); start(); } public void run() { Orient .instance() .getProfiler() .registerHookValue("system.memory.alerts", "Number of alerts received by JVM to free memory resources", METRIC_TYPE.COUNTER, new OProfilerHookValue() { public Object getValue() { return alertTimes; } }); Orient .instance() .getProfiler() .registerHookValue("system.memory.lastGC", "Date of last System.gc() invocation", METRIC_TYPE.STAT, new OProfilerHookValue() { public Object getValue() { return lastGC; } }); while (true) { try { // WAITS FOR THE GC FREE monitorQueue.remove(); if (Thread.interrupted()) break; // GC is freeing memory! alertTimes++; long maxMemory = Runtime.getRuntime().maxMemory(); long freeMemory = Runtime.getRuntime().freeMemory(); int freeMemoryPer = (int) (freeMemory * 100 / maxMemory); if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "Free memory is low %s of %s (%d%%), calling listeners to free memory...", OFileUtils.getSizeAsString(freeMemory), OFileUtils.getSizeAsString(maxMemory), freeMemoryPer); final long timer = Orient.instance().getProfiler().startChrono(); synchronized (listeners) { for (ListenerWrapper listener : listeners.keySet()) { try { listener.listener.memoryUsageLow(freeMemory, freeMemoryPer); } catch (Exception e) { e.printStackTrace(); } } } Orient.instance().getProfiler().stopChrono("OMemoryWatchDog.freeResources", "WatchDog free resources", timer); } catch (InterruptedException e) { break; } catch (Exception e) { } finally { // RE-INSTANTIATE THE MONITOR REF monitorRef = new SoftReference<Object>(new Object(), monitorQueue); } } OLogManager.instance().debug(this, "[OMemoryWatchDog] shutdowning..."); synchronized (listeners) { listeners.clear(); } monitorRef = null; } public Listener addListener(final Listener listener) { synchronized (listeners) { listeners.put(new ListenerWrapper(listener), listener); } return listener; } public boolean removeListener(final Listener listener) { synchronized (listeners) { return listeners.remove(new ListenerWrapper(listener)) != null; } } public List<Listener> getListeners() { synchronized (listeners) { List<Listener> listenerList = new ArrayList<Listener>(); for (ListenerWrapper wrapper : listeners.keySet()) { listenerList.add(wrapper.listener); } return listenerList; } } public static void freeMemoryForOptimization(final long iDelayTime) { freeMemory(iDelayTime, OGlobalConfiguration.JVM_GC_DELAY_FOR_OPTIMIZE.getValueAsLong()); } public static void freeMemoryForResourceCleanup(final long iDelayTime) { freeMemory(iDelayTime, 0); } private static void freeMemory(final long iDelayTime, final long minimalTimeAmount) { final long dateLastGC = System.currentTimeMillis(); if (dateLastGC - lastGC > minimalTimeAmount * 1000) { lastGC = dateLastGC; System.gc(); if (iDelayTime > 0) try { Thread.sleep(iDelayTime); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } }
1no label
core_src_main_java_com_orientechnologies_orient_core_memory_OMemoryWatchDog.java
501
public class CreateIndexAction extends IndicesAction<CreateIndexRequest, CreateIndexResponse, CreateIndexRequestBuilder> { public static final CreateIndexAction INSTANCE = new CreateIndexAction(); public static final String NAME = "indices/create"; private CreateIndexAction() { super(NAME); } @Override public CreateIndexResponse newResponse() { return new CreateIndexResponse(); } @Override public CreateIndexRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new CreateIndexRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_create_CreateIndexAction.java
1,038
public class GetTermVectorCheckDocFreqTests extends ElasticsearchIntegrationTest { @Test public void testSimpleTermVectors() throws ElasticsearchException, IOException { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field") .field("type", "string") .field("term_vector", "with_positions_offsets_payloads") .field("analyzer", "tv_test") .endObject() .endObject() .endObject().endObject(); ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( ImmutableSettings.settingsBuilder() .put("index.number_of_shards", 1) .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") .put("index.number_of_replicas", 0) .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase"))); ensureGreen(); int numDocs = 15; for (int i = 0; i < numDocs; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(XContentFactory.jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog") // 0the3 4quick9 10brown15 16fox19 20jumps25 26over30 // 31the34 35lazy39 40dog43 .endObject()).execute().actionGet(); refresh(); } String[] values = { "brown", "dog", "fox", "jumps", "lazy", "over", "quick", "the" }; int[] freq = { 1, 1, 1, 1, 1, 1, 1, 2 }; int[][] pos = { { 2 }, { 8 }, { 3 }, { 4 }, { 7 }, { 5 }, { 1 }, { 0, 6 } }; int[][] startOffset = { { 10 }, { 40 }, { 16 }, { 20 }, { 35 }, { 26 }, { 4 }, { 0, 31 } }; int[][] endOffset = { { 15 }, { 43 }, { 19 }, { 25 }, { 39 }, { 30 }, { 9 }, { 3, 34 } }; for (int i = 0; i < numDocs; i++) { checkAllInfo(numDocs, values, freq, pos, startOffset, endOffset, i); checkWithoutTermStatistics(numDocs, values, freq, pos, startOffset, endOffset, i); checkWithoutFieldStatistics(numDocs, values, freq, pos, startOffset, endOffset, i); } } private void checkWithoutFieldStatistics(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset, int i) throws IOException { TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true) .setPositions(true).setTermStatistics(true).setFieldStatistics(false).setSelectedFields(); TermVectorResponse response = resp.execute().actionGet(); assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true)); Fields fields = response.getFields(); assertThat(fields.size(), equalTo(1)); Terms terms = fields.terms("field"); assertThat(terms.size(), equalTo(8l)); assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) -1)); assertThat(terms.getDocCount(), Matchers.equalTo(-1)); assertThat(terms.getSumDocFreq(), equalTo((long) -1)); TermsEnum iterator = terms.iterator(null); for (int j = 0; j < values.length; j++) { String string = values[j]; BytesRef next = iterator.next(); assertThat(next, Matchers.notNullValue()); assertThat("expected " + string, string, equalTo(next.utf8ToString())); assertThat(next, Matchers.notNullValue()); if (string.equals("the")) { assertThat("expected ttf of " + string, numDocs * 2, equalTo((int) iterator.totalTermFreq())); } else { assertThat("expected ttf of " + string, numDocs, equalTo((int) iterator.totalTermFreq())); } DocsAndPositionsEnum docsAndPositions = iterator.docsAndPositions(null, null); assertThat(docsAndPositions.nextDoc(), equalTo(0)); assertThat(freq[j], equalTo(docsAndPositions.freq())); assertThat(iterator.docFreq(), equalTo(numDocs)); int[] termPos = pos[j]; int[] termStartOffset = startOffset[j]; int[] termEndOffset = endOffset[j]; assertThat(termPos.length, equalTo(freq[j])); assertThat(termStartOffset.length, equalTo(freq[j])); assertThat(termEndOffset.length, equalTo(freq[j])); for (int k = 0; k < freq[j]; k++) { int nextPosition = docsAndPositions.nextPosition(); assertThat("term: " + string, nextPosition, equalTo(termPos[k])); assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k])); assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k])); assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word"))); } } assertThat(iterator.next(), Matchers.nullValue()); XContentBuilder xBuilder = new XContentFactory().jsonBuilder(); response.toXContent(xBuilder, null); BytesStream bytesStream = xBuilder.bytesStream(); String utf8 = bytesStream.bytes().toUtf8(); String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; assertThat(utf8, equalTo(expectedString)); } private void checkWithoutTermStatistics(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset, int i) throws IOException { TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true) .setPositions(true).setTermStatistics(false).setFieldStatistics(true).setSelectedFields(); assertThat(resp.request().termStatistics(), equalTo(false)); TermVectorResponse response = resp.execute().actionGet(); assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true)); Fields fields = response.getFields(); assertThat(fields.size(), equalTo(1)); Terms terms = fields.terms("field"); assertThat(terms.size(), equalTo(8l)); assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) (9 * numDocs))); assertThat(terms.getDocCount(), Matchers.equalTo(numDocs)); assertThat(terms.getSumDocFreq(), equalTo((long) numDocs * values.length)); TermsEnum iterator = terms.iterator(null); for (int j = 0; j < values.length; j++) { String string = values[j]; BytesRef next = iterator.next(); assertThat(next, Matchers.notNullValue()); assertThat("expected " + string, string, equalTo(next.utf8ToString())); assertThat(next, Matchers.notNullValue()); assertThat("expected ttf of " + string, -1, equalTo((int) iterator.totalTermFreq())); DocsAndPositionsEnum docsAndPositions = iterator.docsAndPositions(null, null); assertThat(docsAndPositions.nextDoc(), equalTo(0)); assertThat(freq[j], equalTo(docsAndPositions.freq())); assertThat(iterator.docFreq(), equalTo(-1)); int[] termPos = pos[j]; int[] termStartOffset = startOffset[j]; int[] termEndOffset = endOffset[j]; assertThat(termPos.length, equalTo(freq[j])); assertThat(termStartOffset.length, equalTo(freq[j])); assertThat(termEndOffset.length, equalTo(freq[j])); for (int k = 0; k < freq[j]; k++) { int nextPosition = docsAndPositions.nextPosition(); assertThat("term: " + string, nextPosition, equalTo(termPos[k])); assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k])); assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k])); assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word"))); } } assertThat(iterator.next(), Matchers.nullValue()); XContentBuilder xBuilder = new XContentFactory().jsonBuilder(); response.toXContent(xBuilder, null); BytesStream bytesStream = xBuilder.bytesStream(); String utf8 = bytesStream.bytes().toUtf8(); String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; assertThat(utf8, equalTo(expectedString)); } private void checkAllInfo(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset, int i) throws IOException { TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true) .setPositions(true).setFieldStatistics(true).setTermStatistics(true).setSelectedFields(); assertThat(resp.request().fieldStatistics(), equalTo(true)); TermVectorResponse response = resp.execute().actionGet(); assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true)); Fields fields = response.getFields(); assertThat(fields.size(), equalTo(1)); Terms terms = fields.terms("field"); assertThat(terms.size(), equalTo(8l)); assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) (9 * numDocs))); assertThat(terms.getDocCount(), Matchers.equalTo(numDocs)); assertThat(terms.getSumDocFreq(), equalTo((long) numDocs * values.length)); TermsEnum iterator = terms.iterator(null); for (int j = 0; j < values.length; j++) { String string = values[j]; BytesRef next = iterator.next(); assertThat(next, Matchers.notNullValue()); assertThat("expected " + string, string, equalTo(next.utf8ToString())); assertThat(next, Matchers.notNullValue()); if (string.equals("the")) { assertThat("expected ttf of " + string, numDocs * 2, equalTo((int) iterator.totalTermFreq())); } else { assertThat("expected ttf of " + string, numDocs, equalTo((int) iterator.totalTermFreq())); } DocsAndPositionsEnum docsAndPositions = iterator.docsAndPositions(null, null); assertThat(docsAndPositions.nextDoc(), equalTo(0)); assertThat(freq[j], equalTo(docsAndPositions.freq())); assertThat(iterator.docFreq(), equalTo(numDocs)); int[] termPos = pos[j]; int[] termStartOffset = startOffset[j]; int[] termEndOffset = endOffset[j]; assertThat(termPos.length, equalTo(freq[j])); assertThat(termStartOffset.length, equalTo(freq[j])); assertThat(termEndOffset.length, equalTo(freq[j])); for (int k = 0; k < freq[j]; k++) { int nextPosition = docsAndPositions.nextPosition(); assertThat("term: " + string, nextPosition, equalTo(termPos[k])); assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k])); assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k])); assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word"))); } } assertThat(iterator.next(), Matchers.nullValue()); XContentBuilder xBuilder = new XContentFactory().jsonBuilder(); response.toXContent(xBuilder, null); BytesStream bytesStream = xBuilder.bytesStream(); String utf8 = bytesStream.bytes().toUtf8(); String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; assertThat(utf8, equalTo(expectedString)); } }
0true
src_test_java_org_elasticsearch_action_termvector_GetTermVectorCheckDocFreqTests.java
1,371
protected class ParameterizedTypeImpl implements ParameterizedType { private final Type[] actualTypeArguments; private final Class<?> rawType; private Type ownerType; public ParameterizedTypeImpl(Type[] actualTypeArguments, Class<?> rawType, Type ownerType) { this.actualTypeArguments = actualTypeArguments; this.rawType = rawType; if (ownerType != null) { this.ownerType = ownerType; } else { this.ownerType = rawType.getDeclaringClass(); } } @Override public Type getRawType() { return rawType; } @Override public Type getOwnerType() { return ownerType; } @Override public Type[] getActualTypeArguments() { return actualTypeArguments.clone(); } }
0true
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_BroadleafMessageBodyReaderWriter.java
473
public interface ClientPartitionService { Address getPartitionOwner(int partitionId); int getPartitionId(Data key); int getPartitionId(Object key); int getPartitionCount(); Partition getPartition(int partitionId); }
0true
hazelcast-client_src_main_java_com_hazelcast_client_spi_ClientPartitionService.java
1,802
command.acceptTargetVisitor(new BindingTargetVisitor<T, Void>() { public Void visit(InstanceBinding<? extends T> binding) { Set<InjectionPoint> injectionPoints = binding.getInjectionPoints(); T instance = binding.getInstance(); Initializable<T> ref = initializer.requestInjection( injector, instance, source, injectionPoints); ConstantFactory<? extends T> factory = new ConstantFactory<T>(ref); InternalFactory<? extends T> scopedFactory = Scopes.scope(key, injector, factory, scoping); putBinding(new InstanceBindingImpl<T>(injector, key, source, scopedFactory, injectionPoints, instance)); return null; } public Void visit(ProviderInstanceBinding<? extends T> binding) { Provider<? extends T> provider = binding.getProviderInstance(); Set<InjectionPoint> injectionPoints = binding.getInjectionPoints(); Initializable<Provider<? extends T>> initializable = initializer .<Provider<? extends T>>requestInjection(injector, provider, source, injectionPoints); InternalFactory<T> factory = new InternalFactoryToProviderAdapter<T>(initializable, source); InternalFactory<? extends T> scopedFactory = Scopes.scope(key, injector, factory, scoping); putBinding(new ProviderInstanceBindingImpl<T>(injector, key, source, scopedFactory, scoping, provider, injectionPoints)); return null; } public Void visit(ProviderKeyBinding<? extends T> binding) { Key<? extends Provider<? extends T>> providerKey = binding.getProviderKey(); BoundProviderFactory<T> boundProviderFactory = new BoundProviderFactory<T>(injector, providerKey, source); creationListeners.add(boundProviderFactory); InternalFactory<? extends T> scopedFactory = Scopes.scope( key, injector, (InternalFactory<? extends T>) boundProviderFactory, scoping); putBinding(new LinkedProviderBindingImpl<T>( injector, key, source, scopedFactory, scoping, providerKey)); return null; } public Void visit(LinkedKeyBinding<? extends T> binding) { Key<? extends T> linkedKey = binding.getLinkedKey(); if (key.equals(linkedKey)) { errors.recursiveBinding(); } FactoryProxy<T> factory = new FactoryProxy<T>(injector, key, linkedKey, source); creationListeners.add(factory); InternalFactory<? extends T> scopedFactory = Scopes.scope(key, injector, factory, scoping); putBinding( new LinkedBindingImpl<T>(injector, key, source, scopedFactory, scoping, linkedKey)); return null; } public Void visit(UntargettedBinding<? extends T> untargetted) { // Error: Missing implementation. // Example: bind(Date.class).annotatedWith(Red.class); // We can't assume abstract types aren't injectable. They may have an // @ImplementedBy annotation or something. if (key.hasAnnotationType()) { errors.missingImplementation(key); putBinding(invalidBinding(injector, key, source)); return null; } // This cast is safe after the preceeding check. final BindingImpl<T> binding; try { binding = injector.createUnitializedBinding(key, scoping, source, errors); putBinding(binding); } catch (ErrorsException e) { errors.merge(e.getErrors()); putBinding(invalidBinding(injector, key, source)); return null; } uninitializedBindings.add(new Runnable() { public void run() { try { ((InjectorImpl) binding.getInjector()).initializeBinding( binding, errors.withSource(source)); } catch (ErrorsException e) { errors.merge(e.getErrors()); } } }); return null; } public Void visit(ExposedBinding<? extends T> binding) { throw new IllegalArgumentException("Cannot apply a non-module element"); } public Void visit(ConvertedConstantBinding<? extends T> binding) { throw new IllegalArgumentException("Cannot apply a non-module element"); } public Void visit(ConstructorBinding<? extends T> binding) { throw new IllegalArgumentException("Cannot apply a non-module element"); } public Void visit(ProviderBinding<? extends T> binding) { throw new IllegalArgumentException("Cannot apply a non-module element"); } });
0true
src_main_java_org_elasticsearch_common_inject_BindingProcessor.java
1,124
@RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class XMLConfigBuilderTest { @Test public void testCleanNodeName() { XmlConfigBuilder configBuilder = new XmlConfigBuilder(); assertEquals("nocolon", configBuilder.cleanNodeName("noColon")); assertEquals("after", configBuilder.cleanNodeName("Before:After")); assertNull(configBuilder.cleanNodeName((String) null)); } @Test public void readVariables() { String xml = "<hazelcast>\n" + " <semaphore name=\"${name}\">\n" + " <initial-permits>${initial.permits}</initial-permits>\n" + " <backup-count>${backupcount.part1}${backupcount.part2}</backup-count>\n" + " <async-backup-count>${notreplaced}</async-backup-count>\n" + " </semaphore>" + "</hazelcast>"; ByteArrayInputStream bis = new ByteArrayInputStream(xml.getBytes()); XmlConfigBuilder configBuilder = new XmlConfigBuilder(bis); Properties properties = new Properties(); properties.setProperty("name","s"); properties.setProperty("initial.permits","25"); properties.setProperty("backupcount.part1","1"); properties.setProperty("backupcount.part2","0"); configBuilder.setProperties(properties); Config config = configBuilder.build(); SemaphoreConfig semaphoreConfig = config.getSemaphoreConfig("s"); assertEquals(25, semaphoreConfig.getInitialPermits()); assertEquals(10, semaphoreConfig.getBackupCount()); assertEquals(0, semaphoreConfig.getAsyncBackupCount()); } @Test public void readAwsConfig() { String xml = "<hazelcast>\n" + " <group>\n" + " <name>dev</name>\n" + " <password>dev-pass</password>\n" + " </group>\n" + " <network>\n" + " <port auto-increment=\"true\">5701</port>\n" + " <join>\n" + " <multicast enabled=\"false\">\n" + " <multicast-group>224.2.2.3</multicast-group>\n" + " <multicast-port>54327</multicast-port>\n" + " </multicast>\n" + " <tcp-ip enabled=\"false\">\n" + " <interface>127.0.0.1</interface>\n" + " </tcp-ip>\n" + " <aws enabled=\"true\" connection-timeout-seconds=\"10\" >\n" + " <access-key>access</access-key>\n" + " <secret-key>secret</secret-key>\n" + " </aws>\n" + " </join>\n" + " <interfaces enabled=\"false\">\n" + " <interface>10.10.1.*</interface>\n" + " </interfaces>\n" + " </network>\n" + "</hazelcast>"; Config config = buildConfig(xml); AwsConfig awsConfig = config.getNetworkConfig().getJoin().getAwsConfig(); assertTrue(awsConfig.isEnabled()); assertEquals(10, config.getNetworkConfig().getJoin().getAwsConfig().getConnectionTimeoutSeconds()); assertEquals("access", awsConfig.getAccessKey()); assertEquals("secret", awsConfig.getSecretKey()); } @Test public void readPortCount() { //check when it is explicitly set. Config config = buildConfig("<hazelcast>\n" + " <network>\n" + " <port port-count=\"200\">5701</port>\n" + " </network>\n" + "</hazelcast>"); assertEquals(200, config.getNetworkConfig().getPortCount()); //check if the default is passed in correctly config = buildConfig( "<hazelcast>\n" + " <network>\n" + " <port>5701</port>\n" + " </network>\n" + "</hazelcast>"); assertEquals(100, config.getNetworkConfig().getPortCount()); } @Test public void readPortAutoIncrement() { //explicitly set. Config config = buildConfig("<hazelcast>\n" + " <network>\n" + " <port auto-increment=\"false\">5701</port>\n" + " </network>\n" + "</hazelcast>"); assertFalse(config.getNetworkConfig().isPortAutoIncrement()); //check if the default is picked up correctly config = buildConfig( "<hazelcast>\n" + " <network>\n" + " <port>5701</port>\n" + " </network>\n" + "</hazelcast>"); assertTrue(config.getNetworkConfig().isPortAutoIncrement()); } @Test public void readSemaphoreConfig() { String xml = "<hazelcast>\n" + " <semaphore name=\"default\">\n" + " <initial-permits>1</initial-permits>\n" + " </semaphore>" + " <semaphore name=\"custom\">\n" + " <initial-permits>10</initial-permits>\n" + " <semaphore-factory enabled=\"true\">" + " <class-name>com.acme.MySemaphore</class-name>\n" + " </semaphore-factory>" + " </semaphore>" + "</hazelcast>"; Config config = buildConfig(xml); SemaphoreConfig defaultConfig = config.getSemaphoreConfig("default"); SemaphoreConfig customConfig = config.getSemaphoreConfig("custom"); assertEquals(1, defaultConfig.getInitialPermits()); assertEquals(10, customConfig.getInitialPermits()); } @Test public void testConfig2Xml2DefaultConfig() { testConfig2Xml2Config("hazelcast-default.xml"); } @Test public void testConfig2Xml2FullConfig() { testConfig2Xml2Config("hazelcast-fullconfig.xml"); } private void testConfig2Xml2Config(String fileName) { final Config config = new ClasspathXmlConfig(fileName); final String xml = new ConfigXmlGenerator(true).generate(config); final Config config2 = new InMemoryXmlConfig(xml); assertTrue(config.isCompatible(config2)); assertTrue(config2.isCompatible(config)); } @Test public void testXSDDefaultXML() throws SAXException, IOException { testXSDConfigXML("hazelcast-default.xml"); } @Test public void testFullConfigXML() throws SAXException, IOException { testXSDConfigXML("hazelcast-fullconfig.xml"); } @Test public void testCaseInsensitivityOfSettings() { String xml = "<hazelcast>\n" + "<map name=\"testCaseInsensitivity\">"+ "<in-memory-format>binary</in-memory-format> "+ "<backup-count>1</backup-count> " + "<async-backup-count>0</async-backup-count> " + "<time-to-live-seconds>0</time-to-live-seconds>" + "<max-idle-seconds>0</max-idle-seconds> " + "<eviction-policy>none</eviction-policy> " + "<max-size policy=\"per_partition\">0</max-size>" + "<eviction-percentage>25</eviction-percentage>" + "<merge-policy>com.hazelcast.map.merge.PassThroughMergePolicy</merge-policy>"+ "</map>"+ "</hazelcast>"; final Config config = buildConfig(xml); final MapConfig mapConfig = config.getMapConfig("testCaseInsensitivity"); assertTrue(mapConfig.getInMemoryFormat().equals(InMemoryFormat.BINARY)); assertTrue(mapConfig.getEvictionPolicy().equals(MapConfig.EvictionPolicy.NONE)); assertTrue(mapConfig.getMaxSizeConfig().getMaxSizePolicy().equals(MaxSizeConfig.MaxSizePolicy.PER_PARTITION)); } @Test public void testManagementCenterConfig() { String xml = "<hazelcast>\n" + "<management-center enabled=\"true\" security-token=\"someToken\" cluster-id=\"someClusterId\">"+ "someUrl"+ "</management-center>"+ "</hazelcast>"; final Config config = buildConfig(xml); final ManagementCenterConfig manCenterCfg = config.getManagementCenterConfig(); assertTrue(manCenterCfg.isEnabled()); assertEquals("someClusterId",manCenterCfg.getClusterId()); assertEquals("someToken",manCenterCfg.getSecurityToken()); assertEquals("someUrl",manCenterCfg.getUrl()); } @Test public void testNullManagementCenterConfig() { String xml = "<hazelcast>\n" + "<management-center>"+ "</management-center>"+ "</hazelcast>"; final Config config = buildConfig(xml); final ManagementCenterConfig manCenterCfg = config.getManagementCenterConfig(); assertFalse(manCenterCfg.isEnabled()); assertNull(manCenterCfg.getClusterId()); assertNull(manCenterCfg.getSecurityToken()); assertNull(manCenterCfg.getUrl()); } @Test public void testEmptyManagementCenterConfig() { String xml = "<hazelcast>\n" + "</hazelcast>"; final Config config = buildConfig(xml); final ManagementCenterConfig manCenterCfg = config.getManagementCenterConfig(); assertFalse(manCenterCfg.isEnabled()); assertNull(manCenterCfg.getClusterId()); assertNull(manCenterCfg.getSecurityToken()); assertNull(manCenterCfg.getUrl()); } @Test public void testNotEnabledManagementCenterConfig() { String xml = "<hazelcast>\n" + "<management-center enabled=\"false\">"+ "</management-center>"+ "</hazelcast>"; final Config config = buildConfig(xml); final ManagementCenterConfig manCenterCfg = config.getManagementCenterConfig(); assertFalse(manCenterCfg.isEnabled()); assertNull(manCenterCfg.getClusterId()); assertNull(manCenterCfg.getSecurityToken()); assertNull(manCenterCfg.getUrl()); } @Test public void testNotEnabledWithURLManagementCenterConfig() { String xml = "<hazelcast>\n" + "<management-center enabled=\"false\">"+ "http://localhost:8080/mancenter"+ "</management-center>"+ "</hazelcast>"; final Config config = buildConfig(xml); final ManagementCenterConfig manCenterCfg = config.getManagementCenterConfig(); assertFalse(manCenterCfg.isEnabled()); assertNull(manCenterCfg.getClusterId()); assertNull(manCenterCfg.getSecurityToken()); assertEquals("http://localhost:8080/mancenter", manCenterCfg.getUrl()); } @Test public void testManagementCenterConfig_onlySecurityTokenSet() { String xml = "<hazelcast>\n" + "<management-center security-token=\"someToken\">"+ "</management-center>"+ "</hazelcast>"; final Config config = buildConfig(xml); final ManagementCenterConfig manCenterCfg = config.getManagementCenterConfig(); assertTrue(manCenterCfg.isEnabled()); assertEquals("someToken",manCenterCfg.getSecurityToken()); assertNull(manCenterCfg.getClusterId()); assertNull(manCenterCfg.getUrl()); } @Test public void testMapStoreInitialModeLazy() { String xml = "<hazelcast>\n" + "<map name=\"mymap\">"+ "<map-store enabled=\"true\" initial-mode=\"LAZY\"></map-store>"+ "</map>"+ "</hazelcast>"; final Config config = buildConfig(xml); System.out.println("config = " + config); final MapStoreConfig mapStoreConfig = config.getMapConfig("mymap").getMapStoreConfig(); assertTrue(mapStoreConfig.isEnabled()); assertEquals(MapStoreConfig.InitialLoadMode.LAZY, mapStoreConfig.getInitialLoadMode()); } @Test public void testMapStoreInitialModeEager() { String xml = "<hazelcast>\n" + "<map name=\"mymap\">"+ "<map-store enabled=\"true\" initial-mode=\"EAGER\"></map-store>"+ "</map>"+ "</hazelcast>"; final Config config = buildConfig(xml); System.out.println("config = " + config); final MapStoreConfig mapStoreConfig = config.getMapConfig("mymap").getMapStoreConfig(); assertTrue(mapStoreConfig.isEnabled()); assertEquals(MapStoreConfig.InitialLoadMode.EAGER, mapStoreConfig.getInitialLoadMode()); } @Test(expected = HazelcastException.class) public void testParseExceptionIsNotSwallowed() { String invalidXml = "<hazelcast>\n" + "</hazelcast"; buildConfig(invalidXml); fail(); //if we, for any reason, we get through the parsing, fail. } private void testXSDConfigXML(String xmlFileName) throws SAXException, IOException { SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); URL schemaResource = XMLConfigBuilderTest.class.getClassLoader().getResource("hazelcast-config-3.3.xsd"); InputStream xmlResource = XMLConfigBuilderTest.class.getClassLoader().getResourceAsStream(xmlFileName); Schema schema = factory.newSchema(schemaResource); Source source = new StreamSource(xmlResource); Validator validator = schema.newValidator(); try { validator.validate(source); } catch (SAXException ex) { fail(xmlFileName + " is not valid because: " + ex.toString()); } } private Config buildConfig(String xml) { ByteArrayInputStream bis = new ByteArrayInputStream(xml.getBytes()); XmlConfigBuilder configBuilder = new XmlConfigBuilder(bis); return configBuilder.build(); } }
0true
hazelcast_src_test_java_com_hazelcast_config_XMLConfigBuilderTest.java
1,466
public class FindReferencesVisitor extends Visitor implements NaturalVisitor { private Referenceable declaration; private final Set<Node> nodes = new HashSet<Node>(); public FindReferencesVisitor(Referenceable declaration) { if (declaration instanceof TypedDeclaration) { Referenceable od = declaration; while (od!=null && od!=declaration) { declaration = od; od = ((TypedDeclaration) od).getOriginalDeclaration(); } } if (declaration instanceof Declaration && ((Declaration)declaration).getContainer() instanceof Setter) { Setter setter = (Setter) ((Declaration)declaration).getContainer(); if (setter.getDirectMember(setter.getName(), null, false) .equals(declaration)) { declaration = setter; } } if (declaration instanceof Setter) { declaration = ((Setter) declaration).getGetter(); } this.declaration = declaration; } public Referenceable getDeclaration() { return declaration; } public Set<Node> getNodes() { return nodes; } protected boolean isReference(Parameter p) { return p!=null && isReference(p.getModel()); } protected boolean isReference(Declaration ref) { return ref!=null && declaration instanceof Declaration && (((Declaration)declaration).refines(ref) || isSetterParameterReference(ref)); } private boolean isSetterParameterReference(Declaration ref) { if (ref.getContainer() instanceof Setter) { Setter setter = (Setter) ref.getContainer(); return setter.getDirectMember(setter.getName(), null, false).equals(ref) && isReference(setter.getGetter()); } else { return false; } } protected boolean isReference(Declaration ref, String id) { return isReference(ref); } private Tree.Variable getConditionVariable(Condition c) { if (c instanceof Tree.ExistsOrNonemptyCondition) { return ((Tree.ExistsOrNonemptyCondition) c).getVariable(); } if (c instanceof Tree.IsCondition) { return ((Tree.IsCondition) c).getVariable(); } return null; } @Override public void visit(Tree.CaseClause that) { Tree.CaseItem ci = that.getCaseItem(); if (ci instanceof Tree.IsCase) { Tree.Variable var = ((Tree.IsCase) ci).getVariable(); if (var!=null) { TypedDeclaration od = var.getDeclarationModel().getOriginalDeclaration(); if (od!=null && od.equals(declaration)) { Referenceable d = declaration; declaration = var.getDeclarationModel(); that.getBlock().visit(this); declaration = d; return; } } } super.visit(that); } @Override public void visit(Tree.IfClause that) { for (Condition c: that.getConditionList().getConditions()) { Tree.Variable var = getConditionVariable(c); if (var!=null && var.getType() instanceof Tree.SyntheticVariable) { TypedDeclaration od = var.getDeclarationModel().getOriginalDeclaration(); if (od!=null && od.equals(declaration)) { c.visit(this); Referenceable d = declaration; declaration = var.getDeclarationModel(); if (that.getBlock()!=null) { that.getBlock().visit(this); } declaration = d; return; } } } super.visit(that); } @Override public void visit(Tree.WhileClause that) { for (Condition c: that.getConditionList().getConditions()) { Tree.Variable var = getConditionVariable(c); if (var!=null && var.getType() instanceof Tree.SyntheticVariable) { TypedDeclaration od = var.getDeclarationModel() .getOriginalDeclaration(); if (od!=null && od.equals(declaration)) { c.visit(this); Referenceable d = declaration; declaration = var.getDeclarationModel(); that.getBlock().visit(this); declaration = d; return; } } } super.visit(that); } @Override public void visit(Tree.Body body) { Referenceable d = declaration; for (Tree.Statement st: body.getStatements()) { if (st instanceof Tree.Assertion) { Tree.Assertion that = (Tree.Assertion) st; for (Condition c: that.getConditionList().getConditions()) { Tree.Variable var = getConditionVariable(c); if (var!=null && var.getType() instanceof Tree.SyntheticVariable) { TypedDeclaration od = var.getDeclarationModel() .getOriginalDeclaration(); if (od!=null && od.equals(declaration)) { c.visit(this); declaration = var.getDeclarationModel(); break; } } } } st.visit(this); } declaration = d; } @Override public void visit(Tree.ExtendedTypeExpression that) {} @Override public void visit(Tree.StaticMemberOrTypeExpression that) { if (isReference(that.getDeclaration(), id(that.getIdentifier()))) { nodes.add(that); } super.visit(that); } public void visit(Tree.MemberLiteral that) { if (isReference(that.getDeclaration(), id(that.getIdentifier()))) { nodes.add(that); } super.visit(that); } @Override public void visit(Tree.TypedArgument that) { if (isReference(that.getParameter())) { nodes.add(that); } super.visit(that); } @Override public void visit(Tree.SpecifiedArgument that) { if (that.getIdentifier()!=null && that.getIdentifier().getToken()!=null && isReference(that.getParameter())) { nodes.add(that); } super.visit(that); } @Override public void visit(Tree.SimpleType that) { ProducedType type = that.getTypeModel(); if (type!=null && isReference(type.getDeclaration(), id(that.getIdentifier()))) { nodes.add(that); } super.visit(that); } @Override public void visit(Tree.ImportMemberOrType that) { if (isReference(that.getDeclarationModel())) { nodes.add(that); } super.visit(that); } @Override public void visit(Import that) { super.visit(that); if (declaration instanceof Package) { if (formatPath(that.getImportPath().getIdentifiers()) .equals(declaration.getNameAsString())) { nodes.add(that); } } } @Override public void visit(ImportModule that) { super.visit(that); if (declaration instanceof Module) { if (formatPath(that.getImportPath().getIdentifiers()) .equals(declaration.getNameAsString())) { nodes.add(that); } } } @Override public void visit(Tree.InitializerParameter that) { if (isReference(that.getParameterModel())) { nodes.add(that); } else { super.visit(that); } } private String id(Tree.Identifier that) { return that==null ? null : that.getText(); } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_util_FindReferencesVisitor.java
2,578
clusterService.submitStateUpdateTask("zen-disco-receive(from master [" + newState.nodes().masterNode() + "])", Priority.URGENT, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { // we don't need to do this, since we ping the master, and get notified when it has moved from being a master // because it doesn't have enough master nodes... //if (!electMaster.hasEnoughMasterNodes(newState.nodes())) { // return disconnectFromCluster(newState, "not enough master nodes on new cluster state received from [" + newState.nodes().masterNode() + "]"); //} latestDiscoNodes = newState.nodes(); // check to see that we monitor the correct master of the cluster if (masterFD.masterNode() == null || !masterFD.masterNode().equals(latestDiscoNodes.masterNode())) { masterFD.restart(latestDiscoNodes.masterNode(), "new cluster state received and we are monitoring the wrong master [" + masterFD.masterNode() + "]"); } ClusterState.Builder builder = ClusterState.builder(newState); // if the routing table did not change, use the original one if (newState.routingTable().version() == currentState.routingTable().version()) { builder.routingTable(currentState.routingTable()); } // same for metadata if (newState.metaData().version() == currentState.metaData().version()) { builder.metaData(currentState.metaData()); } else { // if its not the same version, only copy over new indices or ones that changed the version MetaData.Builder metaDataBuilder = MetaData.builder(newState.metaData()).removeAllIndices(); for (IndexMetaData indexMetaData : newState.metaData()) { IndexMetaData currentIndexMetaData = currentState.metaData().index(indexMetaData.index()); if (currentIndexMetaData == null || currentIndexMetaData.version() != indexMetaData.version()) { metaDataBuilder.put(indexMetaData, false); } else { metaDataBuilder.put(currentIndexMetaData, false); } } builder.metaData(metaDataBuilder); } return builder.build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); newStateProcessed.onNewClusterStateFailed(t); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { sendInitialStateEventIfNeeded(); newStateProcessed.onNewClusterStateProcessed(); } });
1no label
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
1,006
public abstract class SemaphoreRequest extends PartitionClientRequest implements Portable, SecureRequest { protected String name; protected int permitCount; protected SemaphoreRequest() { } protected SemaphoreRequest(String name, int permitCount) { this.name = name; this.permitCount = permitCount; } @Override protected int getPartition() { ClientEngine clientEngine = getClientEngine(); Data key = clientEngine.getSerializationService().toData(name); return clientEngine.getPartitionService().getPartitionId(key); } @Override public String getServiceName() { return SemaphoreService.SERVICE_NAME; } @Override public int getFactoryId() { return SemaphorePortableHook.F_ID; } @Override public void write(PortableWriter writer) throws IOException { writer.writeUTF("n", name); writer.writeInt("p", permitCount); } @Override public void read(PortableReader reader) throws IOException { name = reader.readUTF("n"); permitCount = reader.readInt("p"); } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_semaphore_client_SemaphoreRequest.java
897
public class IdGeneratorService implements ManagedService, RemoteService { public static final String SERVICE_NAME = "hz:impl:idGeneratorService"; public static final String ATOMIC_LONG_NAME = "hz:atomic:idGenerator:"; private NodeEngine nodeEngine; public IdGeneratorService(NodeEngine nodeEngine) { this.nodeEngine = nodeEngine; } @Override public void init(NodeEngine nodeEngine, Properties properties) { this.nodeEngine = nodeEngine; } @Override public void reset() { } @Override public void shutdown(boolean terminate) { } private IAtomicLong getBlockGenerator(String name) { HazelcastInstance hazelcastInstance = nodeEngine.getHazelcastInstance(); return hazelcastInstance.getAtomicLong(ATOMIC_LONG_NAME + name); } @Override public DistributedObject createDistributedObject(String name) { IAtomicLong blockGenerator = getBlockGenerator(name); return new IdGeneratorProxy(blockGenerator, name, nodeEngine, this); } @Override public void destroyDistributedObject(String name) { } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_idgen_IdGeneratorService.java
1,107
public class OSQLFunctionUnion extends OSQLFunctionMultiValueAbstract<Collection<Object>> { public static final String NAME = "union"; public OSQLFunctionUnion() { super(NAME, 1, -1); } public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) { if (iParameters.length == 1) { // AGGREGATION MODE (STATEFULL) Object value = iParameters[0]; if (value != null) { if (value instanceof OSQLFilterItemVariable) value = ((OSQLFilterItemVariable) value).getValue(iCurrentRecord, iContext); if (context == null) context = new ArrayList<Object>(); OMultiValue.add(context, value); } return context; } else { // IN-LINE MODE (STATELESS) final OMultiCollectionIterator<OIdentifiable> result = new OMultiCollectionIterator<OIdentifiable>(); for (Object value : iParameters) { if (value != null) { if (value instanceof OSQLFilterItemVariable) value = ((OSQLFilterItemVariable) value).getValue(iCurrentRecord, iContext); result.add(value); } } return result; } } public String getSyntax() { return "Syntax error: union(<field>*)"; } @Override public Object mergeDistributedResult(List<Object> resultsToMerge) { final Collection<Object> result = new HashSet<Object>(); for (Object iParameter : resultsToMerge) { @SuppressWarnings("unchecked") final Collection<Object> items = (Collection<Object>) iParameter; if (items != null) { result.addAll(items); } } return result; } }
0true
core_src_main_java_com_orientechnologies_orient_core_sql_functions_coll_OSQLFunctionUnion.java
1,404
public class NonStrictReadWriteAccessDelegate<T extends HazelcastRegion> extends AbstractAccessDelegate<T> { public NonStrictReadWriteAccessDelegate(T hazelcastRegion, final Properties props) { super(hazelcastRegion, props); } public boolean afterInsert(final Object key, final Object value, final Object version) throws CacheException { return put(key, value, version); } public boolean afterUpdate(final Object key, final Object value, final Object currentVersion, final Object previousVersion, final SoftLock lock) throws CacheException { return update(key, value, currentVersion, previousVersion, lock); } public boolean putFromLoad(final Object key, final Object value, final long txTimestamp, final Object version, final boolean minimalPutOverride) throws CacheException { return put(key, value, version); } public SoftLock lockItem(Object key, Object version) throws CacheException { return null; } public void unlockItem(final Object key, final SoftLock lock) throws CacheException { remove(key); } public void unlockRegion(final SoftLock lock) throws CacheException { removeAll(); } }
0true
hazelcast-hibernate_hazelcast-hibernate4_src_main_java_com_hazelcast_hibernate_access_NonStrictReadWriteAccessDelegate.java
313
LOG_CONSOLE_LEVEL("log.console.level", "Console logging level", String.class, "info", new OConfigurationChangeCallback() { public void change(final Object iCurrentValue, final Object iNewValue) { OLogManager.instance().setLevel((String) iNewValue, ConsoleHandler.class); } }),
0true
core_src_main_java_com_orientechnologies_orient_core_config_OGlobalConfiguration.java
73
@SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U> extends BulkTask<K,V,U> { final Fun<? super K, ? extends U> transformer; final BiFun<? super U, ? super U, ? extends U> reducer; U result; MapReduceKeysTask<K,V,U> rights, nextRight; MapReduceKeysTask (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t, MapReduceKeysTask<K,V,U> nextRight, Fun<? super K, ? extends U> transformer, BiFun<? super U, ? super U, ? extends U> reducer) { super(p, b, i, f, t); this.nextRight = nextRight; this.transformer = transformer; this.reducer = reducer; } public final U getRawResult() { return result; } public final void compute() { final Fun<? super K, ? extends U> transformer; final BiFun<? super U, ? super U, ? extends U> reducer; if ((transformer = this.transformer) != null && (reducer = this.reducer) != null) { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); (rights = new MapReduceKeysTask<K,V,U> (this, batch >>>= 1, baseLimit = h, f, tab, rights, transformer, reducer)).fork(); } U r = null; for (Node<K,V> p; (p = advance()) != null; ) { U u; if ((u = transformer.apply(p.key)) != null) r = (r == null) ? u : reducer.apply(r, u); } result = r; CountedCompleter<?> c; for (c = firstComplete(); c != null; c = c.nextComplete()) { @SuppressWarnings("unchecked") MapReduceKeysTask<K,V,U> t = (MapReduceKeysTask<K,V,U>)c, s = t.rights; while (s != null) { U tr, sr; if ((sr = s.result) != null) t.result = (((tr = t.result) == null) ? sr : reducer.apply(tr, sr)); s = t.rights = s.nextRight; } } } } }
0true
src_main_java_jsr166e_ConcurrentHashMapV8.java
2,502
END_ARRAY { @Override public boolean isValue() { return false; } },
0true
src_main_java_org_elasticsearch_common_xcontent_XContentParser.java
2,442
static class EsThreadFactory implements ThreadFactory { final ThreadGroup group; final AtomicInteger threadNumber = new AtomicInteger(1); final String namePrefix; public EsThreadFactory(String namePrefix) { this.namePrefix = namePrefix; SecurityManager s = System.getSecurityManager(); group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup(); } @Override public Thread newThread(Runnable r) { Thread t = new Thread(group, r, namePrefix + "[T#" + threadNumber.getAndIncrement() + "]", 0); t.setDaemon(true); return t; } }
0true
src_main_java_org_elasticsearch_common_util_concurrent_EsExecutors.java
761
public class TransportGetAction extends TransportShardSingleOperationAction<GetRequest, GetResponse> { public static boolean REFRESH_FORCE = false; private final IndicesService indicesService; private final boolean realtime; @Inject public TransportGetAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ThreadPool threadPool) { super(settings, threadPool, clusterService, transportService); this.indicesService = indicesService; this.realtime = settings.getAsBoolean("action.get.realtime", true); } @Override protected String executor() { return ThreadPool.Names.GET; } @Override protected String transportAction() { return GetAction.NAME; } @Override protected ClusterBlockException checkGlobalBlock(ClusterState state, GetRequest request) { return state.blocks().globalBlockedException(ClusterBlockLevel.READ); } @Override protected ClusterBlockException checkRequestBlock(ClusterState state, GetRequest request) { return state.blocks().indexBlockedException(ClusterBlockLevel.READ, request.index()); } @Override protected ShardIterator shards(ClusterState state, GetRequest request) { return clusterService.operationRouting() .getShards(clusterService.state(), request.index(), request.type(), request.id(), request.routing(), request.preference()); } @Override protected void resolveRequest(ClusterState state, GetRequest request) { if (request.realtime == null) { request.realtime = this.realtime; } // update the routing (request#index here is possibly an alias) request.routing(state.metaData().resolveIndexRouting(request.routing(), request.index())); request.index(state.metaData().concreteIndex(request.index())); // Fail fast on the node that received the request. if (request.routing() == null && state.getMetaData().routingRequired(request.index(), request.type())) { throw new RoutingMissingException(request.index(), request.type(), request.id()); } } @Override protected GetResponse shardOperation(GetRequest request, int shardId) throws ElasticsearchException { IndexService indexService = indicesService.indexServiceSafe(request.index()); IndexShard indexShard = indexService.shardSafe(shardId); if (request.refresh() && !request.realtime()) { indexShard.refresh(new Engine.Refresh("refresh_flag_get").force(REFRESH_FORCE)); } GetResult result = indexShard.getService().get(request.type(), request.id(), request.fields(), request.realtime(), request.version(), request.versionType(), request.fetchSourceContext()); return new GetResponse(result); } @Override protected GetRequest newRequest() { return new GetRequest(); } @Override protected GetResponse newResponse() { return new GetResponse(); } }
1no label
src_main_java_org_elasticsearch_action_get_TransportGetAction.java
761
public class ListReplicationOperation extends CollectionReplicationOperation { public ListReplicationOperation() { } public ListReplicationOperation(Map<String, CollectionContainer> migrationData, int partitionId, int replicaIndex) { super(migrationData, partitionId, replicaIndex); } @Override protected void readInternal(ObjectDataInput in) throws IOException { int mapSize = in.readInt(); migrationData = new HashMap<String, CollectionContainer>(mapSize); for (int i = 0; i < mapSize; i++) { String name = in.readUTF(); ListContainer container = new ListContainer(); container.readData(in); migrationData.put(name, container); } } @Override public int getId() { return CollectionDataSerializerHook.LIST_REPLICATION; } }
0true
hazelcast_src_main_java_com_hazelcast_collection_list_ListReplicationOperation.java
3,607
public static class CustomShortNumericField extends CustomNumericField { private final short number; private final NumberFieldMapper mapper; public CustomShortNumericField(NumberFieldMapper mapper, short number, FieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; } @Override public TokenStream tokenStream(Analyzer analyzer) throws IOException { if (fieldType().indexed()) { return mapper.popCachedStream().setIntValue(number); } return null; } @Override public String numericAsString() { return Short.toString(number); } }
0true
src_main_java_org_elasticsearch_index_mapper_core_ShortFieldMapper.java
1,130
public class FulfillmentType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, FulfillmentType> TYPES = new LinkedHashMap<String, FulfillmentType>(); public static final FulfillmentType DIGITAL = new FulfillmentType("DIGITAL", "Digital"); public static final FulfillmentType PHYSICAL_SHIP = new FulfillmentType("PHYSICAL_SHIP", "Physical Ship"); public static final FulfillmentType PHYSICAL_PICKUP = new FulfillmentType("PHYSICAL_PICKUP", "Physical Pickup"); public static final FulfillmentType PHYSICAL_PICKUP_OR_SHIP = new FulfillmentType("PHYSICAL_PICKUP_OR_SHIP", "Physical Pickup or Ship"); public static final FulfillmentType GIFT_CARD = new FulfillmentType("GIFT_CARD", "Gift Card"); @Deprecated public static final FulfillmentType SHIPPING = new FulfillmentType("SHIPPING", "Shipping"); public static FulfillmentType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public FulfillmentType() { //do nothing } public FulfillmentType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } @Override public String getType() { return type; } @Override public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; FulfillmentType other = (FulfillmentType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_type_FulfillmentType.java
6,022
public static class Candidate { public static final Candidate[] EMPTY = new Candidate[0]; public final BytesRef term; public final double stringDistance; public final long frequency; public final double score; public final boolean userInput; public Candidate(BytesRef term, long frequency, double stringDistance, double score, boolean userInput) { this.frequency = frequency; this.term = term; this.stringDistance = stringDistance; this.score = score; this.userInput = userInput; } @Override public String toString() { return "Candidate [term=" + term.utf8ToString() + ", stringDistance=" + stringDistance + ", frequency=" + frequency + (userInput ? ", userInput" : "" ) + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((term == null) ? 0 : term.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Candidate other = (Candidate) obj; if (term == null) { if (other.term != null) return false; } else if (!term.equals(other.term)) return false; return true; } }
1no label
src_main_java_org_elasticsearch_search_suggest_phrase_DirectCandidateGenerator.java
1,423
public final class HazelcastInstanceFactory { private final static String HZ_CLIENT_LOADER_CLASSNAME = "com.hazelcast.hibernate.instance.HazelcastClientLoader"; private final static String HZ_INSTANCE_LOADER_CLASSNAME = "com.hazelcast.hibernate.instance.HazelcastInstanceLoader"; private HazelcastInstanceFactory() { } public static HazelcastInstance createInstance(Properties props) throws CacheException { return createInstanceLoader(props).loadInstance(); } public static IHazelcastInstanceLoader createInstanceLoader(Properties props) throws CacheException { boolean useNativeClient = false; if (props != null) { useNativeClient = CacheEnvironment.isNativeClient(props); } IHazelcastInstanceLoader loader = null; Class loaderClass = null; ClassLoader cl = HazelcastInstanceFactory.class.getClassLoader(); try { if (useNativeClient) { loaderClass = cl.loadClass(HZ_CLIENT_LOADER_CLASSNAME); } else { loaderClass = cl.loadClass(HZ_INSTANCE_LOADER_CLASSNAME); } loader = (IHazelcastInstanceLoader) loaderClass.newInstance(); } catch (Exception e) { throw new CacheException(e); } loader.configure(props); return loader; } }
0true
hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_instance_HazelcastInstanceFactory.java
1,453
public class OrderInfoForm implements Serializable { private static final long serialVersionUID = 62974989700147353L; protected String emailAddress; public String getEmailAddress() { return emailAddress; } public void setEmailAddress(String emailAddress) { this.emailAddress = emailAddress; } }
0true
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_checkout_model_OrderInfoForm.java
3,686
public static class Builder extends Mapper.Builder<Builder, SourceFieldMapper> { private boolean enabled = Defaults.ENABLED; private long compressThreshold = Defaults.COMPRESS_THRESHOLD; private Boolean compress = null; private String format = Defaults.FORMAT; private String[] includes = null; private String[] excludes = null; public Builder() { super(Defaults.NAME); } public Builder enabled(boolean enabled) { this.enabled = enabled; return this; } public Builder compress(boolean compress) { this.compress = compress; return this; } public Builder compressThreshold(long compressThreshold) { this.compressThreshold = compressThreshold; return this; } public Builder format(String format) { this.format = format; return this; } public Builder includes(String[] includes) { this.includes = includes; return this; } public Builder excludes(String[] excludes) { this.excludes = excludes; return this; } @Override public SourceFieldMapper build(BuilderContext context) { return new SourceFieldMapper(name, enabled, format, compress, compressThreshold, includes, excludes); } }
0true
src_main_java_org_elasticsearch_index_mapper_internal_SourceFieldMapper.java
1,627
public interface Validator { String validate(String setting, String value); public static final Validator EMPTY = new Validator() { @Override public String validate(String setting, String value) { return null; } }; public static final Validator TIME = new Validator() { @Override public String validate(String setting, String value) { try { if (TimeValue.parseTimeValue(value, null) == null) { return "cannot parse value [" + value + "] as time"; } } catch (ElasticsearchParseException ex) { return "cannot parse value [" + value + "] as time"; } return null; } }; public static final Validator FLOAT = new Validator() { @Override public String validate(String setting, String value) { try { Float.parseFloat(value); } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as a float"; } return null; } }; public static final Validator NON_NEGATIVE_FLOAT = new Validator() { @Override public String validate(String setting, String value) { try { if (Float.parseFloat(value) < 0.0) { return "the value of the setting " + setting + " must be a non negative float"; } } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as a double"; } return null; } }; public static final Validator DOUBLE = new Validator() { @Override public String validate(String setting, String value) { try { Double.parseDouble(value); } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as a double"; } return null; } }; public static final Validator NON_NEGATIVE_DOUBLE = new Validator() { @Override public String validate(String setting, String value) { try { if (Double.parseDouble(value) < 0.0) { return "the value of the setting " + setting + " must be a non negative double"; } } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as a double"; } return null; } }; public static final Validator DOUBLE_GTE_2 = new Validator() { @Override public String validate(String setting, String value) { try { if (Double.parseDouble(value) < 2.0) { return "the value of the setting " + setting + " must be >= 2.0"; } } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as a double"; } return null; } }; public static final Validator INTEGER = new Validator() { @Override public String validate(String setting, String value) { try { Integer.parseInt(value); } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as an integer"; } return null; } }; public static final Validator POSITIVE_INTEGER = new Validator() { @Override public String validate(String setting, String value) { try { if (Integer.parseInt(value) <= 0) { return "the value of the setting " + setting + " must be a positive integer"; } } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as an integer"; } return null; } }; public static final Validator NON_NEGATIVE_INTEGER = new Validator() { @Override public String validate(String setting, String value) { try { if (Integer.parseInt(value) < 0) { return "the value of the setting " + setting + " must be a non negative integer"; } } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as an integer"; } return null; } }; public static final Validator INTEGER_GTE_2 = new Validator() { @Override public String validate(String setting, String value) { try { if (Integer.parseInt(value) < 2) { return "the value of the setting " + setting + " must be >= 2"; } } catch (NumberFormatException ex) { return "cannot parse value [" + value + "] as an integer"; } return null; } }; public static final Validator BYTES_SIZE = new Validator() { @Override public String validate(String setting, String value) { try { parseBytesSizeValue(value); } catch (ElasticsearchParseException ex) { return ex.getMessage(); } return null; } }; public static final Validator MEMORY_SIZE = new Validator() { @Override public String validate(String setting, String value) { try { parseBytesSizeValueOrHeapRatio(value); } catch (ElasticsearchParseException ex) { return ex.getMessage(); } return null; } }; public static final Validator BOOLEAN = new Validator() { @Override public String validate(String setting, String value) { if (value != null && (Booleans.isExplicitFalse(value) || Booleans.isExplicitTrue(value))) { return null; } return "cannot parse value [" + value + "] as a boolean"; } }; }
0true
src_main_java_org_elasticsearch_cluster_settings_Validator.java
1,158
public interface IAtomicReference<E> extends DistributedObject { /** * Atomically sets the value to the given updated value * only if the current value {@code ==} the expected value. * * @param expect the expected value * @param update the new value * @return true if successful; or false if the actual value * was not equal to the expected value. */ boolean compareAndSet(E expect, E update); /** * Gets the current value. * * @return the current value */ E get(); /** * Atomically sets the given value. * * @param newValue the new value */ void set(E newValue); /** * Gets the value and sets the new value. * * @param newValue the new value. * @return the old value. */ E getAndSet(E newValue); /** * Sets and gets the value. * * @param update the new value * @return the new value */ E setAndGet(E update); /** * Checks if the stored reference is null. * * @return true if null, false otherwise. */ boolean isNull(); /** * Clears the current stored reference. */ void clear(); /** * Checks if the reference contains the value. * * @param value the value to check (is allowed to be null). * @return if the value is found, false otherwise. */ boolean contains(E value); /** * Alters the currently stored reference by applying a function on it. * * @param function the function * @throws IllegalArgumentException if function is null. */ void alter(IFunction<E, E> function); /** * Alters the currently stored reference by applying a function on it and gets the result. * * @param function the function * @return the new value. * @throws IllegalArgumentException if function is null. */ E alterAndGet(IFunction<E, E> function); /** * Alters the currently stored reference by applying a function on it on and gets the old value. * * @param function the function * @return the old value * @throws IllegalArgumentException if function is null. */ E getAndAlter(IFunction<E, E> function); /** * Applies a function on the value, the actual stored value will not change. * * @param function the function * @return the result of the function application * @throws IllegalArgumentException if function is null. */ <R> R apply(IFunction<E, R> function); }
0true
hazelcast_src_main_java_com_hazelcast_core_IAtomicReference.java
1,991
super(injector, key, source, new InternalFactory<T>() { public T get(Errors errors, InternalContext context, Dependency<?> dependency) { throw new AssertionError(); } }, Scoping.UNSCOPED);
0true
src_main_java_org_elasticsearch_common_inject_internal_UntargettedBindingImpl.java
149
class MoveToNewUnitProposal implements ICompletionProposal { private final CeylonEditor editor; private final String name; public MoveToNewUnitProposal(String name, CeylonEditor editor) { this.editor = editor; this.name = name; } @Override public Point getSelection(IDocument doc) { return null; } @Override public Image getImage() { return CeylonLabelProvider.MOVE; } @Override public String getDisplayString() { return "Move '" + name + "' to a new source file"; } @Override public IContextInformation getContextInformation() { return null; } @Override public String getAdditionalProposalInfo() { return null; } @Override public void apply(IDocument doc) { new MoveToNewUnitRefactoringAction(editor).run(); } static void add(Collection<ICompletionProposal> proposals, CeylonEditor editor) { if (canMoveDeclaration(editor)) { proposals.add(new MoveToNewUnitProposal(getDeclarationName(editor), editor)); } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_MoveToNewUnitProposal.java
605
public interface OIndexManager { public OIndexManager load(); public void create(); public void recreateIndexes(); public Collection<? extends OIndex<?>> getIndexes(); public OIndex<?> getIndex(final String iName); public boolean existsIndex(final String iName); public OIndex<?> getIndex(final ORID iRID); public OIndex<?> createIndex(final String iName, final String iType, OIndexDefinition iIndexDefinition, final int[] iClusterIdsToIndex, final OProgressListener iProgressListener); public OIndexManager dropIndex(final String iIndexName); public String getDefaultClusterName(); public void setDefaultClusterName(String defaultClusterName); public ODictionary<ORecordInternal<?>> getDictionary(); public void flush(); public ODocument getConfiguration(); /** * Returns list of indexes that contain passed in fields names as their first keys. Order of fields does not matter. * <p/> * All indexes sorted by their count of parameters in ascending order. If there are indexes for the given set of fields in super * class they will be taken into account. * * * * @param className * name of class which is indexed. * @param fields * Field names. * @return list of indexes that contain passed in fields names as their first keys. */ public Set<OIndex<?>> getClassInvolvedIndexes(String className, Collection<String> fields); /** * Returns list of indexes that contain passed in fields names as their first keys. Order of fields does not matter. * <p/> * All indexes sorted by their count of parameters in ascending order. If there are indexes for the given set of fields in super * class they will be taken into account. * * * * @param className * name of class which is indexed. * @param fields * Field names. * @return list of indexes that contain passed in fields names as their first keys. */ public Set<OIndex<?>> getClassInvolvedIndexes(String className, String... fields); /** * Indicates whether given fields are contained as first key fields in class indexes. Order of fields does not matter. If there * are indexes for the given set of fields in super class they will be taken into account. * * @param className * name of class which contain {@code fields}. * @param fields * Field names. * @return <code>true</code> if given fields are contained as first key fields in class indexes. */ public boolean areIndexed(String className, Collection<String> fields); /** * @param className * name of class which contain {@code fields}. * @param fields * Field names. * @return <code>true</code> if given fields are contained as first key fields in class indexes. * @see #areIndexed(String, java.util.Collection) */ public boolean areIndexed(String className, String... fields); public Set<OIndex<?>> getClassIndexes(String className); public OIndex<?> getClassIndex(String className, String indexName); public void waitTillIndexRestore(); public boolean autoRecreateIndexesAfterCrash(); public void addClusterToIndex(String clusterName, String indexName); }
0true
core_src_main_java_com_orientechnologies_orient_core_index_OIndexManager.java
1,466
public class OSQLFunctionBothE extends OSQLFunctionMove { public static final String NAME = "bothE"; public OSQLFunctionBothE() { super(NAME, 0, 1); } @Override protected Object move(final OrientBaseGraph graph, final OIdentifiable iRecord, final String[] iLabels) { return v2e(graph, iRecord, Direction.BOTH, iLabels); } }
1no label
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionBothE.java
1,319
awaitBusy(new Predicate<Object>() { public boolean apply(Object obj) { ClusterState state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); return state.blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK); } });
0true
src_test_java_org_elasticsearch_cluster_MinimumMasterNodesTests.java
1,441
public class GetProductsByCategoryIdTagTest extends BaseTagLibTest { private GetProductsByCategoryIdTag getProductsByCategoryIdTag; public void test_GetProductsByCategoryIdTag() throws JspException { //TODO Fix this test - there are expectation failures /*getProductsByCategoryIdTag.setJspContext(pageContext); getProductsByCategoryIdTag.setCatalogService(catalogService); List<Product> productList = new ArrayList<Product>(); Product p1 = EasyMock.createStrictMock(Product.class); Product p2 = EasyMock.createStrictMock(Product.class); productList.add(p1); productList.add(p2); pageContext.setAttribute("productListVar", productList); Category c = EasyMock.createStrictMock(Category.class); getProductsByCategoryIdTag.setCategoryId(0L); getProductsByCategoryIdTag.setVar("productListVar"); EasyMock.expect(pageContext.getAttribute("productListVar")).andReturn(productList); EasyMock.expect(catalogService.findCategoryById(0L)).andReturn(c); EasyMock.expect(catalogService.findActiveProductsByCategory(c, new Date())).andReturn(productList); EasyMock.replay(p1, p2, c); super.replayAdditionalMockObjects(); getProductsByCategoryIdTag.doTag(); List<Product> list = (List<Product>) pageContext.getAttribute("productListVar"); assert(list.get(0).equals(p1)); assert(list.get(1).equals(p2)); super.verifyBaseMockObjects();*/ } @Override public void setup() { getProductsByCategoryIdTag = new GetProductsByCategoryIdTag(); } }
0true
core_broadleaf-framework-web_src_test_java_org_broadleafcommerce_core_web_catalog_taglib_GetProductsByCategoryIdTagTest.java
489
public class PasswordChange extends PasswordReset { private static final long serialVersionUID = 1L; private String currentPassword; private String newPassword; private String newPasswordConfirm; private String challengeQuestion; private String challengeAnswer; public PasswordChange(String username) { super(username); } public String getChallengeQuestion() { return challengeQuestion; } public void setChallengeQuestion(String challengeQuestion) { this.challengeQuestion = challengeQuestion; } public String getChallengeAnswer() { return challengeAnswer; } public void setChallengeAnswer(String challengeAnswer) { this.challengeAnswer = challengeAnswer; } public String getCurrentPassword() { return currentPassword; } public void setCurrentPassword(String currentPassword) { this.currentPassword = currentPassword; } public String getNewPassword() { return newPassword; } public void setNewPassword(String newPassword) { this.newPassword = newPassword; } public String getNewPasswordConfirm() { return newPasswordConfirm; } public void setNewPasswordConfirm(String newPasswordConfirm) { this.newPasswordConfirm = newPasswordConfirm; } }
0true
common_src_main_java_org_broadleafcommerce_common_security_util_PasswordChange.java
1,555
@RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class MBeanTest extends HazelcastTestSupport { private static HazelcastInstance hz; private static MBeanServer mbs; @BeforeClass public static void setUp() throws Exception { Config config = new Config(); config.setProperty(GroupProperties.PROP_ENABLE_JMX, "true"); hz = new TestHazelcastInstanceFactory(1).newHazelcastInstance(config); mbs = ManagementFactory.getPlatformMBeanServer(); } public void assertMBeanExistEventually(final String type, final String name) { assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { Hashtable table = new Hashtable(); table.put("type", type); table.put("name", name); table.put("instance", hz.getName()); ObjectName name = new ObjectName("com.hazelcast", table); try { ObjectInstance mbean = mbs.getObjectInstance(name); } catch (InstanceNotFoundException e) { fail(e.getMessage()); } } }); } @Test public void testAtomicLong() throws Exception { IAtomicLong atomicLong = hz.getAtomicLong("atomiclong"); atomicLong.incrementAndGet(); assertMBeanExistEventually("IAtomicLong", atomicLong.getName()); } @Test public void testAtomicReference() throws Exception { IAtomicReference atomicReference = hz.getAtomicReference("atomicreference"); atomicReference.set(null); assertMBeanExistEventually("IAtomicReference", atomicReference.getName()); } @Test public void testLock() throws Exception { ILock lock = hz.getLock("lock"); lock.tryLock(); assertMBeanExistEventually("ILock", lock.getName()); } @Test public void testSemaphore() throws Exception { ISemaphore semaphore = hz.getSemaphore("semaphore"); semaphore.availablePermits(); assertMBeanExistEventually("ISemaphore", semaphore.getName()); } @Test public void testCountDownLatch() throws Exception { ICountDownLatch countDownLatch = hz.getCountDownLatch("semaphore"); countDownLatch.getCount(); assertMBeanExistEventually("ICountDownLatch", countDownLatch.getName()); } @Test public void testMap() throws Exception { IMap map = hz.getMap("map"); map.size(); assertMBeanExistEventually("IMap", map.getName()); } @Test public void testMultiMap() throws Exception { MultiMap map = hz.getMultiMap("multimap"); map.size(); assertMBeanExistEventually("MultiMap", map.getName()); } @Test public void testTopic() throws Exception { ITopic topic = hz.getTopic("topic"); topic.publish("foo"); assertMBeanExistEventually("ITopic", topic.getName()); } @Test public void testList() throws Exception { IList list = hz.getList("list"); list.size(); assertMBeanExistEventually("IList", list.getName()); } @Test public void testSet() throws Exception { ISet set = hz.getSet("set"); set.size(); assertMBeanExistEventually("ISet", set.getName()); } @Test public void testQueue() throws Exception { IQueue queue = hz.getQueue("queue"); queue.size(); assertMBeanExistEventually("IQueue", queue.getName()); } @Test public void testExecutor() throws Exception { IExecutorService executor = hz.getExecutorService("executor"); executor.submit(new DummyRunnable()).get(); assertMBeanExistEventually("IExecutorService", executor.getName()); } private static class DummyRunnable implements Runnable, Serializable { @Override public void run() { } } }
0true
hazelcast_src_test_java_com_hazelcast_jmx_MBeanTest.java
273
public interface ElasticsearchWrapperException { Throwable getCause(); }
0true
src_main_java_org_elasticsearch_ElasticsearchWrapperException.java
291
public class NeoStoreIndexStoreView implements IndexStoreView { private final PropertyStore propertyStore; private final NodeStore nodeStore; private final LockService locks; public NeoStoreIndexStoreView( LockService locks, NeoStore neoStore ) { this.locks = locks; this.propertyStore = neoStore.getPropertyStore(); this.nodeStore = neoStore.getNodeStore(); } @Override public <FAILURE extends Exception> StoreScan<FAILURE> visitNodesWithPropertyAndLabel( IndexDescriptor descriptor, final Visitor<NodePropertyUpdate, FAILURE> visitor ) { final int soughtLabelId = descriptor.getLabelId(); final int soughtPropertyKeyId = descriptor.getPropertyKeyId(); return new NodeStoreScan<NodePropertyUpdate, FAILURE>() { @Override protected NodePropertyUpdate read( NodeRecord node ) { long[] labels = parseLabelsField( node ).get( nodeStore ); if ( !containsLabel( soughtLabelId, labels ) ) { return null; } for ( PropertyBlock property : properties( node ) ) { int propertyKeyId = property.getKeyIndexId(); if ( soughtPropertyKeyId == propertyKeyId ) { return NodePropertyUpdate.add( node.getId(), propertyKeyId, valueOf( property ), labels ); } } return null; } @Override protected void process( NodePropertyUpdate update ) throws FAILURE { visitor.visit( update ); } }; } @Override public <FAILURE extends Exception> StoreScan<FAILURE> visitNodes( final int[] labelIds, final int[] propertyKeyIds, final Visitor<NodePropertyUpdate, FAILURE> propertyUpdateVisitor, final Visitor<NodeLabelUpdate, FAILURE> labelUpdateVisitor ) { return new NodeStoreScan<Update, FAILURE>() { @Override protected Update read( NodeRecord node ) { long[] labels = parseLabelsField( node ).get( nodeStore ); Update update = new Update( node.getId(), labels ); if ( !containsAnyLabel( labelIds, labels ) ) { return update; } properties: for ( PropertyBlock property : properties( node ) ) { int propertyKeyId = property.getKeyIndexId(); for ( int sought : propertyKeyIds ) { if ( propertyKeyId == sought ) { update.add( NodePropertyUpdate .add( node.getId(), propertyKeyId, valueOf( property ), labels ) ); continue properties; } } } return update; } @Override protected void process( Update update ) throws FAILURE { labelUpdateVisitor.visit( update.labels ); for ( NodePropertyUpdate propertyUpdate : update ) { propertyUpdateVisitor.visit( propertyUpdate ); } } }; } @Override public Iterable<NodePropertyUpdate> nodeAsUpdates( long nodeId ) { NodeRecord node = nodeStore.forceGetRecord( nodeId ); if ( !node.inUse() ) { return Iterables.empty(); // node not in use => no updates } long firstPropertyId = node.getCommittedNextProp(); if ( firstPropertyId == Record.NO_NEXT_PROPERTY.intValue() ) { return Iterables.empty(); // no properties => no updates (it's not going to be in any index) } long[] labels = parseLabelsField( node ).get( nodeStore ); if ( labels.length == 0 ) { return Iterables.empty(); // no labels => no updates (it's not going to be in any index) } ArrayList<NodePropertyUpdate> updates = new ArrayList<>(); for ( PropertyRecord propertyRecord : propertyStore.getPropertyRecordChain( firstPropertyId ) ) { for ( PropertyBlock property : propertyRecord.getPropertyBlocks() ) { Object value = property.getType().getValue( property, propertyStore ); updates.add( NodePropertyUpdate.add( node.getId(), property.getKeyIndexId(), value, labels ) ); } } return updates; } @Override public Property getProperty( long nodeId, int propertyKeyId ) throws EntityNotFoundException, PropertyNotFoundException { NodeRecord node = nodeStore.forceGetRecord( nodeId ); if ( !node.inUse() ) { throw new EntityNotFoundException( EntityType.NODE, nodeId ); } long firstPropertyId = node.getCommittedNextProp(); if ( firstPropertyId == Record.NO_NEXT_PROPERTY.intValue() ) { throw new PropertyNotFoundException( propertyKeyId, EntityType.NODE, nodeId ); } for ( PropertyRecord propertyRecord : propertyStore.getPropertyRecordChain( firstPropertyId ) ) { PropertyBlock propertyBlock = propertyRecord.getPropertyBlock( propertyKeyId ); if ( propertyBlock != null ) { return propertyBlock.newPropertyData( propertyStore ); } } throw new PropertyNotFoundException( propertyKeyId, EntityType.NODE, nodeId ); } private Object valueOf( PropertyBlock property ) { // Make sure the value is loaded, even if it's of a "heavy" kind. propertyStore.ensureHeavy( property ); return property.getType().getValue( property, propertyStore ); } private Iterable<PropertyBlock> properties( final NodeRecord node ) { return new Iterable<PropertyBlock>() { @Override public Iterator<PropertyBlock> iterator() { return new PropertyBlockIterator( node ); } }; } private static boolean containsLabel( int sought, long[] labels ) { for ( long label : labels ) { if ( label == sought ) { return true; } } return false; } private static boolean containsAnyLabel( int[] soughtIds, long[] labels ) { for ( int soughtId : soughtIds ) { if ( containsLabel( soughtId, labels ) ) { return true; } } return false; } private static class Update implements Iterable<NodePropertyUpdate> { private final NodeLabelUpdate labels; private final List<NodePropertyUpdate> propertyUpdates = new ArrayList<>(); Update( long nodeId, long[] labels ) { this.labels = labelChanges( nodeId, EMPTY_LONG_ARRAY, labels ); } void add( NodePropertyUpdate update ) { propertyUpdates.add( update ); } @Override public Iterator<NodePropertyUpdate> iterator() { return propertyUpdates.iterator(); } } private class PropertyBlockIterator extends PrefetchingIterator<PropertyBlock> { private final Iterator<PropertyRecord> records; private Iterator<PropertyBlock> blocks = IteratorUtil.emptyIterator(); PropertyBlockIterator( NodeRecord node ) { long firstPropertyId = node.getCommittedNextProp(); if ( firstPropertyId == Record.NO_NEXT_PROPERTY.intValue() ) { records = IteratorUtil.emptyIterator(); } else { records = propertyStore.getPropertyRecordChain( firstPropertyId ).iterator(); } } @Override protected PropertyBlock fetchNextOrNull() { for (; ; ) { if ( blocks.hasNext() ) { return blocks.next(); } if ( !records.hasNext() ) { return null; } blocks = records.next().getPropertyBlocks().iterator(); } } } private abstract class NodeStoreScan<RESULT, FAILURE extends Exception> implements StoreScan<FAILURE> { private volatile boolean continueScanning; protected abstract RESULT read( NodeRecord node ); protected abstract void process( RESULT result ) throws FAILURE; @Override public void run() throws FAILURE { PrimitiveLongIterator nodeIds = new StoreIdIterator( nodeStore ); continueScanning = true; while ( continueScanning && nodeIds.hasNext() ) { long id = nodeIds.next(); RESULT result = null; try ( Lock ignored = locks.acquireNodeLock( id, LockService.LockType.READ_LOCK ) ) { NodeRecord record = nodeStore.forceGetRecord( id ); if ( record.inUse() ) { result = read( record ); } } if ( result != null ) { process( result ); } } } @Override public void stop() { continueScanning = false; } } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_NeoStoreIndexStoreView.java
862
searchService.sendExecuteFetch(node, querySearchRequest, new SearchServiceListener<QueryFetchSearchResult>() { @Override public void onResult(QueryFetchSearchResult result) { result.shardTarget(dfsResult.shardTarget()); queryFetchResults.set(shardIndex, result); if (counter.decrementAndGet() == 0) { finishHim(); } } @Override public void onFailure(Throwable t) { onSecondPhaseFailure(t, querySearchRequest, shardIndex, dfsResult, counter); } });
0true
src_main_java_org_elasticsearch_action_search_type_TransportSearchDfsQueryAndFetchAction.java
910
public interface FulfillmentGroupOfferProcessor extends OrderOfferProcessor { public void filterFulfillmentGroupLevelOffer(PromotableOrder order, List<PromotableCandidateFulfillmentGroupOffer> qualifiedFGOffers, Offer offer); public void calculateFulfillmentGroupTotal(PromotableOrder order); /** * Private method that takes a list of sorted CandidateOrderOffers and determines if each offer can be * applied based on the restrictions (stackable and/or combinable) on that offer. OrderAdjustments * are create on the Order for each applied CandidateOrderOffer. An offer with stackable equals false * cannot be applied to an Order that already contains an OrderAdjustment. An offer with combinable * equals false cannot be applied to the Order if the Order already contains an OrderAdjustment. * * @param qualifiedFGOffers a sorted list of CandidateOrderOffer * @param order the Order to apply the CandidateOrderOffers * @return true if order offer applied; otherwise false */ public boolean applyAllFulfillmentGroupOffers(List<PromotableCandidateFulfillmentGroupOffer> qualifiedFGOffers, PromotableOrder order); public List<FulfillmentGroupOfferPotential> removeTrailingNotCombinableFulfillmentGroupOffers(List<FulfillmentGroupOfferPotential> candidateOffers); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_processor_FulfillmentGroupOfferProcessor.java
3,378
static class LongValues extends org.elasticsearch.index.fielddata.LongValues.WithOrdinals { private final MonotonicAppendingLongBuffer values; LongValues(MonotonicAppendingLongBuffer values, Ordinals.Docs ordinals) { super(ordinals); this.values = values; } @Override public long getValueByOrd(long ord) { assert ord != Ordinals.MISSING_ORDINAL; return values.get(ord - 1); } }
0true
src_main_java_org_elasticsearch_index_fielddata_plain_PackedArrayAtomicFieldData.java
301
@RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class ClientMapBasicTest { static HazelcastInstance client; static HazelcastInstance server; @BeforeClass public static void init() { server = Hazelcast.newHazelcastInstance(); client = HazelcastClient.newHazelcastClient(); } @AfterClass public static void destroy() { HazelcastClient.shutdownAll(); Hazelcast.shutdownAll(); } @Test public void testClientGetMap() { assertNotNull( client.getMap(randomString()) ); } @Test public void testGetName() { String mapName = randomString(); final IMap map = client.getMap(mapName); assertEquals(mapName, map.getName()); } @Test public void testSize_whenEmpty() { final IMap map = client.getMap(randomString()); assertEquals(0, map.size()); } @Test public void testSize() { final IMap map = client.getMap(randomString()); map.put("key", "val"); assertEquals(1, map.size()); } @Test public void testSize_withMultiKeyPuts() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); map.put(key, newValue); assertEquals(1, map.size()); } @Test public void testIsEmpty_whenEmpty() { final IMap map = client.getMap(randomString()); assertTrue(map.isEmpty()); } @Test public void testIsEmpty_whenNotEmpty() { final IMap map = client.getMap(randomString()); map.put("key", "val"); assertFalse(map.isEmpty()); } @Test public void testIsEmpty_afterPutRemove() { final IMap map = client.getMap(randomString()); final Object key = "key"; map.put(key, "val"); map.remove(key); assertTrue(map.isEmpty()); } @Test(expected = NullPointerException.class) public void testPut_whenKeyNull() { final IMap map = client.getMap(randomString()); final Object val = "Val"; map.put(null, val); } @Test(expected = HazelcastSerializationException.class) public void testPut_whenValueNull() { final IMap map = client.getMap(randomString()); final Object key = "Key"; map.put(key, null); } @Test public void testPut() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Val"; final Object result = map.put(key, value); assertNull(result); assertEquals(value, map.get(key)); } @Test public void testPut_whenKeyExists() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); final Object result = map.put(key, newValue); assertEquals(oldValue, result); assertEquals(newValue, map.get(key)); } @Test public void testPutTTL() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; final Object result = map.put(key, value, 5, TimeUnit.MINUTES); assertNull(result); assertEquals(value, map.get(key)); } @Test public void testPutTTL_whenKeyExists() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); final Object result = map.put(key, newValue, 5, TimeUnit.MINUTES); assertEquals(oldValue, result); assertEquals(newValue, map.get(key)); } @Test public void testPutTTL_AfterExpire() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; final Object result = map.put(key, value, 1, TimeUnit.SECONDS); assertNull(result); sleepSeconds(2); assertEquals(null, map.get(key)); } @Test public void testPutTTL_AfterExpireWhenKeyExists() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); final Object result = map.put(key, newValue, 1, TimeUnit.SECONDS); assertEquals(oldValue, result); sleepSeconds(2); assertEquals(null, map.get(key)); } @Test public void testPutAsync() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Val"; Future result = map.putAsync(key, value); assertEquals(null, result.get()); assertEquals(value, map.get(key)); } @Test public void testPutAsync_whenKeyExists() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); Future result = map.putAsync(key, newValue); assertEquals(oldValue, result.get()); assertEquals(newValue, map.get(key)); } @Test(expected = NullPointerException.class) public void testPutAsync_withKeyNull() throws Exception { final IMap map = client.getMap(randomString()); final Object val = "Val"; map.putAsync(null, val); } @Test(expected = HazelcastSerializationException.class) public void testPutAsync_withValueNull() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "key"; map.putAsync(key, null); } @Test public void testPutAsyncTTL() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Val"; Future result = map.putAsync(key, value, 5, TimeUnit.MINUTES); assertEquals(null, result.get()); assertEquals(value, map.get(key)); } @Test public void testPutAsyncTTL_whenKeyExists() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); Future result = map.putAsync(key, newValue, 5, TimeUnit.MINUTES); assertEquals(oldValue, result.get()); assertEquals(newValue, map.get(key)); } @Test public void testPutAsyncTTL_afterExpire() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Val"; Future result = map.putAsync(key, value, 1, TimeUnit.SECONDS); sleepSeconds(2); assertEquals(null, result.get()); assertEquals(null, map.get(key)); } @Test public void testPutAsyncTTL_afterExpireWhenKeyExists() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); Future result = map.putAsync(key, newValue, 1, TimeUnit.SECONDS); sleepSeconds(2); assertEquals(oldValue, result.get()); assertEquals(null, map.get(key)); } @Test public void testTryPut_whenNotLocked() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; boolean result = map.tryPut(key, value, 1, TimeUnit.SECONDS); assertTrue(result); assertEquals(value, map.get(key)); } @Test public void testTryPut_whenKeyPresentAndNotLocked() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "Val"; map.put(key, oldValue); boolean result = map.tryPut(key, newValue, 1, TimeUnit.SECONDS); assertTrue(result); assertEquals(newValue, map.get(key)); } @Test(expected = NullPointerException.class) public void testPutIfAbsent_whenKeyNull() throws Exception { final IMap map = client.getMap(randomString()); final Object value = "Value"; map.putIfAbsent(null, value); } @Test(expected = HazelcastSerializationException.class) public void testPutIfAbsent_whenValueNull() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "key"; map.putIfAbsent(key, null); } @Test public void testPutIfAbsent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; Object result = map.putIfAbsent(key, value); assertEquals(null, result); assertEquals(value, map.get(key)); } @Test public void testPutIfAbsent_whenKeyPresent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; map.put(key, value); Object result = map.putIfAbsent(key, value); assertEquals(value, result); assertEquals(value, map.get(key)); } @Test public void testPutIfAbsentNewValue_whenKeyPresent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; final Object newValue = "newValue"; map.put(key, value); Object result = map.putIfAbsent(key, newValue); assertEquals(value, result); assertEquals(value, map.get(key)); } @Test public void testPutIfAbsentTTL() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; Object result = map.putIfAbsent(key, value, 5, TimeUnit.MINUTES); assertEquals(null, result); assertEquals(value, map.get(key)); } @Test public void testPutIfAbsentTTL_whenExpire() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; final Object result = map.putIfAbsent(key, value, 1, TimeUnit.SECONDS); sleepSeconds(2); assertEquals(null, result); assertEquals(null, map.get(key)); } @Test public void testPutIfAbsentTTL_whenKeyPresentAfterExpire() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; map.put(key, value); final Object result = map.putIfAbsent(key, value, 1, TimeUnit.SECONDS); assertEquals(value, result); assertEquals(value, map.get(key)); } @Test public void testPutIfAbsentTTL_whenKeyPresent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; map.put(key, value); final Object result = map.putIfAbsent(key, value, 5, TimeUnit.MINUTES); assertEquals(value, result); assertEquals(value, map.get(key)); } @Test public void testPutIfAbsentNewValueTTL_whenKeyPresent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; final Object newValue = "newValue"; map.put(key, value); final Object result = map.putIfAbsent(key, newValue, 5, TimeUnit.MINUTES); assertEquals(value, result); assertEquals(value, map.get(key)); } @Test public void testClear_whenEmpty() throws Exception { final IMap map = client.getMap(randomString()); map.clear(); assertTrue(map.isEmpty()); } @Test public void testClear() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; map.put(key, value); map.clear(); assertTrue(map.isEmpty()); } @Test public void testContainsKey_whenKeyAbsent() { final IMap map = client.getMap(randomString()); assertFalse(map.containsKey("NOT_THERE")); } @Test(expected = NullPointerException.class) public void testContainsKey_whenKeyNull() { final IMap map = client.getMap(randomString()); map.containsKey(null); } @Test public void testContainsKey_whenKeyPresent() { final IMap map = client.getMap(randomString()); final Object key = "key"; map.put(key, "val"); assertTrue(map.containsKey(key)); } @Test public void testContainsValue_whenValueAbsent() { final IMap map = client.getMap(randomString()); assertFalse(map.containsValue("NOT_THERE")); } @Test(expected = HazelcastSerializationException.class) public void testContainsValue_whenValueNull() { final IMap map = client.getMap(randomString()); map.containsValue(null); } @Test public void testContainsValue_whenValuePresent() { final IMap map = client.getMap(randomString()); final Object key = "key"; final Object value = "value"; map.put(key, value); assertTrue(map.containsValue(value)); } @Test public void testContainsValue_whenMultiValuePresent() { final IMap map = client.getMap(randomString()); final Object value = "value"; map.put("key1", value); map.put("key2", value); assertTrue(map.containsValue(value)); } @Test public void testGet_whenKeyPresent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object val = "Val"; map.put(key, val); assertEquals(val, map.get(key)); } @Test public void testGet_whenKeyAbsent() { final IMap map = client.getMap(randomString()); assertEquals(null, map.get("NOT_THERE")); } @Test(expected = NullPointerException.class) public void testGet_whenKeyNull() { final IMap map = client.getMap(randomString()); map.get(null); } @Test public void testGetAsync_whenKeyPresent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object val = "Val"; map.put(key, val); Future result = map.getAsync(key); assertEquals(val, result.get()); } @Test public void testGetAsync_whenKeyAbsent() throws Exception { final IMap map = client.getMap(randomString()); Future result = map.getAsync("NOT_THERE"); assertEquals(null, result.get()); } @Test(expected = NullPointerException.class) public void testGetAsync_whenKeyNull() throws Exception { final IMap map = client.getMap(randomString()); map.getAsync(null); } @Test public void testMapSet() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object val = "Val"; map.set(key, val); assertEquals(val, map.get(key)); } @Test public void testMapSet_whenKeyPresent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "Val"; final Object newValue = "newValue"; map.set(key, oldValue); map.set(key, newValue); assertEquals(newValue, map.get(key)); } @Test public void testMapSetTTl() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object val = "Val"; map.set(key, val, 5, TimeUnit.MINUTES); assertEquals(val, map.get(key)); } @Test public void testMapSetTTl_whenExpired() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object val = "Val"; map.set(key, val, 1, TimeUnit.SECONDS); sleepSeconds(2); assertEquals(null, map.get(key)); } @Test public void testMapSetTTl_whenReplacingKeyAndExpired() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object newValue = "newValue"; final Object oldValue = "oldvalue"; map.set(key, oldValue); map.set(key, newValue, 1, TimeUnit.SECONDS); sleepSeconds(2); assertEquals(null, map.get(key)); } @Test public void testRemove_WhenKeyAbsent() { final IMap map = client.getMap(randomString()); assertNull(map.remove("NOT_THERE")); } @Test(expected = NullPointerException.class) public void testRemove_WhenKeyNull() { final IMap map = client.getMap(randomString()); assertNull(map.remove(null)); } @Test public void testRemove_WhenKeyPresent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); assertEquals(value, map.remove(key)); assertNull(map.get(key)); } @Test public void testRemoveKeyValue_WhenPresent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); assertTrue(map.remove(key, value)); assertNull(map.get(key)); } @Test public void testRemoveKeyValue_WhenValueAbsent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); assertFalse(map.remove(key, "NOT_THERE")); assertEquals(value, map.get(key)); } @Test public void testRemoveKeyValue_WhenKeyAbsent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); assertFalse(map.remove("NOT_THERE", value)); } @Test public void testRemoveAsync() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); Future result = map.removeAsync(key); assertEquals(value, result.get()); assertEquals(null, map.get(key)); } @Test public void testRemoveAsync_whenKeyNotPresent() throws Exception { final IMap map = client.getMap(randomString()); Future result = map.removeAsync("NOT_THERE"); assertEquals(null, result.get()); } @Test(expected = NullPointerException.class) public void testRemoveAsync_whenKeyNull() throws Exception { final IMap map = client.getMap(randomString()); map.removeAsync(null); } @Test public void testTryRemove_WhenKeyPresentAndNotLocked() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); boolean result = map.tryRemove(key, 1, TimeUnit.SECONDS); assertTrue(result); assertNull(map.get(key)); } @Test public void testTryRemove_WhenKeyAbsentAndNotLocked() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; boolean result = map.tryRemove(key, 1, TimeUnit.SECONDS); assertFalse(result); } @Test(expected = NullPointerException.class) public void testDelete_whenKeyNull() { final IMap map = client.getMap(randomString()); map.delete(null); } @Test public void testDelete_whenKeyPresent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); map.delete(key); assertEquals(0, map.size()); } @Test public void testDelete_whenKeyAbsent() { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); map.delete("NOT_THERE"); assertEquals(1, map.size()); } @Test public void testEvict_whenKeyAbsent() throws InterruptedException { final IMap map = client.getMap(randomString()); boolean result = map.evict("NOT_THERE"); assertFalse( result ); } @Test(expected = HazelcastSerializationException.class) public void testEvict_whenKeyNull() throws InterruptedException { final IMap map = client.getMap(randomString()); map.evict(null); } @Test public void testEvict() throws InterruptedException { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.put(key, value); boolean result = map.evict(key); assertTrue(result); assertEquals(null, map.get(key)); } @Test public void testPutAll() { final int max = 100; final IMap map = client.getMap(randomString()); final Map expected = new HashMap(); for (int i = 0; i < max; i++) { expected.put(i, i); } map.putAll(expected); for(Object key : expected.keySet()){ Object value = map.get(key); Object expectedValue = expected.get(key); assertEquals(expectedValue, value); } } @Test public void testGetAll() { final int max = 100; final IMap map = client.getMap(randomString()); final Map expected = new HashMap(); for (int i = 0; i < max; i++) { map.put(i, i); expected.put(i, i); } Map result = map.getAll(expected.keySet()); for(Object key : expected.keySet()){ Object value = result.get(key); Object expectedValue = expected.get(key); assertEquals(expectedValue, value); } } public void testGetAll_whenMapEmpty() { final int max = 10; final IMap map = client.getMap(randomString()); final Map expected = new HashMap(); for (int i = 0; i < max; i++) { expected.put(i, i); } Map result = map.getAll(expected.keySet()); assertTrue(result.isEmpty()); } @Test public void testReplace_whenKeyValueAbsent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; assertNull(map.replace(key, value)); assertNull(map.get(key)); } @Test public void testReplace() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "value"; final Object newValue = "NewValue"; map.put(key, oldValue); final Object result = map.replace(key, newValue); assertEquals(oldValue, result); assertEquals(newValue, map.get(key)); } @Test public void testReplaceKeyValue() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; final Object newValue = "NewValue"; map.put(key, value); final boolean result = map.replace(key, value, newValue); assertTrue(result); assertEquals(newValue, map.get(key)); } @Test public void testReplaceKeyValue_whenValueAbsent() throws Exception { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; final Object newValue = "NewValue"; map.put(key, value); final boolean result = map.replace(key, "NOT_THERE", newValue); assertFalse(result); assertEquals(value, map.get(key)); } @Test public void testPutTransient() throws InterruptedException { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.putTransient(key, value, 5, TimeUnit.MINUTES); assertEquals(value, map.get(key)); } @Test public void testPutTransient_whenExpire() throws InterruptedException { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "value"; map.putTransient(key, value, 1, TimeUnit.SECONDS); sleepSeconds(2); assertEquals(null, map.get(key)); } @Test public void testPutTransient_whenKeyPresent() throws InterruptedException { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "newValue"; map.put(key, oldValue); map.putTransient(key, newValue, 5, TimeUnit.MINUTES); assertEquals(newValue, map.get(key)); } @Test public void testPutTransient_whenKeyPresentAfterExpire() throws InterruptedException { final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object oldValue = "oldValue"; final Object newValue = "newValue"; map.put(key, oldValue); map.putTransient(key, newValue, 1, TimeUnit.SECONDS); sleepSeconds(2); assertEquals(null, map.get(key)); } @Test public void testGetEntryView_whenKeyAbsent(){ final IMap map = client.getMap(randomString()); final EntryView view = map.getEntryView("NOT_THERE"); assertEquals(null, view); } @Test public void testGetEntryView(){ final IMap map = client.getMap(randomString()); final Object key = "Key"; final Object value = "Value"; map.put(key, value); final EntryView view = map.getEntryView(key); assertEquals(key, view.getKey()); assertEquals(value, view.getValue()); } @Test public void testKeySet_whenEmpty() { final IMap map = client.getMap(randomString()); final Set keySet = map.keySet(); assertTrue(keySet.isEmpty()); } @Test public void testKeySet() { final int max = 81; final IMap map = client.getMap(randomString()); final Set expected = new TreeSet(); for (int key = 0; key < max; key++) { Object value = key+"value"; expected.add(key); map.put(key, value); } final Set keySet = map.keySet(); assertEquals(expected, keySet); } @Test public void testKeySet_withPredicate() { final int max = 44; final IMap map = client.getMap(randomString()); final Set expected = new TreeSet(); for (int key = 0; key < max; key++) { Object value = key+"value"; map.put(key, value); } expected.add(4); final Set keySet = map.keySet(new SqlPredicate("this == 4value")); assertEquals(expected, keySet); } @Test public void testValues_whenEmpty() { final IMap map = client.getMap(randomString()); final Collection values = map.values(); assertTrue(values.isEmpty()); } @Test public void testValues() { final int max = 23; final IMap map = client.getMap(randomString()); final Set expected = new TreeSet(); for (int key = 0; key < max; key++) { Object value = key+"value"; expected.add(value); map.put(key, value); } final Collection collection = map.values(); final Set resultSet = new TreeSet(collection); assertEquals(expected, resultSet); } @Test public void testValues_withPredicate() { final int max = 27; final IMap map = client.getMap(randomString()); final Set expected = new TreeSet(); for (int key = 0; key < max; key++) { Object value = key+"value"; map.put(key, value); } expected.add(4); final Set keySet = map.keySet(new SqlPredicate("this == 4value")); assertEquals(expected, keySet); } @Test public void testEntrySet_whenEmpty() { final IMap map = client.getMap(randomString()); Set<Map.Entry> entrySet = map.entrySet(); assertTrue(entrySet.isEmpty()); } @Test public void testEntrySet() { final int max = 34; final IMap map = client.getMap(randomString()); final Map expected = new HashMap(); for (int key = 0; key < max; key++) { Object value = key+"value"; expected.put(key, value); map.put(key, value); } Set<Map.Entry> entrySet = map.entrySet(); for(Map.Entry entry : entrySet){ Object value = entry.getValue(); Object key = entry.getKey(); Object expectedValue = expected.get(key); assertEquals(expectedValue, value); } } @Test public void testEntrySet_withPredicate() { final int max = 44; final IMap map = client.getMap(randomString()); final Map expected = new HashMap(); for (int key = 0; key < max; key++) { Object value = key+"value"; expected.put(key, value); map.put(key, value); } final Set<Map.Entry> entrySet = map.entrySet(new SqlPredicate("this == 1value")); Map.Entry entry = entrySet.iterator().next(); assertEquals(1, entry.getKey()); assertEquals("1value", entry.getValue()); assertEquals(1, entrySet.size()); } @Test public void testMapStatistics_withClientOperations() { final String mapName = randomString(); final LocalMapStats serverMapStats = server.getMap(mapName).getLocalMapStats(); final IMap map = client.getMap(mapName); final int operationCount = 1123; for (int i = 0; i < operationCount; i++) { map.put(i, i); map.get(i); map.remove(i); } assertEquals("put count", operationCount, serverMapStats.getPutOperationCount()); assertEquals("get count", operationCount, serverMapStats.getGetOperationCount()); assertEquals("remove count", operationCount, serverMapStats.getRemoveOperationCount()); assertTrue("put latency", 0 < serverMapStats.getTotalPutLatency()); assertTrue("get latency", 0 < serverMapStats.getTotalGetLatency()); assertTrue("remove latency", 0 < serverMapStats.getTotalRemoveLatency()); } @Test(expected = UnsupportedOperationException.class) public void testAddLocalEntryListener(){ final IMap map = client.getMap(randomString()); map.addLocalEntryListener(new DumEntryListener()); } @Test(expected = UnsupportedOperationException.class) public void testAddLocalEntryListener_WithPredicate(){ final IMap map = client.getMap(randomString()); map.addLocalEntryListener(new DumEntryListener(), new DumPredicate(), true); } @Test(expected = UnsupportedOperationException.class) public void testAddLocalEntryListener_WithPredicateAndKey(){ final IMap map = client.getMap(randomString()); map.addLocalEntryListener(new DumEntryListener(), new DumPredicate(), "Key", true); } @Test(expected = UnsupportedOperationException.class) public void testLocalKeySet(){ final IMap map = client.getMap(randomString()); map.localKeySet(); } @Test(expected = UnsupportedOperationException.class) public void testLocalKeySet_WithPredicate(){ final IMap map = client.getMap(randomString()); map.localKeySet(new DumPredicate()); } static class DumEntryListener implements EntryListener { public void entryAdded(EntryEvent event) { } public void entryRemoved(EntryEvent event) { } public void entryUpdated(EntryEvent event) { } public void entryEvicted(EntryEvent event) { } } static class DumPredicate implements Predicate { public boolean apply(Map.Entry mapEntry) { return false; } } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapBasicTest.java
1,332
public class ODirtyPagesRecord implements OWALRecord { private OLogSequenceNumber lsn; private Set<ODirtyPage> dirtyPages; public ODirtyPagesRecord() { } public ODirtyPagesRecord(Set<ODirtyPage> dirtyPages) { this.dirtyPages = dirtyPages; } public Set<ODirtyPage> getDirtyPages() { return dirtyPages; } @Override public int toStream(byte[] content, int offset) { OIntegerSerializer.INSTANCE.serializeNative(dirtyPages.size(), content, offset); offset += OIntegerSerializer.INT_SIZE; for (ODirtyPage dirtyPage : dirtyPages) { OLongSerializer.INSTANCE.serializeNative(dirtyPage.getPageIndex(), content, offset); offset += OLongSerializer.LONG_SIZE; OStringSerializer.INSTANCE.serializeNative(dirtyPage.getFileName(), content, offset); offset += OStringSerializer.INSTANCE.getObjectSize(dirtyPage.getFileName()); OLongSerializer.INSTANCE.serializeNative(dirtyPage.getLsn().getSegment(), content, offset); offset += OLongSerializer.LONG_SIZE; OLongSerializer.INSTANCE.serializeNative(dirtyPage.getLsn().getPosition(), content, offset); offset += OLongSerializer.LONG_SIZE; } return offset; } @Override public int fromStream(byte[] content, int offset) { int size = OIntegerSerializer.INSTANCE.deserializeNative(content, offset); offset += OIntegerSerializer.INT_SIZE; dirtyPages = new HashSet<ODirtyPage>(); for (int i = 0; i < size; i++) { long pageIndex = OLongSerializer.INSTANCE.deserializeNative(content, offset); offset += OLongSerializer.LONG_SIZE; String fileName = OStringSerializer.INSTANCE.deserializeNative(content, offset); offset += OStringSerializer.INSTANCE.getObjectSize(fileName); long segment = OLongSerializer.INSTANCE.deserializeNative(content, offset); offset += OLongSerializer.LONG_SIZE; long position = OLongSerializer.INSTANCE.deserializeNative(content, offset); offset += OLongSerializer.LONG_SIZE; dirtyPages.add(new ODirtyPage(fileName, pageIndex, new OLogSequenceNumber(segment, position))); } return offset; } @Override public int serializedSize() { int size = OIntegerSerializer.INT_SIZE; for (ODirtyPage dirtyPage : dirtyPages) { size += 3 * OLongSerializer.LONG_SIZE; size += OStringSerializer.INSTANCE.getObjectSize(dirtyPage.getFileName()); } return size; } @Override public boolean isUpdateMasterRecord() { return false; } @Override public OLogSequenceNumber getLsn() { return lsn; } @Override public void setLsn(OLogSequenceNumber lsn) { this.lsn = lsn; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ODirtyPagesRecord that = (ODirtyPagesRecord) o; if (!dirtyPages.equals(that.dirtyPages)) return false; return true; } @Override public int hashCode() { return dirtyPages.hashCode(); } @Override public String toString() { return "ODirtyPagesRecord{" + "lsn=" + lsn + ", dirtyPages=" + dirtyPages + '}'; } }
0true
core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_ODirtyPagesRecord.java
610
public class UpdateSettingsRequestBuilder extends AcknowledgedRequestBuilder<UpdateSettingsRequest, UpdateSettingsResponse, UpdateSettingsRequestBuilder> { public UpdateSettingsRequestBuilder(IndicesAdminClient indicesClient, String... indices) { super((InternalIndicesAdminClient) indicesClient, new UpdateSettingsRequest(indices)); } /** * Sets the indices the update settings will execute on */ public UpdateSettingsRequestBuilder setIndices(String... indices) { request.indices(indices); return this; } /** * Specifies what type of requested indices to ignore and wildcard indices expressions. * * For example indices that don't exist. */ public UpdateSettingsRequestBuilder setIndicesOptions(IndicesOptions options) { request.indicesOptions(options); return this; } /** * Sets the settings to be updated */ public UpdateSettingsRequestBuilder setSettings(Settings settings) { request.settings(settings); return this; } /** * Sets the settings to be updated */ public UpdateSettingsRequestBuilder setSettings(Settings.Builder settings) { request.settings(settings); return this; } /** * Sets the settings to be updated (either json/yaml/properties format) */ public UpdateSettingsRequestBuilder setSettings(String source) { request.settings(source); return this; } /** * Sets the settings to be updated (either json/yaml/properties format) */ public UpdateSettingsRequestBuilder setSettings(Map<String, Object> source) { request.settings(source); return this; } @Override protected void doExecute(ActionListener<UpdateSettingsResponse> listener) { ((IndicesAdminClient) client).updateSettings(request, listener); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_settings_put_UpdateSettingsRequestBuilder.java
863
public class MoveFileRefactoringParticipant extends MoveParticipant { private IFile file; private static Map<String,TextFileChange> fileChanges = new HashMap<String,TextFileChange>(); private static List<IResource> movingFiles = new ArrayList<IResource>(); @Override protected boolean initialize(Object element) { file = (IFile) element; if (getProcessor() instanceof MoveProcessor) { MoveProcessor moveProcessor = (MoveProcessor) getProcessor(); movingFiles.addAll(Arrays.asList((IResource[]) moveProcessor.getElements())); return getProjectTypeChecker(file.getProject())!=null && file.getFileExtension()!=null && (file.getFileExtension().equals("ceylon") || file.getFileExtension().equals("java")); } else { return false; } } @Override public String getName() { return "Move file participant for Ceylon source"; } @Override public RefactoringStatus checkConditions(IProgressMonitor pm, CheckConditionsContext context) throws OperationCanceledException { return new RefactoringStatus(); } @Override public Change createChange(IProgressMonitor pm) throws CoreException, OperationCanceledException { return null; } @Override public Change createPreChange(IProgressMonitor pm) throws CoreException, OperationCanceledException { try { IProject project = file.getProject(); IFolder folder = (IFolder) getArguments().getDestination(); String newName = folder.getProjectRelativePath() .removeFirstSegments(1) .toPortableString() .replace('/', '.'); String movedRelFilePath = file.getProjectRelativePath() .removeFirstSegments(1) .toPortableString(); String movedRelPath = file.getParent() .getProjectRelativePath() .removeFirstSegments(1) .toPortableString(); String oldName = movedRelPath.replace('/', '.'); List<Change> changes = new ArrayList<Change>(); if (file.getFileExtension().equals("java")) { updateRefsToMovedJavaFile(project, newName, oldName, changes); } else { PhasedUnit movedPhasedUnit = getProjectTypeChecker(project) .getPhasedUnitFromRelativePath(movedRelFilePath); if (movedPhasedUnit==null) { return null; } List<Declaration> declarations = movedPhasedUnit.getDeclarations(); if (newName.equals(oldName)) return null; updateRefsFromMovedCeylonFile(project, newName, oldName, changes, movedPhasedUnit, declarations); updateRefsToMovedCeylonFile(project, newName, oldName, changes, movedPhasedUnit, declarations); } if (changes.isEmpty()) return null; CompositeChange result = new CompositeChange("Ceylon source changes") { @Override public Change perform(IProgressMonitor pm) throws CoreException { fileChanges.clear(); movingFiles.clear(); return super.perform(pm); } }; for (Change change: changes) { result.add(change); } return result; } catch (Exception e) { e.printStackTrace(); return null; } } protected void updateRefsFromMovedCeylonFile(final IProject project, final String newName, final String oldName, final List<Change> changes, final PhasedUnit movedPhasedUnit, final List<Declaration> declarations) { final Map<Declaration,String> imports = new HashMap<Declaration,String>(); movedPhasedUnit.getCompilationUnit().visit(new Visitor() { @Override public void visit(ImportMemberOrType that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclarationModel()); } // @Override // public void visit(QualifiedMemberOrTypeExpression that) { // super.visit(that); // visitIt(that.getIdentifier(), that.getDeclaration()); // } @Override public void visit(BaseMemberOrTypeExpression that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclaration()); } @Override public void visit(BaseType that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclarationModel()); } // @Override // public void visit(QualifiedType that) { // super.visit(that); // visitIt(that.getIdentifier(), that.getDeclarationModel()); // } protected void visitIt(Tree.Identifier id, Declaration dec) { if (dec!=null && !declarations.contains(dec)) { Unit unit = dec.getUnit(); if (unit instanceof ProjectSourceFile && movingFiles.contains(((ProjectSourceFile) unit).getFileResource())) { //also moving } else if (unit.getPackage().equals(movedPhasedUnit.getPackage())) { imports.put(dec, id.getText()); } } } //TODO: DocLinks!! }); collectEditsToMovedFile(newName, oldName, changes, movedPhasedUnit, imports); } protected void updateRefsToMovedCeylonFile(final IProject project, final String newName, final String oldName, final List<Change> changes, PhasedUnit movedPhasedUnit, final List<Declaration> declarations) { if (!getArguments().getUpdateReferences()) return; for (PhasedUnit phasedUnit: getProjectTypeChecker(project) .getPhasedUnits().getPhasedUnits()) { if (phasedUnit==movedPhasedUnit || phasedUnit.getUnit() instanceof ProjectSourceFile && movingFiles.contains(((ProjectSourceFile) phasedUnit.getUnit()).getFileResource())) { continue; } final Map<Declaration,String> imports = new HashMap<Declaration,String>(); phasedUnit.getCompilationUnit().visit(new Visitor() { @Override public void visit(ImportMemberOrType that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclarationModel()); } // @Override // public void visit(QualifiedMemberOrTypeExpression that) { // super.visit(that); // visitIt(that.getIdentifier(), that.getDeclaration()); // } @Override public void visit(BaseMemberOrTypeExpression that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclaration()); } @Override public void visit(BaseType that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclarationModel()); } // @Override // public void visit(QualifiedType that) { // super.visit(that); // visitIt(that.getIdentifier(), that.getDeclarationModel()); // } protected void visitIt(Tree.Identifier id, Declaration dec) { if (dec!=null && declarations.contains(dec)) { imports.put(dec, id.getText()); } } //TODO: DocLinks!! }); collectEdits(newName, oldName, changes, phasedUnit, imports); } } protected void updateRefsToMovedJavaFile(final IProject project, final String newName, final String oldName, final List<Change> changes) throws JavaModelException { if (!getArguments().getUpdateReferences()) return; ICompilationUnit jcu = (ICompilationUnit) JavaCore.create(file); final IType[] types = jcu.getTypes(); TypeChecker tc = getProjectTypeChecker(project); if (tc==null) return; for (PhasedUnit phasedUnit: tc.getPhasedUnits().getPhasedUnits()) { final Map<Declaration,String> imports = new HashMap<Declaration,String>(); phasedUnit.getCompilationUnit().visit(new Visitor() { @Override public void visit(ImportMemberOrType that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclarationModel()); } // @Override // public void visit(QualifiedMemberOrTypeExpression that) { // super.visit(that); // visitIt(that.getIdentifier(), that.getDeclaration()); // } @Override public void visit(BaseMemberOrTypeExpression that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclaration()); } @Override public void visit(BaseType that) { super.visit(that); visitIt(that.getIdentifier(), that.getDeclarationModel()); } // @Override // public void visit(QualifiedType that) { // super.visit(that); // visitIt(that.getIdentifier(), that.getDeclarationModel()); // } protected void visitIt(Tree.Identifier id, Declaration dec) { for (IType type: types) { if (dec!=null && dec.getQualifiedNameString() .equals(getQualifiedName(type))) { imports.put(dec, id.getText()); } } } protected String getQualifiedName(IMember dec) { IJavaElement parent = dec.getParent(); if (parent instanceof ICompilationUnit) { return parent.getParent().getElementName() + "::" + dec.getElementName(); } else if (dec.getDeclaringType()!=null) { return getQualifiedName(dec.getDeclaringType()) + "." + dec.getElementName(); } else { return "@"; } } }); collectEdits(newName, oldName, changes, phasedUnit, imports); } } private void collectEditsToMovedFile(String newName, String oldName, List<Change> changes, PhasedUnit movedPhasedUnit, Map<Declaration, String> imports) { try { IFileVirtualFile virtualFile = (IFileVirtualFile) movedPhasedUnit.getUnitFile(); IFile file = virtualFile.getFile(); String path = file.getProjectRelativePath().toPortableString(); TextFileChange change = fileChanges.get(path); if (change==null) { change = new TextFileChange(file.getName(), file); change.setEdit(new MultiTextEdit()); changes.add(change); fileChanges.put(path, change); } Tree.CompilationUnit cu = movedPhasedUnit.getCompilationUnit(); if (!imports.isEmpty()) { List<InsertEdit> edits = importEdits(cu, imports.keySet(), imports.values(), null, EditorUtil.getDocument(change)); for (TextEdit edit: edits) { change.addEdit(edit); } } Tree.Import toDelete = findImportNode(cu, newName); if (toDelete!=null) { change.addEdit(new DeleteEdit(toDelete.getStartIndex(), toDelete.getStopIndex()-toDelete.getStartIndex()+1)); } } catch (Exception e) { e.printStackTrace(); } } private void collectEdits(String newName, String oldName, List<Change> changes, PhasedUnit phasedUnit, Map<Declaration, String> imports) { try { Tree.CompilationUnit cu = phasedUnit.getCompilationUnit(); if (!imports.isEmpty()) { IFileVirtualFile virtualFile = (IFileVirtualFile) phasedUnit.getUnitFile(); IFile file = virtualFile.getFile(); String path = file.getProjectRelativePath().toPortableString(); TextFileChange change = fileChanges.get(path); if (change==null) { change = new TextFileChange(file.getName(), file); change.setEdit(new MultiTextEdit()); changes.add(change); fileChanges.put(path, change); } List<TextEdit> edits = importEditForMove(cu, imports.keySet(), imports.values(), newName, oldName, EditorUtil.getDocument(change)); if (!edits.isEmpty()) { for (TextEdit edit: edits) { change.addEdit(edit); } } } } catch (Exception e) { e.printStackTrace(); } } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_refactor_MoveFileRefactoringParticipant.java
1,381
public interface APIWrapper<T> { public void wrapDetails(T model, HttpServletRequest request); public void wrapSummary(T model, HttpServletRequest request); }
0true
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_wrapper_APIWrapper.java
3,384
static final class HashedBytesValues extends BytesValues { private final IntArray hashes; HashedBytesValues(IntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) { super(bytes, termOrdToBytesOffset, ordinals); this.hashes = hashes; } @Override public int currentValueHash() { assert ordinals.currentOrd() >= 0; return hashes.get(ordinals.currentOrd()); } }
0true
src_main_java_org_elasticsearch_index_fielddata_plain_PagedBytesAtomicFieldData.java
21
@Controller("blAdminCategoryController") @RequestMapping("/" + AdminCategoryController.SECTION_KEY) public class AdminCategoryController extends AdminBasicEntityController { protected static final String SECTION_KEY = "category"; @Resource(name = "blCatalogService") protected CatalogService catalogService; @Override protected String getSectionKey(Map<String, String> pathVars) { //allow external links to work for ToOne items if (super.getSectionKey(pathVars) != null) { return super.getSectionKey(pathVars); } return SECTION_KEY; } @SuppressWarnings("unchecked") @RequestMapping(value = "", method = RequestMethod.GET) public String viewEntityList(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @RequestParam MultiValueMap<String, String> requestParams) throws Exception { super.viewEntityList(request, response, model, pathVars, requestParams); List<Category> parentCategories = catalogService.findAllParentCategories(); model.addAttribute("parentCategories", parentCategories); List<EntityFormAction> mainActions = (List<EntityFormAction>) model.asMap().get("mainActions"); mainActions.add(new EntityFormAction("CategoryTreeView") .withButtonClass("show-category-tree-view") .withDisplayText("Category_Tree_View")); mainActions.add(new EntityFormAction("CategoryListView") .withButtonClass("show-category-list-view active") .withDisplayText("Category_List_View")); model.addAttribute("viewType", "categoryTree"); return "modules/defaultContainer"; } }
0true
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_web_controller_entity_AdminCategoryController.java
392
public static final Function<IndexEntry,String> ENTRY2FIELD_FCT = new Function<IndexEntry, String>() { @Nullable @Override public String apply(@Nullable IndexEntry indexEntry) { return indexEntry.field; } };
0true
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_indexing_IndexMutation.java
674
public class GetWarmersAction extends IndicesAction<GetWarmersRequest, GetWarmersResponse, GetWarmersRequestBuilder> { public static final GetWarmersAction INSTANCE = new GetWarmersAction(); public static final String NAME = "warmers/get"; private GetWarmersAction() { super(NAME); } @Override public GetWarmersRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new GetWarmersRequestBuilder((InternalGenericClient) client); } @Override public GetWarmersResponse newResponse() { return new GetWarmersResponse(); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_warmer_get_GetWarmersAction.java
1,514
public class PreferLocalPrimariesToRelocatingPrimariesTests extends ElasticsearchAllocationTestCase { @Test public void testPreferLocalPrimaryAllocationOverFiltered() { int concurrentRecoveries = randomIntBetween(1, 10); int primaryRecoveries = randomIntBetween(1, 10); int numberOfShards = randomIntBetween(5, 20); int totalNumberOfShards = numberOfShards * 2; logger.info("create an allocation with [{}] initial primary recoveries and [{}] concurrent recoveries", primaryRecoveries, concurrentRecoveries); AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", concurrentRecoveries) .put("cluster.routing.allocation.node_initial_primaries_recoveries", primaryRecoveries) .build()); logger.info("create 2 indices with [{}] no replicas, and wait till all are allocated", numberOfShards); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test1").numberOfShards(numberOfShards).numberOfReplicas(0)) .put(IndexMetaData.builder("test2").numberOfShards(numberOfShards).numberOfReplicas(0)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsNew(metaData.index("test1")) .addAsNew(metaData.index("test2")) .build(); ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build(); logger.info("adding two nodes and performing rerouting till all are allocated"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .put(newNode("node1", ImmutableMap.of("tag1", "value1"))) .put(newNode("node2", ImmutableMap.of("tag1", "value2")))).build(); routingTable = strategy.reroute(clusterState).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); while (!clusterState.routingNodes().shardsWithState(INITIALIZING).isEmpty()) { routingTable = strategy.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); } logger.info("remove one of the nodes and apply filter to move everything from another node"); metaData = MetaData.builder() .put(IndexMetaData.builder("test1").settings(settingsBuilder() .put("index.number_of_shards", numberOfShards) .put("index.number_of_replicas", 0) .put("index.routing.allocation.exclude.tag1", "value2") .build())) .put(IndexMetaData.builder("test2").settings(settingsBuilder() .put("index.number_of_shards", numberOfShards) .put("index.number_of_replicas", 0) .put("index.routing.allocation.exclude.tag1", "value2") .build())) .build(); clusterState = ClusterState.builder(clusterState).metaData(metaData).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node1")).build(); routingTable = strategy.reroute(clusterState).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); logger.info("[{}] primaries should be still started but [{}] other primaries should be unassigned", numberOfShards, numberOfShards); assertThat(clusterState.routingNodes().shardsWithState(STARTED).size(), equalTo(numberOfShards)); assertThat(clusterState.routingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); assertThat(clusterState.routingTable().shardsWithState(UNASSIGNED).size(), equalTo(numberOfShards)); logger.info("start node back up"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .put(newNode("node1", ImmutableMap.of("tag1", "value1")))).build(); routingTable = strategy.reroute(clusterState).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); while (clusterState.routingNodes().shardsWithState(STARTED).size() < totalNumberOfShards) { int localInitializations = 0; int relocatingInitializations = 0; for (MutableShardRouting routing : clusterState.routingNodes().shardsWithState(INITIALIZING)) { if (routing.relocatingNodeId() == null) { localInitializations++; } else { relocatingInitializations++; } } int needToInitialize = totalNumberOfShards - clusterState.routingNodes().shardsWithState(STARTED).size() - clusterState.routingNodes().shardsWithState(RELOCATING).size(); logger.info("local initializations: [{}], relocating: [{}], need to initialize: {}", localInitializations, relocatingInitializations, needToInitialize); assertThat(localInitializations, equalTo(Math.min(primaryRecoveries, needToInitialize))); clusterState = startRandomInitializingShard(clusterState, strategy); } } }
0true
src_test_java_org_elasticsearch_cluster_routing_allocation_PreferLocalPrimariesToRelocatingPrimariesTests.java
2,260
final ChannelFutureListener remover = new ChannelFutureListener() { public void operationComplete(ChannelFuture future) throws Exception { boolean removed = openChannels.remove(future.getChannel()); if (removed) { openChannelsMetric.dec(); } if (logger.isTraceEnabled()) { logger.trace("channel closed: {}", future.getChannel()); } } };
0true
src_main_java_org_elasticsearch_common_netty_OpenChannelsHandler.java
5,082
public class SearchServiceTransportAction extends AbstractComponent { static final class FreeContextResponseHandler extends EmptyTransportResponseHandler { private final ESLogger logger; FreeContextResponseHandler(ESLogger logger) { super(ThreadPool.Names.SAME); this.logger = logger; } @Override public void handleException(TransportException exp) { logger.warn("Failed to send release search context", exp); } } private final TransportService transportService; private final ClusterService clusterService; private final SearchService searchService; private final FreeContextResponseHandler freeContextResponseHandler = new FreeContextResponseHandler(logger); @Inject public SearchServiceTransportAction(Settings settings, TransportService transportService, ClusterService clusterService, SearchService searchService) { super(settings); this.transportService = transportService; this.clusterService = clusterService; this.searchService = searchService; transportService.registerHandler(SearchFreeContextTransportHandler.ACTION, new SearchFreeContextTransportHandler()); transportService.registerHandler(ClearScrollContextsTransportHandler.ACTION, new ClearScrollContextsTransportHandler()); transportService.registerHandler(SearchDfsTransportHandler.ACTION, new SearchDfsTransportHandler()); transportService.registerHandler(SearchQueryTransportHandler.ACTION, new SearchQueryTransportHandler()); transportService.registerHandler(SearchQueryByIdTransportHandler.ACTION, new SearchQueryByIdTransportHandler()); transportService.registerHandler(SearchQueryScrollTransportHandler.ACTION, new SearchQueryScrollTransportHandler()); transportService.registerHandler(SearchQueryFetchTransportHandler.ACTION, new SearchQueryFetchTransportHandler()); transportService.registerHandler(SearchQueryQueryFetchTransportHandler.ACTION, new SearchQueryQueryFetchTransportHandler()); transportService.registerHandler(SearchQueryFetchScrollTransportHandler.ACTION, new SearchQueryFetchScrollTransportHandler()); transportService.registerHandler(SearchFetchByIdTransportHandler.ACTION, new SearchFetchByIdTransportHandler()); transportService.registerHandler(SearchScanTransportHandler.ACTION, new SearchScanTransportHandler()); transportService.registerHandler(SearchScanScrollTransportHandler.ACTION, new SearchScanScrollTransportHandler()); } public void sendFreeContext(DiscoveryNode node, final long contextId, SearchRequest request) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { searchService.freeContext(contextId); } else { transportService.sendRequest(node, SearchFreeContextTransportHandler.ACTION, new SearchFreeContextRequest(request, contextId), freeContextResponseHandler); } } public void sendFreeContext(DiscoveryNode node, long contextId, ClearScrollRequest request, final ActionListener<Boolean> actionListener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { searchService.freeContext(contextId); actionListener.onResponse(true); } else { transportService.sendRequest(node, SearchFreeContextTransportHandler.ACTION, new SearchFreeContextRequest(request, contextId), new TransportResponseHandler<TransportResponse>() { @Override public TransportResponse newInstance() { return TransportResponse.Empty.INSTANCE; } @Override public void handleResponse(TransportResponse response) { actionListener.onResponse(true); } @Override public void handleException(TransportException exp) { actionListener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendClearAllScrollContexts(DiscoveryNode node, ClearScrollRequest request, final ActionListener<Boolean> actionListener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { searchService.freeAllScrollContexts(); actionListener.onResponse(true); } else { transportService.sendRequest(node, ClearScrollContextsTransportHandler.ACTION, new ClearScrollContextsRequest(request), new TransportResponseHandler<TransportResponse>() { @Override public TransportResponse newInstance() { return TransportResponse.Empty.INSTANCE; } @Override public void handleResponse(TransportResponse response) { actionListener.onResponse(true); } @Override public void handleException(TransportException exp) { actionListener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteDfs(DiscoveryNode node, final ShardSearchRequest request, final SearchServiceListener<DfsSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { DfsSearchResult result = searchService.executeDfsPhase(request); listener.onResult(result); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchDfsTransportHandler.ACTION, request, new BaseTransportResponseHandler<DfsSearchResult>() { @Override public DfsSearchResult newInstance() { return new DfsSearchResult(); } @Override public void handleResponse(DfsSearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteQuery(DiscoveryNode node, final ShardSearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QuerySearchResult result = searchService.executeQueryPhase(request); listener.onResult(result); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchQueryTransportHandler.ACTION, request, new BaseTransportResponseHandler<QuerySearchResult>() { @Override public QuerySearchResult newInstance() { return new QuerySearchResult(); } @Override public void handleResponse(QuerySearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteQuery(DiscoveryNode node, final QuerySearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QuerySearchResult result = searchService.executeQueryPhase(request); listener.onResult(result); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchQueryByIdTransportHandler.ACTION, request, new BaseTransportResponseHandler<QuerySearchResult>() { @Override public QuerySearchResult newInstance() { return new QuerySearchResult(); } @Override public void handleResponse(QuerySearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteQuery(DiscoveryNode node, final InternalScrollSearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { ScrollQuerySearchResult result = searchService.executeQueryPhase(request); listener.onResult(result.queryResult()); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchQueryScrollTransportHandler.ACTION, request, new BaseTransportResponseHandler<ScrollQuerySearchResult>() { @Override public ScrollQuerySearchResult newInstance() { return new ScrollQuerySearchResult(); } @Override public void handleResponse(ScrollQuerySearchResult response) { listener.onResult(response.queryResult()); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteFetch(DiscoveryNode node, final ShardSearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QueryFetchSearchResult result = searchService.executeFetchPhase(request); listener.onResult(result); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchQueryFetchTransportHandler.ACTION, request, new BaseTransportResponseHandler<QueryFetchSearchResult>() { @Override public QueryFetchSearchResult newInstance() { return new QueryFetchSearchResult(); } @Override public void handleResponse(QueryFetchSearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteFetch(DiscoveryNode node, final QuerySearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QueryFetchSearchResult result = searchService.executeFetchPhase(request); listener.onResult(result); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchQueryQueryFetchTransportHandler.ACTION, request, new BaseTransportResponseHandler<QueryFetchSearchResult>() { @Override public QueryFetchSearchResult newInstance() { return new QueryFetchSearchResult(); } @Override public void handleResponse(QueryFetchSearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteFetch(DiscoveryNode node, final InternalScrollSearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request); listener.onResult(result.result()); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchQueryFetchScrollTransportHandler.ACTION, request, new BaseTransportResponseHandler<ScrollQueryFetchSearchResult>() { @Override public ScrollQueryFetchSearchResult newInstance() { return new ScrollQueryFetchSearchResult(); } @Override public void handleResponse(ScrollQueryFetchSearchResult response) { listener.onResult(response.result()); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteFetch(DiscoveryNode node, final FetchSearchRequest request, final SearchServiceListener<FetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { FetchSearchResult result = searchService.executeFetchPhase(request); listener.onResult(result); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchFetchByIdTransportHandler.ACTION, request, new BaseTransportResponseHandler<FetchSearchResult>() { @Override public FetchSearchResult newInstance() { return new FetchSearchResult(); } @Override public void handleResponse(FetchSearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteScan(DiscoveryNode node, final ShardSearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QuerySearchResult result = searchService.executeScan(request); listener.onResult(result); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchScanTransportHandler.ACTION, request, new BaseTransportResponseHandler<QuerySearchResult>() { @Override public QuerySearchResult newInstance() { return new QuerySearchResult(); } @Override public void handleResponse(QuerySearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } public void sendExecuteScan(DiscoveryNode node, final InternalScrollSearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { ScrollQueryFetchSearchResult result = searchService.executeScan(request); listener.onResult(result.result()); } catch (Throwable e) { listener.onFailure(e); } } else { transportService.sendRequest(node, SearchScanScrollTransportHandler.ACTION, request, new BaseTransportResponseHandler<ScrollQueryFetchSearchResult>() { @Override public ScrollQueryFetchSearchResult newInstance() { return new ScrollQueryFetchSearchResult(); } @Override public void handleResponse(ScrollQueryFetchSearchResult response) { listener.onResult(response.result()); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } }); } } class SearchFreeContextRequest extends TransportRequest { private long id; SearchFreeContextRequest() { } SearchFreeContextRequest(TransportRequest request, long id) { super(request); this.id = id; } public long id() { return this.id; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); id = in.readLong(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeLong(id); } } class SearchFreeContextTransportHandler extends BaseTransportRequestHandler<SearchFreeContextRequest> { static final String ACTION = "search/freeContext"; @Override public SearchFreeContextRequest newInstance() { return new SearchFreeContextRequest(); } @Override public void messageReceived(SearchFreeContextRequest request, TransportChannel channel) throws Exception { searchService.freeContext(request.id()); channel.sendResponse(TransportResponse.Empty.INSTANCE); } @Override public String executor() { // freeing the context is cheap, // no need for fork it to another thread return ThreadPool.Names.SAME; } } class ClearScrollContextsRequest extends TransportRequest { ClearScrollContextsRequest() { } ClearScrollContextsRequest(TransportRequest request) { super(request); } } class ClearScrollContextsTransportHandler extends BaseTransportRequestHandler<ClearScrollContextsRequest> { static final String ACTION = "search/clearScrollContexts"; @Override public ClearScrollContextsRequest newInstance() { return new ClearScrollContextsRequest(); } @Override public void messageReceived(ClearScrollContextsRequest request, TransportChannel channel) throws Exception { searchService.freeAllScrollContexts(); channel.sendResponse(TransportResponse.Empty.INSTANCE); } @Override public String executor() { // freeing the context is cheap, // no need for fork it to another thread return ThreadPool.Names.SAME; } } private class SearchDfsTransportHandler extends BaseTransportRequestHandler<ShardSearchRequest> { static final String ACTION = "search/phase/dfs"; @Override public ShardSearchRequest newInstance() { return new ShardSearchRequest(); } @Override public void messageReceived(ShardSearchRequest request, TransportChannel channel) throws Exception { DfsSearchResult result = searchService.executeDfsPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchQueryTransportHandler extends BaseTransportRequestHandler<ShardSearchRequest> { static final String ACTION = "search/phase/query"; @Override public ShardSearchRequest newInstance() { return new ShardSearchRequest(); } @Override public void messageReceived(ShardSearchRequest request, TransportChannel channel) throws Exception { QuerySearchResult result = searchService.executeQueryPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchQueryByIdTransportHandler extends BaseTransportRequestHandler<QuerySearchRequest> { static final String ACTION = "search/phase/query/id"; @Override public QuerySearchRequest newInstance() { return new QuerySearchRequest(); } @Override public void messageReceived(QuerySearchRequest request, TransportChannel channel) throws Exception { QuerySearchResult result = searchService.executeQueryPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchQueryScrollTransportHandler extends BaseTransportRequestHandler<InternalScrollSearchRequest> { static final String ACTION = "search/phase/query/scroll"; @Override public InternalScrollSearchRequest newInstance() { return new InternalScrollSearchRequest(); } @Override public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel) throws Exception { ScrollQuerySearchResult result = searchService.executeQueryPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchQueryFetchTransportHandler extends BaseTransportRequestHandler<ShardSearchRequest> { static final String ACTION = "search/phase/query+fetch"; @Override public ShardSearchRequest newInstance() { return new ShardSearchRequest(); } @Override public void messageReceived(ShardSearchRequest request, TransportChannel channel) throws Exception { QueryFetchSearchResult result = searchService.executeFetchPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchQueryQueryFetchTransportHandler extends BaseTransportRequestHandler<QuerySearchRequest> { static final String ACTION = "search/phase/query/query+fetch"; @Override public QuerySearchRequest newInstance() { return new QuerySearchRequest(); } @Override public void messageReceived(QuerySearchRequest request, TransportChannel channel) throws Exception { QueryFetchSearchResult result = searchService.executeFetchPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchFetchByIdTransportHandler extends BaseTransportRequestHandler<FetchSearchRequest> { static final String ACTION = "search/phase/fetch/id"; @Override public FetchSearchRequest newInstance() { return new FetchSearchRequest(); } @Override public void messageReceived(FetchSearchRequest request, TransportChannel channel) throws Exception { FetchSearchResult result = searchService.executeFetchPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchQueryFetchScrollTransportHandler extends BaseTransportRequestHandler<InternalScrollSearchRequest> { static final String ACTION = "search/phase/query+fetch/scroll"; @Override public InternalScrollSearchRequest newInstance() { return new InternalScrollSearchRequest(); } @Override public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel) throws Exception { ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchScanTransportHandler extends BaseTransportRequestHandler<ShardSearchRequest> { static final String ACTION = "search/phase/scan"; @Override public ShardSearchRequest newInstance() { return new ShardSearchRequest(); } @Override public void messageReceived(ShardSearchRequest request, TransportChannel channel) throws Exception { QuerySearchResult result = searchService.executeScan(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } private class SearchScanScrollTransportHandler extends BaseTransportRequestHandler<InternalScrollSearchRequest> { static final String ACTION = "search/phase/scan/scroll"; @Override public InternalScrollSearchRequest newInstance() { return new InternalScrollSearchRequest(); } @Override public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel) throws Exception { ScrollQueryFetchSearchResult result = searchService.executeScan(request); channel.sendResponse(result); } @Override public String executor() { return ThreadPool.Names.SEARCH; } } }
1no label
src_main_java_org_elasticsearch_search_action_SearchServiceTransportAction.java
788
public interface OfferDao { List<Offer> readAllOffers(); Offer readOfferById(Long offerId); List<Offer> readOffersByAutomaticDeliveryType(); Offer save(Offer offer); void delete(Offer offer); Offer create(); CandidateOrderOffer createCandidateOrderOffer(); CandidateItemOffer createCandidateItemOffer(); CandidateFulfillmentGroupOffer createCandidateFulfillmentGroupOffer(); OrderItemAdjustment createOrderItemAdjustment(); OrderItemPriceDetailAdjustment createOrderItemPriceDetailAdjustment(); OrderAdjustment createOrderAdjustment(); FulfillmentGroupAdjustment createFulfillmentGroupAdjustment(); OfferInfo createOfferInfo(); OfferInfo save(OfferInfo offerInfo); void delete(OfferInfo offerInfo); /** * Returns the number of milliseconds that the current date/time will be cached for queries before refreshing. * This aids in query caching, otherwise every query that utilized current date would be different and caching * would be ineffective. * * @return the milliseconds to cache the current date/time */ public Long getCurrentDateResolution(); /** * Sets the number of milliseconds that the current date/time will be cached for queries before refreshing. * This aids in query caching, otherwise every query that utilized current date would be different and caching * would be ineffective. * * @param currentDateResolution the milliseconds to cache the current date/time */ public void setCurrentDateResolution(Long currentDateResolution); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_dao_OfferDao.java
883
public class OScriptBlock extends OAbstractBlock { public static final String NAME = "script"; @Override public Object processBlock(OComposableProcessor iManager, final OCommandContext iContext, final ODocument iConfig, ODocument iOutput, final boolean iReadOnly) { final String language = getFieldOrDefault(iContext, iConfig, "language", "javascript"); Object code = getRequiredField(iContext, iConfig, "code"); if (OMultiValue.isMultiValue(code)) { // CONCATS THE SNIPPET IN A BIG ONE final StringBuilder buffer = new StringBuilder(); for (Object o : OMultiValue.getMultiValueIterable(code)) { if (buffer.length() > 0) buffer.append(";"); buffer.append(o.toString()); } code = buffer.toString(); } final OCommandScript script = new OCommandScript(language, code.toString()); script.getContext().setParent(iContext); iContext.setVariable("block", this); return script.execute(); } @Override public String getName() { return NAME; } }
0true
core_src_main_java_com_orientechnologies_orient_core_processor_block_OScriptBlock.java
729
public class TransportIndexDeleteAction extends TransportIndexReplicationOperationAction<IndexDeleteRequest, IndexDeleteResponse, ShardDeleteRequest, ShardDeleteRequest, ShardDeleteResponse> { @Inject public TransportIndexDeleteAction(Settings settings, ClusterService clusterService, TransportService transportService, ThreadPool threadPool, TransportShardDeleteAction deleteAction) { super(settings, transportService, clusterService, threadPool, deleteAction); } @Override protected IndexDeleteRequest newRequestInstance() { return new IndexDeleteRequest(); } @Override protected IndexDeleteResponse newResponseInstance(IndexDeleteRequest request, AtomicReferenceArray shardsResponses) { int successfulShards = 0; int failedShards = 0; ArrayList<ShardDeleteResponse> responses = new ArrayList<ShardDeleteResponse>(); for (int i = 0; i < shardsResponses.length(); i++) { if (shardsResponses.get(i) == null) { failedShards++; } else { responses.add((ShardDeleteResponse) shardsResponses.get(i)); successfulShards++; } } return new IndexDeleteResponse(request.index(), successfulShards, failedShards, responses.toArray(new ShardDeleteResponse[responses.size()])); } @Override protected boolean accumulateExceptions() { return false; } @Override protected String transportAction() { return "indices/index/delete"; } @Override protected ClusterBlockException checkGlobalBlock(ClusterState state, IndexDeleteRequest request) { return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE); } @Override protected ClusterBlockException checkRequestBlock(ClusterState state, IndexDeleteRequest request) { return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, request.index()); } @Override protected GroupShardsIterator shards(IndexDeleteRequest request) { return clusterService.operationRouting().broadcastDeleteShards(clusterService.state(), request.index()); } @Override protected ShardDeleteRequest newShardRequestInstance(IndexDeleteRequest request, int shardId) { return new ShardDeleteRequest(request, shardId); } }
0true
src_main_java_org_elasticsearch_action_delete_index_TransportIndexDeleteAction.java
910
if (!hasSameContentOf(makeDbCall(iMyDb, new ODbRelatedCall<ODocument>() { public ODocument call() { return (ODocument) myEntry.getValue(); } }), iMyDb, makeDbCall(iOtherDb, new ODbRelatedCall<ODocument>() {
0true
core_src_main_java_com_orientechnologies_orient_core_record_impl_ODocumentHelper.java
520
public class OMemoryLockException extends ODatabaseException { private static final long serialVersionUID = 1L; public OMemoryLockException(String message, Throwable cause) { super(message, cause); } }
0true
core_src_main_java_com_orientechnologies_orient_core_exception_OMemoryLockException.java
2,917
public class PorterStemTokenFilterFactory extends AbstractTokenFilterFactory { @Inject public PorterStemTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) { super(index, indexSettings, name, settings); } @Override public TokenStream create(TokenStream tokenStream) { return new PorterStemFilter(tokenStream); } }
0true
src_main_java_org_elasticsearch_index_analysis_PorterStemTokenFilterFactory.java
1,542
@ManagedDescription("IAtomicLong") public class AtomicLongMBean extends HazelcastMBean<IAtomicLong> { public AtomicLongMBean(IAtomicLong managedObject, ManagementService service) { super(managedObject, service); objectName = service.createObjectName("IAtomicLong",managedObject.getName()); } @ManagedAnnotation("name") @ManagedDescription("Name of the DistributedObject") public String getName() { return managedObject.getName(); } @ManagedAnnotation("currentValue") @ManagedDescription("Current Value") public long getCurrentValue() { return managedObject.get(); } @ManagedAnnotation(value = "set", operation = true) @ManagedDescription("set value") public void set(long value) { managedObject.set(value); } @ManagedAnnotation(value = "addAndGet", operation = true) @ManagedDescription("add value and get") public long addAndGet(long delta) { return managedObject.addAndGet(delta); } @ManagedAnnotation(value = "compareAndSet", operation = true) @ManagedDescription("compare expected value with current value if equals then set") public boolean compareAndSet(long expect, long value) { return managedObject.compareAndSet(expect, value); } @ManagedAnnotation(value = "decrementAndGet", operation = true) @ManagedDescription("decrement the current value and get") public long decrementAndGet() { return managedObject.decrementAndGet(); } @ManagedAnnotation(value = "getAndAdd", operation = true) @ManagedDescription("get the current value then add") public long getAndAdd(long delta) { return managedObject.getAndAdd(delta); } @ManagedAnnotation(value = "getAndIncrement", operation = true) @ManagedDescription("get the current value then increment") public long getAndIncrement() { return managedObject.getAndIncrement(); } @ManagedAnnotation(value = "getAndSet", operation = true) @ManagedDescription("get the current value then set") public long getAndSet(long value) { return managedObject.getAndSet(value); } @ManagedAnnotation(value = "incrementAndGet", operation = true) @ManagedDescription("increment the current value then get") public long incrementAndGet() { return managedObject.incrementAndGet(); } @ManagedAnnotation("partitionKey") @ManagedDescription("the partitionKey") public String getPartitionKey() { return managedObject.getPartitionKey(); } }
0true
hazelcast_src_main_java_com_hazelcast_jmx_AtomicLongMBean.java
1,409
@XmlRootElement(name = "phone") @XmlAccessorType(value = XmlAccessType.FIELD) public class PhoneWrapper extends BaseWrapper implements APIWrapper<Phone>, APIUnwrapper<Phone> { @XmlElement protected Long id; @XmlElement protected String phoneNumber; @XmlElement protected Boolean isActive; @XmlElement protected Boolean isDefault; @Override public void wrapDetails(Phone model, HttpServletRequest request) { this.id = model.getId(); this.phoneNumber = model.getPhoneNumber(); this.isActive = model.isActive(); this.isDefault = model.isDefault(); } @Override public void wrapSummary(Phone model, HttpServletRequest request) { wrapDetails(model, request); } @Override public Phone unwrap(HttpServletRequest request, ApplicationContext appContext) { PhoneService phoneService = (PhoneService) appContext.getBean("blPhoneService"); Phone phone = phoneService.create(); phone.setId(this.id); if (this.isActive != null) { phone.setActive(this.isActive); } if (this.isDefault != null) { phone.setDefault(this.isDefault); } phone.setPhoneNumber(this.phoneNumber); return phone; } }
0true
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_wrapper_PhoneWrapper.java
3,207
INT(32, false, SortField.Type.INT, Integer.MIN_VALUE, Integer.MAX_VALUE) { @Override public long toLong(BytesRef indexForm) { return NumericUtils.prefixCodedToInt(indexForm); } @Override public void toIndexForm(Number number, BytesRef bytes) { NumericUtils.intToPrefixCodedBytes(number.intValue(), 0, bytes); } @Override public Number toNumber(BytesRef indexForm) { return NumericUtils.prefixCodedToInt(indexForm); } },
0true
src_main_java_org_elasticsearch_index_fielddata_IndexNumericFieldData.java
2,456
public class EsThreadPoolExecutor extends ThreadPoolExecutor { private volatile ShutdownListener listener; private final Object monitor = new Object(); EsThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory) { this(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new EsAbortPolicy()); } EsThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler); } public void shutdown(ShutdownListener listener) { synchronized (monitor) { if (this.listener != null) { throw new ElasticsearchIllegalStateException("Shutdown was already called on this thread pool"); } if (isTerminated()) { listener.onTerminated(); } else { this.listener = listener; } } shutdown(); } @Override protected synchronized void terminated() { super.terminated(); synchronized (monitor) { if (listener != null) { try { listener.onTerminated(); } finally { listener = null; } } } } public static interface ShutdownListener { public void onTerminated(); } }
0true
src_main_java_org_elasticsearch_common_util_concurrent_EsThreadPoolExecutor.java
1,423
public class OChannelBinaryInputStream extends InputStream { private OChannelBinary channel; private final byte[] buffer; private int pos = 0; private int total = -1; private boolean again = true; public OChannelBinaryInputStream(final OChannelBinary channel) { this.channel = channel; buffer = channel.getBuffer(); } @Override public int read() throws IOException { if (pos >= total) if (again) fetch(); else return -1; return buffer[pos++]; } @Override public int available() throws IOException { if (total < 0) // ONLY THE FIRST TIME fetch(); final int remaining = total - pos; return remaining > 0 ? remaining : again ? 1 : 0; } private void fetch() throws IOException { // FETCH DATA pos = 0; total = channel.in.readInt(); if (total > buffer.length) throw new ONetworkProtocolException("Bad chunk size received: " + total + " when the maximum can be: " + buffer.length); if (total > 0) channel.in.readFully(buffer, 0, total); again = channel.in.readByte() == 1; } }
0true
enterprise_src_main_java_com_orientechnologies_orient_enterprise_channel_binary_OChannelBinaryInputStream.java
80
public class DefaultConsoleReader implements OConsoleReader { final BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); public String readLine() { try { return reader.readLine(); } catch (IOException e) { return null; } } public OConsoleApplication getConsole() { return null; } public void setConsole(OConsoleApplication console) { } }
0true
commons_src_main_java_com_orientechnologies_common_console_DefaultConsoleReader.java
1,999
private static class Any extends AbstractMatcher<Object> implements Serializable { public boolean matches(Object o) { return true; } @Override public String toString() { return "any()"; } public Object readResolve() { return any(); } private static final long serialVersionUID = 0; }
0true
src_main_java_org_elasticsearch_common_inject_matcher_Matchers.java
5,406
public static class SortedAndUnique extends Bytes implements ReaderContextAware { private final FieldDataSource delegate; private final MetaData metaData; private BytesValues bytesValues; public SortedAndUnique(FieldDataSource delegate) { this.delegate = delegate; this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build(); } @Override public MetaData metaData() { return metaData; } @Override public void setNextReader(AtomicReaderContext reader) { bytesValues = null; // order may change per-segment -> reset } @Override public org.elasticsearch.index.fielddata.BytesValues bytesValues() { if (bytesValues == null) { bytesValues = delegate.bytesValues(); if (bytesValues.isMultiValued() && (!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) { bytesValues = new SortedUniqueBytesValues(bytesValues); } } return bytesValues; } static class SortedUniqueBytesValues extends FilterBytesValues { final BytesRef spare; int[] sortedIds; final BytesRefHash bytes; int numUniqueValues; int pos = Integer.MAX_VALUE; public SortedUniqueBytesValues(BytesValues delegate) { super(delegate); bytes = new BytesRefHash(); spare = new BytesRef(); } @Override public int setDocument(int docId) { final int numValues = super.setDocument(docId); if (numValues == 0) { sortedIds = null; return 0; } bytes.clear(); bytes.reinit(); for (int i = 0; i < numValues; ++i) { bytes.add(super.nextValue(), super.currentValueHash()); } numUniqueValues = bytes.size(); sortedIds = bytes.sort(BytesRef.getUTF8SortedAsUnicodeComparator()); pos = 0; return numUniqueValues; } @Override public BytesRef nextValue() { bytes.get(sortedIds[pos++], spare); return spare; } @Override public int currentValueHash() { return spare.hashCode(); } @Override public Order getOrder() { return Order.BYTES; } } }
1no label
src_main_java_org_elasticsearch_search_aggregations_support_FieldDataSource.java
805
shardMultiPercolateAction.execute(shardRequest, new ActionListener<TransportShardMultiPercolateAction.Response>() { @Override public void onResponse(TransportShardMultiPercolateAction.Response response) { onShardResponse(shardId, response); } @Override public void onFailure(Throwable e) { onShardFailure(shardId, e); } });
0true
src_main_java_org_elasticsearch_action_percolate_TransportMultiPercolateAction.java
715
public interface RelatedProduct extends PromotableProduct { public Long getId(); public Product getProduct(); public Category getCategory(); public Product getRelatedProduct(); public String getPromotionMessage(); public Long getSequence(); public void setId(Long id); public void setProduct(Product product); public void setCategory(Category category); public void setRelatedProduct(Product relatedProduct); public void setPromotionMessage(String promotionMessage); public void setSequence(Long sequence); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_RelatedProduct.java
3,709
private class ManagedThread extends Thread { protected final int id; public ManagedThread(Runnable target, String name, int id) { super(threadGroup, target, name); this.id = id; } public void run() { try { super.run(); } catch (OutOfMemoryError e) { OutOfMemoryErrorDispatcher.onOutOfMemory(e); } finally { try { idQ.offer(id); } catch (Throwable ignored) { } } } }
1no label
hazelcast_src_main_java_com_hazelcast_util_executor_PoolExecutorThreadFactory.java
254
public class StoreRateLimiting { public static interface Provider { StoreRateLimiting rateLimiting(); } public interface Listener { void onPause(long nanos); } public static enum Type { NONE, MERGE, ALL; public static Type fromString(String type) throws ElasticsearchIllegalArgumentException { if ("none".equalsIgnoreCase(type)) { return NONE; } else if ("merge".equalsIgnoreCase(type)) { return MERGE; } else if ("all".equalsIgnoreCase(type)) { return ALL; } throw new ElasticsearchIllegalArgumentException("rate limiting type [" + type + "] not valid, can be one of [all|merge|none]"); } } private final SimpleRateLimiter rateLimiter = new SimpleRateLimiter(0); private volatile SimpleRateLimiter actualRateLimiter; private volatile Type type; public StoreRateLimiting() { } @Nullable public RateLimiter getRateLimiter() { return actualRateLimiter; } public void setMaxRate(ByteSizeValue rate) { if (rate.bytes() <= 0) { actualRateLimiter = null; } else if (actualRateLimiter == null) { actualRateLimiter = rateLimiter; actualRateLimiter.setMbPerSec(rate.mbFrac()); } else { assert rateLimiter == actualRateLimiter; rateLimiter.setMbPerSec(rate.mbFrac()); } } public Type getType() { return type; } public void setType(Type type) { this.type = type; } public void setType(String type) throws ElasticsearchIllegalArgumentException { this.type = Type.fromString(type); } }
0true
src_main_java_org_apache_lucene_store_StoreRateLimiting.java
1,089
@Service("blOrderService") @ManagedResource(objectName="org.broadleafcommerce:name=OrderService", description="Order Service", currencyTimeLimit=15) public class OrderServiceImpl implements OrderService { private static final Log LOG = LogFactory.getLog(OrderServiceImpl.class); /* DAOs */ @Resource(name = "blPaymentInfoDao") protected PaymentInfoDao paymentInfoDao; @Resource(name = "blOrderDao") protected OrderDao orderDao; @Resource(name = "blOfferDao") protected OfferDao offerDao; /* Factories */ @Resource(name = "blNullOrderFactory") protected NullOrderFactory nullOrderFactory; /* Services */ @Resource(name = "blPricingService") protected PricingService pricingService; @Resource(name = "blOrderItemService") protected OrderItemService orderItemService; @Resource(name = "blFulfillmentGroupService") protected FulfillmentGroupService fulfillmentGroupService; @Resource(name = "blOfferService") protected OfferService offerService; @Resource(name = "blSecurePaymentInfoService") protected SecurePaymentInfoService securePaymentInfoService; @Resource(name = "blMergeCartService") protected MergeCartService mergeCartService; @Resource(name = "blOrderServiceExtensionManager") protected OrderServiceExtensionManager extensionManager; /* Workflows */ @Resource(name = "blAddItemWorkflow") protected SequenceProcessor addItemWorkflow; @Resource(name = "blUpdateItemWorkflow") protected SequenceProcessor updateItemWorkflow; @Resource(name = "blRemoveItemWorkflow") protected SequenceProcessor removeItemWorkflow; @Resource(name = "blTransactionManager") protected PlatformTransactionManager transactionManager; @Value("${pricing.retry.count.for.lock.failure}") protected int pricingRetryCountForLockFailure = 3; @Value("${pricing.retry.wait.interval.for.lock.failure}") protected long pricingRetryWaitIntervalForLockFailure = 500L; /* Fields */ protected boolean moveNamedOrderItems = true; protected boolean deleteEmptyNamedOrders = true; @Value("${automatically.merge.like.items}") protected boolean automaticallyMergeLikeItems; @Override @Transactional("blTransactionManager") public Order createNewCartForCustomer(Customer customer) { return orderDao.createNewCartForCustomer(customer); } @Override @Transactional("blTransactionManager") public Order createNamedOrderForCustomer(String name, Customer customer) { Order namedOrder = orderDao.create(); namedOrder.setCustomer(customer); namedOrder.setName(name); namedOrder.setStatus(OrderStatus.NAMED); if (extensionManager != null) { extensionManager.getProxy().attachAdditionalDataToNewNamedCart(customer, namedOrder); } if (BroadleafRequestContext.getBroadleafRequestContext() != null) { namedOrder.setLocale(BroadleafRequestContext.getBroadleafRequestContext().getLocale()); } return orderDao.save(namedOrder); // No need to price here } @Override public Order findNamedOrderForCustomer(String name, Customer customer) { return orderDao.readNamedOrderForCustomer(customer, name); } @Override public Order findOrderById(Long orderId) { return orderDao.readOrderById(orderId); } @Override public Order getNullOrder() { return nullOrderFactory.getNullOrder(); } @Override public Order findCartForCustomer(Customer customer) { return orderDao.readCartForCustomer(customer); } @Override public List<Order> findOrdersForCustomer(Customer customer) { return orderDao.readOrdersForCustomer(customer.getId()); } @Override public List<Order> findOrdersForCustomer(Customer customer, OrderStatus status) { return orderDao.readOrdersForCustomer(customer, status); } @Override public Order findOrderByOrderNumber(String orderNumber) { return orderDao.readOrderByOrderNumber(orderNumber); } @Override public List<PaymentInfo> findPaymentInfosForOrder(Order order) { return paymentInfoDao.readPaymentInfosForOrder(order); } @Override @Transactional("blTransactionManager") public PaymentInfo addPaymentToOrder(Order order, PaymentInfo payment, Referenced securePaymentInfo) { payment.setOrder(order); order.getPaymentInfos().add(payment); order = persist(order); int paymentIndex = order.getPaymentInfos().size() - 1; if (securePaymentInfo != null) { securePaymentInfoService.save(securePaymentInfo); } return order.getPaymentInfos().get(paymentIndex); } @Override public Order save(Order order, Boolean priceOrder) throws PricingException { //persist the order first TransactionStatus status = TransactionUtils.createTransaction("saveOrder", TransactionDefinition.PROPAGATION_REQUIRED, transactionManager); try { order = persist(order); TransactionUtils.finalizeTransaction(status, transactionManager, false); } catch (RuntimeException ex) { TransactionUtils.finalizeTransaction(status, transactionManager, true); throw ex; } //make any pricing changes - possibly retrying with the persisted state if there's a lock failure if (priceOrder) { int retryCount = 0; boolean isValid = false; while (!isValid) { try { order = pricingService.executePricing(order); isValid = true; } catch (Exception ex) { boolean isValidCause = false; Throwable cause = ex; while (!isValidCause) { if (cause.getClass().equals(LockAcquisitionException.class)) { isValidCause = true; } cause = cause.getCause(); if (cause == null) { break; } } if (isValidCause) { if (LOG.isInfoEnabled()) { LOG.info("Problem acquiring lock during pricing call - attempting to price again."); } isValid = false; if (retryCount >= pricingRetryCountForLockFailure) { if (LOG.isInfoEnabled()) { LOG.info("Problem acquiring lock during pricing call. Retry limit exceeded at (" + retryCount + "). Throwing exception."); } if (ex instanceof PricingException) { throw (PricingException) ex; } else { throw new PricingException(ex); } } else { order = findOrderById(order.getId()); retryCount++; } try { Thread.sleep(pricingRetryWaitIntervalForLockFailure); } catch (Throwable e) { //do nothing } } else { if (ex instanceof PricingException) { throw (PricingException) ex; } else { throw new PricingException(ex); } } } } //make the final save of the priced order status = TransactionUtils.createTransaction("saveOrder", TransactionDefinition.PROPAGATION_REQUIRED, transactionManager); try { order = persist(order); TransactionUtils.finalizeTransaction(status, transactionManager, false); } catch (RuntimeException ex) { TransactionUtils.finalizeTransaction(status, transactionManager, true); throw ex; } } return order; } // This method exists to provide OrderService methods the ability to save an order // without having to worry about a PricingException being thrown. protected Order persist(Order order) { return orderDao.save(order); } @Override @Transactional("blTransactionManager") public void cancelOrder(Order order) { orderDao.delete(order); } @Override @Transactional("blTransactionManager") public void deleteOrder(Order order) { orderDao.delete(order); } @Override @Transactional("blTransactionManager") public Order addOfferCode(Order order, OfferCode offerCode, boolean priceOrder) throws PricingException, OfferMaxUseExceededException { Set<Offer> addedOffers = offerService.getUniqueOffersFromOrder(order); //TODO: give some sort of notification that adding the offer code to the order was unsuccessful if (!order.getAddedOfferCodes().contains(offerCode) && !addedOffers.contains(offerCode.getOffer())) { if (!offerService.verifyMaxCustomerUsageThreshold(order.getCustomer(), offerCode)) { throw new OfferMaxUseExceededException("The customer has used this offer code more than the maximum allowed number of times."); } order.getAddedOfferCodes().add(offerCode); order = save(order, priceOrder); } return order; } @Override @Transactional("blTransactionManager") public Order removeOfferCode(Order order, OfferCode offerCode, boolean priceOrder) throws PricingException { order.getAddedOfferCodes().remove(offerCode); order = save(order, priceOrder); return order; } @Override @Transactional("blTransactionManager") public Order removeAllOfferCodes(Order order, boolean priceOrder) throws PricingException { order.getAddedOfferCodes().clear(); order = save(order, priceOrder); return order; } @Override @ManagedAttribute(description="The delete empty named order after adding items to cart attribute", currencyTimeLimit=15) public void setDeleteEmptyNamedOrders(boolean deleteEmptyNamedOrders) { this.deleteEmptyNamedOrders = deleteEmptyNamedOrders; } @Override public OrderItem findLastMatchingItem(Order order, Long skuId, Long productId) { if (order.getOrderItems() != null) { for (int i=(order.getOrderItems().size()-1); i >= 0; i--) { OrderItem currentItem = (order.getOrderItems().get(i)); if (currentItem instanceof DiscreteOrderItem) { DiscreteOrderItem discreteItem = (DiscreteOrderItem) currentItem; if (skuId != null) { if (discreteItem.getSku() != null && skuId.equals(discreteItem.getSku().getId())) { return discreteItem; } } else if (productId != null && discreteItem.getProduct() != null && productId.equals(discreteItem.getProduct().getId())) { return discreteItem; } } else if (currentItem instanceof BundleOrderItem) { BundleOrderItem bundleItem = (BundleOrderItem) currentItem; if (skuId != null) { if (bundleItem.getSku() != null && skuId.equals(bundleItem.getSku().getId())) { return bundleItem; } } else if (productId != null && bundleItem.getProduct() != null && productId.equals(bundleItem.getProduct().getId())) { return bundleItem; } } } } return null; } @Override @Transactional("blTransactionManager") public Order confirmOrder(Order order) { return orderDao.submitOrder(order); } @Override @Transactional("blTransactionManager") public Order addAllItemsFromNamedOrder(Order namedOrder, boolean priceOrder) throws RemoveFromCartException, AddToCartException { Order cartOrder = orderDao.readCartForCustomer(namedOrder.getCustomer()); if (cartOrder == null) { cartOrder = createNewCartForCustomer(namedOrder.getCustomer()); } List<OrderItem> items = new ArrayList<OrderItem>(namedOrder.getOrderItems()); for (OrderItem item : items) { if (moveNamedOrderItems) { removeItem(namedOrder.getId(), item.getId(), false); } OrderItemRequestDTO orderItemRequest = orderItemService.buildOrderItemRequestDTOFromOrderItem(item); cartOrder = addItem(cartOrder.getId(), orderItemRequest, priceOrder); } if (deleteEmptyNamedOrders) { cancelOrder(namedOrder); } return cartOrder; } @Override @Transactional("blTransactionManager") public Order addItemFromNamedOrder(Order namedOrder, OrderItem item, boolean priceOrder) throws RemoveFromCartException, AddToCartException { Order cartOrder = orderDao.readCartForCustomer(namedOrder.getCustomer()); if (cartOrder == null) { cartOrder = createNewCartForCustomer(namedOrder.getCustomer()); } if (moveNamedOrderItems) { removeItem(namedOrder.getId(), item.getId(), false); } OrderItemRequestDTO orderItemRequest = orderItemService.buildOrderItemRequestDTOFromOrderItem(item); cartOrder = addItem(cartOrder.getId(), orderItemRequest, priceOrder); if (namedOrder.getOrderItems().size() == 0 && deleteEmptyNamedOrders) { cancelOrder(namedOrder); } return cartOrder; } @Override @Transactional("blTransactionManager") public Order addItemFromNamedOrder(Order namedOrder, OrderItem item, int quantity, boolean priceOrder) throws RemoveFromCartException, AddToCartException, UpdateCartException { // Validate that the quantity requested makes sense if (quantity < 1 || quantity > item.getQuantity()) { throw new IllegalArgumentException("Cannot move 0 or less quantity"); } else if (quantity == item.getQuantity()) { return addItemFromNamedOrder(namedOrder, item, priceOrder); } Order cartOrder = orderDao.readCartForCustomer(namedOrder.getCustomer()); if (cartOrder == null) { cartOrder = createNewCartForCustomer(namedOrder.getCustomer()); } if (moveNamedOrderItems) { // Update the old item to its new quantity only if we're moving items OrderItemRequestDTO orderItemRequestDTO = new OrderItemRequestDTO(); orderItemRequestDTO.setOrderItemId(item.getId()); orderItemRequestDTO.setQuantity(item.getQuantity() - quantity); updateItemQuantity(namedOrder.getId(), orderItemRequestDTO, false); } OrderItemRequestDTO orderItemRequest = orderItemService.buildOrderItemRequestDTOFromOrderItem(item); orderItemRequest.setQuantity(quantity); cartOrder = addItem(cartOrder.getId(), orderItemRequest, priceOrder); return cartOrder; } @Override @Transactional("blTransactionManager") public OrderItem addGiftWrapItemToOrder(Order order, GiftWrapOrderItemRequest itemRequest, boolean priceOrder) throws PricingException { GiftWrapOrderItem item = orderItemService.createGiftWrapOrderItem(itemRequest); item.setOrder(order); item = (GiftWrapOrderItem) orderItemService.saveOrderItem(item); order.getOrderItems().add(item); order = save(order, priceOrder); return item; } @Override @Transactional(value = "blTransactionManager", rollbackFor = {AddToCartException.class}) public Order addItem(Long orderId, OrderItemRequestDTO orderItemRequestDTO, boolean priceOrder) throws AddToCartException { // Don't allow overrides from this method. orderItemRequestDTO.setOverrideRetailPrice(null); orderItemRequestDTO.setOverrideSalePrice(null); return addItemWithPriceOverrides(orderId, orderItemRequestDTO, priceOrder); } @Override @Transactional(value = "blTransactionManager", rollbackFor = { AddToCartException.class }) public Order addItemWithPriceOverrides(Long orderId, OrderItemRequestDTO orderItemRequestDTO, boolean priceOrder) throws AddToCartException { Order order = findOrderById(orderId); if (automaticallyMergeLikeItems) { OrderItem item = findMatchingItem(order, orderItemRequestDTO); if (item != null) { orderItemRequestDTO.setQuantity(item.getQuantity() + orderItemRequestDTO.getQuantity()); orderItemRequestDTO.setOrderItemId(item.getId()); try { return updateItemQuantity(orderId, orderItemRequestDTO, priceOrder); } catch (RemoveFromCartException e) { throw new AddToCartException("Unexpected error - system tried to remove item while adding to cart", e); } catch (UpdateCartException e) { throw new AddToCartException("Could not update quantity for matched item", e); } } } try { CartOperationRequest cartOpRequest = new CartOperationRequest(findOrderById(orderId), orderItemRequestDTO, priceOrder); CartOperationContext context = (CartOperationContext) addItemWorkflow.doActivities(cartOpRequest); return context.getSeedData().getOrder(); } catch (WorkflowException e) { throw new AddToCartException("Could not add to cart", getCartOperationExceptionRootCause(e)); } } @Override @Transactional(value = "blTransactionManager", rollbackFor = {UpdateCartException.class, RemoveFromCartException.class}) public Order updateItemQuantity(Long orderId, OrderItemRequestDTO orderItemRequestDTO, boolean priceOrder) throws UpdateCartException, RemoveFromCartException { if (orderItemRequestDTO.getQuantity() == 0) { return removeItem(orderId, orderItemRequestDTO.getOrderItemId(), priceOrder); } try { CartOperationRequest cartOpRequest = new CartOperationRequest(findOrderById(orderId), orderItemRequestDTO, priceOrder); CartOperationContext context = (CartOperationContext) updateItemWorkflow.doActivities(cartOpRequest); return context.getSeedData().getOrder(); } catch (WorkflowException e) { throw new UpdateCartException("Could not update cart quantity", getCartOperationExceptionRootCause(e)); } } @Override @Transactional(value = "blTransactionManager", rollbackFor = {RemoveFromCartException.class}) public Order removeItem(Long orderId, Long orderItemId, boolean priceOrder) throws RemoveFromCartException { try { OrderItemRequestDTO orderItemRequestDTO = new OrderItemRequestDTO(); orderItemRequestDTO.setOrderItemId(orderItemId); CartOperationRequest cartOpRequest = new CartOperationRequest(findOrderById(orderId), orderItemRequestDTO, priceOrder); CartOperationContext context = (CartOperationContext) removeItemWorkflow.doActivities(cartOpRequest); return context.getSeedData().getOrder(); } catch (WorkflowException e) { throw new RemoveFromCartException("Could not remove from cart", getCartOperationExceptionRootCause(e)); } } @Override @Transactional(value = "blTransactionManager", rollbackFor = { RemoveFromCartException.class }) public Order removeInactiveItems(Long orderId, boolean priceOrder) throws RemoveFromCartException { Order order = findOrderById(orderId); try { for (OrderItem currentItem : new ArrayList<OrderItem>(order.getOrderItems())) { if (!currentItem.isSkuActive()) { removeItem(orderId, currentItem.getId(), priceOrder); } } } catch (Exception e) { throw new RemoveFromCartException("Could not remove from cart", e.getCause()); } return findOrderById(orderId); } @Override public boolean getAutomaticallyMergeLikeItems() { return automaticallyMergeLikeItems; } @Override public void setAutomaticallyMergeLikeItems(boolean automaticallyMergeLikeItems) { this.automaticallyMergeLikeItems = automaticallyMergeLikeItems; } @Override @ManagedAttribute(description="The move item from named order when adding to the cart attribute", currencyTimeLimit=15) public boolean isMoveNamedOrderItems() { return moveNamedOrderItems; } @Override @ManagedAttribute(description="The move item from named order when adding to the cart attribute", currencyTimeLimit=15) public void setMoveNamedOrderItems(boolean moveNamedOrderItems) { this.moveNamedOrderItems = moveNamedOrderItems; } @Override @ManagedAttribute(description="The delete empty named order after adding items to cart attribute", currencyTimeLimit=15) public boolean isDeleteEmptyNamedOrders() { return deleteEmptyNamedOrders; } @Override @Transactional("blTransactionManager") public void removeAllPaymentsFromOrder(Order order) { removePaymentsFromOrder(order, null); } @Override @Transactional("blTransactionManager") public void removePaymentsFromOrder(Order order, PaymentInfoType paymentInfoType) { List<PaymentInfo> infos = new ArrayList<PaymentInfo>(); for (PaymentInfo paymentInfo : order.getPaymentInfos()) { if (paymentInfoType == null || paymentInfoType.equals(paymentInfo.getType())) { infos.add(paymentInfo); } } order.getPaymentInfos().removeAll(infos); for (PaymentInfo paymentInfo : infos) { try { securePaymentInfoService.findAndRemoveSecurePaymentInfo(paymentInfo.getReferenceNumber(), paymentInfo.getType()); } catch (WorkflowException e) { // do nothing--this is an acceptable condition LOG.debug("No secure payment is associated with the PaymentInfo", e); } order.getPaymentInfos().remove(paymentInfo); paymentInfo = paymentInfoDao.readPaymentInfoById(paymentInfo.getId()); paymentInfoDao.delete(paymentInfo); } } @Override @Transactional("blTransactionManager") public void removePaymentFromOrder(Order order, PaymentInfo paymentInfo){ PaymentInfo paymentInfoToRemove = null; for(PaymentInfo info : order.getPaymentInfos()){ if(info.equals(paymentInfo)){ paymentInfoToRemove = info; } } if(paymentInfoToRemove != null){ try { securePaymentInfoService.findAndRemoveSecurePaymentInfo(paymentInfoToRemove.getReferenceNumber(), paymentInfo.getType()); } catch (WorkflowException e) { // do nothing--this is an acceptable condition LOG.debug("No secure payment is associated with the PaymentInfo", e); } order.getPaymentInfos().remove(paymentInfoToRemove); paymentInfo = paymentInfoDao.readPaymentInfoById(paymentInfoToRemove.getId()); paymentInfoDao.delete(paymentInfo); } } /** * This method will return the exception that is immediately below the deepest * WorkflowException in the current stack trace. * * @param e the workflow exception that contains the requested root cause * @return the root cause of the workflow exception */ protected Throwable getCartOperationExceptionRootCause(WorkflowException e) { Throwable cause = e.getCause(); if (cause == null) { return e; } Throwable currentCause = cause; while (currentCause.getCause() != null) { currentCause = currentCause.getCause(); if (currentCause instanceof WorkflowException) { cause = currentCause.getCause(); } } return cause; } /** * Returns true if the two items attributes exactly match. * @param item1 * @param item2 * @return */ protected boolean compareAttributes(Map<String, OrderItemAttribute> item1Attributes, OrderItemRequestDTO item2) { int item1AttributeSize = item1Attributes == null ? 0 : item1Attributes.size(); int item2AttributeSize = item2.getItemAttributes() == null ? 0 : item2.getItemAttributes().size(); if (item1AttributeSize != item2AttributeSize) { return false; } for (String key : item2.getItemAttributes().keySet()) { String itemOneValue = (item1Attributes.get(key) == null) ? null : item1Attributes.get(key).getValue(); String itemTwoValue = item2.getItemAttributes().get(key); if (!itemTwoValue.equals(itemOneValue)) { return false; } } return true; } protected boolean itemMatches(Sku item1Sku, Product item1Product, Map<String, OrderItemAttribute> item1Attributes, OrderItemRequestDTO item2) { // Must match on SKU and options if (item1Sku != null && item2.getSkuId() != null) { if (item1Sku.getId().equals(item2.getSkuId())) { return true; } } else { if (item1Product != null && item2.getProductId() != null) { if (item1Product.getId().equals(item2.getProductId())) { return compareAttributes(item1Attributes, item2); } } } return false; } protected OrderItem findMatchingItem(Order order, OrderItemRequestDTO itemToFind) { if (order == null) { return null; } for (OrderItem currentItem : order.getOrderItems()) { if (currentItem instanceof DiscreteOrderItem) { DiscreteOrderItem discreteItem = (DiscreteOrderItem) currentItem; if (itemMatches(discreteItem.getSku(), discreteItem.getProduct(), discreteItem.getOrderItemAttributes(), itemToFind)) { return discreteItem; } } else if (currentItem instanceof BundleOrderItem) { BundleOrderItem bundleItem = (BundleOrderItem) currentItem; if (itemMatches(bundleItem.getSku(), bundleItem.getProduct(), null, itemToFind)) { return bundleItem; } } } return null; } @Override public void printOrder(Order order, Log log) { if (!log.isDebugEnabled()) { return; } TableCreator tc = new TableCreator(new TableCreator.Col[] { new TableCreator.Col("Order Item", 30), new TableCreator.Col("Qty"), new TableCreator.Col("Unit Price"), new TableCreator.Col("Avg Adj"), new TableCreator.Col("Total Adj"), new TableCreator.Col("Total Price") }); for (OrderItem oi : order.getOrderItems()) { tc.addRow(new String[] { oi.getName(), String.valueOf(oi.getQuantity()), String.valueOf(oi.getPriceBeforeAdjustments(true)), String.valueOf(oi.getAverageAdjustmentValue()), String.valueOf(oi.getTotalAdjustmentValue()), String.valueOf(oi.getTotalPrice()) }); } tc.addSeparator() .withGlobalRowHeaderWidth(15) .addRow("Subtotal", order.getSubTotal()) .addRow("Order Adj.", order.getOrderAdjustmentsValue()) .addRow("Tax", order.getTotalTax()) .addRow("Shipping", order.getTotalShipping()) .addRow("Total", order.getTotal()) .addSeparator(); log.debug(tc.toString()); } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_OrderServiceImpl.java
571
config1.addListenerConfig(new ListenerConfig(new LifecycleListener() { public void stateChanged(final LifecycleEvent event) { switch (event.getState()) { case MERGING: case MERGED: latch.countDown(); default: break; } } }));
0true
hazelcast_src_test_java_com_hazelcast_cluster_ClusterJoinTest.java
2,683
public class GatewayService extends AbstractLifecycleComponent<GatewayService> implements ClusterStateListener { public static final ClusterBlock STATE_NOT_RECOVERED_BLOCK = new ClusterBlock(1, "state not recovered / initialized", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); private final Gateway gateway; private final ThreadPool threadPool; private final AllocationService allocationService; private final ClusterService clusterService; private final DiscoveryService discoveryService; private final TimeValue recoverAfterTime; private final int recoverAfterNodes; private final int expectedNodes; private final int recoverAfterDataNodes; private final int expectedDataNodes; private final int recoverAfterMasterNodes; private final int expectedMasterNodes; private final AtomicBoolean recovered = new AtomicBoolean(); private final AtomicBoolean scheduledRecovery = new AtomicBoolean(); @Inject public GatewayService(Settings settings, Gateway gateway, AllocationService allocationService, ClusterService clusterService, DiscoveryService discoveryService, ThreadPool threadPool) { super(settings); this.gateway = gateway; this.allocationService = allocationService; this.clusterService = clusterService; this.discoveryService = discoveryService; this.threadPool = threadPool; // allow to control a delay of when indices will get created this.recoverAfterTime = componentSettings.getAsTime("recover_after_time", null); this.recoverAfterNodes = componentSettings.getAsInt("recover_after_nodes", -1); this.expectedNodes = componentSettings.getAsInt("expected_nodes", -1); this.recoverAfterDataNodes = componentSettings.getAsInt("recover_after_data_nodes", -1); this.expectedDataNodes = componentSettings.getAsInt("expected_data_nodes", -1); // default the recover after master nodes to the minimum master nodes in the discovery this.recoverAfterMasterNodes = componentSettings.getAsInt("recover_after_master_nodes", settings.getAsInt("discovery.zen.minimum_master_nodes", -1)); this.expectedMasterNodes = componentSettings.getAsInt("expected_master_nodes", -1); // Add the not recovered as initial state block, we don't allow anything until this.clusterService.addInitialStateBlock(STATE_NOT_RECOVERED_BLOCK); } @Override protected void doStart() throws ElasticsearchException { gateway.start(); // if we received initial state, see if we can recover within the start phase, so we hold the // node from starting until we recovered properly if (discoveryService.initialStateReceived()) { ClusterState clusterState = clusterService.state(); DiscoveryNodes nodes = clusterState.nodes(); if (clusterState.nodes().localNodeMaster() && clusterState.blocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) { if (clusterState.blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK)) { logger.debug("not recovering from gateway, no master elected yet"); } else if (recoverAfterNodes != -1 && (nodes.masterAndDataNodes().size()) < recoverAfterNodes) { logger.debug("not recovering from gateway, nodes_size (data+master) [" + nodes.masterAndDataNodes().size() + "] < recover_after_nodes [" + recoverAfterNodes + "]"); } else if (recoverAfterDataNodes != -1 && nodes.dataNodes().size() < recoverAfterDataNodes) { logger.debug("not recovering from gateway, nodes_size (data) [" + nodes.dataNodes().size() + "] < recover_after_data_nodes [" + recoverAfterDataNodes + "]"); } else if (recoverAfterMasterNodes != -1 && nodes.masterNodes().size() < recoverAfterMasterNodes) { logger.debug("not recovering from gateway, nodes_size (master) [" + nodes.masterNodes().size() + "] < recover_after_master_nodes [" + recoverAfterMasterNodes + "]"); } else { boolean ignoreRecoverAfterTime; if (expectedNodes == -1 && expectedMasterNodes == -1 && expectedDataNodes == -1) { // no expected is set, don't ignore the timeout ignoreRecoverAfterTime = false; } else { // one of the expected is set, see if all of them meet the need, and ignore the timeout in this case ignoreRecoverAfterTime = true; if (expectedNodes != -1 && (nodes.masterAndDataNodes().size() < expectedNodes)) { // does not meet the expected... ignoreRecoverAfterTime = false; } if (expectedMasterNodes != -1 && (nodes.masterNodes().size() < expectedMasterNodes)) { // does not meet the expected... ignoreRecoverAfterTime = false; } if (expectedDataNodes != -1 && (nodes.dataNodes().size() < expectedDataNodes)) { // does not meet the expected... ignoreRecoverAfterTime = false; } } performStateRecovery(ignoreRecoverAfterTime); } } } else { logger.debug("can't wait on start for (possibly) reading state from gateway, will do it asynchronously"); } clusterService.addLast(this); } @Override protected void doStop() throws ElasticsearchException { clusterService.remove(this); gateway.stop(); } @Override protected void doClose() throws ElasticsearchException { gateway.close(); } @Override public void clusterChanged(final ClusterChangedEvent event) { if (lifecycle.stoppedOrClosed()) { return; } if (event.state().blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK)) { // we need to clear those flags, since we might need to recover again in case we disconnect // from the cluster and then reconnect recovered.set(false); scheduledRecovery.set(false); } if (event.localNodeMaster() && event.state().blocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) { ClusterState clusterState = event.state(); DiscoveryNodes nodes = clusterState.nodes(); if (event.state().blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK)) { logger.debug("not recovering from gateway, no master elected yet"); } else if (recoverAfterNodes != -1 && (nodes.masterAndDataNodes().size()) < recoverAfterNodes) { logger.debug("not recovering from gateway, nodes_size (data+master) [" + nodes.masterAndDataNodes().size() + "] < recover_after_nodes [" + recoverAfterNodes + "]"); } else if (recoverAfterDataNodes != -1 && nodes.dataNodes().size() < recoverAfterDataNodes) { logger.debug("not recovering from gateway, nodes_size (data) [" + nodes.dataNodes().size() + "] < recover_after_data_nodes [" + recoverAfterDataNodes + "]"); } else if (recoverAfterMasterNodes != -1 && nodes.masterNodes().size() < recoverAfterMasterNodes) { logger.debug("not recovering from gateway, nodes_size (master) [" + nodes.masterNodes().size() + "] < recover_after_master_nodes [" + recoverAfterMasterNodes + "]"); } else { boolean ignoreRecoverAfterTime; if (expectedNodes == -1 && expectedMasterNodes == -1 && expectedDataNodes == -1) { // no expected is set, don't ignore the timeout ignoreRecoverAfterTime = false; } else { // one of the expected is set, see if all of them meet the need, and ignore the timeout in this case ignoreRecoverAfterTime = true; if (expectedNodes != -1 && (nodes.masterAndDataNodes().size() < expectedNodes)) { // does not meet the expected... ignoreRecoverAfterTime = false; } if (expectedMasterNodes != -1 && (nodes.masterNodes().size() < expectedMasterNodes)) { // does not meet the expected... ignoreRecoverAfterTime = false; } if (expectedDataNodes != -1 && (nodes.dataNodes().size() < expectedDataNodes)) { // does not meet the expected... ignoreRecoverAfterTime = false; } } final boolean fIgnoreRecoverAfterTime = ignoreRecoverAfterTime; threadPool.generic().execute(new Runnable() { @Override public void run() { performStateRecovery(fIgnoreRecoverAfterTime); } }); } } } private void performStateRecovery(boolean ignoreRecoverAfterTime) { final Gateway.GatewayStateRecoveredListener recoveryListener = new GatewayRecoveryListener(new CountDownLatch(1)); if (!ignoreRecoverAfterTime && recoverAfterTime != null) { if (scheduledRecovery.compareAndSet(false, true)) { logger.debug("delaying initial state recovery for [{}]", recoverAfterTime); threadPool.schedule(recoverAfterTime, ThreadPool.Names.GENERIC, new Runnable() { @Override public void run() { if (recovered.compareAndSet(false, true)) { logger.trace("performing state recovery..."); gateway.performStateRecovery(recoveryListener); } } }); } } else { if (recovered.compareAndSet(false, true)) { logger.trace("performing state recovery..."); gateway.performStateRecovery(recoveryListener); } } } class GatewayRecoveryListener implements Gateway.GatewayStateRecoveredListener { private final CountDownLatch latch; GatewayRecoveryListener(CountDownLatch latch) { this.latch = latch; } @Override public void onSuccess(final ClusterState recoveredState) { logger.trace("successful state recovery, importing cluster state..."); clusterService.submitStateUpdateTask("local-gateway-elected-state", new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { assert currentState.metaData().indices().isEmpty(); // remove the block, since we recovered from gateway ClusterBlocks.Builder blocks = ClusterBlocks.builder() .blocks(currentState.blocks()) .blocks(recoveredState.blocks()) .removeGlobalBlock(STATE_NOT_RECOVERED_BLOCK); MetaData.Builder metaDataBuilder = MetaData.builder(recoveredState.metaData()); // automatically generate a UID for the metadata if we need to metaDataBuilder.generateUuidIfNeeded(); if (recoveredState.metaData().settings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || currentState.metaData().settings().getAsBoolean(MetaData.SETTING_READ_ONLY, false)) { blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } for (IndexMetaData indexMetaData : recoveredState.metaData()) { metaDataBuilder.put(indexMetaData, false); blocks.addBlocks(indexMetaData); } // update the state to reflect the new metadata and routing ClusterState updatedState = ClusterState.builder(currentState) .blocks(blocks) .metaData(metaDataBuilder) .build(); // initialize all index routing tables as empty RoutingTable.Builder routingTableBuilder = RoutingTable.builder(updatedState.routingTable()); for (ObjectCursor<IndexMetaData> cursor : updatedState.metaData().indices().values()) { routingTableBuilder.addAsRecovery(cursor.value); } // start with 0 based versions for routing table routingTableBuilder.version(0); // now, reroute RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(updatedState).routingTable(routingTableBuilder).build()); return ClusterState.builder(updatedState).routingResult(routingResult).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { logger.info("recovered [{}] indices into cluster_state", newState.metaData().indices().size()); latch.countDown(); } }); } @Override public void onFailure(String message) { recovered.set(false); scheduledRecovery.set(false); // don't remove the block here, we don't want to allow anything in such a case logger.info("metadata state not restored, reason: {}", message); } } }
1no label
src_main_java_org_elasticsearch_gateway_GatewayService.java
3,040
public final class BloomFilterPostingsFormat extends PostingsFormat { public static final String BLOOM_CODEC_NAME = "XBloomFilter"; // the Lucene one is named BloomFilter public static final int BLOOM_CODEC_VERSION = 1; /** * Extension of Bloom Filters file */ static final String BLOOM_EXTENSION = "blm"; private BloomFilter.Factory bloomFilterFactory = BloomFilter.Factory.DEFAULT; private PostingsFormat delegatePostingsFormat; /** * Creates Bloom filters for a selection of fields created in the index. This * is recorded as a set of Bitsets held as a segment summary in an additional * "blm" file. This PostingsFormat delegates to a choice of delegate * PostingsFormat for encoding all other postings data. * * @param delegatePostingsFormat The PostingsFormat that records all the non-bloom filter data i.e. * postings info. * @param bloomFilterFactory The {@link BloomFilter.Factory} responsible for sizing BloomFilters * appropriately */ public BloomFilterPostingsFormat(PostingsFormat delegatePostingsFormat, BloomFilter.Factory bloomFilterFactory) { super(BLOOM_CODEC_NAME); this.delegatePostingsFormat = delegatePostingsFormat; this.bloomFilterFactory = bloomFilterFactory; } // Used only by core Lucene at read-time via Service Provider instantiation - // do not use at Write-time in application code. public BloomFilterPostingsFormat() { super(BLOOM_CODEC_NAME); } @Override public BloomFilteredFieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { if (delegatePostingsFormat == null) { throw new UnsupportedOperationException("Error - " + getClass().getName() + " has been constructed without a choice of PostingsFormat"); } return new BloomFilteredFieldsConsumer( delegatePostingsFormat.fieldsConsumer(state), state, delegatePostingsFormat); } @Override public BloomFilteredFieldsProducer fieldsProducer(SegmentReadState state) throws IOException { return new BloomFilteredFieldsProducer(state); } public final class BloomFilteredFieldsProducer extends FieldsProducer { private FieldsProducer delegateFieldsProducer; HashMap<String, BloomFilter> bloomsByFieldName = new HashMap<String, BloomFilter>(); // for internal use only FieldsProducer getDelegate() { return delegateFieldsProducer; } public BloomFilteredFieldsProducer(SegmentReadState state) throws IOException { String bloomFileName = IndexFileNames.segmentFileName( state.segmentInfo.name, state.segmentSuffix, BLOOM_EXTENSION); IndexInput bloomIn = null; boolean success = false; try { bloomIn = state.directory.openInput(bloomFileName, state.context); CodecUtil.checkHeader(bloomIn, BLOOM_CODEC_NAME, BLOOM_CODEC_VERSION, BLOOM_CODEC_VERSION); // // Load the hash function used in the BloomFilter // hashFunction = HashFunction.forName(bloomIn.readString()); // Load the delegate postings format PostingsFormat delegatePostingsFormat = PostingsFormat.forName(bloomIn .readString()); this.delegateFieldsProducer = delegatePostingsFormat .fieldsProducer(state); int numBlooms = bloomIn.readInt(); boolean load = true; Store.StoreDirectory storeDir = DirectoryUtils.getStoreDirectory(state.directory); if (storeDir != null && storeDir.codecService() != null) { load = storeDir.codecService().isLoadBloomFilter(); } if (load && state.context.context != IOContext.Context.MERGE) { // if we merge we don't need to load the bloom filters for (int i = 0; i < numBlooms; i++) { int fieldNum = bloomIn.readInt(); BloomFilter bloom = BloomFilter.deserialize(bloomIn); FieldInfo fieldInfo = state.fieldInfos.fieldInfo(fieldNum); bloomsByFieldName.put(fieldInfo.name, bloom); } } IOUtils.close(bloomIn); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(bloomIn, delegateFieldsProducer); } } } @Override public Iterator<String> iterator() { return delegateFieldsProducer.iterator(); } @Override public void close() throws IOException { delegateFieldsProducer.close(); } @Override public Terms terms(String field) throws IOException { BloomFilter filter = bloomsByFieldName.get(field); if (filter == null) { return delegateFieldsProducer.terms(field); } else { Terms result = delegateFieldsProducer.terms(field); if (result == null) { return null; } return new BloomFilteredTerms(result, filter); } } @Override public int size() { return delegateFieldsProducer.size(); } public long getUniqueTermCount() throws IOException { return delegateFieldsProducer.getUniqueTermCount(); } @Override public long ramBytesUsed() { long size = delegateFieldsProducer.ramBytesUsed(); for (BloomFilter bloomFilter : bloomsByFieldName.values()) { size += bloomFilter.getSizeInBytes(); } return size; } } public static final class BloomFilteredTerms extends FilterAtomicReader.FilterTerms { private BloomFilter filter; public BloomFilteredTerms(Terms terms, BloomFilter filter) { super(terms); this.filter = filter; } public BloomFilter getFilter() { return filter; } @Override public TermsEnum iterator(TermsEnum reuse) throws IOException { TermsEnum result; if ((reuse != null) && (reuse instanceof BloomFilteredTermsEnum)) { // recycle the existing BloomFilteredTermsEnum by asking the delegate // to recycle its contained TermsEnum BloomFilteredTermsEnum bfte = (BloomFilteredTermsEnum) reuse; if (bfte.filter == filter) { bfte.reset(this.in); return bfte; } reuse = bfte.reuse; } // We have been handed something we cannot reuse (either null, wrong // class or wrong filter) so allocate a new object result = new BloomFilteredTermsEnum(this.in, reuse, filter); return result; } } static final class BloomFilteredTermsEnum extends TermsEnum { private Terms delegateTerms; private TermsEnum delegateTermsEnum; private TermsEnum reuse; private BloomFilter filter; public BloomFilteredTermsEnum(Terms other, TermsEnum reuse, BloomFilter filter) { this.delegateTerms = other; this.reuse = reuse; this.filter = filter; } void reset(Terms others) { reuse = this.delegateTermsEnum; this.delegateTermsEnum = null; this.delegateTerms = others; } private TermsEnum getDelegate() throws IOException { if (delegateTermsEnum == null) { /* pull the iterator only if we really need it - * this can be a relatively heavy operation depending on the * delegate postings format and they underlying directory * (clone IndexInput) */ delegateTermsEnum = delegateTerms.iterator(reuse); } return delegateTermsEnum; } @Override public final BytesRef next() throws IOException { return getDelegate().next(); } @Override public final Comparator<BytesRef> getComparator() { return delegateTerms.getComparator(); } @Override public final boolean seekExact(BytesRef text) throws IOException { // The magical fail-fast speed up that is the entire point of all of // this code - save a disk seek if there is a match on an in-memory // structure // that may occasionally give a false positive but guaranteed no false // negatives if (!filter.mightContain(text)) { return false; } return getDelegate().seekExact(text); } @Override public final SeekStatus seekCeil(BytesRef text) throws IOException { return getDelegate().seekCeil(text); } @Override public final void seekExact(long ord) throws IOException { getDelegate().seekExact(ord); } @Override public final BytesRef term() throws IOException { return getDelegate().term(); } @Override public final long ord() throws IOException { return getDelegate().ord(); } @Override public final int docFreq() throws IOException { return getDelegate().docFreq(); } @Override public final long totalTermFreq() throws IOException { return getDelegate().totalTermFreq(); } @Override public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException { return getDelegate().docsAndPositions(liveDocs, reuse, flags); } @Override public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException { return getDelegate().docs(liveDocs, reuse, flags); } } final class BloomFilteredFieldsConsumer extends FieldsConsumer { private FieldsConsumer delegateFieldsConsumer; private Map<FieldInfo, BloomFilter> bloomFilters = new HashMap<FieldInfo, BloomFilter>(); private SegmentWriteState state; // private PostingsFormat delegatePostingsFormat; public BloomFilteredFieldsConsumer(FieldsConsumer fieldsConsumer, SegmentWriteState state, PostingsFormat delegatePostingsFormat) { this.delegateFieldsConsumer = fieldsConsumer; // this.delegatePostingsFormat=delegatePostingsFormat; this.state = state; } // for internal use only FieldsConsumer getDelegate() { return delegateFieldsConsumer; } @Override public TermsConsumer addField(FieldInfo field) throws IOException { BloomFilter bloomFilter = bloomFilterFactory.createFilter(state.segmentInfo.getDocCount()); if (bloomFilter != null) { assert bloomFilters.containsKey(field) == false; bloomFilters.put(field, bloomFilter); return new WrappedTermsConsumer(delegateFieldsConsumer.addField(field), bloomFilter); } else { // No, use the unfiltered fieldsConsumer - we are not interested in // recording any term Bitsets. return delegateFieldsConsumer.addField(field); } } @Override public void close() throws IOException { delegateFieldsConsumer.close(); // Now we are done accumulating values for these fields List<Entry<FieldInfo, BloomFilter>> nonSaturatedBlooms = new ArrayList<Map.Entry<FieldInfo, BloomFilter>>(); for (Entry<FieldInfo, BloomFilter> entry : bloomFilters.entrySet()) { nonSaturatedBlooms.add(entry); } String bloomFileName = IndexFileNames.segmentFileName( state.segmentInfo.name, state.segmentSuffix, BLOOM_EXTENSION); IndexOutput bloomOutput = null; try { bloomOutput = state.directory .createOutput(bloomFileName, state.context); CodecUtil.writeHeader(bloomOutput, BLOOM_CODEC_NAME, BLOOM_CODEC_VERSION); // remember the name of the postings format we will delegate to bloomOutput.writeString(delegatePostingsFormat.getName()); // First field in the output file is the number of fields+blooms saved bloomOutput.writeInt(nonSaturatedBlooms.size()); for (Entry<FieldInfo, BloomFilter> entry : nonSaturatedBlooms) { FieldInfo fieldInfo = entry.getKey(); BloomFilter bloomFilter = entry.getValue(); bloomOutput.writeInt(fieldInfo.number); saveAppropriatelySizedBloomFilter(bloomOutput, bloomFilter, fieldInfo); } } finally { IOUtils.close(bloomOutput); } //We are done with large bitsets so no need to keep them hanging around bloomFilters.clear(); } private void saveAppropriatelySizedBloomFilter(IndexOutput bloomOutput, BloomFilter bloomFilter, FieldInfo fieldInfo) throws IOException { // FuzzySet rightSizedSet = bloomFilterFactory.downsize(fieldInfo, // bloomFilter); // if (rightSizedSet == null) { // rightSizedSet = bloomFilter; // } // rightSizedSet.serialize(bloomOutput); BloomFilter.serilaize(bloomFilter, bloomOutput); } } class WrappedTermsConsumer extends TermsConsumer { private TermsConsumer delegateTermsConsumer; private BloomFilter bloomFilter; public WrappedTermsConsumer(TermsConsumer termsConsumer, BloomFilter bloomFilter) { this.delegateTermsConsumer = termsConsumer; this.bloomFilter = bloomFilter; } @Override public PostingsConsumer startTerm(BytesRef text) throws IOException { return delegateTermsConsumer.startTerm(text); } @Override public void finishTerm(BytesRef text, TermStats stats) throws IOException { // Record this term in our BloomFilter if (stats.docFreq > 0) { bloomFilter.put(text); } delegateTermsConsumer.finishTerm(text, stats); } @Override public void finish(long sumTotalTermFreq, long sumDocFreq, int docCount) throws IOException { delegateTermsConsumer.finish(sumTotalTermFreq, sumDocFreq, docCount); } @Override public Comparator<BytesRef> getComparator() throws IOException { return delegateTermsConsumer.getComparator(); } } public PostingsFormat getDelegate() { return this.delegatePostingsFormat; } }
0true
src_main_java_org_elasticsearch_index_codec_postingsformat_BloomFilterPostingsFormat.java
4,446
return (FD) cache.get(key, new Callable<AtomicFieldData>() { @Override public AtomicFieldData call() throws Exception { SegmentReaderUtils.registerCoreListener(context.reader(), IndexFieldCache.this); AtomicFieldData fieldData = indexFieldData.loadDirect(context); if (indexService != null) { ShardId shardId = ShardUtils.extractShardId(context.reader()); if (shardId != null) { IndexShard shard = indexService.shard(shardId.id()); if (shard != null) { key.listener = shard.fieldData(); } } } if (key.listener != null) { key.listener.onLoad(fieldNames, fieldDataType, fieldData); } return fieldData; } });
1no label
src_main_java_org_elasticsearch_indices_fielddata_cache_IndicesFieldDataCache.java
1,752
private static class AlwaysDistanceBoundingCheck implements DistanceBoundingCheck { @Override public boolean isWithin(double targetLatitude, double targetLongitude) { return true; } @Override public GeoPoint topLeft() { return null; } @Override public GeoPoint bottomRight() { return null; } }
0true
src_main_java_org_elasticsearch_common_geo_GeoDistance.java
286
public class PassthroughEncryptionModule implements EncryptionModule { protected static final Logger LOG = LogManager.getLogger(PassthroughEncryptionModule.class); protected RuntimeEnvironmentKeyResolver keyResolver = new SystemPropertyRuntimeEnvironmentKeyResolver(); public PassthroughEncryptionModule() { if ("production".equals(keyResolver.resolveRuntimeEnvironmentKey())) { LOG.warn("This passthrough encryption module provides NO ENCRYPTION and should NOT be used in production."); } } public String decrypt(String cipherText) { return cipherText; } public String encrypt(String plainText) { return plainText; } }
0true
common_src_main_java_org_broadleafcommerce_common_encryption_PassthroughEncryptionModule.java
3,032
public class PreBuiltDocValuesFormatProvider implements DocValuesFormatProvider { public static final class Factory implements DocValuesFormatProvider.Factory { private final PreBuiltDocValuesFormatProvider provider; public Factory(DocValuesFormat docValuesFormat) { this(docValuesFormat.getName(), docValuesFormat); } public Factory(String name, DocValuesFormat docValuesFormat) { this.provider = new PreBuiltDocValuesFormatProvider(name, docValuesFormat); } public DocValuesFormatProvider get() { return provider; } @Override public DocValuesFormatProvider create(String name, Settings settings) { return provider; } public String name() { return provider.name(); } } private final String name; private final DocValuesFormat docValuesFormat; public PreBuiltDocValuesFormatProvider(DocValuesFormat postingsFormat) { this(postingsFormat.getName(), postingsFormat); } public PreBuiltDocValuesFormatProvider(String name, DocValuesFormat postingsFormat) { Preconditions.checkNotNull(postingsFormat, "DocValuesFormat must not be null"); this.name = name; this.docValuesFormat = postingsFormat; } @Override public String name() { return name; } @Override public DocValuesFormat get() { return docValuesFormat; } }
0true
src_main_java_org_elasticsearch_index_codec_docvaluesformat_PreBuiltDocValuesFormatProvider.java
2,482
public interface XRejectedExecutionHandler extends RejectedExecutionHandler { /** * The number of rejected executions. */ long rejected(); }
0true
src_main_java_org_elasticsearch_common_util_concurrent_XRejectedExecutionHandler.java
445
static final class Fields { static final XContentBuilderString AVAILABLE_PROCESSORS = new XContentBuilderString("available_processors"); static final XContentBuilderString MEM = new XContentBuilderString("mem"); static final XContentBuilderString TOTAL = new XContentBuilderString("total"); static final XContentBuilderString TOTAL_IN_BYTES = new XContentBuilderString("total_in_bytes"); static final XContentBuilderString CPU = new XContentBuilderString("cpu"); static final XContentBuilderString COUNT = new XContentBuilderString("count"); }
0true
src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java
1,358
private static final class AddEntryResult { private final long pagePointer; private final ORecordVersion recordVersion; private final int recordsSizeDiff; public AddEntryResult(long pagePointer, ORecordVersion recordVersion, int recordsSizeDiff) { this.pagePointer = pagePointer; this.recordVersion = recordVersion; this.recordsSizeDiff = recordsSizeDiff; } }
0true
core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_depricated_OPaginatedWithoutRidReuseCluster.java
3,376
public static class WithOrdinals extends PackedArrayAtomicFieldData { private final MonotonicAppendingLongBuffer values; private final Ordinals ordinals; public WithOrdinals(MonotonicAppendingLongBuffer values, int numDocs, Ordinals ordinals) { super(numDocs); this.values = values; this.ordinals = ordinals; } @Override public boolean isMultiValued() { return ordinals.isMultiValued(); } @Override public boolean isValuesOrdered() { return true; } @Override public long getMemorySizeInBytes() { if (size == -1) { size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + values.ramBytesUsed() + ordinals.getMemorySizeInBytes(); } return size; } @Override public long getNumberUniqueValues() { return ordinals.getNumOrds(); } @Override public LongValues getLongValues() { return new LongValues(values, ordinals.ordinals()); } @Override public DoubleValues getDoubleValues() { return new DoubleValues(values, ordinals.ordinals()); } static class LongValues extends org.elasticsearch.index.fielddata.LongValues.WithOrdinals { private final MonotonicAppendingLongBuffer values; LongValues(MonotonicAppendingLongBuffer values, Ordinals.Docs ordinals) { super(ordinals); this.values = values; } @Override public long getValueByOrd(long ord) { assert ord != Ordinals.MISSING_ORDINAL; return values.get(ord - 1); } } static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.WithOrdinals { private final MonotonicAppendingLongBuffer values; DoubleValues(MonotonicAppendingLongBuffer values, Ordinals.Docs ordinals) { super(ordinals); this.values = values; } @Override public double getValueByOrd(long ord) { assert ord != Ordinals.MISSING_ORDINAL; return values.get(ord - 1); } } }
0true
src_main_java_org_elasticsearch_index_fielddata_plain_PackedArrayAtomicFieldData.java
119
public static class StressThread extends Thread { private static final Object READ = new Object(); private static final Object WRITE = new Object(); private static ResourceObject resources[] = new ResourceObject[10]; private final Random rand = new Random( currentTimeMillis() ); static { for ( int i = 0; i < resources.length; i++ ) resources[i] = new ResourceObject( "RX" + i ); } private final CountDownLatch startSignal; private final String name; private final int numberOfIterations; private final int depthCount; private final float readWriteRatio; private final LockManager lm; private volatile Exception error; private final Transaction tx = mock( Transaction.class ); public volatile Long startedWaiting = null; StressThread( String name, int numberOfIterations, int depthCount, float readWriteRatio, LockManager lm, CountDownLatch startSignal ) { super(); this.name = name; this.numberOfIterations = numberOfIterations; this.depthCount = depthCount; this.readWriteRatio = readWriteRatio; this.lm = lm; this.startSignal = startSignal; } @Override public void run() { try { startSignal.await(); java.util.Stack<Object> lockStack = new java.util.Stack<Object>(); java.util.Stack<ResourceObject> resourceStack = new java.util.Stack<ResourceObject>(); for ( int i = 0; i < numberOfIterations; i++ ) { try { int depth = depthCount; do { float f = rand.nextFloat(); int n = rand.nextInt( resources.length ); if ( f < readWriteRatio ) { startedWaiting = currentTimeMillis(); lm.getReadLock( resources[n], tx ); startedWaiting = null; lockStack.push( READ ); } else { startedWaiting = currentTimeMillis(); lm.getWriteLock( resources[n], tx ); startedWaiting = null; lockStack.push( WRITE ); } resourceStack.push( resources[n] ); } while ( --depth > 0 ); } catch ( DeadlockDetectedException e ) { // This is good } finally { releaseAllLocks( lockStack, resourceStack ); } } } catch ( Exception e ) { error = e; } } private void releaseAllLocks( Stack<Object> lockStack, Stack<ResourceObject> resourceStack ) { while ( !lockStack.isEmpty() ) { if ( lockStack.pop() == READ ) { lm.releaseReadLock( resourceStack.pop(), tx ); } else { lm.releaseWriteLock( resourceStack.pop(), tx ); } } } @Override public String toString() { return this.name; } }
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestDeadlockDetection.java
4,197
public class BlobStoreIndexShardSnapshot { /** * Information about snapshotted file */ public static class FileInfo { private final String name; private final String physicalName; private final long length; private final String checksum; private final ByteSizeValue partSize; private final long partBytes; private final long numberOfParts; /** * Constructs a new instance of file info * * @param name file name as stored in the blob store * @param physicalName original file name * @param length total length of the file * @param partSize size of the single chunk * @param checksum checksum for the file */ public FileInfo(String name, String physicalName, long length, ByteSizeValue partSize, String checksum) { this.name = name; this.physicalName = physicalName; this.length = length; this.checksum = checksum; long partBytes = Long.MAX_VALUE; if (partSize != null) { partBytes = partSize.bytes(); } long totalLength = length; long numberOfParts = totalLength / partBytes; if (totalLength % partBytes > 0) { numberOfParts++; } if (numberOfParts == 0) { numberOfParts++; } this.numberOfParts = numberOfParts; this.partSize = partSize; this.partBytes = partBytes; } /** * Returns the base file name * * @return file name */ public String name() { return name; } /** * Returns part name if file is stored as multiple parts * * @param part part number * @return part name */ public String partName(long part) { if (numberOfParts > 1) { return name + ".part" + part; } else { return name; } } /** * Returns base file name from part name * * @param blobName part name * @return base file name */ public static String canonicalName(String blobName) { if (blobName.contains(".part")) { return blobName.substring(0, blobName.indexOf(".part")); } return blobName; } /** * Returns original file name * * @return original file name */ public String physicalName() { return this.physicalName; } /** * File length * * @return file length */ public long length() { return length; } /** * Returns part size * * @return part size */ public ByteSizeValue partSize() { return partSize; } /** * Return maximum number of bytes in a part * * @return maximum number of bytes in a part */ public long partBytes() { return partBytes; } /** * Returns number of parts * * @return number of parts */ public long numberOfParts() { return numberOfParts; } /** * Returns file md5 checksum provided by {@link org.elasticsearch.index.store.Store} * * @return file checksum */ @Nullable public String checksum() { return checksum; } /** * Checks if a file in a store is the same file * * @param md file in a store * @return true if file in a store this this file have the same checksum and length */ public boolean isSame(StoreFileMetaData md) { if (checksum == null || md.checksum() == null) { return false; } return length == md.length() && checksum.equals(md.checksum()); } static final class Fields { static final XContentBuilderString NAME = new XContentBuilderString("name"); static final XContentBuilderString PHYSICAL_NAME = new XContentBuilderString("physical_name"); static final XContentBuilderString LENGTH = new XContentBuilderString("length"); static final XContentBuilderString CHECKSUM = new XContentBuilderString("checksum"); static final XContentBuilderString PART_SIZE = new XContentBuilderString("part_size"); } /** * Serializes file info into JSON * * @param file file info * @param builder XContent builder * @param params parameters * @throws IOException */ public static void toXContent(FileInfo file, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field(Fields.NAME, file.name); builder.field(Fields.PHYSICAL_NAME, file.physicalName); builder.field(Fields.LENGTH, file.length); if (file.checksum != null) { builder.field(Fields.CHECKSUM, file.checksum); } if (file.partSize != null) { builder.field(Fields.PART_SIZE, file.partSize.bytes()); } builder.endObject(); } /** * Parses JSON that represents file info * * @param parser parser * @return file info * @throws IOException */ public static FileInfo fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); String name = null; String physicalName = null; long length = -1; String checksum = null; ByteSizeValue partSize = null; if (token == XContentParser.Token.START_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { String currentFieldName = parser.currentName(); token = parser.nextToken(); if (token.isValue()) { if ("name".equals(currentFieldName)) { name = parser.text(); } else if ("physical_name".equals(currentFieldName)) { physicalName = parser.text(); } else if ("length".equals(currentFieldName)) { length = parser.longValue(); } else if ("checksum".equals(currentFieldName)) { checksum = parser.text(); } else if ("part_size".equals(currentFieldName)) { partSize = new ByteSizeValue(parser.longValue()); } else { throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]"); } } else { throw new ElasticsearchParseException("unexpected token [" + token + "]"); } } else { throw new ElasticsearchParseException("unexpected token [" + token + "]"); } } } // TODO: Verify??? return new FileInfo(name, physicalName, length, partSize, checksum); } } private final String snapshot; private final long indexVersion; private final ImmutableList<FileInfo> indexFiles; /** * Constructs new shard snapshot metadata from snapshot metadata * * @param snapshot snapshot id * @param indexVersion index version * @param indexFiles list of files in the shard */ public BlobStoreIndexShardSnapshot(String snapshot, long indexVersion, List<FileInfo> indexFiles) { assert snapshot != null; assert indexVersion >= 0; this.snapshot = snapshot; this.indexVersion = indexVersion; this.indexFiles = ImmutableList.copyOf(indexFiles); } /** * Returns index version * * @return index version */ public long indexVersion() { return indexVersion; } /** * Returns snapshot id * * @return snapshot id */ public String snapshot() { return snapshot; } /** * Returns list of files in the shard * * @return list of files */ public ImmutableList<FileInfo> indexFiles() { return indexFiles; } /** * Serializes shard snapshot metadata info into JSON * * @param snapshot shard snapshot metadata * @param builder XContent builder * @param params parameters * @throws IOException */ public static void toXContent(BlobStoreIndexShardSnapshot snapshot, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field("name", snapshot.snapshot); builder.field("index-version", snapshot.indexVersion); builder.startArray("files"); for (FileInfo fileInfo : snapshot.indexFiles) { FileInfo.toXContent(fileInfo, builder, params); } builder.endArray(); builder.endObject(); } /** * Parses shard snapshot metadata * * @param parser parser * @return shard snapshot metadata * @throws IOException */ public static BlobStoreIndexShardSnapshot fromXContent(XContentParser parser) throws IOException { String snapshot = null; long indexVersion = -1; List<FileInfo> indexFiles = newArrayList(); XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.START_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { String currentFieldName = parser.currentName(); token = parser.nextToken(); if (token.isValue()) { if ("name".equals(currentFieldName)) { snapshot = parser.text(); } else if ("index-version".equals(currentFieldName)) { indexVersion = parser.longValue(); } else { throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { indexFiles.add(FileInfo.fromXContent(parser)); } } else { throw new ElasticsearchParseException("unexpected token [" + token + "]"); } } else { throw new ElasticsearchParseException("unexpected token [" + token + "]"); } } } return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, ImmutableList.<FileInfo>copyOf(indexFiles)); } /** * Returns true if this snapshot contains a file with a given original name * * @param physicalName original file name * @return true if the file was found, false otherwise */ public boolean containPhysicalIndexFile(String physicalName) { return findPhysicalIndexFile(physicalName) != null; } public FileInfo findPhysicalIndexFile(String physicalName) { for (FileInfo file : indexFiles) { if (file.physicalName().equals(physicalName)) { return file; } } return null; } /** * Returns true if this snapshot contains a file with a given name * * @param name file name * @return true if file was found, false otherwise */ public FileInfo findNameFile(String name) { for (FileInfo file : indexFiles) { if (file.name().equals(name)) { return file; } } return null; } }
1no label
src_main_java_org_elasticsearch_index_snapshots_blobstore_BlobStoreIndexShardSnapshot.java
1,584
public class CriteriaTransferObject { private Integer firstResult; private Integer maxResults; private Map<String, FilterAndSortCriteria> criteriaMap = new HashMap<String, FilterAndSortCriteria>(); private List<FilterMapping> additionalFilterMappings = new ArrayList<FilterMapping>(); /** * The index of records in the database for which a fetch will start. * * @return the index to start, or null */ public Integer getFirstResult() { return firstResult; } /** * The index of records in the datastore for which a fetch will start. * * @param firstResult the index to start, or null */ public void setFirstResult(Integer firstResult) { this.firstResult = firstResult; } /** * The max number of records from the datastore to return. * * @return the max records, or null */ public Integer getMaxResults() { return maxResults; } /** * The max number of records from the datastore to return. * * @param maxResults the max records, or null */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * Add a {@link FilterAndSortCriteria} instance. Contains information about which records are retrieved * and in what direction they're sorted. * * @param criteria {@link FilterAndSortCriteria} */ public void add(FilterAndSortCriteria criteria) { criteriaMap.put(criteria.getPropertyId(), criteria); } /** * Add all {@link FilterAndSortCriteria} instances. Contains information about which records are retrieved * and in what direction they're sorted. * * @param criterias the list of {@link FilterAndSortCriteria} instances to add */ public void addAll(Collection<FilterAndSortCriteria> criterias) { for (FilterAndSortCriteria fasc : criterias) { add(fasc); } } /** * Retrieve the added {@link FilterAndSortCriteria} instances organized into a map * * @return the {@link FilterAndSortCriteria} instances as a map */ public Map<String, FilterAndSortCriteria> getCriteriaMap() { return criteriaMap; } public void setCriteriaMap(Map<String, FilterAndSortCriteria> criteriaMap) { this.criteriaMap = criteriaMap; } public FilterAndSortCriteria get(String name) { if (criteriaMap.containsKey(name)) { return criteriaMap.get(name); } FilterAndSortCriteria criteria = new FilterAndSortCriteria(name); criteriaMap.put(name, criteria); return criteriaMap.get(name); } /** * This list holds additional filter mappings that might have been constructed in a custom persistence * handler. This is only used when very custom filtering needs to occur. */ public List<FilterMapping> getAdditionalFilterMappings() { return additionalFilterMappings; } public void setAdditionalFilterMappings(List<FilterMapping> additionalFilterMappings) { this.additionalFilterMappings = additionalFilterMappings; } }
0true
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_CriteriaTransferObject.java
1,685
runnable = new Runnable() { public void run() { map.tryPut(null, "value", 1, TimeUnit.SECONDS); } };
0true
hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java
274
public class InterceptingWriteTransaction extends NeoStoreTransaction { private final TransactionInterceptor interceptor; InterceptingWriteTransaction( long lastCommittedTxWhenTransactionStarted, XaLogicalLog log, NeoStore neoStore, TransactionState state, CacheAccessBackDoor cacheAccess, IndexingService indexingService, LabelScanStore labelScanStore, TransactionInterceptor interceptor, IntegrityValidator validator, KernelTransactionImplementation kernelTransaction, LockService locks ) { super( lastCommittedTxWhenTransactionStarted, log, state, neoStore, cacheAccess, indexingService, labelScanStore, validator, kernelTransaction, locks ); this.interceptor = interceptor; } @Override protected void intercept( List<Command> commands ) { super.intercept( commands ); for ( Command command : commands ) { command.accept( interceptor ); } interceptor.complete(); } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_InterceptingWriteTransaction.java
1,001
class ReplicaOperationTransportHandler extends BaseTransportRequestHandler<ReplicaOperationRequest> { @Override public ReplicaOperationRequest newInstance() { return new ReplicaOperationRequest(); } @Override public String executor() { return executor; } // we must never reject on because of thread pool capacity on replicas @Override public boolean isForceExecution() { return true; } @Override public void messageReceived(final ReplicaOperationRequest request, final TransportChannel channel) throws Exception { shardOperationOnReplica(request); channel.sendResponse(TransportResponse.Empty.INSTANCE); } }
0true
src_main_java_org_elasticsearch_action_support_replication_TransportShardReplicationOperationAction.java