Unnamed: 0
int64
0
6.45k
func
stringlengths
29
253k
target
class label
2 classes
project
stringlengths
36
167
1,456
public class OCommandExecutorSQLCreateEdgeTest { private ODatabaseDocumentTx db; private ODocument owner1; private ODocument owner2; @BeforeClass public void setUp() throws Exception { db = Orient.instance().getDatabaseFactory().createDatabase("graph", "memory:target/testdb"); if (db.exists()) { db.open("admin", "admin"); db.drop(); } db.create(); final OSchema schema = db.getMetadata().getSchema(); schema.createClass("Owner", schema.getClass("V")); schema.createClass("link", schema.getClass("E")); owner1 = new ODocument("Owner"); owner1.save(); owner2 = new ODocument("Owner"); owner2.save(); } @AfterClass public void tearDown() throws Exception { db.drop(); } @Test public void testParametersBinding() throws Exception { db.command(new OCommandSQL("CREATE EDGE link from " + owner1.getIdentity() + " TO " + owner2.getIdentity() + " SET foo = ?")) .execute("123"); final List<ODocument> list = db.query(new OSQLSynchQuery<Object>("SELECT FROM link")); Assert.assertEquals(list.size(), 1); Assert.assertEquals(list.get(0).field("foo"), "123"); } }
0true
graphdb_src_test_java_com_orientechnologies_orient_graph_sql_OCommandExecutorSQLCreateEdgeTest.java
2,675
public class NodeEnvironment extends AbstractComponent { private final File[] nodeFiles; private final File[] nodeIndicesLocations; private final Lock[] locks; private final int localNodeId; @Inject public NodeEnvironment(Settings settings, Environment environment) { super(settings); if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) { nodeFiles = null; nodeIndicesLocations = null; locks = null; localNodeId = -1; return; } File[] nodesFiles = new File[environment.dataWithClusterFiles().length]; Lock[] locks = new Lock[environment.dataWithClusterFiles().length]; int localNodeId = -1; IOException lastException = null; int maxLocalStorageNodes = settings.getAsInt("node.max_local_storage_nodes", 50); for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) { for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) { File dir = new File(new File(environment.dataWithClusterFiles()[dirIndex], "nodes"), Integer.toString(possibleLockId)); if (!dir.exists()) { FileSystemUtils.mkdirs(dir); } logger.trace("obtaining node lock on {} ...", dir.getAbsolutePath()); try { NativeFSLockFactory lockFactory = new NativeFSLockFactory(dir); Lock tmpLock = lockFactory.makeLock("node.lock"); boolean obtained = tmpLock.obtain(); if (obtained) { locks[dirIndex] = tmpLock; nodesFiles[dirIndex] = dir; localNodeId = possibleLockId; } else { logger.trace("failed to obtain node lock on {}", dir.getAbsolutePath()); // release all the ones that were obtained up until now for (int i = 0; i < locks.length; i++) { if (locks[i] != null) { try { locks[i].release(); } catch (Exception e1) { // ignore } } locks[i] = null; } break; } } catch (IOException e) { logger.trace("failed to obtain node lock on {}", e, dir.getAbsolutePath()); lastException = new IOException("failed to obtain lock on " + dir.getAbsolutePath(), e); // release all the ones that were obtained up until now for (int i = 0; i < locks.length; i++) { if (locks[i] != null) { try { locks[i].release(); } catch (Exception e1) { // ignore } } locks[i] = null; } break; } } if (locks[0] != null) { // we found a lock, break break; } } if (locks[0] == null) { throw new ElasticsearchIllegalStateException("Failed to obtain node lock, is the following location writable?: " + Arrays.toString(environment.dataWithClusterFiles()), lastException); } this.localNodeId = localNodeId; this.locks = locks; this.nodeFiles = nodesFiles; if (logger.isDebugEnabled()) { logger.debug("using node location [{}], local_node_id [{}]", nodesFiles, localNodeId); } if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("node data locations details:\n"); for (File file : nodesFiles) { sb.append(" -> ").append(file.getAbsolutePath()).append(", free_space [").append(new ByteSizeValue(file.getFreeSpace())).append("], usable_space [").append(new ByteSizeValue(file.getUsableSpace())).append("]\n"); } logger.trace(sb.toString()); } this.nodeIndicesLocations = new File[nodeFiles.length]; for (int i = 0; i < nodeFiles.length; i++) { nodeIndicesLocations[i] = new File(nodeFiles[i], "indices"); } } public int localNodeId() { return this.localNodeId; } public boolean hasNodeFile() { return nodeFiles != null && locks != null; } public File[] nodeDataLocations() { if (nodeFiles == null || locks == null) { throw new ElasticsearchIllegalStateException("node is not configured to store local location"); } return nodeFiles; } public File[] indicesLocations() { return nodeIndicesLocations; } public File[] indexLocations(Index index) { File[] indexLocations = new File[nodeFiles.length]; for (int i = 0; i < nodeFiles.length; i++) { indexLocations[i] = new File(new File(nodeFiles[i], "indices"), index.name()); } return indexLocations; } public File[] shardLocations(ShardId shardId) { File[] shardLocations = new File[nodeFiles.length]; for (int i = 0; i < nodeFiles.length; i++) { shardLocations[i] = new File(new File(new File(nodeFiles[i], "indices"), shardId.index().name()), Integer.toString(shardId.id())); } return shardLocations; } public Set<String> findAllIndices() throws Exception { if (nodeFiles == null || locks == null) { throw new ElasticsearchIllegalStateException("node is not configured to store local location"); } Set<String> indices = Sets.newHashSet(); for (File indicesLocation : nodeIndicesLocations) { File[] indicesList = indicesLocation.listFiles(); if (indicesList == null) { continue; } for (File indexLocation : indicesList) { if (indexLocation.isDirectory()) { indices.add(indexLocation.getName()); } } } return indices; } public Set<ShardId> findAllShardIds() throws Exception { if (nodeFiles == null || locks == null) { throw new ElasticsearchIllegalStateException("node is not configured to store local location"); } Set<ShardId> shardIds = Sets.newHashSet(); for (File indicesLocation : nodeIndicesLocations) { File[] indicesList = indicesLocation.listFiles(); if (indicesList == null) { continue; } for (File indexLocation : indicesList) { if (!indexLocation.isDirectory()) { continue; } String indexName = indexLocation.getName(); File[] shardsList = indexLocation.listFiles(); if (shardsList == null) { continue; } for (File shardLocation : shardsList) { if (!shardLocation.isDirectory()) { continue; } Integer shardId = Ints.tryParse(shardLocation.getName()); if (shardId != null) { shardIds.add(new ShardId(indexName, shardId)); } } } } return shardIds; } public void close() { if (locks != null) { for (Lock lock : locks) { try { logger.trace("releasing lock [{}]", lock); lock.release(); } catch (IOException e) { logger.trace("failed to release lock [{}]", e, lock); } } } } }
1no label
src_main_java_org_elasticsearch_env_NodeEnvironment.java
1,645
@Component("blMapFieldsFieldMetadataProvider") @Scope("prototype") public class MapFieldsFieldMetadataProvider extends DefaultFieldMetadataProvider { private static final Log LOG = LogFactory.getLog(MapFieldsFieldMetadataProvider.class); protected boolean canHandleFieldForConfiguredMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) { AdminPresentationMapFields annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationMapFields.class); return annot != null; } protected boolean canHandleFieldForTypeMetadata(AddMetadataFromFieldTypeRequest addMetadataFromFieldTypeRequest, Map<String, FieldMetadata> metadata) { AdminPresentationMapFields annot = addMetadataFromFieldTypeRequest.getRequestedField().getAnnotation(AdminPresentationMapFields.class); return annot != null; } @Override public FieldProviderResponse addMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) { if (!canHandleFieldForConfiguredMetadata(addMetadataRequest, metadata)) { return FieldProviderResponse.NOT_HANDLED; } AdminPresentationMapFields annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationMapFields.class); for (AdminPresentationMapField mapField : annot.mapDisplayFields()) { if (mapField.fieldPresentation().fieldType() == SupportedFieldType.UNKNOWN) { throw new IllegalArgumentException("fieldType property on AdminPresentation must be set for AdminPresentationMapField"); } FieldMetadataOverride override = constructBasicMetadataOverride(mapField.fieldPresentation(), null, null); override.setFriendlyName(mapField.fieldPresentation().friendlyName()); FieldInfo myInfo = new FieldInfo(); myInfo.setName(addMetadataRequest.getRequestedField().getName() + FieldManager.MAPFIELDSEPARATOR + mapField.fieldName()); buildBasicMetadata(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, myInfo, override, addMetadataRequest.getDynamicEntityDao()); setClassOwnership(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, myInfo); BasicFieldMetadata basicFieldMetadata = (BasicFieldMetadata) metadata.get(myInfo.getName()); if (!mapField.targetClass().equals(Void.class)) { if (mapField.targetClass().isInterface()) { throw new IllegalArgumentException("targetClass on @AdminPresentationMapField must be a concrete class"); } basicFieldMetadata.setMapFieldValueClass(mapField.targetClass().getName()); } if (mapField.searchable() != CustomFieldSearchableTypes.NOT_SPECIFIED) { basicFieldMetadata.setSearchable(mapField.searchable() == CustomFieldSearchableTypes.YES); } if (!StringUtils.isEmpty(mapField.manyToField())) { basicFieldMetadata.setManyToField(mapField.manyToField()); } } return FieldProviderResponse.HANDLED; } @Override public FieldProviderResponse addMetadataFromFieldType(AddMetadataFromFieldTypeRequest addMetadataFromFieldTypeRequest, Map<String, FieldMetadata> metadata) { if (!canHandleFieldForTypeMetadata(addMetadataFromFieldTypeRequest, metadata)) { return FieldProviderResponse.NOT_HANDLED; } //look for any map field metadata that was previously added for the requested field for (Map.Entry<String, FieldMetadata> entry : addMetadataFromFieldTypeRequest.getPresentationAttributes().entrySet()) { if (entry.getKey().startsWith(addMetadataFromFieldTypeRequest.getRequestedPropertyName() + FieldManager.MAPFIELDSEPARATOR)) { TypeLocatorImpl typeLocator = new TypeLocatorImpl(new TypeResolver()); Type myType = null; //first, check if an explicit type was declared String valueClass = ((BasicFieldMetadata) entry.getValue()).getMapFieldValueClass(); if (valueClass != null) { myType = typeLocator.entity(valueClass); } if (myType == null) { SupportedFieldType fieldType = ((BasicFieldMetadata) entry.getValue()).getExplicitFieldType(); Class<?> basicJavaType = getBasicJavaType(fieldType); if (basicJavaType != null) { myType = typeLocator.basic(basicJavaType); } } if (myType == null) { java.lang.reflect.Type genericType = addMetadataFromFieldTypeRequest.getRequestedField().getGenericType(); if (genericType instanceof ParameterizedType) { ParameterizedType pType = (ParameterizedType) genericType; Class<?> clazz = (Class<?>) pType.getActualTypeArguments()[1]; Class<?>[] entities = addMetadataFromFieldTypeRequest.getDynamicEntityDao().getAllPolymorphicEntitiesFromCeiling(clazz); if (!ArrayUtils.isEmpty(entities)) { myType = typeLocator.entity(entities[entities.length-1]); } } } if (myType == null) { throw new IllegalArgumentException("Unable to establish the type for the property (" + entry .getKey() + ")"); } //add property for this map field as if it was a normal field super.addMetadataFromFieldType(new AddMetadataFromFieldTypeRequest(addMetadataFromFieldTypeRequest.getRequestedField(), addMetadataFromFieldTypeRequest.getTargetClass(), addMetadataFromFieldTypeRequest.getForeignField(), addMetadataFromFieldTypeRequest.getAdditionalForeignFields(), addMetadataFromFieldTypeRequest.getMergedPropertyType(), addMetadataFromFieldTypeRequest.getComponentProperties(), addMetadataFromFieldTypeRequest.getIdProperty(), addMetadataFromFieldTypeRequest.getPrefix(), entry.getKey(), myType, addMetadataFromFieldTypeRequest.isPropertyForeignKey(), addMetadataFromFieldTypeRequest.getAdditionalForeignKeyIndexPosition(), addMetadataFromFieldTypeRequest.getPresentationAttributes(), entry.getValue(), ((BasicFieldMetadata) entry.getValue()).getExplicitFieldType(), myType.getReturnedClass(), addMetadataFromFieldTypeRequest.getDynamicEntityDao()), metadata); } } return FieldProviderResponse.HANDLED; } @Override public FieldProviderResponse overrideViaAnnotation(OverrideViaAnnotationRequest overrideViaAnnotationRequest, Map<String, FieldMetadata> metadata) { //TODO support annotation override return FieldProviderResponse.NOT_HANDLED; } @Override public FieldProviderResponse overrideViaXml(OverrideViaXmlRequest overrideViaXmlRequest, Map<String, FieldMetadata> metadata) { //TODO support xml override return FieldProviderResponse.NOT_HANDLED; } @Override public int getOrder() { return FieldMetadataProvider.MAP_FIELD; } }
0true
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_provider_metadata_MapFieldsFieldMetadataProvider.java
799
public class PercolateShardResponse extends BroadcastShardOperationResponse { private static final BytesRef[] EMPTY_MATCHES = new BytesRef[0]; private static final float[] EMPTY_SCORES = new float[0]; private static final List<Map<String, HighlightField>> EMPTY_HL = ImmutableList.of(); private long count; private float[] scores; private BytesRef[] matches; private List<Map<String, HighlightField>> hls; private byte percolatorTypeId; private int requestedSize; private InternalFacets facets; private InternalAggregations aggregations; PercolateShardResponse() { hls = new ArrayList<Map<String, HighlightField>>(); } public PercolateShardResponse(BytesRef[] matches, List<Map<String, HighlightField>> hls, long count, float[] scores, PercolateContext context, String index, int shardId) { super(index, shardId); this.matches = matches; this.hls = hls; this.count = count; this.scores = scores; this.percolatorTypeId = context.percolatorTypeId; this.requestedSize = context.size; QuerySearchResult result = context.queryResult(); if (result != null) { if (result.facets() != null) { this.facets = new InternalFacets(result.facets().facets()); } if (result.aggregations() != null) { this.aggregations = (InternalAggregations) result.aggregations(); } } } public PercolateShardResponse(BytesRef[] matches, long count, float[] scores, PercolateContext context, String index, int shardId) { this(matches, EMPTY_HL, count, scores, context, index, shardId); } public PercolateShardResponse(BytesRef[] matches, List<Map<String, HighlightField>> hls, long count, PercolateContext context, String index, int shardId) { this(matches, hls, count, EMPTY_SCORES, context, index, shardId); } public PercolateShardResponse(long count, PercolateContext context, String index, int shardId) { this(EMPTY_MATCHES, EMPTY_HL, count, EMPTY_SCORES, context, index, shardId); } public PercolateShardResponse(PercolateContext context, String index, int shardId) { this(EMPTY_MATCHES, EMPTY_HL, 0, EMPTY_SCORES, context, index, shardId); } public BytesRef[] matches() { return matches; } public float[] scores() { return scores; } public long count() { return count; } public int requestedSize() { return requestedSize; } public List<Map<String, HighlightField>> hls() { return hls; } public InternalFacets facets() { return facets; } public InternalAggregations aggregations() { return aggregations; } public byte percolatorTypeId() { return percolatorTypeId; } public boolean isEmpty() { return percolatorTypeId == 0x00; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); percolatorTypeId = in.readByte(); requestedSize = in.readVInt(); count = in.readVLong(); matches = new BytesRef[in.readVInt()]; for (int i = 0; i < matches.length; i++) { matches[i] = in.readBytesRef(); } scores = new float[in.readVInt()]; for (int i = 0; i < scores.length; i++) { scores[i] = in.readFloat(); } int size = in.readVInt(); for (int i = 0; i < size; i++) { int mSize = in.readVInt(); Map<String, HighlightField> fields = new HashMap<String, HighlightField>(); for (int j = 0; j < mSize; j++) { fields.put(in.readString(), HighlightField.readHighlightField(in)); } hls.add(fields); } facets = InternalFacets.readOptionalFacets(in); aggregations = InternalAggregations.readOptionalAggregations(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeByte(percolatorTypeId); out.writeVLong(requestedSize); out.writeVLong(count); out.writeVInt(matches.length); for (BytesRef match : matches) { out.writeBytesRef(match); } out.writeVLong(scores.length); for (float score : scores) { out.writeFloat(score); } out.writeVInt(hls.size()); for (Map<String, HighlightField> hl : hls) { out.writeVInt(hl.size()); for (Map.Entry<String, HighlightField> entry : hl.entrySet()) { out.writeString(entry.getKey()); entry.getValue().writeTo(out); } } out.writeOptionalStreamable(facets); out.writeOptionalStreamable(aggregations); } }
0true
src_main_java_org_elasticsearch_action_percolate_PercolateShardResponse.java
1,481
public class RoutingService extends AbstractLifecycleComponent<RoutingService> implements ClusterStateListener { private static final String CLUSTER_UPDATE_TASK_SOURCE = "routing-table-updater"; private final ThreadPool threadPool; private final ClusterService clusterService; private final AllocationService allocationService; private final TimeValue schedule; private volatile boolean routingTableDirty = false; private volatile Future scheduledRoutingTableFuture; @Inject public RoutingService(Settings settings, ThreadPool threadPool, ClusterService clusterService, AllocationService allocationService) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; this.allocationService = allocationService; this.schedule = componentSettings.getAsTime("schedule", timeValueSeconds(10)); clusterService.addFirst(this); } @Override protected void doStart() throws ElasticsearchException { } @Override protected void doStop() throws ElasticsearchException { } @Override protected void doClose() throws ElasticsearchException { if (scheduledRoutingTableFuture != null) { scheduledRoutingTableFuture.cancel(true); scheduledRoutingTableFuture = null; } clusterService.remove(this); } @Override public void clusterChanged(ClusterChangedEvent event) { if (event.source().equals(CLUSTER_UPDATE_TASK_SOURCE)) { // that's us, ignore this event return; } if (event.state().nodes().localNodeMaster()) { // we are master, schedule the routing table updater if (scheduledRoutingTableFuture == null) { // a new master (us), make sure we reroute shards routingTableDirty = true; scheduledRoutingTableFuture = threadPool.scheduleWithFixedDelay(new RoutingTableUpdater(), schedule); } if (event.nodesRemoved()) { // if nodes were removed, we don't want to wait for the scheduled task // since we want to get primary election as fast as possible routingTableDirty = true; reroute(); // Commented out since we make sure to reroute whenever shards changes state or metadata changes state // } else if (event.routingTableChanged()) { // routingTableDirty = true; // reroute(); } else { if (event.nodesAdded()) { for (DiscoveryNode node : event.nodesDelta().addedNodes()) { if (node.dataNode()) { routingTableDirty = true; break; } } } } } else { if (scheduledRoutingTableFuture != null) { scheduledRoutingTableFuture.cancel(true); scheduledRoutingTableFuture = null; } } } private void reroute() { try { if (!routingTableDirty) { return; } if (lifecycle.stopped()) { return; } clusterService.submitStateUpdateTask(CLUSTER_UPDATE_TASK_SOURCE, Priority.HIGH, new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { RoutingAllocation.Result routingResult = allocationService.reroute(currentState); if (!routingResult.changed()) { // no state changed return currentState; } return ClusterState.builder(currentState).routingResult(routingResult).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } }); routingTableDirty = false; } catch (Exception e) { logger.warn("Failed to reroute routing table", e); } } private class RoutingTableUpdater implements Runnable { @Override public void run() { reroute(); } } }
1no label
src_main_java_org_elasticsearch_cluster_routing_RoutingService.java
445
String id = queue.addItemListener(new ItemListener() { public void itemAdded(ItemEvent itemEvent) { itemAddedLatch.countDown(); } public void itemRemoved(ItemEvent item) { itemRemovedLatch.countDown(); } }, true);
0true
hazelcast-client_src_test_java_com_hazelcast_client_queue_ClientQueueTest.java
1,262
public class FulfillmentItemPricingActivity extends BaseActivity<PricingContext> { private static final Log LOG = LogFactory.getLog(FulfillmentItemPricingActivity.class); protected BroadleafCurrency getCurrency(FulfillmentGroup fg) { return fg.getOrder().getCurrency(); } /** * Returns the order adjustment value or zero if none exists * @param order * @return */ protected Money getOrderSavingsToDistribute(Order order) { if (order.getOrderAdjustmentsValue() == null) { return new Money(order.getCurrency()); } else { Money adjustmentValue = order.getOrderAdjustmentsValue(); Money orderSubTotal = order.getSubTotal(); if (orderSubTotal == null || orderSubTotal.lessThan(adjustmentValue)) { if (LOG.isWarnEnabled()) { LOG.warn("Subtotal is null or less than orderSavings in DistributeOrderSavingsActivity.java. " + "No distribution is taking place."); } return new Money(order.getCurrency()); } return adjustmentValue; } } @Override public PricingContext execute(PricingContext context) throws Exception { Order order = context.getSeedData(); Map<OrderItem,List<FulfillmentGroupItem>> partialOrderItemMap = new HashMap<OrderItem,List<FulfillmentGroupItem>>(); // Calculate the fulfillmentGroupItem total populateItemTotalAmount(order, partialOrderItemMap); fixItemTotalRoundingIssues(order, partialOrderItemMap); // Calculate the fulfillmentGroupItem prorated orderSavings Money totalAllItemsAmount = calculateTotalPriceForAllFulfillmentItems(order); Money totalOrderAdjustmentDistributed = distributeOrderSavingsToItems(order, totalAllItemsAmount.getAmount()); fixOrderSavingsRoundingIssues(order, totalOrderAdjustmentDistributed); // Step 3: Finalize the taxable amounts updateTaxableAmountsOnItems(order); context.setSeedData(order); return context; } /** * Sets the fulfillment amount which includes the relative portion of the total price for * the corresponding order item. * * @param order * @param partialOrderItemMap */ protected void populateItemTotalAmount(Order order, Map<OrderItem, List<FulfillmentGroupItem>> partialOrderItemMap) { for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) { for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) { OrderItem orderItem = fgItem.getOrderItem(); int fgItemQty = fgItem.getQuantity(); int orderItemQty = orderItem.getQuantity(); Money totalItemAmount = orderItem.getTotalPrice(); if (fgItemQty != orderItemQty) { // We need to keep track of all of these items in case we need to distribute a remainder // to one or more of the items. List<FulfillmentGroupItem> fgItemList = partialOrderItemMap.get(orderItem); if (fgItemList == null) { fgItemList = new ArrayList<FulfillmentGroupItem>(); partialOrderItemMap.put(orderItem, fgItemList); } fgItemList.add(fgItem); fgItem.setTotalItemAmount(totalItemAmount.multiply(fgItemQty).divide(orderItemQty)); } else { fgItem.setTotalItemAmount(totalItemAmount); } } } } /** * Because an item may have multiple price details that don't round cleanly, we may have pennies * left over that need to be distributed. * * @param order * @param partialOrderItemMap */ protected void fixItemTotalRoundingIssues(Order order, Map<OrderItem, List<FulfillmentGroupItem>> partialOrderItemMap) { for (OrderItem orderItem : partialOrderItemMap.keySet()) { Money totalItemAmount = orderItem.getTotalPrice(); Money totalFGItemAmount = sumItemAmount(partialOrderItemMap.get(orderItem), order); Money amountDiff = totalItemAmount.subtract(totalFGItemAmount); if (!(amountDiff.getAmount().compareTo(BigDecimal.ZERO) == 0)) { long numApplicationsNeeded = countNumberOfUnits(amountDiff); Money unitAmount = getUnitAmount(amountDiff); for (FulfillmentGroupItem fgItem : partialOrderItemMap.get(orderItem)) { numApplicationsNeeded = numApplicationsNeeded - applyDifferenceToAmount(fgItem, numApplicationsNeeded, unitAmount); if (numApplicationsNeeded == 0) { break; } } } } } /** * Returns the total price for all fulfillment items. * @param order * @return */ protected Money calculateTotalPriceForAllFulfillmentItems(Order order) { Money totalAllItemsAmount = new Money(order.getCurrency()); for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) { for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) { totalAllItemsAmount = totalAllItemsAmount.add(fgItem.getTotalItemAmount()); } } return totalAllItemsAmount; } /** * Distributes the order adjustments (if any) to the individual fulfillment group items. * @param order * @param totalAllItems * @return */ protected Money distributeOrderSavingsToItems(Order order, BigDecimal totalAllItems) { Money returnAmount = new Money(order.getCurrency()); BigDecimal orderAdjAmt = order.getOrderAdjustmentsValue().getAmount(); for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) { for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) { BigDecimal fgItemAmount = fgItem.getTotalItemAmount().getAmount(); BigDecimal proratedAdjAmt = totalAllItems.compareTo(BigDecimal.ZERO) == 0 ? totalAllItems : orderAdjAmt.multiply(fgItemAmount).divide(totalAllItems, RoundingMode.FLOOR); fgItem.setProratedOrderAdjustmentAmount(new Money(proratedAdjAmt, order.getCurrency())); returnAmount = returnAmount.add(fgItem.getProratedOrderAdjustmentAmount()); } } return returnAmount; } /** * It is possible due to rounding that the order adjustments do not match the * total. This method fixes by adding or removing the pennies. * @param order * @param partialOrderItemMap */ protected void fixOrderSavingsRoundingIssues(Order order, Money totalOrderAdjustmentDistributed) { if (!order.getHasOrderAdjustments()) { return; } Money orderAdjustmentTotal = order.getOrderAdjustmentsValue(); Money amountDiff = totalOrderAdjustmentDistributed.subtract(orderAdjustmentTotal); if (!(amountDiff.getAmount().compareTo(BigDecimal.ZERO) == 0)) { long numApplicationsNeeded = countNumberOfUnits(amountDiff); Money unitAmount = getUnitAmount(amountDiff); for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) { for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) { numApplicationsNeeded = numApplicationsNeeded - applyDifferenceToProratedAdj(fgItem, numApplicationsNeeded, unitAmount); if (numApplicationsNeeded == 0) { break; } } } } } /** * Returns the total price for all fulfillment items. * @param order * @return */ protected void updateTaxableAmountsOnItems(Order order) { Money zero = new Money(order.getCurrency()); for (FulfillmentGroup fulfillmentGroup : order.getFulfillmentGroups()) { for (FulfillmentGroupItem fgItem : fulfillmentGroup.getFulfillmentGroupItems()) { if (fgItem.getOrderItem().isTaxable()) { Money proratedOrderAdjAmt = fgItem.getProratedOrderAdjustmentAmount(); if (proratedOrderAdjAmt != null) { fgItem.setTotalItemTaxableAmount(fgItem.getTotalItemAmount().subtract(proratedOrderAdjAmt)); } else { fgItem.setTotalItemTaxableAmount(fgItem.getTotalItemAmount()); } } else { fgItem.setTotalItemTaxableAmount(zero); } } } } protected Money sumItemAmount(List<FulfillmentGroupItem> items, Order order) { Money totalAmount = new Money(order.getCurrency()); for (FulfillmentGroupItem fgItem : items) { totalAmount = totalAmount.add(fgItem.getTotalItemAmount()); } return totalAmount; } protected Money sumTaxAmount(List<FulfillmentGroupItem> items, Order order) { Money taxAmount = new Money(order.getCurrency()); for (FulfillmentGroupItem fgItem : items) { taxAmount = taxAmount.add(fgItem.getTotalItemTaxableAmount()); } return taxAmount; } public long countNumberOfUnits(Money difference) { double numUnits = difference.multiply(Math.pow(10, difference.getCurrency().getDefaultFractionDigits())).doubleValue(); return Math.round(numUnits); } /** * Returns the unit amount (e.g. .01 for US) * @param currency * @return */ public Money getUnitAmount(Money difference) { Currency currency = difference.getCurrency(); BigDecimal divisor = new BigDecimal(Math.pow(10, currency.getDefaultFractionDigits())); BigDecimal unitAmount = new BigDecimal("1").divide(divisor); if (difference.lessThan(BigDecimal.ZERO)) { unitAmount = unitAmount.negate(); } return new Money(unitAmount, currency); } public long applyDifferenceToAmount(FulfillmentGroupItem fgItem, long numApplicationsNeeded, Money unitAmount) { BigDecimal numTimesToApply = new BigDecimal(Math.min(numApplicationsNeeded, fgItem.getQuantity())); Money oldAmount = fgItem.getTotalItemAmount(); Money changeToAmount = unitAmount.multiply(numTimesToApply); fgItem.setTotalItemAmount(oldAmount.add(changeToAmount)); return numTimesToApply.longValue(); } public long applyDifferenceToProratedAdj(FulfillmentGroupItem fgItem, long numApplicationsNeeded, Money unitAmount) { BigDecimal numTimesToApply = new BigDecimal(Math.min(numApplicationsNeeded, fgItem.getQuantity())); Money oldAmount = fgItem.getProratedOrderAdjustmentAmount(); Money changeToAmount = unitAmount.multiply(numTimesToApply); fgItem.setProratedOrderAdjustmentAmount(oldAmount.add(changeToAmount)); return numTimesToApply.longValue(); } public long applyTaxDifference(FulfillmentGroupItem fgItem, long numApplicationsNeeded, Money unitAmount) { BigDecimal numTimesToApply = new BigDecimal(Math.min(numApplicationsNeeded, fgItem.getQuantity())); Money oldAmount = fgItem.getTotalItemTaxableAmount(); Money changeToAmount = unitAmount.multiply(numTimesToApply); fgItem.setTotalItemTaxableAmount(oldAmount.add(changeToAmount)); return numTimesToApply.longValue(); } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_service_workflow_FulfillmentItemPricingActivity.java
1,442
return new MessageListener<Object>() { public void onMessage(final Message<Object> message) { final Invalidation invalidation = (Invalidation) message.getMessageObject(); if (versionComparator != null) { final Value value = cache.get(invalidation.getKey()); if (value != null) { Object currentVersion = value.getVersion(); Object newVersion = invalidation.getVersion(); if (versionComparator.compare(newVersion, currentVersion) > 0) { cache.remove(invalidation.getKey(), value); } } } else { cache.remove(invalidation.getKey()); } } };
0true
hazelcast-hibernate_hazelcast-hibernate4_src_main_java_com_hazelcast_hibernate_local_LocalRegionCache.java
1,667
public interface BlobContainer { interface BlobNameFilter { /** * Return <tt>false</tt> if the blob should be filtered. */ boolean accept(String blobName); } interface ReadBlobListener { void onPartial(byte[] data, int offset, int size) throws IOException; void onCompleted(); void onFailure(Throwable t); } BlobPath path(); boolean blobExists(String blobName); void readBlob(String blobName, ReadBlobListener listener); byte[] readBlobFully(String blobName) throws IOException; boolean deleteBlob(String blobName) throws IOException; void deleteBlobsByPrefix(String blobNamePrefix) throws IOException; void deleteBlobsByFilter(BlobNameFilter filter) throws IOException; ImmutableMap<String, BlobMetaData> listBlobs() throws IOException; ImmutableMap<String, BlobMetaData> listBlobsByPrefix(String blobNamePrefix) throws IOException; }
0true
src_main_java_org_elasticsearch_common_blobstore_BlobContainer.java
4,755
Arrays.sort(copy, new Comparator<RestFilter>() { @Override public int compare(RestFilter o1, RestFilter o2) { return o2.order() - o1.order(); } });
1no label
src_main_java_org_elasticsearch_rest_RestController.java
3,543
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> { protected final FieldType fieldType; protected Boolean docValues; protected float boost = Defaults.BOOST; protected boolean omitNormsSet = false; protected String indexName; protected NamedAnalyzer indexAnalyzer; protected NamedAnalyzer searchAnalyzer; protected Boolean includeInAll; protected boolean indexOptionsSet = false; protected PostingsFormatProvider postingsProvider; protected DocValuesFormatProvider docValuesProvider; protected SimilarityProvider similarity; protected Loading normsLoading; @Nullable protected Settings fieldDataSettings; protected final MultiFields.Builder multiFieldsBuilder; protected CopyTo copyTo; protected Builder(String name, FieldType fieldType) { super(name); this.fieldType = fieldType; multiFieldsBuilder = new MultiFields.Builder(); } public T index(boolean index) { this.fieldType.setIndexed(index); return builder; } public T store(boolean store) { this.fieldType.setStored(store); return builder; } public T docValues(boolean docValues) { this.docValues = docValues; return builder; } public T storeTermVectors(boolean termVectors) { if (termVectors) { this.fieldType.setStoreTermVectors(termVectors); } // don't set it to false, it is default and might be flipped by a more specific option return builder; } public T storeTermVectorOffsets(boolean termVectorOffsets) { if (termVectorOffsets) { this.fieldType.setStoreTermVectors(termVectorOffsets); } this.fieldType.setStoreTermVectorOffsets(termVectorOffsets); return builder; } public T storeTermVectorPositions(boolean termVectorPositions) { if (termVectorPositions) { this.fieldType.setStoreTermVectors(termVectorPositions); } this.fieldType.setStoreTermVectorPositions(termVectorPositions); return builder; } public T storeTermVectorPayloads(boolean termVectorPayloads) { if (termVectorPayloads) { this.fieldType.setStoreTermVectors(termVectorPayloads); } this.fieldType.setStoreTermVectorPayloads(termVectorPayloads); return builder; } public T tokenized(boolean tokenized) { this.fieldType.setTokenized(tokenized); return builder; } public T boost(float boost) { this.boost = boost; return builder; } public T omitNorms(boolean omitNorms) { this.fieldType.setOmitNorms(omitNorms); this.omitNormsSet = true; return builder; } public T indexOptions(IndexOptions indexOptions) { this.fieldType.setIndexOptions(indexOptions); this.indexOptionsSet = true; return builder; } public T indexName(String indexName) { this.indexName = indexName; return builder; } public T indexAnalyzer(NamedAnalyzer indexAnalyzer) { this.indexAnalyzer = indexAnalyzer; return builder; } public T searchAnalyzer(NamedAnalyzer searchAnalyzer) { this.searchAnalyzer = searchAnalyzer; return builder; } public T includeInAll(Boolean includeInAll) { this.includeInAll = includeInAll; return builder; } public T postingsFormat(PostingsFormatProvider postingsFormat) { this.postingsProvider = postingsFormat; return builder; } public T docValuesFormat(DocValuesFormatProvider docValuesFormat) { this.docValuesProvider = docValuesFormat; return builder; } public T similarity(SimilarityProvider similarity) { this.similarity = similarity; return builder; } public T normsLoading(Loading normsLoading) { this.normsLoading = normsLoading; return builder; } public T fieldDataSettings(Settings settings) { this.fieldDataSettings = settings; return builder; } public T multiFieldPathType(ContentPath.Type pathType) { multiFieldsBuilder.pathType(pathType); return builder; } public T addMultiField(Mapper.Builder mapperBuilder) { multiFieldsBuilder.add(mapperBuilder); return builder; } public T copyTo(CopyTo copyTo) { this.copyTo = copyTo; return builder; } public Names buildNames(BuilderContext context) { return new Names(name, buildIndexName(context), indexName == null ? name : indexName, buildFullName(context), context.path().sourcePath()); } public String buildIndexName(BuilderContext context) { String actualIndexName = indexName == null ? name : indexName; return context.path().pathAsText(actualIndexName); } public String buildFullName(BuilderContext context) { return context.path().fullPathAsText(name); } }
0true
src_main_java_org_elasticsearch_index_mapper_core_AbstractFieldMapper.java
2,421
public final class SlicedDoubleList extends AbstractList<Double> implements RandomAccess { public static final SlicedDoubleList EMPTY = new SlicedDoubleList(0); public double[] values; public int offset; public int length; public SlicedDoubleList(int capacity) { this(new double[capacity], 0, capacity); } public SlicedDoubleList(double[] values, int offset, int length) { this.values = values; this.offset = offset; this.length = length; } @Override public int size() { return length; } @Override public boolean isEmpty() { return size() == 0; } @Override public Double get(int index) { assert index < size(); return values[offset + index]; } @Override public boolean contains(Object target) { // Overridden to prevent a ton of boxing return (target instanceof Double) && indexOf(values, (Double) target, offset, offset+length) != -1; } @Override public int indexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Double) { int i = indexOf(values, (Double) target, offset, offset+length); if (i >= 0) { return i - offset; } } return -1; } @Override public int lastIndexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Double) { int i = lastIndexOf(values, (Double) target, offset, offset+length); if (i >= 0) { return i - offset; } } return -1; } @Override public Double set(int index, Double element) { throw new UnsupportedOperationException("modifying list opertations are not implemented"); } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof SlicedDoubleList) { SlicedDoubleList that = (SlicedDoubleList) object; int size = size(); if (that.size() != size) { return false; } for (int i = 0; i < size; i++) { if (values[offset + i] != that.values[that.offset + i]) { return false; } } return true; } return super.equals(object); } @Override public int hashCode() { int result = 1; for (int i = 0; i < length; i++) { result = 31 * result + Doubles.hashCode(values[offset+i]); } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(size() * 10); builder.append('['); if (length > 0) { builder.append(values[offset]); for (int i = 1; i < length; i++) { builder.append(", ").append(values[offset+i]); } } return builder.append(']').toString(); } private static int indexOf(double[] array, double target, int start, int end) { for (int i = start; i < end; i++) { if (array[i] == target) { return i; } } return -1; } private static int lastIndexOf(double[] array, double target, int start, int end) { for (int i = end - 1; i >= start; i--) { if (array[i] == target) { return i; } } return -1; } public void grow(int newLength) { assert offset == 0; values = ArrayUtil.grow(values, newLength); } }
0true
src_main_java_org_elasticsearch_common_util_SlicedDoubleList.java
1,339
public class MappingUpdatedAction extends TransportMasterNodeOperationAction<MappingUpdatedAction.MappingUpdatedRequest, MappingUpdatedAction.MappingUpdatedResponse> { private final AtomicLong mappingUpdateOrderGen = new AtomicLong(); private final MetaDataMappingService metaDataMappingService; @Inject public MappingUpdatedAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, MetaDataMappingService metaDataMappingService) { super(settings, transportService, clusterService, threadPool); this.metaDataMappingService = metaDataMappingService; } public long generateNextMappingUpdateOrder() { return mappingUpdateOrderGen.incrementAndGet(); } @Override protected String transportAction() { return "cluster/mappingUpdated"; } @Override protected String executor() { // we go async right away return ThreadPool.Names.SAME; } @Override protected MappingUpdatedRequest newRequest() { return new MappingUpdatedRequest(); } @Override protected MappingUpdatedResponse newResponse() { return new MappingUpdatedResponse(); } @Override protected void masterOperation(final MappingUpdatedRequest request, final ClusterState state, final ActionListener<MappingUpdatedResponse> listener) throws ElasticsearchException { metaDataMappingService.updateMapping(request.index(), request.indexUUID(), request.type(), request.mappingSource(), request.order, request.nodeId, new ClusterStateUpdateListener() { @Override public void onResponse(ClusterStateUpdateResponse response) { listener.onResponse(new MappingUpdatedResponse()); } @Override public void onFailure(Throwable t) { logger.warn("[{}] update-mapping [{}] failed to dynamically update the mapping in cluster_state from shard", t, request.index(), request.type()); listener.onFailure(t); } }); } public static class MappingUpdatedResponse extends ActionResponse { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); } } public static class MappingUpdatedRequest extends MasterNodeOperationRequest<MappingUpdatedRequest> { private String index; private String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; private String type; private CompressedString mappingSource; private long order = -1; // -1 means not set... private String nodeId = null; // null means not set MappingUpdatedRequest() { } public MappingUpdatedRequest(String index, String indexUUID, String type, CompressedString mappingSource, long order, String nodeId) { this.index = index; this.indexUUID = indexUUID; this.type = type; this.mappingSource = mappingSource; this.order = order; this.nodeId = nodeId; } public String index() { return index; } public String indexUUID() { return indexUUID; } public String type() { return type; } public CompressedString mappingSource() { return mappingSource; } /** * Returns -1 if not set... */ public long order() { return this.order; } /** * Returns null for not set. */ public String nodeId() { return this.nodeId; } @Override public ActionRequestValidationException validate() { return null; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); index = in.readString(); type = in.readString(); mappingSource = CompressedString.readCompressedString(in); indexUUID = in.readString(); order = in.readLong(); nodeId = in.readOptionalString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(index); out.writeString(type); mappingSource.writeTo(out); out.writeString(indexUUID); out.writeLong(order); out.writeOptionalString(nodeId); } @Override public String toString() { return "index [" + index + "], indexUUID [" + indexUUID + "], type [" + type + "] and source [" + mappingSource + "]"; } } }
0true
src_main_java_org_elasticsearch_cluster_action_index_MappingUpdatedAction.java
1,145
public class EntryAdapter<K, V> implements EntryListener<K, V> { @Override public void entryAdded(EntryEvent<K, V> event) { onEntryEvent(event); } @Override public void entryRemoved(EntryEvent<K, V> event) { onEntryEvent(event); } @Override public void entryUpdated(EntryEvent<K, V> event) { onEntryEvent(event); } @Override public void entryEvicted(EntryEvent<K, V> event) { onEntryEvent(event); } /** * This method is called when an one of the methods of the {@link com.hazelcast.core.EntryListener} is not * overridden. It can be practical if you want to bundle some/all of the methods to a single method. * * @param event the EntryEvent. */ public void onEntryEvent(EntryEvent<K,V> event){ } }
0true
hazelcast_src_main_java_com_hazelcast_core_EntryAdapter.java
2,250
public class CounterMetric implements Metric { private final LongAdder counter = new LongAdder(); public void inc() { counter.increment(); } public void inc(long n) { counter.add(n); } public void dec() { counter.decrement(); } public void dec(long n) { counter.add(-n); } public long count() { return counter.sum(); } }
0true
src_main_java_org_elasticsearch_common_metrics_CounterMetric.java
1,692
public class OHttpRequest { public final OContextConfiguration configuration; public final InputStream in; public String authorization; public String sessionId; public String url; public Map<String, String> parameters; public String httpMethod; public String httpVersion; public String contentType; public String contentEncoding; public String content; public OHttpMultipartBaseInputStream multipartStream; public String boundary; public String databaseName; public boolean isMultipart; public String ifMatch; public String authentication; protected Map<String, String> headers; public final ONetworkProtocolData data; public final ONetworkProtocolHttpAbstract executor; public OHttpRequest(final ONetworkProtocolHttpAbstract iExecutor, final InputStream iInStream, final ONetworkProtocolData iData, final OContextConfiguration iConfiguration) { executor = iExecutor; in = iInStream; data = iData; configuration = iConfiguration; } public String getUser() { return authorization != null ? authorization.substring(0, authorization.indexOf(":")) : null; } public InputStream getInputStream() { return in; } public String getParameter(final String iName) { return parameters != null ? parameters.get(iName) : null; } public void addHeader(final String h) { if (headers == null) headers = new HashMap<String, String>(); final int pos = h.indexOf(':'); if (pos > -1) { headers.put(h.substring(0, pos).trim(), h.substring(pos + 1).trim()); } } public String getHeader(final String iName) { return headers.get(iName); } public Set<Entry<String, String>> getHeaders() { return headers.entrySet(); } public String getRemoteAddress() { if (data.caller != null) return data.caller; return ((InetSocketAddress) executor.channel.socket.getRemoteSocketAddress()).getAddress().getHostAddress(); } }
1no label
server_src_main_java_com_orientechnologies_orient_server_network_protocol_http_OHttpRequest.java
3,368
public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFieldData { public static PackedArrayAtomicFieldData empty(int numDocs) { return new Empty(numDocs); } private final int numDocs; protected long size = -1; public PackedArrayAtomicFieldData(int numDocs) { super(false); this.numDocs = numDocs; } @Override public void close() { } @Override public int getNumDocs() { return numDocs; } static class Empty extends PackedArrayAtomicFieldData { Empty(int numDocs) { super(numDocs); } @Override public LongValues getLongValues() { return LongValues.EMPTY; } @Override public DoubleValues getDoubleValues() { return DoubleValues.EMPTY; } @Override public boolean isMultiValued() { return false; } @Override public boolean isValuesOrdered() { return false; } @Override public long getMemorySizeInBytes() { return 0; } @Override public long getNumberUniqueValues() { return 0; } @Override public BytesValues getBytesValues(boolean needsHashes) { return BytesValues.EMPTY; } @Override public ScriptDocValues getScriptValues() { return ScriptDocValues.EMPTY; } } public static class WithOrdinals extends PackedArrayAtomicFieldData { private final MonotonicAppendingLongBuffer values; private final Ordinals ordinals; public WithOrdinals(MonotonicAppendingLongBuffer values, int numDocs, Ordinals ordinals) { super(numDocs); this.values = values; this.ordinals = ordinals; } @Override public boolean isMultiValued() { return ordinals.isMultiValued(); } @Override public boolean isValuesOrdered() { return true; } @Override public long getMemorySizeInBytes() { if (size == -1) { size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + values.ramBytesUsed() + ordinals.getMemorySizeInBytes(); } return size; } @Override public long getNumberUniqueValues() { return ordinals.getNumOrds(); } @Override public LongValues getLongValues() { return new LongValues(values, ordinals.ordinals()); } @Override public DoubleValues getDoubleValues() { return new DoubleValues(values, ordinals.ordinals()); } static class LongValues extends org.elasticsearch.index.fielddata.LongValues.WithOrdinals { private final MonotonicAppendingLongBuffer values; LongValues(MonotonicAppendingLongBuffer values, Ordinals.Docs ordinals) { super(ordinals); this.values = values; } @Override public long getValueByOrd(long ord) { assert ord != Ordinals.MISSING_ORDINAL; return values.get(ord - 1); } } static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.WithOrdinals { private final MonotonicAppendingLongBuffer values; DoubleValues(MonotonicAppendingLongBuffer values, Ordinals.Docs ordinals) { super(ordinals); this.values = values; } @Override public double getValueByOrd(long ord) { assert ord != Ordinals.MISSING_ORDINAL; return values.get(ord - 1); } } } /** * A single valued case, where not all values are "set", so we have a special * value which encodes the fact that the document has no value. */ public static class SingleSparse extends PackedArrayAtomicFieldData { private final PackedInts.Mutable values; private final long minValue; private final long missingValue; private final long numOrds; public SingleSparse(PackedInts.Mutable values, long minValue, int numDocs, long missingValue, long numOrds) { super(numDocs); this.values = values; this.minValue = minValue; this.missingValue = missingValue; this.numOrds = numOrds; } @Override public boolean isMultiValued() { return false; } @Override public boolean isValuesOrdered() { return false; } @Override public long getNumberUniqueValues() { return numOrds; } @Override public long getMemorySizeInBytes() { if (size == -1) { size = values.ramBytesUsed() + 2 * RamUsageEstimator.NUM_BYTES_LONG; } return size; } @Override public LongValues getLongValues() { return new LongValues(values, minValue, missingValue); } @Override public DoubleValues getDoubleValues() { return new DoubleValues(values, minValue, missingValue); } static class LongValues extends org.elasticsearch.index.fielddata.LongValues { private final PackedInts.Mutable values; private final long minValue; private final long missingValue; LongValues(PackedInts.Mutable values, long minValue, long missingValue) { super(false); this.values = values; this.minValue = minValue; this.missingValue = missingValue; } @Override public int setDocument(int docId) { this.docId = docId; return values.get(docId) != missingValue ? 1 : 0; } @Override public long nextValue() { return minValue + values.get(docId); } } static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues { private final PackedInts.Mutable values; private final long minValue; private final long missingValue; DoubleValues(PackedInts.Mutable values, long minValue, long missingValue) { super(false); this.values = values; this.minValue = minValue; this.missingValue = missingValue; } @Override public int setDocument(int docId) { this.docId = docId; return values.get(docId) != missingValue ? 1 : 0; } @Override public double nextValue() { return minValue + values.get(docId); } } } /** * Assumes all the values are "set", and docId is used as the index to the value array. */ public static class Single extends PackedArrayAtomicFieldData { private final PackedInts.Mutable values; private final long minValue; private final long numOrds; /** * Note, here, we assume that there is no offset by 1 from docId, so position 0 * is the value for docId 0. */ public Single(PackedInts.Mutable values, long minValue, int numDocs, long numOrds) { super(numDocs); this.values = values; this.minValue = minValue; this.numOrds = numOrds; } @Override public boolean isMultiValued() { return false; } @Override public boolean isValuesOrdered() { return false; } @Override public long getNumberUniqueValues() { return numOrds; } @Override public long getMemorySizeInBytes() { if (size == -1) { size = values.ramBytesUsed(); } return size; } @Override public LongValues getLongValues() { return new LongValues(values, minValue); } @Override public DoubleValues getDoubleValues() { return new DoubleValues(values, minValue); } static class LongValues extends DenseLongValues { private final PackedInts.Mutable values; private final long minValue; LongValues(PackedInts.Mutable values, long minValue) { super(false); this.values = values; this.minValue = minValue; } @Override public long nextValue() { return minValue + values.get(docId); } } static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues { private final PackedInts.Mutable values; private final long minValue; DoubleValues(PackedInts.Mutable values, long minValue) { super(false); this.values = values; this.minValue = minValue; } @Override public int setDocument(int docId) { this.docId = docId; return 1; } @Override public double nextValue() { return minValue + values.get(docId); } } } }
1no label
src_main_java_org_elasticsearch_index_fielddata_plain_PackedArrayAtomicFieldData.java
490
public class ODatabaseImport extends ODatabaseImpExpAbstract { public static final String EXPORT_IMPORT_MAP_NAME = "___exportImportRIDMap"; private Map<OPropertyImpl, String> linkedClasses = new HashMap<OPropertyImpl, String>(); private Map<OClass, String> superClasses = new HashMap<OClass, String>(); private OJSONReader jsonReader; private ORecordInternal<?> record; private boolean schemaImported = false; private int exporterVersion = -1; private ORID schemaRecordId; private ORID indexMgrRecordId; private boolean deleteRIDMapping = true; private OIndex<OIdentifiable> exportImportHashTable; private boolean preserveClusterIDs = true; private boolean migrateLinks = true; private boolean merge = false; private boolean rebuildIndexes = true; private Set<String> indexesToRebuild = new HashSet<String>(); public ODatabaseImport(final ODatabaseDocument database, final String iFileName, final OCommandOutputListener iListener) throws IOException { super(database, iFileName, iListener); InputStream inStream; final BufferedInputStream bf = new BufferedInputStream(new FileInputStream(fileName)); bf.mark(1024); try { inStream = new GZIPInputStream(bf, 16384); // 16KB } catch (Exception e) { bf.reset(); inStream = bf; } OMurmurHash3HashFunction<OIdentifiable> keyHashFunction = new OMurmurHash3HashFunction<OIdentifiable>(); keyHashFunction.setValueSerializer(OLinkSerializer.INSTANCE); jsonReader = new OJSONReader(new InputStreamReader(inStream)); database.declareIntent(new OIntentMassiveInsert()); } public ODatabaseImport(final ODatabaseDocument database, final InputStream iStream, final OCommandOutputListener iListener) throws IOException { super(database, "streaming", iListener); jsonReader = new OJSONReader(new InputStreamReader(iStream)); database.declareIntent(new OIntentMassiveInsert()); } @Override public ODatabaseImport setOptions(String iOptions) { super.setOptions(iOptions); return this; } @Override protected void parseSetting(final String option, final List<String> items) { if (option.equalsIgnoreCase("-deleteRIDMapping")) deleteRIDMapping = Boolean.parseBoolean(items.get(0)); else if (option.equalsIgnoreCase("-preserveClusterIDs")) preserveClusterIDs = Boolean.parseBoolean(items.get(0)); else if (option.equalsIgnoreCase("-merge")) merge = Boolean.parseBoolean(items.get(0)); else if (option.equalsIgnoreCase("-migrateLinks")) migrateLinks = Boolean.parseBoolean(items.get(0)); else if (option.equalsIgnoreCase("-rebuildIndexes")) rebuildIndexes = Boolean.parseBoolean(items.get(0)); else super.parseSetting(option, items); } public ODatabaseImport importDatabase() { try { listener.onMessage("\nStarted import of database '" + database.getURL() + "' from " + fileName + "..."); long time = System.currentTimeMillis(); jsonReader.readNext(OJSONReader.BEGIN_OBJECT); database.getLevel1Cache().setEnable(false); database.getLevel2Cache().setEnable(false); database.setMVCC(false); database.setValidationEnabled(false); database.setStatus(STATUS.IMPORTING); for (OIndex<?> index : database.getMetadata().getIndexManager().getIndexes()) { if (index.isAutomatic()) indexesToRebuild.add(index.getName().toLowerCase()); } if (!merge) removeDefaultNonSecurityClasses(); String tag; while (jsonReader.hasNext() && jsonReader.lastChar() != '}') { tag = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT); if (tag.equals("info")) importInfo(); else if (tag.equals("clusters")) importClusters(); else if (tag.equals("schema")) importSchema(); else if (tag.equals("records")) importRecords(); else if (tag.equals("indexes")) importIndexes(); else if (tag.equals("manualIndexes")) importManualIndexes(); } if (rebuildIndexes) rebuildIndexes(); database.getStorage().synch(); database.setStatus(STATUS.OPEN); if (isDeleteRIDMapping()) removeExportImportRIDsMap(); listener.onMessage("\n\nDatabase import completed in " + ((System.currentTimeMillis() - time)) + " ms"); } catch (Exception e) { System.err.println("Error on database import happened just before line " + jsonReader.getLineNumber() + ", column " + jsonReader.getColumnNumber()); e.printStackTrace(); throw new ODatabaseExportException("Error on importing database '" + database.getName() + "' from file: " + fileName, e); } finally { close(); } return this; } public void rebuildIndexes() { listener.onMessage("\nRebuild of stale indexes..."); for (String indexName : indexesToRebuild) { listener.onMessage("\nStart rebuild index " + indexName); database.command(new OCommandSQL("rebuild index " + indexName)).execute(); listener.onMessage("\nRebuild of index " + indexName + " is completed."); } listener.onMessage("\nStale indexes were rebuilt..."); } private void removeDefaultNonSecurityClasses() { listener.onMessage("\nNon merge mode (-merge=false): removing all default non security classes"); OSchema schema = database.getMetadata().getSchema(); Collection<OClass> classes = schema.getClasses(); final AbstractList<String> classesSortedByInheritance = new ArrayList<String>(); for (OClass dbClass : classes) { classesSortedByInheritance.add(dbClass.getName()); } for (OClass dbClass : classes) { OClass parentClass = dbClass.getSuperClass(); if (parentClass != null) { classesSortedByInheritance.remove(dbClass.getName()); final int parentIndex = classesSortedByInheritance.indexOf(parentClass.getName()); classesSortedByInheritance.add(parentIndex, dbClass.getName()); } } int removedClasses = 0; for (String className : classesSortedByInheritance) { if (!className.equalsIgnoreCase(ORole.CLASS_NAME) && !className.equalsIgnoreCase(OUser.CLASS_NAME) && !className.equalsIgnoreCase(OSecurityShared.IDENTITY_CLASSNAME)) { schema.dropClass(className); removedClasses++; listener.onMessage("\n- Class " + className + " was removed."); } } schema.save(); schema.reload(); listener.onMessage("\nRemoved " + removedClasses + " classes."); } private void importInfo() throws IOException, ParseException { listener.onMessage("\nImporting database info..."); jsonReader.readNext(OJSONReader.BEGIN_OBJECT); while (jsonReader.lastChar() != '}') { final String fieldName = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT); if (fieldName.equals("exporter-version")) exporterVersion = jsonReader.readInteger(OJSONReader.NEXT_IN_OBJECT); else if (fieldName.equals("schemaRecordId")) schemaRecordId = new ORecordId(jsonReader.readString(OJSONReader.NEXT_IN_OBJECT)); else if (fieldName.equals("indexMgrRecordId")) indexMgrRecordId = new ORecordId(jsonReader.readString(OJSONReader.NEXT_IN_OBJECT)); else jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT); } jsonReader.readNext(OJSONReader.COMMA_SEPARATOR); if (schemaRecordId == null) schemaRecordId = new ORecordId(database.getStorage().getConfiguration().schemaRecordId); if (indexMgrRecordId == null) indexMgrRecordId = new ORecordId(database.getStorage().getConfiguration().indexMgrRecordId); listener.onMessage("OK"); } private void importManualIndexes() throws IOException, ParseException { listener.onMessage("\nImporting manual index entries..."); ODocument doc = new ODocument(); OIndexManagerProxy indexManager = database.getMetadata().getIndexManager(); // FORCE RELOADING indexManager.reload(); int n = 0; do { jsonReader.readNext(OJSONReader.BEGIN_OBJECT); jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT); final String indexName = jsonReader.readString(OJSONReader.NEXT_IN_ARRAY); if (indexName == null || indexName.length() == 0) return; listener.onMessage("\n- Index '" + indexName + "'..."); final OIndex<?> index = database.getMetadata().getIndexManager().getIndex(indexName); long tot = 0; jsonReader.readNext(OJSONReader.BEGIN_COLLECTION); do { final String value = jsonReader.readString(OJSONReader.NEXT_IN_ARRAY).trim(); if (!value.isEmpty() && !indexName.equalsIgnoreCase(EXPORT_IMPORT_MAP_NAME)) { doc = (ODocument) ORecordSerializerJSON.INSTANCE.fromString(value, doc, null); doc.setLazyLoad(false); final OIdentifiable oldRid = doc.<OIdentifiable> field("rid"); final OIdentifiable newRid; if (!doc.<Boolean> field("binary")) { if (exportImportHashTable != null) newRid = exportImportHashTable.get(oldRid); else newRid = oldRid; index.put(doc.field("key"), newRid != null ? newRid.getIdentity() : oldRid.getIdentity()); } else { ORuntimeKeyIndexDefinition<?> runtimeKeyIndexDefinition = (ORuntimeKeyIndexDefinition<?>) index.getDefinition(); OBinarySerializer<?> binarySerializer = runtimeKeyIndexDefinition.getSerializer(); if (exportImportHashTable != null) newRid = exportImportHashTable.get(doc.<OIdentifiable> field("rid")).getIdentity(); else newRid = doc.<OIdentifiable> field("rid"); index.put(binarySerializer.deserialize(doc.<byte[]> field("key"), 0), newRid != null ? newRid : oldRid); } tot++; } } while (jsonReader.lastChar() == ','); if (index != null) { listener.onMessage("OK (" + tot + " entries)"); n++; } else listener.onMessage("ERR, the index wasn't found in configuration"); jsonReader.readNext(OJSONReader.END_OBJECT); jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); } while (jsonReader.lastChar() == ','); listener.onMessage("\nDone. Imported " + n + " indexes."); jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT); } private void importSchema() throws IOException, ParseException { listener.onMessage("\nImporting database schema..."); jsonReader.readNext(OJSONReader.BEGIN_OBJECT); @SuppressWarnings("unused") int schemaVersion = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"version\"") .readNumber(OJSONReader.ANY_NUMBER, true); jsonReader.readNext(OJSONReader.COMMA_SEPARATOR).readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"classes\"") .readNext(OJSONReader.BEGIN_COLLECTION); long classImported = 0; try { do { jsonReader.readNext(OJSONReader.BEGIN_OBJECT); final String className = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"name\"") .readString(OJSONReader.COMMA_SEPARATOR); String next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue(); if (next.equals("\"id\"")) { // @COMPATIBILITY 1.0rc4 IGNORE THE ID next = jsonReader.readString(OJSONReader.COMMA_SEPARATOR); next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue(); } final int classDefClusterId; if (jsonReader.isContent("\"default-cluster-id\"")) { next = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); classDefClusterId = Integer.parseInt(next); } else classDefClusterId = database.getDefaultClusterId(); String classClusterIds = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"cluster-ids\"") .readString(OJSONReader.END_COLLECTION, true).trim(); jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT); OClassImpl cls = (OClassImpl) database.getMetadata().getSchema().getClass(className); if (cls != null) { if (cls.getDefaultClusterId() != classDefClusterId) cls.setDefaultClusterId(classDefClusterId); } else cls = (OClassImpl) database.getMetadata().getSchema().createClass(className, classDefClusterId); if (classClusterIds != null) { // REMOVE BRACES classClusterIds = classClusterIds.substring(1, classClusterIds.length() - 1); // ASSIGN OTHER CLUSTER IDS for (int i : OStringSerializerHelper.splitIntArray(classClusterIds)) { if (i != -1) cls.addClusterId(i); } } String value; while (jsonReader.lastChar() == ',') { jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT); value = jsonReader.getValue(); if (value.equals("\"strictMode\"")) { cls.setStrictMode(jsonReader.readBoolean(OJSONReader.NEXT_IN_OBJECT)); } else if (value.equals("\"abstract\"")) { cls.setAbstract(jsonReader.readBoolean(OJSONReader.NEXT_IN_OBJECT)); } else if (value.equals("\"oversize\"")) { final String oversize = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); cls.setOverSize(Float.parseFloat(oversize)); } else if (value.equals("\"short-name\"")) { final String shortName = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); cls.setShortName(shortName); } else if (value.equals("\"super-class\"")) { final String classSuper = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); superClasses.put(cls, classSuper); } else if (value.equals("\"properties\"")) { // GET PROPERTIES jsonReader.readNext(OJSONReader.BEGIN_COLLECTION); while (jsonReader.lastChar() != ']') { importProperty(cls); if (jsonReader.lastChar() == '}') jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); } jsonReader.readNext(OJSONReader.END_OBJECT); } } classImported++; jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); } while (jsonReader.lastChar() == ','); // REBUILD ALL THE INHERITANCE for (Map.Entry<OClass, String> entry : superClasses.entrySet()) entry.getKey().setSuperClass(database.getMetadata().getSchema().getClass(entry.getValue())); // SET ALL THE LINKED CLASSES for (Map.Entry<OPropertyImpl, String> entry : linkedClasses.entrySet()) { entry.getKey().setLinkedClass(database.getMetadata().getSchema().getClass(entry.getValue())); } database.getMetadata().getSchema().save(); listener.onMessage("OK (" + classImported + " classes)"); schemaImported = true; jsonReader.readNext(OJSONReader.END_OBJECT); jsonReader.readNext(OJSONReader.COMMA_SEPARATOR); } catch (Exception e) { e.printStackTrace(); listener.onMessage("ERROR (" + classImported + " entries): " + e); } } private void importProperty(final OClass iClass) throws IOException, ParseException { jsonReader.readNext(OJSONReader.NEXT_OBJ_IN_ARRAY); if (jsonReader.lastChar() == ']') return; final String propName = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"name\"") .readString(OJSONReader.COMMA_SEPARATOR); String next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue(); if (next.equals("\"id\"")) { // @COMPATIBILITY 1.0rc4 IGNORE THE ID next = jsonReader.readString(OJSONReader.COMMA_SEPARATOR); next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue(); } next = jsonReader.checkContent("\"type\"").readString(OJSONReader.NEXT_IN_OBJECT); final OType type = OType.valueOf(next); String attrib; String value = null; String min = null; String max = null; String linkedClass = null; OType linkedType = null; boolean mandatory = false; boolean readonly = false; boolean notNull = false; Map<String, String> customFields = null; while (jsonReader.lastChar() == ',') { jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT); attrib = jsonReader.getValue(); if (!attrib.equals("\"customFields\"")) value = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); if (attrib.equals("\"min\"")) min = value; else if (attrib.equals("\"max\"")) max = value; else if (attrib.equals("\"linked-class\"")) linkedClass = value; else if (attrib.equals("\"mandatory\"")) mandatory = Boolean.parseBoolean(value); else if (attrib.equals("\"readonly\"")) readonly = Boolean.parseBoolean(value); else if (attrib.equals("\"not-null\"")) notNull = Boolean.parseBoolean(value); else if (attrib.equals("\"linked-type\"")) linkedType = OType.valueOf(value); else if (attrib.equals("\"customFields\"")) customFields = importCustomFields(); } OPropertyImpl prop = (OPropertyImpl) iClass.getProperty(propName); if (prop == null) // CREATE IT prop = (OPropertyImpl) iClass.createProperty(propName, type); prop.setMandatory(mandatory); prop.setReadonly(readonly); prop.setNotNull(notNull); if (min != null) prop.setMin(min); if (max != null) prop.setMax(max); if (linkedClass != null) linkedClasses.put(prop, linkedClass); if (linkedType != null) prop.setLinkedType(linkedType); if (customFields != null) { for (Map.Entry<String, String> entry : customFields.entrySet()) { prop.setCustom(entry.getKey(), entry.getValue()); } } } private Map<String, String> importCustomFields() throws ParseException, IOException { Map<String, String> result = new HashMap<String, String>(); jsonReader.readNext(OJSONReader.BEGIN_OBJECT); while (jsonReader.lastChar() != '}') { final String key = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT); final String value = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); result.put(key, value); } jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); return result; } private long importClusters() throws ParseException, IOException { listener.onMessage("\nImporting clusters..."); long total = 0; jsonReader.readNext(OJSONReader.BEGIN_COLLECTION); boolean makeFullCheckPointAfterClusterCreation = false; if (database.getStorage() instanceof OLocalPaginatedStorage) { makeFullCheckPointAfterClusterCreation = ((OLocalPaginatedStorage) database.getStorage()) .isMakeFullCheckPointAfterClusterCreate(); ((OLocalPaginatedStorage) database.getStorage()).disableFullCheckPointAfterClusterCreate(); } boolean recreateManualIndex = false; if (exporterVersion <= 4) { removeDefaultClusters(); recreateManualIndex = true; } final Set<String> indexesToRebuild = new HashSet<String>(); @SuppressWarnings("unused") ORecordId rid = null; while (jsonReader.lastChar() != ']') { jsonReader.readNext(OJSONReader.BEGIN_OBJECT); String name = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"name\"") .readString(OJSONReader.COMMA_SEPARATOR); if (name.length() == 0) name = null; if (name != null) // CHECK IF THE CLUSTER IS INCLUDED if (includeClusters != null) { if (!includeClusters.contains(name)) { jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); continue; } } else if (excludeClusters != null) { if (excludeClusters.contains(name)) { jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); continue; } } int id = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"id\"").readInteger(OJSONReader.COMMA_SEPARATOR); String type = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"type\"") .readString(OJSONReader.NEXT_IN_OBJECT); if (jsonReader.lastChar() == ',') { rid = new ORecordId(jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"rid\"") .readString(OJSONReader.NEXT_IN_OBJECT)); } else rid = null; listener.onMessage("\n- Creating cluster " + (name != null ? "'" + name + "'" : "NULL") + "..."); int clusterId = name != null ? database.getClusterIdByName(name) : -1; if (clusterId == -1) { // CREATE IT if (!preserveClusterIDs) clusterId = database.addCluster(type, name, null, null); else { clusterId = database.addCluster(type, name, id, null, null); assert clusterId == id; } } if (clusterId != id) { if (!preserveClusterIDs) { if (database.countClusterElements(clusterId - 1) == 0) { listener.onMessage("Found previous version: migrating old clusters..."); database.dropCluster(name, true); database.addCluster(type, "temp_" + clusterId, null, null); clusterId = database.addCluster(type, name, null, null); } else throw new OConfigurationException("Imported cluster '" + name + "' has id=" + clusterId + " different from the original: " + id + ". To continue the import drop the cluster '" + database.getClusterNameById(clusterId - 1) + "' that has " + database.countClusterElements(clusterId - 1) + " records"); } else { database.dropCluster(clusterId, false); database.addCluster(type, name, id, null, null); } } if (name != null && !(name.equalsIgnoreCase(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME) || name.equalsIgnoreCase(OMetadataDefault.CLUSTER_INTERNAL_NAME) || name .equalsIgnoreCase(OMetadataDefault.CLUSTER_INDEX_NAME))) { database.command(new OCommandSQL("truncate cluster " + name)).execute(); for (OIndex existingIndex : database.getMetadata().getIndexManager().getIndexes()) { if (existingIndex.getClusters().contains(name)) { indexesToRebuild.add(existingIndex.getName()); } } } listener.onMessage("OK, assigned id=" + clusterId); total++; jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); } jsonReader.readNext(OJSONReader.COMMA_SEPARATOR); listener.onMessage("\nRebuilding indexes of truncated clusters ..."); for (final String indexName : indexesToRebuild) database.getMetadata().getIndexManager().getIndex(indexName).rebuild(new OProgressListener() { @Override public void onBegin(Object iTask, long iTotal) { listener.onMessage("\nCluster content was truncated and index " + indexName + " will be rebuilt"); } @Override public boolean onProgress(Object iTask, long iCounter, float iPercent) { listener.onMessage(String.format("\nIndex %s is rebuilt on %f percent", indexName, iPercent)); return true; } @Override public void onCompletition(Object iTask, boolean iSucceed) { listener.onMessage("\nIndex " + indexName + " was successfully rebuilt."); } }); listener.onMessage("\nDone " + indexesToRebuild.size() + " indexes were rebuilt."); if (recreateManualIndex) { database.addCluster(OStorage.CLUSTER_TYPE.PHYSICAL.toString(), OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME, null, null); database.getMetadata().getIndexManager().create(); listener.onMessage("\nManual index cluster was recreated."); } listener.onMessage("\nDone. Imported " + total + " clusters"); if (database.load(new ORecordId(database.getStorage().getConfiguration().indexMgrRecordId)) == null) { ODocument indexDocument = new ODocument(); indexDocument.save(OMetadataDefault.CLUSTER_INTERNAL_NAME); database.getStorage().getConfiguration().indexMgrRecordId = indexDocument.getIdentity().toString(); database.getStorage().getConfiguration().update(); } if (database.getStorage() instanceof OLocalPaginatedStorage && makeFullCheckPointAfterClusterCreation) ((OLocalPaginatedStorage) database.getStorage()).enableFullCheckPointAfterClusterCreate(); return total; } protected void removeDefaultClusters() { listener.onMessage("\nWARN: Exported database does not support manual index separation." + " Manual index cluster will be dropped."); // In v4 new cluster for manual indexes has been implemented. To keep database consistent we should shift back // all clusters and recreate cluster for manual indexes in the end. database.dropCluster(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME, true); final OSchema schema = database.getMetadata().getSchema(); if (schema.existsClass(OUser.CLASS_NAME)) schema.dropClass(OUser.CLASS_NAME); if (schema.existsClass(ORole.CLASS_NAME)) schema.dropClass(ORole.CLASS_NAME); if (schema.existsClass(OSecurityShared.RESTRICTED_CLASSNAME)) schema.dropClass(OSecurityShared.RESTRICTED_CLASSNAME); if (schema.existsClass(OFunction.CLASS_NAME)) schema.dropClass(OFunction.CLASS_NAME); if (schema.existsClass(OMVRBTreeRIDProvider.PERSISTENT_CLASS_NAME)) schema.dropClass(OMVRBTreeRIDProvider.PERSISTENT_CLASS_NAME); if (schema.existsClass(OClassTrigger.CLASSNAME)) schema.dropClass(OClassTrigger.CLASSNAME); schema.save(); database.dropCluster(OStorage.CLUSTER_DEFAULT_NAME, true); database.getStorage().setDefaultClusterId( database.addCluster(OStorage.CLUSTER_TYPE.PHYSICAL.toString(), OStorage.CLUSTER_DEFAULT_NAME, null, null)); // Starting from v4 schema has been moved to internal cluster. // Create a stub at #2:0 to prevent cluster position shifting. new ODocument().save(OStorage.CLUSTER_DEFAULT_NAME); database.getMetadata().getSecurity().create(); } private long importRecords() throws Exception { long total = 0; database.getMetadata().getIndexManager().dropIndex(EXPORT_IMPORT_MAP_NAME); exportImportHashTable = (OIndex<OIdentifiable>) database .getMetadata() .getIndexManager() .createIndex(EXPORT_IMPORT_MAP_NAME, OClass.INDEX_TYPE.DICTIONARY_HASH_INDEX.toString(), new OSimpleKeyIndexDefinition(OType.LINK), null, null); jsonReader.readNext(OJSONReader.BEGIN_COLLECTION); long totalRecords = 0; System.out.print("\nImporting records..."); ORID rid; int lastClusterId = -1; long clusterRecords = 0; while (jsonReader.lastChar() != ']') { rid = importRecord(); if (rid != null) { ++clusterRecords; if (lastClusterId == -1) { lastClusterId = rid.getClusterId(); // CHANGED CLUSTERID: DUMP STATISTICS System.out.print("\n- Importing records into cluster '" + database.getClusterNameById(lastClusterId) + "' (id=" + lastClusterId + "): "); } else if (rid.getClusterId() != lastClusterId || jsonReader.lastChar() == ']') { // CHANGED CLUSTERID: DUMP STATISTICS System.out.print(" = " + clusterRecords + " records"); clusterRecords = 0; lastClusterId = rid.getClusterId(); System.out.print("\n- Importing records into cluster '" + database.getClusterNameById(lastClusterId) + "' (id=" + lastClusterId + "): "); } else if (clusterRecords % 10000 == 0) // DUMP PROGRESS System.out.print("."); ++totalRecords; } record = null; } if (migrateLinks) migrateLinksInImportedDocuments(); listener.onMessage("\n\nDone. Imported " + totalRecords + " records\n"); jsonReader.readNext(OJSONReader.COMMA_SEPARATOR); return total; } private ORID importRecord() throws Exception { String value = jsonReader.readString(OJSONReader.END_OBJECT, true); // JUMP EMPTY RECORDS while (!value.isEmpty() && value.charAt(0) != '{') { value = value.substring(1); } record = null; try { record = ORecordSerializerJSON.INSTANCE.fromString(value, record, null); if (schemaImported && record.getIdentity().equals(schemaRecordId)) { // JUMP THE SCHEMA return null; } // CHECK IF THE CLUSTER IS INCLUDED if (includeClusters != null) { if (!includeClusters.contains(database.getClusterNameById(record.getIdentity().getClusterId()))) { jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); return null; } } else if (excludeClusters != null) { if (excludeClusters.contains(database.getClusterNameById(record.getIdentity().getClusterId()))) return null; } if (record.getIdentity().getClusterId() == 0 && record.getIdentity().getClusterPosition().longValue() == 1) // JUMP INTERNAL RECORDS return null; if (exporterVersion >= 3) { int oridsId = database.getClusterIdByName(OMVRBTreeRIDProvider.PERSISTENT_CLASS_NAME); int indexId = database.getClusterIdByName(OMetadataDefault.CLUSTER_INDEX_NAME); if (record.getIdentity().getClusterId() == indexId || record.getIdentity().getClusterId() == oridsId) // JUMP INDEX RECORDS return null; } final int manualIndexCluster = database.getClusterIdByName(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME); final int internalCluster = database.getClusterIdByName(OMetadataDefault.CLUSTER_INTERNAL_NAME); final int indexCluster = database.getClusterIdByName(OMetadataDefault.CLUSTER_INDEX_NAME); if (exporterVersion >= 4) { if (record.getIdentity().getClusterId() == manualIndexCluster) // JUMP INDEX RECORDS return null; } if (record.getIdentity().equals(indexMgrRecordId)) return null; final ORID rid = record.getIdentity(); final int clusterId = rid.getClusterId(); if ((clusterId != manualIndexCluster && clusterId != internalCluster && clusterId != indexCluster)) { record.getRecordVersion().copyFrom(OVersionFactory.instance().createVersion()); record.setDirty(); record.setIdentity(new ORecordId()); if (!preserveRids && record instanceof ODocument && ((ODocument) record).getSchemaClass() != null) record.save(); else record.save(database.getClusterNameById(clusterId)); if (!rid.equals(record.getIdentity())) // SAVE IT ONLY IF DIFFERENT exportImportHashTable.put(rid, record.getIdentity()); } } catch (Exception t) { if (record != null) System.err.println("Error importing record " + record.getIdentity() + ". Source line " + jsonReader.getLineNumber() + ", column " + jsonReader.getColumnNumber()); else System.err.println("Error importing record. Source line " + jsonReader.getLineNumber() + ", column " + jsonReader.getColumnNumber()); throw t; } finally { jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); } return record.getIdentity(); } private void importIndexes() throws IOException, ParseException { listener.onMessage("\nImporting indexes ..."); OIndexManagerProxy indexManager = database.getMetadata().getIndexManager(); indexManager.reload(); jsonReader.readNext(OJSONReader.BEGIN_COLLECTION); int n = 0; while (jsonReader.lastChar() != ']') { jsonReader.readNext(OJSONReader.BEGIN_OBJECT); String indexName = null; String indexType = null; Set<String> clustersToIndex = new HashSet<String>(); OIndexDefinition indexDefinition = null; while (jsonReader.lastChar() != '}') { final String fieldName = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT); if (fieldName.equals("name")) indexName = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); else if (fieldName.equals("type")) indexType = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); else if (fieldName.equals("clustersToIndex")) clustersToIndex = importClustersToIndex(); else if (fieldName.equals("definition")) indexDefinition = importIndexDefinition(); } jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY); // drop automatically created indexes if (!indexName.equalsIgnoreCase(EXPORT_IMPORT_MAP_NAME)) { listener.onMessage("\n- Index '" + indexName + "'..."); indexManager.dropIndex(indexName); indexesToRebuild.remove(indexName.toLowerCase()); int[] clusterIdsToIndex = new int[clustersToIndex.size()]; int i = 0; for (final String clusterName : clustersToIndex) { clusterIdsToIndex[i] = database.getClusterIdByName(clusterName); i++; } indexManager.createIndex(indexName, indexType, indexDefinition, clusterIdsToIndex, null); n++; listener.onMessage("OK"); } } listener.onMessage("\nDone. Created " + n + " indexes."); jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT); } private Set<String> importClustersToIndex() throws IOException, ParseException { final Set<String> clustersToIndex = new HashSet<String>(); jsonReader.readNext(OJSONReader.BEGIN_COLLECTION); while (jsonReader.lastChar() != ']') { final String clusterToIndex = jsonReader.readString(OJSONReader.NEXT_IN_ARRAY); clustersToIndex.add(clusterToIndex); } jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); return clustersToIndex; } private OIndexDefinition importIndexDefinition() throws IOException, ParseException { jsonReader.readString(OJSONReader.BEGIN_OBJECT); jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT); final String className = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT); final String value = jsonReader.readString(OJSONReader.END_OBJECT, true); final OIndexDefinition indexDefinition; final ODocument indexDefinitionDoc = (ODocument) ORecordSerializerJSON.INSTANCE.fromString(value, null, null); try { final Class<?> indexDefClass = Class.forName(className); indexDefinition = (OIndexDefinition) indexDefClass.getDeclaredConstructor().newInstance(); indexDefinition.fromStream(indexDefinitionDoc); } catch (final ClassNotFoundException e) { throw new IOException("Error during deserialization of index definition", e); } catch (final NoSuchMethodException e) { throw new IOException("Error during deserialization of index definition", e); } catch (final InvocationTargetException e) { throw new IOException("Error during deserialization of index definition", e); } catch (final InstantiationException e) { throw new IOException("Error during deserialization of index definition", e); } catch (final IllegalAccessException e) { throw new IOException("Error during deserialization of index definition", e); } jsonReader.readString(OJSONReader.NEXT_IN_OBJECT); return indexDefinition; } private void migrateLinksInImportedDocuments() throws IOException { listener.onMessage("\nStarted migration of links (-migrateLinks=true). Links are going to be updated according to new RIDs:"); long totalDocuments = 0; Collection<String> clusterNames = database.getClusterNames(); for (String clusterName : clusterNames) { if (OMetadataDefault.CLUSTER_INDEX_NAME.equals(clusterName) || OMetadataDefault.CLUSTER_INTERNAL_NAME.equals(clusterName) || OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME.equals(clusterName)) continue; long documents = 0; listener.onMessage("\n- Cluster " + clusterName + "..."); int clusterId = database.getClusterIdByName(clusterName); OStorage storage = database.getStorage(); OPhysicalPosition[] positions = storage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition( OClusterPositionFactory.INSTANCE.valueOf(0))); while (positions.length > 0) { for (OPhysicalPosition position : positions) { ORecord<?> record = database.load(new ORecordId(clusterId, position.clusterPosition)); if (record instanceof ODocument) { ODocument document = (ODocument) record; rewriteLinksInDocument(document); documents++; totalDocuments++; if (documents % 10000 == 0) listener.onMessage("\n" + documents + " documents were processed..."); } } positions = storage.higherPhysicalPositions(clusterId, positions[positions.length - 1]); } listener.onMessage(" Processed: " + documents); } listener.onMessage("\nTotal links updated: " + totalDocuments); } private void rewriteLinksInDocument(ODocument document) { RewritersFactory.INSTANCE.setExportImportHashTable(exportImportHashTable); DocumentRewriter documentRewriter = new DocumentRewriter(); document = documentRewriter.rewriteValue(document); if (document != null) document.save(); } public ODatabaseImport removeExportImportRIDsMap() { listener.onMessage("\nDeleting RID Mapping table..."); if (exportImportHashTable != null) { database.command(new OCommandSQL("drop index " + EXPORT_IMPORT_MAP_NAME)); exportImportHashTable = null; } listener.onMessage("OK\n"); return this; } public void close() { database.declareIntent(null); } public boolean isDeleteRIDMapping() { return deleteRIDMapping; } public void setDeleteRIDMapping(boolean deleteRIDMapping) { this.deleteRIDMapping = deleteRIDMapping; } public void setPreserveClusterIDs(boolean preserveClusterIDs) { this.preserveClusterIDs = preserveClusterIDs; } private static class RewritersFactory { public static final RewritersFactory INSTANCE = new RewritersFactory(); private OIndex<OIdentifiable> exportImportHashTable; @SuppressWarnings("unchecked") public <T> FieldRewriter<T> findRewriter(ODocument document, String fieldName, T value) { if (value == null) return new IdentityRewriter<T>(); OType fieldType = null; if (document != null) { OClass docClass = document.getSchemaClass(); if (docClass != null) { OProperty property = docClass.getProperty(fieldName); if (property != null) fieldType = property.getType(); } else { fieldType = document.fieldType(fieldName); } } if (fieldType == null) { if (value instanceof ODocument) return (FieldRewriter<T>) new DocumentRewriter(); else if (value instanceof List) return (FieldRewriter<T>) new ListRewriter(); else if (value instanceof Map) return (FieldRewriter<T>) new MapRewriter(); else if (value instanceof OMVRBTreeRIDSet) return (FieldRewriter<T>) new LinkSetRewriter(); else if (value instanceof ORID) return (FieldRewriter<T>) new LinkRewriter(exportImportHashTable); else if (value instanceof Set) return (FieldRewriter<T>) new SetRewriter(); else return new IdentityRewriter<T>(); } switch (fieldType) { case EMBEDDED: return (FieldRewriter<T>) new DocumentRewriter(); case LINKLIST: return (FieldRewriter<T>) new ListRewriter(); case LINKMAP: return (FieldRewriter<T>) new MapRewriter(); case LINKSET: return (FieldRewriter<T>) new LinkSetRewriter(); case LINK: return (FieldRewriter<T>) new LinkRewriter(exportImportHashTable); case EMBEDDEDLIST: return (FieldRewriter<T>) new ListRewriter(); case EMBEDDEDMAP: return (FieldRewriter<T>) new MapRewriter(); case EMBEDDEDSET: return (FieldRewriter<T>) new SetRewriter(); } return new IdentityRewriter<T>(); } private void setExportImportHashTable(OIndex<OIdentifiable> exportImportHashTable) { this.exportImportHashTable = exportImportHashTable; } } private interface FieldRewriter<T> { T rewriteValue(T value); } private static class IdentityRewriter<T> implements FieldRewriter<T> { @Override public T rewriteValue(T value) { return null; } } private static class ListRewriter implements FieldRewriter<List<?>> { @Override public List<?> rewriteValue(List<?> listValue) { boolean wasRewritten = false; List<Object> result = new ArrayList<Object>(listValue.size()); for (Object listItem : listValue) { FieldRewriter<Object> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(null, null, listItem); Object rewrittenItem = fieldRewriter.rewriteValue(listItem); if (rewrittenItem != null) { wasRewritten = true; result.add(rewrittenItem); } else result.add(listItem); } if (!wasRewritten) return null; return result; } } private static class DocumentRewriter implements FieldRewriter<ODocument> { @Override public ODocument rewriteValue(ODocument documentValue) { boolean wasRewritten = false; documentValue.setLazyLoad(false); for (String fieldName : documentValue.fieldNames()) { Object fieldValue = documentValue.field(fieldName); FieldRewriter<Object> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(documentValue, fieldName, fieldValue); Object newFieldValue = fieldRewriter.rewriteValue(fieldValue); if (newFieldValue != null) { documentValue.field(fieldName, newFieldValue); wasRewritten = true; } } if (wasRewritten) return documentValue; return null; } } private static class MapRewriter implements FieldRewriter<Map<String, Object>> { @Override public Map<String, Object> rewriteValue(Map<String, Object> mapValue) { boolean wasRewritten = false; Map<String, Object> result = new HashMap<String, Object>(); for (Map.Entry<String, Object> entry : mapValue.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); FieldRewriter<Object> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(null, null, value); Object newValue = fieldRewriter.rewriteValue(value); if (newValue != null) { result.put(key, newValue); wasRewritten = true; } else result.put(key, value); } if (wasRewritten) return result; return null; } } private static class LinkSetRewriter implements FieldRewriter<OMVRBTreeRIDSet> { @Override public OMVRBTreeRIDSet rewriteValue(OMVRBTreeRIDSet setValue) { setValue.setAutoConvertToRecord(false); OMVRBTreeRIDSet result = new OMVRBTreeRIDSet(); result.setAutoConvertToRecord(false); boolean wasRewritten = false; for (OIdentifiable identifiable : setValue) { FieldRewriter<ORID> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(null, null, identifiable.getIdentity()); ORID newRid = fieldRewriter.rewriteValue(identifiable.getIdentity()); if (newRid != null) { wasRewritten = true; result.add(newRid); } else result.add(identifiable); } if (wasRewritten) return result; result.clear(); return null; } } private static class SetRewriter implements FieldRewriter<Set<?>> { @Override public Set<?> rewriteValue(Set<?> setValue) { boolean wasRewritten = false; Set<Object> result = new HashSet<Object>(); for (Object item : setValue) { FieldRewriter<Object> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(null, null, item); Object newItem = fieldRewriter.rewriteValue(item); if (newItem != null) { wasRewritten = true; result.add(newItem); } else result.add(item); } if (wasRewritten) return result; return null; } } private static class LinkRewriter implements FieldRewriter<ORID> { private final OIndex<OIdentifiable> exportImportHashTable; private LinkRewriter(OIndex<OIdentifiable> exportImportHashTable) { this.exportImportHashTable = exportImportHashTable; } @Override public ORID rewriteValue(ORID value) { if (!value.isPersistent()) return null; final OIdentifiable result = exportImportHashTable.get(value); return result != null ? result.getIdentity() : null; } } }
1no label
core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseImport.java
1,725
public class MapBuilder<K, V> { public static <K, V> MapBuilder<K, V> newMapBuilder() { return new MapBuilder<K, V>(); } public static <K, V> MapBuilder<K, V> newMapBuilder(Map<K, V> map) { return new MapBuilder<K, V>(map); } private Map<K, V> map = newHashMap(); public MapBuilder() { this.map = newHashMap(); } public MapBuilder(Map<K, V> map) { this.map = newHashMap(map); } public MapBuilder<K, V> putAll(Map<K, V> map) { this.map.putAll(map); return this; } public MapBuilder<K, V> put(K key, V value) { this.map.put(key, value); return this; } public MapBuilder<K, V> remove(K key) { this.map.remove(key); return this; } public MapBuilder<K, V> clear() { this.map.clear(); return this; } public V get(K key) { return map.get(key); } public boolean containsKey(K key) { return map.containsKey(key); } public boolean isEmpty() { return map.isEmpty(); } public Map<K, V> map() { return this.map; } public ImmutableMap<K, V> immutableMap() { return ImmutableMap.copyOf(map); } }
0true
src_main_java_org_elasticsearch_common_collect_MapBuilder.java
4,445
class IndexFieldCache implements IndexFieldDataCache, SegmentReader.CoreClosedListener { @Nullable private final IndexService indexService; final Index index; final FieldMapper.Names fieldNames; final FieldDataType fieldDataType; IndexFieldCache(@Nullable IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) { this.indexService = indexService; this.index = index; this.fieldNames = fieldNames; this.fieldDataType = fieldDataType; } @Override public <FD extends AtomicFieldData, IFD extends IndexFieldData<FD>> FD load(final AtomicReaderContext context, final IFD indexFieldData) throws Exception { final Key key = new Key(this, context.reader().getCoreCacheKey()); //noinspection unchecked return (FD) cache.get(key, new Callable<AtomicFieldData>() { @Override public AtomicFieldData call() throws Exception { SegmentReaderUtils.registerCoreListener(context.reader(), IndexFieldCache.this); AtomicFieldData fieldData = indexFieldData.loadDirect(context); if (indexService != null) { ShardId shardId = ShardUtils.extractShardId(context.reader()); if (shardId != null) { IndexShard shard = indexService.shard(shardId.id()); if (shard != null) { key.listener = shard.fieldData(); } } } if (key.listener != null) { key.listener.onLoad(fieldNames, fieldDataType, fieldData); } return fieldData; } }); } @Override public void onClose(Object coreKey) { cache.invalidate(new Key(this, coreKey)); } @Override public void clear() { for (Key key : cache.asMap().keySet()) { if (key.indexCache.index.equals(index)) { cache.invalidate(key); } } } @Override public void clear(String fieldName) { for (Key key : cache.asMap().keySet()) { if (key.indexCache.index.equals(index)) { if (key.indexCache.fieldNames.fullName().equals(fieldName)) { cache.invalidate(key); } } } } @Override public void clear(Object coreCacheKey) { cache.invalidate(new Key(this, coreCacheKey)); } }
1no label
src_main_java_org_elasticsearch_indices_fielddata_cache_IndicesFieldDataCache.java
112
public class SimpleTxHook implements Synchronization { private volatile boolean gotBefore, gotAfter; @Override public void beforeCompletion() { gotBefore = true; } @Override public void afterCompletion( int status ) { gotAfter = true; } }
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestJtaCompliance.java
645
@Component("blAuthenticationSuccessRedirectStrategy") public class BroadleafAuthenticationSuccessRedirectStrategy implements RedirectStrategy { private String redirectPath="/redirect"; private RedirectStrategy redirectStrategy = new DefaultRedirectStrategy(); @Override public void sendRedirect(HttpServletRequest request, HttpServletResponse response, String url) throws IOException { if (BroadleafControllerUtility.isAjaxRequest(request)) { request.getSession().setAttribute("BLC_REDIRECT_URL", url); url = getRedirectPath(); } redirectStrategy.sendRedirect(request, response, url); } public String updateLoginErrorUrlForAjax(String url) { String blcAjax = BroadleafControllerUtility.BLC_AJAX_PARAMETER; if (url != null && url.indexOf("?") > 0) { url = url + "&" + blcAjax + "=true"; } else { url = url + "?" + blcAjax + "=true"; } return url; } public String getRedirectPath() { return redirectPath; } public void setRedirectPath(String redirectPath) { this.redirectPath = redirectPath; } public RedirectStrategy getRedirectStrategy() { return redirectStrategy; } public void setRedirectStrategy(RedirectStrategy redirectStrategy) { this.redirectStrategy = redirectStrategy; } }
1no label
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_common_web_security_BroadleafAuthenticationSuccessRedirectStrategy.java
471
public class AliasesExistRequestBuilder extends BaseAliasesRequestBuilder<AliasesExistResponse, AliasesExistRequestBuilder> { public AliasesExistRequestBuilder(IndicesAdminClient client, String... aliases) { super(client, aliases); } @Override protected void doExecute(ActionListener<AliasesExistResponse> listener) { ((IndicesAdminClient) client).aliasesExist(request, listener); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_alias_exists_AliasesExistRequestBuilder.java
361
public static class GroupingTestCollator implements Collator<Map.Entry<String, List<Integer>>, Integer> { @Override public Integer collate(Iterable<Map.Entry<String, List<Integer>>> values) { int sum = 0; for (Map.Entry<String, List<Integer>> entry : values) { for (Integer value : entry.getValue()) { sum += value; } } return sum; } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java
626
indexEngine.getEntriesBetween(iRangeFrom, iRangeTo, inclusive, null, new OIndexEngine.EntriesResultListener() { @Override public boolean addResult(ODocument entry) { return entriesResultListener.addResult(entry); } });
0true
core_src_main_java_com_orientechnologies_orient_core_index_OIndexOneValue.java
783
public class TransactionalListProxy<E> extends AbstractTransactionalCollectionProxy<ListService, E> implements TransactionalList<E> { private final LinkedList<CollectionItem> list = new LinkedList<CollectionItem>(); public TransactionalListProxy(String name, TransactionSupport tx, NodeEngine nodeEngine, ListService service) { super(name, tx, nodeEngine, service); } @Override public String getServiceName() { return ListService.SERVICE_NAME; } @Override protected Collection<CollectionItem> getCollection() { return list; } }
0true
hazelcast_src_main_java_com_hazelcast_collection_txn_TransactionalListProxy.java
702
constructors[TXN_LIST_REMOVE] = new ConstructorFunction<Integer, Portable>() { public Portable createNew(Integer arg) { return new TxnListRemoveRequest(); } };
0true
hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java
1,157
public interface IAtomicLong extends DistributedObject { /** * Returns the name of this IAtomicLong instance. * * @return name of this instance */ String getName(); /** * Atomically adds the given value to the current value. * * @param delta the value to add * @return the updated value */ long addAndGet(long delta); /** * Atomically sets the value to the given updated value * only if the current value {@code ==} the expected value. * * @param expect the expected value * @param update the new value * @return true if successful; or false if the actual value * was not equal to the expected value. */ boolean compareAndSet(long expect, long update); /** * Atomically decrements the current value by one. * * @return the updated value */ long decrementAndGet(); /** * Gets the current value. * * @return the current value */ long get(); /** * Atomically adds the given value to the current value. * * @param delta the value to add * @return the old value before the add */ long getAndAdd(long delta); /** * Atomically sets the given value and returns the old value. * * @param newValue the new value * @return the old value */ long getAndSet(long newValue); /** * Atomically increments the current value by one. * * @return the updated value */ long incrementAndGet(); /** * Atomically increments the current value by one. * * @return the old value */ long getAndIncrement(); /** * Atomically sets the given value. * * @param newValue the new value */ void set(long newValue); /** * Alters the currently stored value by applying a function on it. * * @param function the function * @throws IllegalArgumentException if function is null. * @since 3.2 */ void alter(IFunction<Long, Long> function); /** * Alters the currently stored value by applying a function on it and gets the result. * * @param function the function * @return the new value. * @throws IllegalArgumentException if function is null. * @since 3.2 */ long alterAndGet(IFunction<Long, Long> function); /** * Alters the currently stored value by applying a function on it on and gets the old value. * * @param function the function * @return the old value * @throws IllegalArgumentException if function is null. * @since 3.2 */ long getAndAlter(IFunction<Long, Long> function); /** * Applies a function on the value, the actual stored value will not change. * * @param function the function * @return the result of the function application * @throws IllegalArgumentException if function is null. * @since 3.2 */ <R> R apply(IFunction<Long, R> function); }
0true
hazelcast_src_main_java_com_hazelcast_core_IAtomicLong.java
2,013
@Service("blCustomerService") public class CustomerServiceImpl implements CustomerService { private static final Log LOG = LogFactory.getLog(CustomerServiceImpl.class); @Resource(name="blCustomerDao") protected CustomerDao customerDao; @Resource(name="blIdGenerationService") protected IdGenerationService idGenerationService; @Resource(name="blCustomerForgotPasswordSecurityTokenDao") protected CustomerForgotPasswordSecurityTokenDao customerForgotPasswordSecurityTokenDao; @Resource(name="blPasswordEncoder") protected PasswordEncoder passwordEncoder; /** * Optional password salt to be used with the passwordEncoder * @deprecated utilize {@link #saltSource} instead so that it can be shared between this class as well as Spring's * authentication manager */ @Deprecated protected String salt; /** * Use a Salt Source ONLY if there's one configured */ @Autowired(required=false) @Qualifier("blSaltSource") protected SaltSource saltSource; @Resource(name="blRoleDao") protected RoleDao roleDao; @Resource(name="blEmailService") protected EmailService emailService; @Resource(name="blForgotPasswordEmailInfo") protected EmailInfo forgotPasswordEmailInfo; @Resource(name="blForgotUsernameEmailInfo") protected EmailInfo forgotUsernameEmailInfo; @Resource(name="blRegistrationEmailInfo") protected EmailInfo registrationEmailInfo; @Resource(name="blChangePasswordEmailInfo") protected EmailInfo changePasswordEmailInfo; protected int tokenExpiredMinutes = 30; protected int passwordTokenLength = 20; protected final List<PostRegistrationObserver> postRegisterListeners = new ArrayList<PostRegistrationObserver>(); protected List<PasswordUpdatedHandler> passwordResetHandlers = new ArrayList<PasswordUpdatedHandler>(); protected List<PasswordUpdatedHandler> passwordChangedHandlers = new ArrayList<PasswordUpdatedHandler>(); @Override public Customer saveCustomer(Customer customer) { return saveCustomer(customer, true); } @Override public Customer saveCustomer(Customer customer, boolean register) { if (register && !customer.isRegistered()) { customer.setRegistered(true); } if (customer.getUnencodedPassword() != null) { customer.setPassword(encodePassword(customer.getUnencodedPassword(), customer)); } // let's make sure they entered a new challenge answer (we will populate // the password field with hashed values so check that they have changed // id if (customer.getUnencodedChallengeAnswer() != null && !customer.getUnencodedChallengeAnswer().equals(customer.getChallengeAnswer())) { customer.setChallengeAnswer(encodePassword(customer.getUnencodedChallengeAnswer(), customer)); } return customerDao.save(customer); } protected String generateSecurePassword() { return RandomStringUtils.randomAlphanumeric(16); } @Override public Customer registerCustomer(Customer customer, String password, String passwordConfirm) { customer.setRegistered(true); // When unencodedPassword is set the save() will encode it if (customer.getId() == null) { customer.setId(findNextCustomerId()); } customer.setUnencodedPassword(password); Customer retCustomer = saveCustomer(customer); createRegisteredCustomerRoles(retCustomer); HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("customer", retCustomer); emailService.sendTemplateEmail(customer.getEmailAddress(), getRegistrationEmailInfo(), vars); notifyPostRegisterListeners(retCustomer); return retCustomer; } /** * Subclassed implementations can assign unique roles for various customer types * * @param customer */ protected void createRegisteredCustomerRoles(Customer customer) { Role role = roleDao.readRoleByName("ROLE_USER"); CustomerRole customerRole = new CustomerRoleImpl(); customerRole.setRole(role); customerRole.setCustomer(customer); roleDao.addRoleToCustomer(customerRole); } @Override public Customer readCustomerByEmail(String emailAddress) { return customerDao.readCustomerByEmail(emailAddress); } @Override public Customer changePassword(PasswordChange passwordChange) { Customer customer = readCustomerByUsername(passwordChange.getUsername()); customer.setUnencodedPassword(passwordChange.getNewPassword()); customer.setPasswordChangeRequired(passwordChange.getPasswordChangeRequired()); customer = saveCustomer(customer); for (PasswordUpdatedHandler handler : passwordChangedHandlers) { handler.passwordChanged(passwordChange, customer, passwordChange.getNewPassword()); } return customer; } @Override public Customer resetPassword(PasswordReset passwordReset) { Customer customer = readCustomerByUsername(passwordReset.getUsername()); String newPassword = PasswordUtils.generateTemporaryPassword(passwordReset.getPasswordLength()); customer.setUnencodedPassword(newPassword); customer.setPasswordChangeRequired(passwordReset.getPasswordChangeRequired()); customer = saveCustomer(customer); for (PasswordUpdatedHandler handler : passwordResetHandlers) { handler.passwordChanged(passwordReset, customer, newPassword); } return customer; } @Override public void addPostRegisterListener(PostRegistrationObserver postRegisterListeners) { this.postRegisterListeners.add(postRegisterListeners); } @Override public void removePostRegisterListener(PostRegistrationObserver postRegisterListeners) { if (this.postRegisterListeners.contains(postRegisterListeners)) { this.postRegisterListeners.remove(postRegisterListeners); } } protected void notifyPostRegisterListeners(Customer customer) { for (Iterator<PostRegistrationObserver> iter = postRegisterListeners.iterator(); iter.hasNext();) { PostRegistrationObserver listener = iter.next(); listener.processRegistrationEvent(customer); } } @Override public Customer createCustomer() { return createCustomerFromId(null); } @Override public Customer createCustomerFromId(Long customerId) { Customer customer = customerId != null ? readCustomerById(customerId) : null; if (customer == null) { customer = customerDao.create(); if (customerId != null) { customer.setId(customerId); } else { customer.setId(findNextCustomerId()); } } return customer; } /** * Allow customers to call from subclassed service. * @return */ @Override public Long findNextCustomerId() { return idGenerationService.findNextId("org.broadleafcommerce.profile.core.domain.Customer"); } @Override public Customer createNewCustomer() { return createCustomerFromId(null); } @Override public Customer readCustomerByUsername(String username) { return customerDao.readCustomerByUsername(username); } @Override public Customer readCustomerById(Long id) { return customerDao.readCustomerById(id); } public void setCustomerDao(CustomerDao customerDao) { this.customerDao = customerDao; } public void setPasswordEncoder(PasswordEncoder passwordEncoder) { this.passwordEncoder = passwordEncoder; } /** * * @deprecated use {@link #getSalt(Customer, String)} instead */ @Deprecated @Override public Object getSalt(Customer customer) { return getSalt(customer, ""); } /** * Optionally provide a salt based on a customer. By default, this returns * the salt property * * @param customer * @return * @see {@link CustomerServiceImpl#getSalt()} */ @Override public Object getSalt(Customer customer, String unencodedPassword) { Object salt = null; if (saltSource != null && customer != null) { salt = saltSource.getSalt(new CustomerUserDetails(customer.getId(), customer.getUsername(), unencodedPassword, new ArrayList<GrantedAuthority>())); } return salt; } @Override public String encodePassword(String clearText, Customer customer) { return passwordEncoder.encodePassword(clearText, getSalt(customer, clearText)); } @Override public boolean isPasswordValid(String rawPassword, String encodedPassword, Customer customer) { return passwordEncoder.isPasswordValid(encodedPassword, rawPassword, getSalt(customer, rawPassword)); } @Override @Deprecated public String getSalt() { return salt; } @Override @Deprecated public void setSalt(String salt) { this.salt = salt; } @Override public SaltSource getSaltSource() { return saltSource; } @Override public void setSaltSource(SaltSource saltSource) { this.saltSource = saltSource; } @Override public List<PasswordUpdatedHandler> getPasswordResetHandlers() { return passwordResetHandlers; } @Override public void setPasswordResetHandlers(List<PasswordUpdatedHandler> passwordResetHandlers) { this.passwordResetHandlers = passwordResetHandlers; } @Override public List<PasswordUpdatedHandler> getPasswordChangedHandlers() { return passwordChangedHandlers; } @Override public void setPasswordChangedHandlers(List<PasswordUpdatedHandler> passwordChangedHandlers) { this.passwordChangedHandlers = passwordChangedHandlers; } @Override public GenericResponse sendForgotUsernameNotification(String emailAddress) { GenericResponse response = new GenericResponse(); List<Customer> customers = null; if (emailAddress != null) { customers = customerDao.readCustomersByEmail(emailAddress); } if (customers == null || customers.isEmpty()) { response.addErrorCode("notFound"); } else { List<String> activeUsernames = new ArrayList<String>(); for (Customer customer: customers) { if (! customer.isDeactivated()) { activeUsernames.add(customer.getUsername()); } } if (activeUsernames.size() > 0) { HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("userNames", activeUsernames); emailService.sendTemplateEmail(emailAddress, getForgotUsernameEmailInfo(), vars); } else { // send inactive username found email. response.addErrorCode("inactiveUser"); } } return response; } @Override public GenericResponse sendForgotPasswordNotification(String username, String resetPasswordUrl) { GenericResponse response = new GenericResponse(); Customer customer = null; if (username != null) { customer = customerDao.readCustomerByUsername(username); } checkCustomer(customer,response); if (! response.getHasErrors()) { String token = PasswordUtils.generateTemporaryPassword(getPasswordTokenLength()); token = token.toLowerCase(); CustomerForgotPasswordSecurityToken fpst = new CustomerForgotPasswordSecurityTokenImpl(); fpst.setCustomerId(customer.getId()); fpst.setToken(encodePassword(token, null)); fpst.setCreateDate(SystemTime.asDate()); customerForgotPasswordSecurityTokenDao.saveToken(fpst); HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("token", token); if (!StringUtils.isEmpty(resetPasswordUrl)) { if (resetPasswordUrl.contains("?")) { resetPasswordUrl=resetPasswordUrl+"&token="+token; } else { resetPasswordUrl=resetPasswordUrl+"?token="+token; } } vars.put("resetPasswordUrl", resetPasswordUrl); emailService.sendTemplateEmail(customer.getEmailAddress(), getForgotPasswordEmailInfo(), vars); } return response; } @Override public GenericResponse checkPasswordResetToken(String token) { GenericResponse response = new GenericResponse(); checkPasswordResetToken(token, response); return response; } private CustomerForgotPasswordSecurityToken checkPasswordResetToken(String token, GenericResponse response) { if (token == null || "".equals(token)) { response.addErrorCode("invalidToken"); } CustomerForgotPasswordSecurityToken fpst = null; if (! response.getHasErrors()) { token = token.toLowerCase(); fpst = customerForgotPasswordSecurityTokenDao.readToken(encodePassword(token, null)); if (fpst == null) { response.addErrorCode("invalidToken"); } else if (fpst.isTokenUsedFlag()) { response.addErrorCode("tokenUsed"); } else if (isTokenExpired(fpst)) { response.addErrorCode("tokenExpired"); } } return fpst; } @Override public GenericResponse resetPasswordUsingToken(String username, String token, String password, String confirmPassword) { GenericResponse response = new GenericResponse(); Customer customer = null; if (username != null) { customer = customerDao.readCustomerByUsername(username); } checkCustomer(customer, response); checkPassword(password, confirmPassword, response); CustomerForgotPasswordSecurityToken fpst = checkPasswordResetToken(token, response); if (! response.getHasErrors()) { if (! customer.getId().equals(fpst.getCustomerId())) { if (LOG.isWarnEnabled()) { LOG.warn("Password reset attempt tried with mismatched customer and token " + customer.getId() + ", " + token); } response.addErrorCode("invalidToken"); } } if (! response.getHasErrors()) { customer.setUnencodedPassword(password); saveCustomer(customer); fpst.setTokenUsedFlag(true); customerForgotPasswordSecurityTokenDao.saveToken(fpst); } return response; } protected void checkCustomer(Customer customer, GenericResponse response) { if (customer == null) { response.addErrorCode("invalidCustomer"); } else if (customer.getEmailAddress() == null || "".equals(customer.getEmailAddress())) { response.addErrorCode("emailNotFound"); } else if (customer.isDeactivated()) { response.addErrorCode("inactiveUser"); } } protected void checkPassword(String password, String confirmPassword, GenericResponse response) { if (password == null || confirmPassword == null || "".equals(password) || "".equals(confirmPassword)) { response.addErrorCode("invalidPassword"); } else if (! password.equals(confirmPassword)) { response.addErrorCode("passwordMismatch"); } } protected boolean isTokenExpired(CustomerForgotPasswordSecurityToken fpst) { Date now = SystemTime.asDate(); long currentTimeInMillis = now.getTime(); long tokenSaveTimeInMillis = fpst.getCreateDate().getTime(); long minutesSinceSave = (currentTimeInMillis - tokenSaveTimeInMillis)/60000; return minutesSinceSave > tokenExpiredMinutes; } public int getTokenExpiredMinutes() { return tokenExpiredMinutes; } public void setTokenExpiredMinutes(int tokenExpiredMinutes) { this.tokenExpiredMinutes = tokenExpiredMinutes; } public int getPasswordTokenLength() { return passwordTokenLength; } public void setPasswordTokenLength(int passwordTokenLength) { this.passwordTokenLength = passwordTokenLength; } public EmailInfo getForgotPasswordEmailInfo() { return forgotPasswordEmailInfo; } public void setForgotPasswordEmailInfo(EmailInfo forgotPasswordEmailInfo) { this.forgotPasswordEmailInfo = forgotPasswordEmailInfo; } public EmailInfo getForgotUsernameEmailInfo() { return forgotUsernameEmailInfo; } public void setForgotUsernameEmailInfo(EmailInfo forgotUsernameEmailInfo) { this.forgotUsernameEmailInfo = forgotUsernameEmailInfo; } public EmailInfo getRegistrationEmailInfo() { return registrationEmailInfo; } public void setRegistrationEmailInfo(EmailInfo registrationEmailInfo) { this.registrationEmailInfo = registrationEmailInfo; } public EmailInfo getChangePasswordEmailInfo() { return changePasswordEmailInfo; } public void setChangePasswordEmailInfo(EmailInfo changePasswordEmailInfo) { this.changePasswordEmailInfo = changePasswordEmailInfo; } }
1no label
core_broadleaf-profile_src_main_java_org_broadleafcommerce_profile_core_service_CustomerServiceImpl.java
3,144
engine.recover(new Engine.RecoveryHandler() { @Override public void phase1(SnapshotIndexCommit snapshot) throws EngineException { } @Override public void phase2(Translog.Snapshot snapshot) throws EngineException { assertThat(snapshot.hasNext(), equalTo(true)); Translog.Create create = (Translog.Create) snapshot.next(); assertThat(snapshot.hasNext(), equalTo(false)); assertThat(create.source().toBytesArray(), equalTo(B_2)); // add for phase3 ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_3, false); engine.create(new Engine.Create(null, newUid("3"), doc3)); } @Override public void phase3(Translog.Snapshot snapshot) throws EngineException { assertThat(snapshot.hasNext(), equalTo(true)); Translog.Create create = (Translog.Create) snapshot.next(); assertThat(snapshot.hasNext(), equalTo(false)); assertThat(create.source().toBytesArray(), equalTo(B_3)); } });
0true
src_test_java_org_elasticsearch_index_engine_internal_InternalEngineTests.java
1,527
public class PersistenceXMLParsingTest { /** * Test parsing a persistence descriptor with several entries * * @throws Exception */ @Test public void testFile1() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("apache-aries/file1/META-INF/persistence.xml"); Collection<? extends PersistenceUnitInfo> parsedUnits = PersistenceXmlUtil.parse(locationUrl); assertNotNull("Persistence units shouldn't be null.", parsedUnits); assertEquals("An incorrect number of persistence units has been returned.", 4, parsedUnits.size()); Iterator<? extends PersistenceUnitInfo> iterator = parsedUnits.iterator(); assertPersistenceUnit(iterator.next(), new Rule[] { // new Rule("The schema version was incorrect", "1.0", "getPersistenceXMLSchemaVersion"), new Rule("The unit name was incorrect", "alpha", "getPersistenceUnitName"), new Rule("The transaction type was incorrect", null, "getTransactionType"), new Rule("The provider class name was incorrect", null, "getPersistenceProviderClassName"), new Rule("One or more mapping files were specified", EMPTY_LIST, "getMappingFileNames"), new Rule("One or more jar files were specified", EMPTY_LIST, "getJarFileUrls"), new Rule("One or more managed classes were specified", EMPTY_LIST, "getManagedClassNames"), new Rule("We should not exclude any classes", false, "excludeUnlistedClasses") }); assertPersistenceUnit( iterator.next(), new Rule[] { // new Rule("The schema version was incorrect", "1.0", "getPersistenceXMLSchemaVersion"), new Rule("The unit name was incorrect", "bravo", "getPersistenceUnitName"), new Rule("The transaction type was incorrect", JTA, "getTransactionType"), new Rule("The provider class name was incorrect", "bravo.persistence.provider", "getPersistenceProviderClassName"), new Rule("Incorrect mapping files were listed", asList("bravoMappingFile1.xml", "bravoMappingFile2.xml"), "getMappingFileNames"), new Rule("Incorrect jar URLs were listed", asURLList("bravoJarFile1.jar", "bravoJarFile2.jar"), "getJarFileUrls"), new Rule("Incorrect managed classes were listed", asList("bravoClass1", "bravoClass2"), "getManagedClassNames"), new Rule("We should not exclude any classes", true, "excludeUnlistedClasses"), new Rule("The properties should never be null", asProperty("some.prop", "prop.value", "some.other.prop", "another.prop.value"), "getProperties") }); assertPersistenceUnit(iterator.next(), new Rule[] { // new Rule("The schema version was incorrect", "1.0", "getPersistenceXMLSchemaVersion"), new Rule("The unit name was incorrect", "charlie", "getPersistenceUnitName"), new Rule("The transaction type was incorrect", PersistenceUnitTransactionType.RESOURCE_LOCAL, "getTransactionType"), new Rule("The provider class name was incorrect", "charlie.persistence.provider", "getPersistenceProviderClassName"), new Rule("One or more mapping files were specified", EMPTY_LIST, "getMappingFileNames"), new Rule("One or more jar files were specified", EMPTY_LIST, "getJarFileUrls"), new Rule("One or more managed classes were specified", EMPTY_LIST, "getManagedClassNames"), new Rule("We should not exclude any classes", true, "excludeUnlistedClasses") }); assertPersistenceUnit(iterator.next(), new Rule[] { // new Rule("The schema version was incorrect", "1.0", "getPersistenceXMLSchemaVersion"), new Rule("The unit name was incorrect", "delta", "getPersistenceUnitName"), new Rule("The transaction type was incorrect", PersistenceUnitTransactionType.RESOURCE_LOCAL, "getTransactionType"), new Rule("The provider class name was incorrect", "delta.persistence.provider", "getPersistenceProviderClassName"), new Rule("One or more mapping files were specified", EMPTY_LIST, "getMappingFileNames"), new Rule("One or more jar files were specified", EMPTY_LIST, "getJarFileUrls"), new Rule("One or more managed classes were specified", EMPTY_LIST, "getManagedClassNames"), new Rule("We should not exclude any classes", false, "excludeUnlistedClasses") }); } @Test public void testFile2() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("apache-aries/file2/META-INF/persistence.xml"); Collection<? extends PersistenceUnitInfo> parsedUnits = PersistenceXmlUtil.parse(locationUrl); assertNotNull("Persistence units shouldn't be null.", parsedUnits); assertEquals("An incorrect number of persistence units has been returned.", 0, parsedUnits.size()); } @Test(expectedExceptions = PersistenceException.class) public void testFile3() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("apache-aries/file3/META-INF/persistence.xml"); PersistenceXmlUtil.parse(locationUrl); fail("Parsing should not succeed"); } @Test public void testJPA2() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("apache-aries/file22/META-INF/persistence.xml"); Collection<? extends PersistenceUnitInfo> parsedUnits = PersistenceXmlUtil.parse(locationUrl); assertNotNull("Persistence units shouldn't be null.", parsedUnits); assertEquals("An incorrect number of persistence units has been returned.", 2, parsedUnits.size()); Iterator<? extends PersistenceUnitInfo> iterator = parsedUnits.iterator(); // test defaults assertPersistenceUnit(iterator.next(), new Rule[] { // new Rule("The schema version was incorrect", "2.0", "getPersistenceXMLSchemaVersion"), new Rule("The unit name was incorrect", "default", "getPersistenceUnitName"), new Rule("Unexpected SharedCacheMode", SharedCacheMode.UNSPECIFIED, "getSharedCacheMode"), new Rule("Unexpected ValidationMode", ValidationMode.AUTO, "getValidationMode") }); assertPersistenceUnit(iterator.next(), new Rule[] { // new Rule("The schema version was incorrect", "2.0", "getPersistenceXMLSchemaVersion"), new Rule("The unit name was incorrect", "custom", "getPersistenceUnitName"), new Rule("Unexpected SharedCacheMode", SharedCacheMode.ENABLE_SELECTIVE, "getSharedCacheMode"), new Rule("Unexpected ValidationMode", ValidationMode.CALLBACK, "getValidationMode") }); } /** * Test parsing a persistence descriptor with several entries * * @throws Exception */ @SuppressWarnings("unchecked") @Test public void testReallyBigFile() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("apache-aries/file24/META-INF/persistence.xml"); Collection<? extends PersistenceUnitInfo> parsedUnits = PersistenceXmlUtil.parse(locationUrl); assertNotNull("Persistence units shouldn't be null.", parsedUnits); assertEquals("An incorrect number of persistence units has been returned.", 33, parsedUnits.size()); List<OJPAPersistenceUnitInfo> units = new ArrayList<OJPAPersistenceUnitInfo>(); units.addAll((Collection<OJPAPersistenceUnitInfo>) parsedUnits); assertEquals("An incorrect number of units has been returned.", 33, units.size()); // prepare Collections.sort(units, new Comparator<OJPAPersistenceUnitInfo>() { @Override public int compare(OJPAPersistenceUnitInfo p1, OJPAPersistenceUnitInfo p2) { return Integer.valueOf(p1.getPersistenceUnitName()).compareTo(Integer.valueOf(p2.getPersistenceUnitName())); } }); for (int counter = 1; counter < units.size(); counter++) { assertPersistenceUnit(units.get(counter - 1), new Rule[] { // new Rule("The schema version was incorrect", "1.0", "getPersistenceXMLSchemaVersion"), new Rule("The unit name was incorrect", Integer.toString(counter), "getPersistenceUnitName"), new Rule("The transaction type was incorrect", JTA, "getTransactionType"), new Rule("The provider class name was incorrect", "provider." + counter, "getPersistenceProviderClassName"), new Rule("Incorrect mapping files were listed", asList("mappingFile." + counter), "getMappingFileNames"), new Rule("Incorrect jar URLs were listed", asURLList("jarFile." + counter), "getJarFileUrls"), new Rule("Incorrect managed classes were listed", asList("class." + counter), "getManagedClassNames"), new Rule("We should not exclude any classes", true, "excludeUnlistedClasses"), new Rule("The properties should never be null", asProperty("some.prop." + counter, "prop.value." + counter), "getProperties") }); } } @Test public void elementsPrefixedWithPersistenceNameSpaceShouldBeAccepted() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("apache-aries/file26/META-INF/persistence.xml"); Collection<? extends PersistenceUnitInfo> parsedUnits = PersistenceXmlUtil.parse(locationUrl); assertNotNull("Persistence units shouldn't be null.", parsedUnits); assertEquals("An incorrect number of persistence units has been returned.", 1, parsedUnits.size()); } @Test(expectedExceptions = PersistenceException.class) public void elementsPrefixedWithWrongNameSpaceShouldBeRejected() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("apache-aries/file27/META-INF/persistence.xml"); PersistenceXmlUtil.parse(locationUrl); fail("should throw"); } @Test(expectedExceptions = PersistenceException.class) public void testConfigWithoutXMLSchemaVersion() throws Exception { URL locationUrl = getClass().getClassLoader().getResource("orient/file1/META-INF/persistence.xml"); PersistenceXmlUtil.parse(locationUrl); fail("should throw"); } // ---------------- helpers class Rule { public String message; public Object expected; public String method; public Rule(String message, Object expected, String method) { this.message = message; this.expected = expected; this.method = method; } } public static void assertPersistenceUnit(PersistenceUnitInfo unit, Rule[] rules) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException { for (int i = 0; i < rules.length; i++) { Method method = OJPAPersistenceUnitInfo.class.getDeclaredMethod(rules[i].method); String msg = "For unit name '" + (unit.getPersistenceUnitName() != null ? unit.getPersistenceUnitName() : "-empty-") + "': " + rules[i].message; assertEquals(msg, rules[i].expected, method.invoke(unit)); } } /** * @param a * array of strings * @return array of urls using @see com.orientechnologies.orient.object.jpa.parsing.OJPAPersistenceUnitInfo.initJarFile(String) */ public static List<URL> asURLList(String... a) { List<URL> list = new ArrayList<URL>(); for (int i = 0; i < a.length; i++) { list.add(OJPAPersistenceUnitInfo.initJarFile(a[i])); } return list; } /** * @param a * put 'key1, value1, key2, value2, ...' here * @return */ public static Properties asProperty(String... a) { Properties properties = new Properties(); for (int i = 0; i < a.length; i += 2) { properties.setProperty(a[i], a[i + 1]); } return properties; } }
0true
object_src_test_java_com_orientechnologies_orient_object_jpa_parsing_PersistenceXMLParsingTest.java
1,607
public enum RESULT_STRATEGY { ANY, MERGE, UNION }
0true
server_src_main_java_com_orientechnologies_orient_server_distributed_task_OAbstractRemoteTask.java
994
public interface OStreamSerializer { public byte[] toStream(Object iObject) throws IOException; public Object fromStream(byte[] iStream) throws IOException; public String getName(); }
0true
core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_stream_OStreamSerializer.java
807
getDatabase().getStorage().callInLock(new Callable<Object>() { public Object call() throws Exception { for (int id : clusterIds) { final OStorage storage = getDatabase().getStorage(); storage.getClusterById(id).truncate(); storage.getLevel2Cache().freeCluster(id); } for (OIndex<?> index : getClassIndexes()) { index.clear(); } return null; } }, true);
0true
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OClassImpl.java
273
public class SelectNoMembers implements MemberSelector { @Override public boolean select(Member member) { return false; } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_tasks_SelectNoMembers.java
2,976
public class IndexCacheModule extends AbstractModule { private final Settings settings; public IndexCacheModule(Settings settings) { this.settings = settings; } @Override protected void configure() { new FilterCacheModule(settings).configure(binder()); new IdCacheModule(settings).configure(binder()); new QueryParserCacheModule(settings).configure(binder()); new DocSetCacheModule(settings).configure(binder()); bind(IndexCache.class).asEagerSingleton(); } }
0true
src_main_java_org_elasticsearch_index_cache_IndexCacheModule.java
848
SHORT("Short", 2, new Class<?>[] { Short.class, Short.TYPE }, new Class<?>[] { Short.class, Number.class }) { },
0true
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java
985
public class OSerializationThreadLocal extends ThreadLocal<Set<Integer>> { public static OSerializationThreadLocal INSTANCE = new OSerializationThreadLocal(); @Override protected Set<Integer> initialValue() { return new HashSet<Integer>(); } }
0true
core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_record_OSerializationThreadLocal.java
121
public class JMSArchivedPageSubscriber implements MessageListener { @Resource(name = "blPageService") private PageService pageService; /* * (non-Javadoc) * @see javax.jms.MessageListener#onMessage(javax.jms.Message) */ @SuppressWarnings("unchecked") public void onMessage(Message message) { String basePageCacheKey = null; try { basePageCacheKey = ((TextMessage) message).getText(); pageService.removePageFromCache(basePageCacheKey); } catch (JMSException e) { throw new RuntimeException(e); } } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_message_jms_JMSArchivedPageSubscriber.java
1,755
public static class FactorFixedSourceDistance implements FixedSourceDistance { private final double sourceLongitude; private final double a; private final double sinA; private final double cosA; public FactorFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { this.sourceLongitude = sourceLongitude; this.a = Math.toRadians(90D - sourceLatitude); this.sinA = Math.sin(a); this.cosA = Math.cos(a); } @Override public double calculate(double targetLatitude, double targetLongitude) { double longitudeDifference = targetLongitude - sourceLongitude; double c = Math.toRadians(90D - targetLatitude); return (cosA * Math.cos(c)) + (sinA * Math.sin(c) * Math.cos(Math.toRadians(longitudeDifference))); } }
0true
src_main_java_org_elasticsearch_common_geo_GeoDistance.java
1,300
@Test public class LocalPaginatedStorageSmallCacheBigRecordsCrashRestore { private ODatabaseDocumentTx baseDocumentTx; private ODatabaseDocumentTx testDocumentTx; private File buildDir; private final AtomicLong idGen = new AtomicLong(); private ExecutorService executorService = Executors.newCachedThreadPool(); private Process process; @BeforeClass public void beforeClass() throws Exception { OGlobalConfiguration.CACHE_LEVEL1_ENABLED.setValue(false); OGlobalConfiguration.CACHE_LEVEL1_SIZE.setValue(0); OGlobalConfiguration.CACHE_LEVEL2_ENABLED.setValue(false); OGlobalConfiguration.CACHE_LEVEL2_SIZE.setValue(0); String buildDirectory = System.getProperty("buildDirectory", "."); buildDirectory += "/localPaginatedStorageSmallCacheBigRecordsCrashRestore"; buildDir = new File(buildDirectory); if (buildDir.exists()) buildDir.delete(); buildDir.mkdir(); String javaExec = System.getProperty("java.home") + "/bin/java"; System.setProperty("ORIENTDB_HOME", buildDirectory); ProcessBuilder processBuilder = new ProcessBuilder(javaExec, "-Xmx2048m", "-classpath", System.getProperty("java.class.path"), "-DORIENTDB_HOME=" + buildDirectory, RemoteDBRunner.class.getName()); processBuilder.inheritIO(); process = processBuilder.start(); Thread.sleep(5000); } @AfterClass public void afterClass() { testDocumentTx.drop(); baseDocumentTx.drop(); Assert.assertTrue(buildDir.delete()); } @BeforeMethod public void beforeMethod() { baseDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/baseLocalPaginatedStorageSmallCacheBigRecordsCrashRestore"); if (baseDocumentTx.exists()) { baseDocumentTx.open("admin", "admin"); baseDocumentTx.drop(); } baseDocumentTx.create(); testDocumentTx = new ODatabaseDocumentTx("remote:localhost:3500/testLocalPaginatedStorageSmallCacheBigRecordsCrashRestore"); testDocumentTx.open("admin", "admin"); } public void testDocumentCreation() throws Exception { createSchema(baseDocumentTx); createSchema(testDocumentTx); List<Future> futures = new ArrayList<Future>(); for (int i = 0; i < 2; i++) { futures.add(executorService.submit(new DataPropagationTask(baseDocumentTx, testDocumentTx))); } Thread.sleep(900000); long lastTs = System.currentTimeMillis(); process.destroy(); for (Future future : futures) { try { future.get(); } catch (Exception e) { e.printStackTrace(); } } testDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/testLocalPaginatedStorageSmallCacheBigRecordsCrashRestore"); testDocumentTx.open("admin", "admin"); testDocumentTx.close(); testDocumentTx.open("admin", "admin"); compareDocuments(lastTs); } private void createSchema(ODatabaseDocumentTx dbDocumentTx) { ODatabaseRecordThreadLocal.INSTANCE.set(dbDocumentTx); OSchema schema = dbDocumentTx.getMetadata().getSchema(); if (!schema.existsClass("TestClass")) { OClass testClass = schema.createClass("TestClass"); testClass.createProperty("id", OType.LONG); testClass.createProperty("timestamp", OType.LONG); testClass.createProperty("stringValue", OType.STRING); testClass.createProperty("binaryValue", OType.BINARY); testClass.createIndex("idIndex", OClass.INDEX_TYPE.UNIQUE, "id"); schema.save(); } } private void compareDocuments(long lastTs) { long minTs = Long.MAX_VALUE; int clusterId = baseDocumentTx.getClusterIdByName("TestClass"); OStorage baseStorage = baseDocumentTx.getStorage(); OPhysicalPosition[] physicalPositions = baseStorage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition( OClusterPositionFactory.INSTANCE.valueOf(0))); int recordsRestored = 0; int recordsTested = 0; while (physicalPositions.length > 0) { final ORecordId rid = new ORecordId(clusterId); for (OPhysicalPosition physicalPosition : physicalPositions) { rid.clusterPosition = physicalPosition.clusterPosition; ODatabaseRecordThreadLocal.INSTANCE.set(baseDocumentTx); ODocument baseDocument = baseDocumentTx.load(rid); ODatabaseRecordThreadLocal.INSTANCE.set(testDocumentTx); List<ODocument> testDocuments = testDocumentTx.query(new OSQLSynchQuery<ODocument>("select from TestClass where id = " + baseDocument.field("id"))); if (testDocuments.size() == 0) { if (((Long) baseDocument.field("timestamp")) < minTs) minTs = baseDocument.field("timestamp"); } else { ODocument testDocument = testDocuments.get(0); Assert.assertEquals(testDocument.field("id"), baseDocument.field("id")); Assert.assertEquals(testDocument.field("timestamp"), baseDocument.field("timestamp")); Assert.assertEquals(testDocument.field("stringValue"), baseDocument.field("stringValue")); Assert.assertEquals(testDocument.field("binaryValue"), baseDocument.field("binaryValue")); recordsRestored++; } recordsTested++; if (recordsTested % 10000 == 0) System.out.println(recordsTested + " were tested, " + recordsRestored + " were restored ..."); } physicalPositions = baseStorage.higherPhysicalPositions(clusterId, physicalPositions[physicalPositions.length - 1]); } System.out.println(recordsRestored + " records were restored. Total records " + recordsTested + ". Max interval for lost records " + (lastTs - minTs)); } public static final class RemoteDBRunner { public static void main(String[] args) throws Exception { OGlobalConfiguration.CACHE_LEVEL1_ENABLED.setValue(false); OGlobalConfiguration.CACHE_LEVEL1_SIZE.setValue(0); OGlobalConfiguration.CACHE_LEVEL2_ENABLED.setValue(false); OGlobalConfiguration.CACHE_LEVEL2_SIZE.setValue(0); OGlobalConfiguration.DISK_CACHE_SIZE.setValue(512); OServer server = OServerMain.create(); server.startup(RemoteDBRunner.class .getResourceAsStream("/com/orientechnologies/orient/core/storage/impl/local/paginated/db-create-big-records-config.xml")); server.activate(); while (true) ; } } public class DataPropagationTask implements Callable<Void> { private ODatabaseDocumentTx baseDB; private ODatabaseDocumentTx testDB; public DataPropagationTask(ODatabaseDocumentTx baseDB, ODatabaseDocumentTx testDocumentTx) { this.baseDB = new ODatabaseDocumentTx(baseDB.getURL()); this.testDB = new ODatabaseDocumentTx(testDocumentTx.getURL()); } @Override public Void call() throws Exception { Random random = new Random(); baseDB.open("admin", "admin"); testDB.open("admin", "admin"); try { while (true) { final ODocument document = new ODocument("TestClass"); document.field("id", idGen.getAndIncrement()); document.field("timestamp", System.currentTimeMillis()); document.field("stringValue", "sfe" + random.nextLong()); byte[] binaryValue = new byte[random.nextInt(2 * 65536) + 65537]; random.nextBytes(binaryValue); document.field("binaryValue", binaryValue); saveDoc(document); } } finally { baseDB.close(); testDB.close(); } } private void saveDoc(ODocument document) { ODatabaseRecordThreadLocal.INSTANCE.set(baseDB); ODocument testDoc = new ODocument(); document.copyTo(testDoc); document.save(); ODatabaseRecordThreadLocal.INSTANCE.set(testDB); testDoc.save(); ODatabaseRecordThreadLocal.INSTANCE.set(baseDB); } } }
1no label
server_src_test_java_com_orientechnologies_orient_core_storage_impl_local_paginated_LocalPaginatedStorageSmallCacheBigRecordsCrashRestore.java
2,274
public class PropertyPlaceholder { private final String placeholderPrefix; private final String placeholderSuffix; private final boolean ignoreUnresolvablePlaceholders; /** * Creates a new <code>PropertyPlaceholderHelper</code> that uses the supplied prefix and suffix. Unresolvable * placeholders are ignored. * * @param placeholderPrefix the prefix that denotes the start of a placeholder. * @param placeholderSuffix the suffix that denotes the end of a placeholder. */ public PropertyPlaceholder(String placeholderPrefix, String placeholderSuffix) { this(placeholderPrefix, placeholderSuffix, true); } /** * Creates a new <code>PropertyPlaceholderHelper</code> that uses the supplied prefix and suffix. * * @param placeholderPrefix the prefix that denotes the start of a placeholder. * @param placeholderSuffix the suffix that denotes the end of a placeholder. * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should be ignored * (<code>true</code>) or cause an exception (<code>false</code>). */ public PropertyPlaceholder(String placeholderPrefix, String placeholderSuffix, boolean ignoreUnresolvablePlaceholders) { Preconditions.checkNotNull(placeholderPrefix, "Argument 'placeholderPrefix' must not be null."); Preconditions.checkNotNull(placeholderSuffix, "Argument 'placeholderSuffix' must not be null."); this.placeholderPrefix = placeholderPrefix; this.placeholderSuffix = placeholderSuffix; this.ignoreUnresolvablePlaceholders = ignoreUnresolvablePlaceholders; } /** * Replaces all placeholders of format <code>${name}</code> with the value returned from the supplied {@link * PlaceholderResolver}. * * @param value the value containing the placeholders to be replaced. * @param placeholderResolver the <code>PlaceholderResolver</code> to use for replacement. * @return the supplied value with placeholders replaced inline. */ public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) { Preconditions.checkNotNull(value, "Argument 'value' must not be null."); return parseStringValue(value, placeholderResolver, new HashSet<String>()); } protected String parseStringValue(String strVal, PlaceholderResolver placeholderResolver, Set<String> visitedPlaceholders) { StringBuilder buf = new StringBuilder(strVal); int startIndex = strVal.indexOf(this.placeholderPrefix); while (startIndex != -1) { int endIndex = findPlaceholderEndIndex(buf, startIndex); if (endIndex != -1) { String placeholder = buf.substring(startIndex + this.placeholderPrefix.length(), endIndex); if (!visitedPlaceholders.add(placeholder)) { throw new IllegalArgumentException( "Circular placeholder reference '" + placeholder + "' in property definitions"); } // Recursive invocation, parsing placeholders contained in the placeholder key. placeholder = parseStringValue(placeholder, placeholderResolver, visitedPlaceholders); // Now obtain the value for the fully resolved key... int defaultValueIdx = placeholder.indexOf(':'); String defaultValue = null; if (defaultValueIdx != -1) { defaultValue = placeholder.substring(defaultValueIdx + 1); placeholder = placeholder.substring(0, defaultValueIdx); } String propVal = placeholderResolver.resolvePlaceholder(placeholder); if (propVal == null) { propVal = defaultValue; } if (propVal == null && placeholderResolver.shouldIgnoreMissing(placeholder)) { propVal = ""; } if (propVal != null) { // Recursive invocation, parsing placeholders contained in the // previously resolved placeholder value. propVal = parseStringValue(propVal, placeholderResolver, visitedPlaceholders); buf.replace(startIndex, endIndex + this.placeholderSuffix.length(), propVal); startIndex = buf.indexOf(this.placeholderPrefix, startIndex + propVal.length()); } else if (this.ignoreUnresolvablePlaceholders) { // Proceed with unprocessed value. startIndex = buf.indexOf(this.placeholderPrefix, endIndex + this.placeholderSuffix.length()); } else { throw new IllegalArgumentException("Could not resolve placeholder '" + placeholder + "'"); } visitedPlaceholders.remove(placeholder); } else { startIndex = -1; } } return buf.toString(); } private int findPlaceholderEndIndex(CharSequence buf, int startIndex) { int index = startIndex + this.placeholderPrefix.length(); int withinNestedPlaceholder = 0; while (index < buf.length()) { if (Strings.substringMatch(buf, index, this.placeholderSuffix)) { if (withinNestedPlaceholder > 0) { withinNestedPlaceholder--; index = index + this.placeholderPrefix.length() - 1; } else { return index; } } else if (Strings.substringMatch(buf, index, this.placeholderPrefix)) { withinNestedPlaceholder++; index = index + this.placeholderPrefix.length(); } else { index++; } } return -1; } /** * Strategy interface used to resolve replacement values for placeholders contained in Strings. * * @see PropertyPlaceholder */ public static interface PlaceholderResolver { /** * Resolves the supplied placeholder name into the replacement value. * * @param placeholderName the name of the placeholder to resolve. * @return the replacement value or <code>null</code> if no replacement is to be made. */ String resolvePlaceholder(String placeholderName); boolean shouldIgnoreMissing(String placeholderName); } }
1no label
src_main_java_org_elasticsearch_common_property_PropertyPlaceholder.java
1,745
public class LZFCompressor implements Compressor { static final byte[] LUCENE_HEADER = {'L', 'Z', 'F', 0}; public static final String TYPE = "lzf"; private ChunkDecoder decoder; public LZFCompressor() { if (Constants.SUN_OS) { this.decoder = ChunkDecoderFactory.safeInstance(); } else { this.decoder = ChunkDecoderFactory.optimalInstance(); } Loggers.getLogger(LZFCompressor.class).debug("using [{}] decoder", this.decoder.getClass().getSimpleName()); } @Override public String type() { return TYPE; } @Override public void configure(Settings settings) { String decoderType = settings.get("compress.lzf.decoder", null); if (decoderType != null) { if ("optimal".equalsIgnoreCase(decoderType)) { this.decoder = ChunkDecoderFactory.optimalInstance(); Loggers.getLogger(LZFCompressor.class).debug("using [{}] decoder", this.decoder.getClass().getSimpleName()); } else if ("safe".equalsIgnoreCase(decoderType)) { this.decoder = ChunkDecoderFactory.safeInstance(); Loggers.getLogger(LZFCompressor.class).debug("using [{}] decoder", this.decoder.getClass().getSimpleName()); } else { Loggers.getLogger(LZFCompressor.class).warn("decoder type not recognized [{}], still using [{}]", decoderType, this.decoder.getClass().getSimpleName()); } } } @Override public boolean isCompressed(BytesReference bytes) { return bytes.length() >= 3 && bytes.get(0) == LZFChunk.BYTE_Z && bytes.get(1) == LZFChunk.BYTE_V && (bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); } @Override public boolean isCompressed(byte[] data, int offset, int length) { return length >= 3 && data[offset] == LZFChunk.BYTE_Z && data[offset + 1] == LZFChunk.BYTE_V && (data[offset + 2] == LZFChunk.BLOCK_TYPE_COMPRESSED || data[offset + 2] == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); } @Override public boolean isCompressed(ChannelBuffer buffer) { int offset = buffer.readerIndex(); return buffer.readableBytes() >= 3 && buffer.getByte(offset) == LZFChunk.BYTE_Z && buffer.getByte(offset + 1) == LZFChunk.BYTE_V && (buffer.getByte(offset + 2) == LZFChunk.BLOCK_TYPE_COMPRESSED || buffer.getByte(offset + 2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); } @Override public boolean isCompressed(IndexInput in) throws IOException { long currentPointer = in.getFilePointer(); // since we have some metdata before the first compressed header, we check on our specific header if (in.length() - currentPointer < (LUCENE_HEADER.length)) { return false; } for (int i = 0; i < LUCENE_HEADER.length; i++) { if (in.readByte() != LUCENE_HEADER[i]) { in.seek(currentPointer); return false; } } in.seek(currentPointer); return true; } @Override public byte[] uncompress(byte[] data, int offset, int length) throws IOException { return decoder.decode(data, offset, length); } @Override public byte[] compress(byte[] data, int offset, int length) throws IOException { return LZFEncoder.encode(data, offset, length); } @Override public CompressedStreamInput streamInput(StreamInput in) throws IOException { return new LZFCompressedStreamInput(in, decoder); } @Override public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException { return new LZFCompressedStreamOutput(out); } @Override public CompressedIndexInput indexInput(IndexInput in) throws IOException { return new LZFCompressedIndexInput(in, decoder); } }
0true
src_main_java_org_elasticsearch_common_compress_lzf_LZFCompressor.java
3,593
public static class Defaults extends NumberFieldMapper.Defaults { public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); static { FIELD_TYPE.freeze(); } public static final Long NULL_VALUE = null; }
0true
src_main_java_org_elasticsearch_index_mapper_core_LongFieldMapper.java
2,859
public class ReplicaSyncRetryResponse extends Operation implements PartitionAwareOperation, BackupOperation, UrgentSystemOperation { public ReplicaSyncRetryResponse() { } public void beforeRun() throws Exception { } public void run() throws Exception { final NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine(); final InternalPartitionServiceImpl partitionService = (InternalPartitionServiceImpl) nodeEngine.getPartitionService(); final int partitionId = getPartitionId(); final int replicaIndex = getReplicaIndex(); partitionService.schedulePartitionReplicaSync(partitionId, replicaIndex, InternalPartitionService.REPLICA_SYNC_RETRY_DELAY); } public void afterRun() throws Exception { } public boolean returnsResponse() { return false; } public Object getResponse() { return null; } public boolean validatesTarget() { return true; } public void logError(Throwable e) { ReplicaErrorLogger.log(e, getLogger()); } protected void writeInternal(ObjectDataOutput out) throws IOException { } protected void readInternal(ObjectDataInput in) throws IOException { } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("ReplicaSyncRetryResponse"); sb.append("{partition=").append(getPartitionId()); sb.append(", replica=").append(getReplicaIndex()); sb.append('}'); return sb.toString(); } }
1no label
hazelcast_src_main_java_com_hazelcast_partition_impl_ReplicaSyncRetryResponse.java
2,036
public class AddInterceptorOperation extends AbstractOperation { MapService mapService; String id; MapInterceptor mapInterceptor; String mapName; public AddInterceptorOperation(String id, MapInterceptor mapInterceptor, String mapName) { this.id = id; this.mapInterceptor = mapInterceptor; this.mapName = mapName; } public AddInterceptorOperation() { } public void run() { mapService = getService(); mapService.getMapContainer(mapName).addInterceptor(id, mapInterceptor); } @Override public boolean returnsResponse() { return true; } public Object getResponse() { return true; } @Override public void readInternal(ObjectDataInput in) throws IOException { super.readInternal(in); mapName = in.readUTF(); id = in.readUTF(); mapInterceptor = in.readObject(); } @Override public void writeInternal(ObjectDataOutput out) throws IOException { super.writeInternal(out); out.writeUTF(mapName); out.writeUTF(id); out.writeObject(mapInterceptor); } @Override public String toString() { return "AddInterceptorOperation{}"; } }
0true
hazelcast_src_main_java_com_hazelcast_map_operation_AddInterceptorOperation.java
1,032
execute(request, new ActionListener<Response>() { @Override public void onResponse(Response result) { try { channel.sendResponse(result); } catch (Throwable e) { onFailure(e); } } @Override public void onFailure(Throwable e) { try { channel.sendResponse(e); } catch (Exception e1) { logger.warn("failed to send response for get", e1); } } });
0true
src_main_java_org_elasticsearch_action_support_single_shard_TransportShardSingleOperationAction.java
2,011
binder.bind(mapKey).toProvider(new ProviderWithDependencies<Map<K, V>>() { public Map<K, V> get() { Map<K, V> map = new LinkedHashMap<K, V>(); for (Entry<K, Provider<V>> entry : mapProvider.get().entrySet()) { V value = entry.getValue().get(); K key = entry.getKey(); Multibinder.checkConfiguration(value != null, "Map injection failed due to null value for key \"%s\"", key); map.put(key, value); } return Collections.unmodifiableMap(map); } public Set<Dependency<?>> getDependencies() { return dependencies; } });
0true
src_main_java_org_elasticsearch_common_inject_multibindings_MapBinder.java
258
return Iterators.filter(rowIterator, new Predicate<Row>() { @Override public boolean apply(@Nullable Row row) { return row != null && !row.key.key.equals(exceptKey); } });
0true
titan-cassandra_src_main_java_com_thinkaurelius_titan_diskstorage_cassandra_embedded_CassandraEmbeddedKeyColumnValueStore.java
1,496
private class AddressDefinition extends InterfaceDefinition { InetAddress inetAddress; int port; private AddressDefinition() { } private AddressDefinition(final InetAddress inetAddress) { super(inetAddress.getHostAddress()); this.inetAddress = inetAddress; } private AddressDefinition(final String host, final InetAddress inetAddress) { super(host, inetAddress.getHostAddress()); this.inetAddress = inetAddress; } private AddressDefinition(final String host, final int port, final InetAddress inetAddress) { super(host, inetAddress.getHostAddress()); this.inetAddress = inetAddress; this.port = port; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)){ return false; } AddressDefinition that = (AddressDefinition) o; if (port != that.port) { return false; } if (inetAddress != null ? !inetAddress.equals(that.inetAddress) : that.inetAddress != null) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (inetAddress != null ? inetAddress.hashCode() : 0); result = 31 * result + port; return result; } }
0true
hazelcast_src_main_java_com_hazelcast_instance_DefaultAddressPicker.java
271
public class MapPutRunnable implements Runnable, DataSerializable, HazelcastInstanceAware { private HazelcastInstance instance; public String mapName; public MapPutRunnable(){} public MapPutRunnable(String mapName) { this.mapName = mapName; } public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(mapName); } public void readData(ObjectDataInput in) throws IOException { mapName = in.readUTF(); } public void run() { Member member = instance.getCluster().getLocalMember(); IMap map = instance.getMap(mapName); map.put(member.getUuid(), member.getUuid()+"value"); } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { instance = hazelcastInstance; } public String getMapName() { return mapName; } public void setMapName(String mapName) { this.mapName = mapName; } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_tasks_MapPutRunnable.java
479
private static class Data { final Entry[] array; final int size; Data(final Entry[] array, final int size) { Preconditions.checkArgument(size >= 0 && size <= array.length); assert isSorted(); this.array = array; this.size = size; } boolean isEmpty() { return size == 0; } int getIndex(StaticBuffer column) { return Arrays.binarySearch(array, 0, size, StaticArrayEntry.of(column)); } Entry get(int index) { return array[index]; } boolean isSorted() { for (int i = 1; i < size; i++) { if (!(array[i].compareTo(array[i - 1]) > 0)) return false; } return true; } }
0true
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_inmemory_ColumnValueStore.java
2,824
public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory { private final int flags; private final boolean outputUnigrams; @Inject public CJKBigramFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) { super(index, indexSettings, name, settings); outputUnigrams = settings.getAsBoolean("output_unigrams", false); final String[] asArray = settings.getAsArray("ignored_scripts"); Set<String> scripts = new HashSet<String>(Arrays.asList("han", "hiragana", "katakana", "hangul")); if (asArray != null) { scripts.removeAll(Arrays.asList(asArray)); } int flags = 0; for (String script : scripts) { if ("han".equals(script)) { flags |= CJKBigramFilter.HAN; } else if ("hiragana".equals(script)) { flags |= CJKBigramFilter.HIRAGANA; } else if ("katakana".equals(script)) { flags |= CJKBigramFilter.KATAKANA; } else if ("hangul".equals(script)) { flags |= CJKBigramFilter.HANGUL; } } this.flags = flags; } @Override public TokenStream create(TokenStream tokenStream) { return new CJKBigramFilter(tokenStream, flags, outputUnigrams); } }
0true
src_main_java_org_elasticsearch_index_analysis_CJKBigramFilterFactory.java
1,102
public class SecurityConfig { private boolean enabled = false; private CredentialsFactoryConfig memberCredentialsConfig = new CredentialsFactoryConfig(); private List<LoginModuleConfig> memberLoginModuleConfigs = new ArrayList<LoginModuleConfig>(); private List<LoginModuleConfig> clientLoginModuleConfigs = new ArrayList<LoginModuleConfig>(); private PermissionPolicyConfig clientPolicyConfig = new PermissionPolicyConfig(); private Set<PermissionConfig> clientPermissionConfigs = new HashSet<PermissionConfig>(); public boolean isEnabled() { return enabled; } public SecurityConfig setEnabled(boolean enabled) { this.enabled = enabled; return this; } public SecurityConfig addMemberLoginModuleConfig(LoginModuleConfig loginModuleConfig) { memberLoginModuleConfigs.add(loginModuleConfig); return this; } public SecurityConfig addClientLoginModuleConfig(LoginModuleConfig loginModuleConfig) { clientLoginModuleConfigs.add(loginModuleConfig); return this; } public SecurityConfig addClientPermissionConfig(PermissionConfig permissionConfig) { clientPermissionConfigs.add(permissionConfig); return this; } public List<LoginModuleConfig> getClientLoginModuleConfigs() { return clientLoginModuleConfigs; } public SecurityConfig setClientLoginModuleConfigs(List<LoginModuleConfig> loginModuleConfigs) { this.clientLoginModuleConfigs = loginModuleConfigs; return this; } public List<LoginModuleConfig> getMemberLoginModuleConfigs() { return memberLoginModuleConfigs; } public SecurityConfig setMemberLoginModuleConfigs(List<LoginModuleConfig> memberLoginModuleConfigs) { this.memberLoginModuleConfigs = memberLoginModuleConfigs; return this; } public PermissionPolicyConfig getClientPolicyConfig() { return clientPolicyConfig; } public SecurityConfig setClientPolicyConfig(PermissionPolicyConfig policyConfig) { this.clientPolicyConfig = policyConfig; return this; } public Set<PermissionConfig> getClientPermissionConfigs() { return clientPermissionConfigs; } public SecurityConfig setClientPermissionConfigs(Set<PermissionConfig> permissions) { this.clientPermissionConfigs = permissions; return this; } public CredentialsFactoryConfig getMemberCredentialsConfig() { return memberCredentialsConfig; } public SecurityConfig setMemberCredentialsConfig(CredentialsFactoryConfig credentialsFactoryConfig) { this.memberCredentialsConfig = credentialsFactoryConfig; return this; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("SecurityConfig"); sb.append("{enabled=").append(enabled); sb.append(", memberCredentialsConfig=").append(memberCredentialsConfig); sb.append(", memberLoginModuleConfigs=").append(memberLoginModuleConfigs); sb.append(", clientLoginModuleConfigs=").append(clientLoginModuleConfigs); sb.append(", clientPolicyConfig=").append(clientPolicyConfig); sb.append(", clientPermissionConfigs=").append(clientPermissionConfigs); sb.append('}'); return sb.toString(); } }
0true
hazelcast_src_main_java_com_hazelcast_config_SecurityConfig.java
1,740
map.addEntryListener(new EntryListener<Integer, Integer>() { @Override public void entryAdded(EntryEvent<Integer, Integer> event) { addCount.incrementAndGet(); if (event.getKey() == 1) { addKey1Sum.addAndGet(event.getValue()); } latch.countDown(); } @Override public void entryRemoved(EntryEvent<Integer, Integer> event) { removeCount.incrementAndGet(); if (event.getKey() == 1) { removeKey1Sum.addAndGet(event.getValue()); } latch.countDown(); } @Override public void entryUpdated(EntryEvent<Integer, Integer> event) { updateCount.incrementAndGet(); if (event.getKey() == 1) { updateKey1OldSum.addAndGet(event.getOldValue()); updateKey1Sum.addAndGet(event.getValue()); } latch.countDown(); } @Override public void entryEvicted(EntryEvent<Integer, Integer> event) { } }, true);
0true
hazelcast_src_test_java_com_hazelcast_map_EntryProcessorTest.java
2,996
static class FilterCacheFilterWrapper extends CachedFilter { private final Filter filter; private final WeightedFilterCache cache; FilterCacheFilterWrapper(Filter filter, WeightedFilterCache cache) { this.filter = filter; this.cache = cache; } @Override public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException { Object filterKey = filter; if (filter instanceof CacheKeyFilter) { filterKey = ((CacheKeyFilter) filter).cacheKey(); } FilterCacheKey cacheKey = new FilterCacheKey(context.reader().getCoreCacheKey(), filterKey); Cache<FilterCacheKey, DocIdSet> innerCache = cache.indicesFilterCache.cache(); DocIdSet cacheValue = innerCache.getIfPresent(cacheKey); if (cacheValue == null) { if (!cache.seenReaders.containsKey(context.reader().getCoreCacheKey())) { Boolean previous = cache.seenReaders.putIfAbsent(context.reader().getCoreCacheKey(), Boolean.TRUE); if (previous == null) { // we add a core closed listener only, for non core IndexReaders we rely on clear being called (percolator for example) if (context.reader() instanceof SegmentReader) { ((SegmentReader) context.reader()).addCoreClosedListener(cache); } } } // we can't pass down acceptedDocs provided, because we are caching the result, and acceptedDocs // might be specific to a query. We don't pass the live docs either because a cache built for a specific // generation of a segment might be reused by an older generation which has fewer deleted documents cacheValue = DocIdSets.toCacheable(context.reader(), filter.getDocIdSet(context, null)); // we might put the same one concurrently, that's fine, it will be replaced and the removal // will be called ShardId shardId = ShardUtils.extractShardId(context.reader()); if (shardId != null) { IndexShard shard = cache.indexService.shard(shardId.id()); if (shard != null) { cacheKey.removalListener = shard.filterCache(); shard.filterCache().onCached(DocIdSets.sizeInBytes(cacheValue)); } } innerCache.put(cacheKey, cacheValue); } // note, we don't wrap the return value with a BitsFilteredDocIdSet.wrap(docIdSet, acceptDocs) because // we rely on our custom XFilteredQuery to do the wrapping if needed, so we don't have the wrap each // filter on its own return DocIdSets.isEmpty(cacheValue) ? null : cacheValue; } public String toString() { return "cache(" + filter + ")"; } public boolean equals(Object o) { if (!(o instanceof FilterCacheFilterWrapper)) return false; return this.filter.equals(((FilterCacheFilterWrapper) o).filter); } public int hashCode() { return filter.hashCode() ^ 0x1117BF25; } }
0true
src_main_java_org_elasticsearch_index_cache_filter_weighted_WeightedFilterCache.java
1,477
public final class RoutingNodeIterator implements Iterator<MutableShardRouting>, Iterable<MutableShardRouting> { private final RoutingNode iterable; private MutableShardRouting shard; private final Iterator<MutableShardRouting> delegate; public RoutingNodeIterator(RoutingNode iterable) { this.delegate = iterable.mutableIterator(); this.iterable = iterable; } @Override public boolean hasNext() { return delegate.hasNext(); } @Override public MutableShardRouting next() { return shard = delegate.next(); } public void remove() { delegate.remove(); RoutingNodes.this.remove(shard); } @Override public Iterator<MutableShardRouting> iterator() { return iterable.iterator(); } public void moveToUnassigned() { iterator().remove(); unassigned().add(new MutableShardRouting(shard.index(), shard.id(), null, shard.primary(), ShardRoutingState.UNASSIGNED, shard.version() + 1)); } }
0true
src_main_java_org_elasticsearch_cluster_routing_RoutingNodes.java
999
public static class Order { public static final int General = 1000; public static final int Pricing = 2000; }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_FulfillmentGroupImpl.java
1,419
public final class HazelcastAccessor { private HazelcastAccessor(){} static final ILogger logger = Logger.getLogger(HazelcastAccessor.class); /** * Tries to extract <code>HazelcastInstance</code> from <code>Session</code>. * * @param session * @return Currently used <code>HazelcastInstance</code> or null if an error occurs. */ public static HazelcastInstance getHazelcastInstance(final Session session) { return getHazelcastInstance(session.getSessionFactory()); } /** * Tries to extract <code>HazelcastInstance</code> from <code>SessionFactory</code>. * * @param sessionFactory * @return Currently used <code>HazelcastInstance</code> or null if an error occurs. */ public static HazelcastInstance getHazelcastInstance(final SessionFactory sessionFactory) { if (!(sessionFactory instanceof SessionFactoryImplementor)) { logger.warning("SessionFactory is expected to be instance of SessionFactoryImplementor."); return null; } return getHazelcastInstance((SessionFactoryImplementor) sessionFactory); } /** * Tries to extract <code>HazelcastInstance</code> from <code>SessionFactoryImplementor</code>. * * @param sessionFactory * @return currently used <code>HazelcastInstance</code> or null if an error occurs. */ public static HazelcastInstance getHazelcastInstance(final SessionFactoryImplementor sessionFactory) { final Settings settings = sessionFactory.getSettings(); final RegionFactory rf = settings.getRegionFactory(); if (rf == null) { logger.severe("Hibernate 2nd level cache has not been enabled!"); return null; } if (rf instanceof AbstractHazelcastCacheRegionFactory) { return ((AbstractHazelcastCacheRegionFactory) rf).getHazelcastInstance(); } else { logger.warning("Current 2nd level cache implementation is not HazelcastCacheRegionFactory!"); } return null; } }
0true
hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_instance_HazelcastAccessor.java
921
public abstract class TransportAction<Request extends ActionRequest, Response extends ActionResponse> extends AbstractComponent { protected final ThreadPool threadPool; protected TransportAction(Settings settings, ThreadPool threadPool) { super(settings); this.threadPool = threadPool; } public ActionFuture<Response> execute(Request request) throws ElasticsearchException { PlainActionFuture<Response> future = newFuture(); // since we don't have a listener, and we release a possible lock with the future // there is no need to execute it under a listener thread request.listenerThreaded(false); execute(request, future); return future; } public void execute(Request request, ActionListener<Response> listener) { if (request.listenerThreaded()) { listener = new ThreadedActionListener<Response>(threadPool, listener, logger); } ActionRequestValidationException validationException = request.validate(); if (validationException != null) { listener.onFailure(validationException); return; } try { doExecute(request, listener); } catch (Throwable e) { logger.trace("Error during transport action execution.", e); listener.onFailure(e); } } protected abstract void doExecute(Request request, ActionListener<Response> listener); static final class ThreadedActionListener<Response> implements ActionListener<Response> { private final ThreadPool threadPool; private final ActionListener<Response> listener; private final ESLogger logger; ThreadedActionListener(ThreadPool threadPool, ActionListener<Response> listener, ESLogger logger) { this.threadPool = threadPool; this.listener = listener; this.logger = logger; } @Override public void onResponse(final Response response) { try { threadPool.generic().execute(new Runnable() { @Override public void run() { try { listener.onResponse(response); } catch (Throwable e) { listener.onFailure(e); } } }); } catch (EsRejectedExecutionException ex) { logger.debug("Can not run threaded action, exectuion rejected [{}] running on current thread", listener); /* we don't care if that takes long since we are shutting down. But if we not respond somebody could wait * for the response on the listener side which could be a remote machine so make sure we push it out there.*/ try { listener.onResponse(response); } catch (Throwable e) { listener.onFailure(e); } } } @Override public void onFailure(final Throwable e) { try { threadPool.generic().execute(new Runnable() { @Override public void run() { listener.onFailure(e); } }); } catch (EsRejectedExecutionException ex) { logger.debug("Can not run threaded action, exectuion rejected for listener [{}] running on current thread", listener); /* we don't care if that takes long since we are shutting down. But if we not respond somebody could wait * for the response on the listener side which could be a remote machine so make sure we push it out there.*/ listener.onFailure(e); } } } }
0true
src_main_java_org_elasticsearch_action_support_TransportAction.java
796
private static class WoohaaException extends RuntimeException { }
0true
hazelcast_src_test_java_com_hazelcast_concurrent_atomiclong_AtomicLongTest.java
273
class LazyIndexUpdates implements IndexUpdates { private final NodeStore nodeStore; private final PropertyStore propertyStore; private final Collection<List<PropertyCommand>> propCommands; private final Map<Long, NodeCommand> nodeCommands; private Collection<NodePropertyUpdate> updates; public LazyIndexUpdates( NodeStore nodeStore, PropertyStore propertyStore, Collection<List<PropertyCommand>> propCommands, Map<Long, NodeCommand> nodeCommands ) { this.nodeStore = nodeStore; this.propertyStore = propertyStore; this.propCommands = propCommands; this.nodeCommands = nodeCommands; } @Override public Iterator<NodePropertyUpdate> iterator() { if ( updates == null ) { updates = gatherPropertyAndLabelUpdates(); } return updates.iterator(); } @Override public Set<Long> changedNodeIds() { Set<Long> nodeIds = new HashSet<>( nodeCommands.keySet() ); for ( List<PropertyCommand> propCmd : propCommands ) { PropertyRecord record = propCmd.get( 0 ).getAfter(); if ( record.isNodeSet() ) { nodeIds.add( record.getNodeId() ); } } return nodeIds; } private Collection<NodePropertyUpdate> gatherPropertyAndLabelUpdates() { Collection<NodePropertyUpdate> propertyUpdates = new HashSet<>(); Map<Pair<Long, Integer>, NodePropertyUpdate> propertyChanges = new HashMap<>(); gatherUpdatesFromPropertyCommands( propertyUpdates, propertyChanges ); gatherUpdatesFromNodeCommands( propertyUpdates, propertyChanges ); return propertyUpdates; } private void gatherUpdatesFromPropertyCommands( Collection<NodePropertyUpdate> updates, Map<Pair<Long, Integer>, NodePropertyUpdate> propertyLookup ) { for ( List<PropertyCommand> propertyCommands : propCommands ) { // Let after state of first command here be representative of the whole group PropertyRecord representative = propertyCommands.get( 0 ).getAfter(); if ( !representative.isNodeSet() ) { // These changes wasn't for a node, skip them continue; } long nodeId = representative.getNodeId(); long[] nodeLabelsBefore, nodeLabelsAfter; NodeCommand nodeChanges = nodeCommands.get( nodeId ); if ( nodeChanges != null ) { nodeLabelsBefore = parseLabelsField( nodeChanges.getBefore() ).get( nodeStore ); nodeLabelsAfter = parseLabelsField( nodeChanges.getAfter() ).get( nodeStore ); } else { /* If the node doesn't exist here then we've most likely encountered this scenario: * - TX1: Node N exists and has property record P * - rotate log * - TX2: P gets changed * - TX3: N gets deleted (also P, but that's irrelevant for this scenario) * - N is persisted to disk for some reason * - crash * - recover * - TX2: P has changed and updates to indexes are gathered. As part of that it tries to read * the labels of N (which does not exist a.t.m.). * * We can actually (if we disregard any potential inconsistencies) just assume that * if this happens and we're in recovery mode that the node in question will be deleted * in an upcoming transaction, so just skip this update. */ NodeRecord nodeRecord = nodeStore.getRecord( nodeId ); nodeLabelsBefore = nodeLabelsAfter = parseLabelsField( nodeRecord ).get( nodeStore ); } propertyStore.toLogicalUpdates( updates, Iterables.<PropertyRecordChange,PropertyCommand>cast( propertyCommands ), nodeLabelsBefore, nodeLabelsAfter ); } for ( NodePropertyUpdate update : updates ) { if ( update.getUpdateMode() == UpdateMode.CHANGED ) { propertyLookup.put( Pair.of( update.getNodeId(), update.getPropertyKeyId() ), update ); } } } private void gatherUpdatesFromNodeCommands( Collection<NodePropertyUpdate> propertyUpdates, Map<Pair<Long, Integer>, NodePropertyUpdate> propertyLookup ) { for ( NodeCommand nodeCommand : nodeCommands.values() ) { long nodeId = nodeCommand.getKey(); long[] labelsBefore = parseLabelsField( nodeCommand.getBefore() ).get( nodeStore ); long[] labelsAfter = parseLabelsField( nodeCommand.getAfter() ).get( nodeStore ); if ( nodeCommand.getMode() != Mode.UPDATE ) { // For created and deleted nodes rely on the updates from the perspective of properties to cover it all // otherwise we'll get duplicate update during recovery, or cannot load properties if deleted. continue; } LabelChangeSummary summary = new LabelChangeSummary( labelsBefore, labelsAfter ); Iterator<DefinedProperty> properties = nodeFullyLoadProperties( nodeId ); while ( properties.hasNext() ) { DefinedProperty property = properties.next(); int propertyKeyId = property.propertyKeyId(); if ( summary.hasAddedLabels() ) { Object value = property.value(); propertyUpdates.add( add( nodeId, propertyKeyId, value, summary.getAddedLabels() ) ); } if ( summary.hasRemovedLabels() ) { NodePropertyUpdate propertyChange = propertyLookup.get( Pair.of( nodeId, propertyKeyId ) ); Object value = propertyChange == null ? property.value() : propertyChange.getValueBefore(); propertyUpdates.add( remove( nodeId, propertyKeyId, value, summary.getRemovedLabels() ) ); } } } } private Iterator<DefinedProperty> nodeFullyLoadProperties( long nodeId ) { IteratingPropertyReceiver receiver = new IteratingPropertyReceiver(); NeoStoreTransaction.loadProperties( propertyStore, nodeCommands.get( nodeId ).getAfter().getNextProp(), receiver ); return receiver; } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_LazyIndexUpdates.java
3,065
public class SnapshotDeletionPolicy extends AbstractESDeletionPolicy { private final IndexDeletionPolicy primary; private final ConcurrentMap<Long, SnapshotHolder> snapshots = ConcurrentCollections.newConcurrentMap(); private volatile List<SnapshotIndexCommit> commits; private final Object mutex = new Object(); private SnapshotIndexCommit lastCommit; /** * Constructs a new snapshot deletion policy that wraps the provided deletion policy. */ @Inject public SnapshotDeletionPolicy(@Named("actual") IndexDeletionPolicy primary) { super(((IndexShardComponent) primary).shardId(), ((IndexShardComponent) primary).indexSettings()); this.primary = primary; } /** * Called by Lucene. Same as {@link #onCommit(java.util.List)}. */ public void onInit(List<? extends IndexCommit> commits) throws IOException { if (!commits.isEmpty()) { // this might be empty if we create a new index. // the behavior has changed in Lucene 4.4 that calls onInit even with an empty commits list. onCommit(commits); } } /** * Called by Lucene.. Wraps the provided commits with {@link SnapshotIndexCommit} * and delegates to the wrapped deletion policy. */ public void onCommit(List<? extends IndexCommit> commits) throws IOException { assert !commits.isEmpty() : "Commits must not be empty"; synchronized (mutex) { List<SnapshotIndexCommit> snapshotCommits = wrapCommits(commits); primary.onCommit(snapshotCommits); // clean snapshots that their respective counts are 0 (should not really happen) for (Iterator<SnapshotHolder> it = snapshots.values().iterator(); it.hasNext(); ) { SnapshotHolder holder = it.next(); if (holder.counter <= 0) { it.remove(); } } // build the current commits list (all the ones that are not deleted by the primary) List<SnapshotIndexCommit> newCommits = new ArrayList<SnapshotIndexCommit>(); for (SnapshotIndexCommit commit : snapshotCommits) { if (!commit.isDeleted()) { newCommits.add(commit); } } this.commits = newCommits; // the last commit that is not deleted this.lastCommit = newCommits.get(newCommits.size() - 1); } } /** * Snapshots all the current commits in the index. Make sure to call * {@link SnapshotIndexCommits#release()} to release it. */ public SnapshotIndexCommits snapshots() throws IOException { synchronized (mutex) { if (snapshots == null) { throw new IllegalStateException("Snapshot deletion policy has not been init yet..."); } List<SnapshotIndexCommit> result = new ArrayList<SnapshotIndexCommit>(commits.size()); for (SnapshotIndexCommit commit : commits) { result.add(snapshot(commit)); } return new SnapshotIndexCommits(result); } } /** * Returns a snapshot of the index (for the last commit point). Make * sure to call {@link SnapshotIndexCommit#release()} in order to release it. */ public SnapshotIndexCommit snapshot() throws IOException { synchronized (mutex) { if (lastCommit == null) { throw new IllegalStateException("Snapshot deletion policy has not been init yet..."); } return snapshot(lastCommit); } } @Override public IndexDeletionPolicy clone() { // Lucene IW makes a clone internally but since we hold on to this instance // the clone will just be the identity. See InternalEngine recovery why we need this. return this; } /** * Helper method to snapshot a give commit. */ private SnapshotIndexCommit snapshot(SnapshotIndexCommit commit) throws IOException { SnapshotHolder snapshotHolder = snapshots.get(commit.getGeneration()); if (snapshotHolder == null) { snapshotHolder = new SnapshotHolder(0); snapshots.put(commit.getGeneration(), snapshotHolder); } snapshotHolder.counter++; return new OneTimeReleaseSnapshotIndexCommit(this, commit); } /** * Returns <tt>true</tt> if the version has been snapshotted. */ boolean isHeld(long version) { SnapshotDeletionPolicy.SnapshotHolder holder = snapshots.get(version); return holder != null && holder.counter > 0; } /** * Releases the version provided. Returns <tt>true</tt> if the release was successful. */ boolean release(long version) { synchronized (mutex) { SnapshotDeletionPolicy.SnapshotHolder holder = snapshots.get(version); if (holder == null) { return false; } if (holder.counter <= 0) { snapshots.remove(version); return false; } if (--holder.counter == 0) { snapshots.remove(version); } return true; } } /** * A class that wraps an {@link SnapshotIndexCommit} and makes sure that release will only * be called once on it. */ private static class OneTimeReleaseSnapshotIndexCommit extends SnapshotIndexCommit { private volatile boolean released = false; OneTimeReleaseSnapshotIndexCommit(SnapshotDeletionPolicy deletionPolicy, IndexCommit cp) throws IOException { super(deletionPolicy, cp); } @Override public boolean release() { if (released) { return false; } released = true; return ((SnapshotIndexCommit) delegate).release(); } } private static class SnapshotHolder { int counter; private SnapshotHolder(int counter) { this.counter = counter; } } private List<SnapshotIndexCommit> wrapCommits(List<? extends IndexCommit> commits) throws IOException { final int count = commits.size(); List<SnapshotIndexCommit> snapshotCommits = new ArrayList<SnapshotIndexCommit>(count); for (int i = 0; i < count; i++) snapshotCommits.add(new SnapshotIndexCommit(this, commits.get(i))); return snapshotCommits; } }
0true
src_main_java_org_elasticsearch_index_deletionpolicy_SnapshotDeletionPolicy.java
880
public interface OfferHolder { Offer getOffer(); BroadleafCurrency getCurrency(); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_discount_domain_OfferHolder.java
3,711
private class ManagedThread extends Thread { public ManagedThread(Runnable target) { super(threadGroup, target, threadName); } @Override public void run() { try { super.run(); } catch (OutOfMemoryError e) { OutOfMemoryErrorDispatcher.onOutOfMemory(e); } } }
1no label
hazelcast_src_main_java_com_hazelcast_util_executor_SingleExecutorThreadFactory.java
2,968
public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStreamTestCase { @Test public void testDefault() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's"; String[] expected = new String[]{"Power", "Shot", "500", "42", "wi", "fi", "wi", "fi", "4000", "j", "2", "se", "O", "Neil"}; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } @Test public void testCatenateWords() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's"; String[] expected = new String[]{"PowerShot", "500", "42", "wifi", "wifi", "4000", "j", "2", "se", "ONeil"}; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } @Test public void testCatenateNumbers() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") .put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's"; String[] expected = new String[]{"Power", "Shot", "50042", "wi", "fi", "wi", "fi", "4000", "j", "2", "se", "O", "Neil"}; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } @Test public void testCatenateAll() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") .put("index.analysis.filter.my_word_delimiter.catenate_all", "true") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's"; String[] expected = new String[]{"PowerShot", "50042", "wifi", "wifi4000", "j2se", "ONeil"}; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } @Test public void testSplitOnCaseChange() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); String source = "PowerShot"; String[] expected = new String[]{"PowerShot"}; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } @Test public void testPreserveOriginal() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.preserve_original", "true") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's"; String[] expected = new String[]{"PowerShot", "Power", "Shot", "500-42", "500", "42", "wi-fi", "wi", "fi", "wi-fi-4000", "wi", "fi", "4000", "j2se", "j", "2", "se", "O'Neil's", "O", "Neil"}; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } @Test public void testStemEnglishPossessive() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's"; String[] expected = new String[]{"Power", "Shot", "500", "42", "wi", "fi", "wi", "fi", "4000", "j", "2", "se", "O", "Neil", "s"}; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } }
0true
src_test_java_org_elasticsearch_index_analysis_WordDelimiterTokenFilterFactoryTests.java
463
private static enum StaticEntryGetter implements StaticArrayEntry.GetColVal<Map.Entry<Integer, Long>, StaticBuffer> { INSTANCE, SCHEMA_INSTANCE; @Override public StaticBuffer getColumn(Map.Entry<Integer, Long> element) { ByteBuffer b = ByteBuffer.allocate(4); b.putInt(element.getKey()); return StaticArrayBuffer.of(b.array()); } @Override public StaticBuffer getValue(Map.Entry<Integer, Long> element) { ByteBuffer b = ByteBuffer.allocate(8); b.putLong(element.getValue()); return StaticArrayBuffer.of(b.array()); } @Override public EntryMetaData[] getMetaSchema(Map.Entry<Integer, Long> element) { if (this==INSTANCE) return StaticArrayEntry.EMPTY_SCHEMA; else return metaSchema; } @Override public Object getMetaData(Map.Entry<Integer, Long> element, EntryMetaData meta) { if (this==INSTANCE) throw new UnsupportedOperationException("Unsupported meta data: " + meta); else return metaData.get(meta); } }
0true
titan-test_src_test_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_StaticArrayEntryTest.java
1,534
public class UpdateNumberOfReplicasTests extends ElasticsearchAllocationTestCase { private final ESLogger logger = Loggers.getLogger(UpdateNumberOfReplicasTests.class); @Test public void testUpdateNumberOfReplicas() { AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build(); assertThat(routingTable.index("test").shards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).shards().get(0).state(), equalTo(UNASSIGNED)); assertThat(routingTable.index("test").shard(0).shards().get(1).state(), equalTo(UNASSIGNED)); assertThat(routingTable.index("test").shard(0).shards().get(0).currentNodeId(), nullValue()); assertThat(routingTable.index("test").shard(0).shards().get(1).currentNodeId(), nullValue()); logger.info("Adding two nodes and performing rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build(); RoutingTable prevRoutingTable = routingTable; routingTable = strategy.reroute(clusterState).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); logger.info("Start all the primary shards"); RoutingNodes routingNodes = clusterState.routingNodes(); prevRoutingTable = routingTable; routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); logger.info("Start all the replica shards"); routingNodes = clusterState.routingNodes(); prevRoutingTable = routingTable; routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); final String nodeHoldingPrimary = routingTable.index("test").shard(0).primaryShard().currentNodeId(); final String nodeHoldingReplica = routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(); assertThat(nodeHoldingPrimary, not(equalTo(nodeHoldingReplica))); assertThat(prevRoutingTable != routingTable, equalTo(true)); assertThat(routingTable.index("test").shards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo(nodeHoldingPrimary)); assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), equalTo(nodeHoldingReplica)); logger.info("add another replica"); routingNodes = clusterState.routingNodes(); prevRoutingTable = routingTable; routingTable = RoutingTable.builder(routingTable).updateNumberOfReplicas(2).build(); metaData = MetaData.builder(clusterState.metaData()).updateNumberOfReplicas(2).build(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).metaData(metaData).build(); assertThat(clusterState.metaData().index("test").numberOfReplicas(), equalTo(2)); assertThat(prevRoutingTable != routingTable, equalTo(true)); assertThat(routingTable.index("test").shards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(3)); assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo(nodeHoldingPrimary)); assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), equalTo(nodeHoldingReplica)); assertThat(routingTable.index("test").shard(0).replicaShards().get(1).state(), equalTo(UNASSIGNED)); logger.info("Add another node and start the added replica"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).put(newNode("node3"))).build(); prevRoutingTable = routingTable; routingTable = strategy.reroute(clusterState).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat(prevRoutingTable != routingTable, equalTo(true)); assertThat(routingTable.index("test").shards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(3)); assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo(nodeHoldingPrimary)); assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).replicaShardsWithState(STARTED).size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).replicaShardsWithState(STARTED).get(0).currentNodeId(), equalTo(nodeHoldingReplica)); assertThat(routingTable.index("test").shard(0).replicaShardsWithState(INITIALIZING).size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).replicaShardsWithState(INITIALIZING).get(0).currentNodeId(), equalTo("node3")); routingNodes = clusterState.routingNodes(); prevRoutingTable = routingTable; routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat(prevRoutingTable != routingTable, equalTo(true)); assertThat(routingTable.index("test").shards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(3)); assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo(nodeHoldingPrimary)); assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).replicaShardsWithState(STARTED).size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).replicaShardsWithState(STARTED).get(0).currentNodeId(), anyOf(equalTo(nodeHoldingReplica), equalTo("node3"))); assertThat(routingTable.index("test").shard(0).replicaShardsWithState(STARTED).get(1).currentNodeId(), anyOf(equalTo(nodeHoldingReplica), equalTo("node3"))); logger.info("now remove a replica"); routingNodes = clusterState.routingNodes(); prevRoutingTable = routingTable; routingTable = RoutingTable.builder(routingTable).updateNumberOfReplicas(1).build(); metaData = MetaData.builder(clusterState.metaData()).updateNumberOfReplicas(1).build(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).metaData(metaData).build(); assertThat(clusterState.metaData().index("test").numberOfReplicas(), equalTo(1)); assertThat(prevRoutingTable != routingTable, equalTo(true)); assertThat(routingTable.index("test").shards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo(nodeHoldingPrimary)); assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), anyOf(equalTo(nodeHoldingReplica), equalTo("node3"))); logger.info("do a reroute, should remain the same"); prevRoutingTable = routingTable; routingTable = strategy.reroute(clusterState).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat(prevRoutingTable != routingTable, equalTo(false)); } }
0true
src_test_java_org_elasticsearch_cluster_routing_allocation_UpdateNumberOfReplicasTests.java
793
ex.execute(new Runnable() { public void run() { instances[id] = nodeFactory.newHazelcastInstance(); instances[id].getAtomicLong(name).incrementAndGet(); countDownLatch.countDown(); } });
0true
hazelcast_src_test_java_com_hazelcast_concurrent_atomiclong_AtomicLongTest.java
30
final class NestedCompletionProposal implements ICompletionProposal, ICompletionProposalExtension2 { private final String op; private final int loc; private final int index; private final boolean basic; private final Declaration dec; NestedCompletionProposal(Declaration dec, int loc, int index, boolean basic, String op) { this.op = op; this.loc = loc; this.index = index; this.basic = basic; this.dec = dec; } public String getAdditionalProposalInfo() { return null; } @Override public void apply(IDocument document) { //the following awfulness is necessary because the //insertion point may have changed (and even its //text may have changed, since the proposal was //instantiated). try { IRegion li = document.getLineInformationOfOffset(loc); int endOfLine = li.getOffset() + li.getLength(); int startOfArgs = getFirstPosition(); int offset = findCharCount(index, document, loc+startOfArgs, endOfLine, ",;", "", true)+1; if (offset>0 && document.getChar(offset)==' ') { offset++; } int nextOffset = findCharCount(index+1, document, loc+startOfArgs, endOfLine, ",;", "", true); int middleOffset = findCharCount(1, document, offset, nextOffset, "=", "", true)+1; if (middleOffset>0 && document.getChar(middleOffset)=='>') { middleOffset++; } while (middleOffset>0 && document.getChar(middleOffset)==' ') { middleOffset++; } if (middleOffset>offset && middleOffset<nextOffset) { offset = middleOffset; } String str = getText(false); if (nextOffset==-1) { nextOffset = offset; } if (document.getChar(nextOffset)=='}') { str += " "; } document.replace(offset, nextOffset-offset, str); } catch (BadLocationException e) { e.printStackTrace(); } //adding imports drops us out of linked mode :( /*try { DocumentChange tc = new DocumentChange("imports", document); tc.setEdit(new MultiTextEdit()); HashSet<Declaration> decs = new HashSet<Declaration>(); CompilationUnit cu = cpc.getRootNode(); importDeclaration(decs, d, cu); if (d instanceof Functional) { List<ParameterList> pls = ((Functional) d).getParameterLists(); if (!pls.isEmpty()) { for (Parameter p: pls.get(0).getParameters()) { MethodOrValue pm = p.getModel(); if (pm instanceof Method) { for (ParameterList ppl: ((Method) pm).getParameterLists()) { for (Parameter pp: ppl.getParameters()) { importSignatureTypes(pp.getModel(), cu, decs); } } } } } } applyImports(tc, decs, cu, document); tc.perform(new NullProgressMonitor()); } catch (Exception e) { e.printStackTrace(); }*/ } private String getText(boolean description) { StringBuilder sb = new StringBuilder() .append(op).append(dec.getName(getUnit())); if (dec instanceof Functional && !basic) { appendPositionalArgs(dec, getUnit(), sb, false, description); } return sb.toString(); } @Override public Point getSelection(IDocument document) { return null; } @Override public String getDisplayString() { return getText(true); } @Override public Image getImage() { return getImageForDeclaration(dec); } @Override public IContextInformation getContextInformation() { return null; } @Override public void apply(ITextViewer viewer, char trigger, int stateMask, int offset) { apply(viewer.getDocument()); } @Override public void selected(ITextViewer viewer, boolean smartToggle) {} @Override public void unselected(ITextViewer viewer) {} @Override public boolean validate(IDocument document, int currentOffset, DocumentEvent event) { if (event==null) { return true; } else { try { IRegion li = document.getLineInformationOfOffset(loc); int endOfLine = li.getOffset() + li.getLength(); int startOfArgs = getFirstPosition(); int offset = findCharCount(index, document, loc+startOfArgs, endOfLine, ",;", "", true)+1; String content = document.get(offset, currentOffset - offset); int eq = content.indexOf("="); if (eq>0) { content = content.substring(eq+1); } String filter = content.trim().toLowerCase(); String decName = dec.getName(getUnit()); if ((op+decName).toLowerCase().startsWith(filter) || decName.toLowerCase().startsWith(filter)) { return true; } } catch (BadLocationException e) { // ignore concurrently modified document } return false; } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_InvocationCompletionProposal.java
2,596
private static class MasterPingRequest extends TransportRequest { private String nodeId; private String masterNodeId; private MasterPingRequest() { } private MasterPingRequest(String nodeId, String masterNodeId) { this.nodeId = nodeId; this.masterNodeId = masterNodeId; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); nodeId = in.readString(); masterNodeId = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(nodeId); out.writeString(masterNodeId); } }
1no label
src_main_java_org_elasticsearch_discovery_zen_fd_MasterFaultDetection.java
466
public interface SandBoxDao { public SandBox retrieve(Long id); public SandBox retrieveSandBoxByType(Site site, SandBoxType sandboxType); public SandBox retrieveNamedSandBox(Site site, SandBoxType sandboxType, String sandboxName); public SandBox persist(SandBox entity); public SandBox createSandBox(Site site, String sandBoxName, SandBoxType sandBoxType); }
0true
common_src_main_java_org_broadleafcommerce_common_sandbox_dao_SandBoxDao.java
236
public class CeylonTaskUtil { public static void addTaskAnnotation(CommonToken token, IAnnotationModel model) { List<Task> tasks = getTasks(token); if (tasks != null) { for (Task task : tasks) { Annotation annotation = new Annotation(TODO_ANNOTATION_TYPE, false, task.text); Position position = new Position(task.startIndex, task.text.length()); model.addAnnotation(annotation, position); } } } public static void addTaskMarkers(CommonToken token, IFile file) { List<Task> tasks = getTasks(token); if (tasks != null) { for (Task task : tasks) { Map<String, Object> attributes = new HashMap<String, Object>(); attributes.put(IMarker.SEVERITY, IMarker.SEVERITY_INFO); attributes.put(IMarker.PRIORITY, task.priority); attributes.put(IMarker.MESSAGE, task.text); attributes.put(IMarker.LINE_NUMBER, task.line); attributes.put(IMarker.CHAR_START, task.startIndex); attributes.put(IMarker.CHAR_END, task.startIndex + task.text.length()); attributes.put(IMarker.SOURCE_ID, CeylonBuilder.SOURCE); attributes.put(IMarker.USER_EDITABLE, false); try { file.createMarker(CeylonBuilder.TASK_MARKER_ID).setAttributes(attributes); } catch (Exception e) { e.printStackTrace(); } } } } public static List<Task> getTasks(CommonToken token) { List<Task> tasks = null; if (token.getType() == CeylonLexer.LINE_COMMENT || token.getType() == CeylonLexer.MULTI_COMMENT) { int line = token.getLine(); int startIndex = token.getStartIndex(); String[] parts = token.getText().split("(?=TODO|FIXME|XXX|\n|\\*/|/\\*)"); for (String part : parts) { int priority = -1; if (part.startsWith("TODO")) { priority = IMarker.PRIORITY_NORMAL; } else if (part.startsWith("XXX")) { priority = IMarker.PRIORITY_NORMAL; } else if (part.startsWith("FIXME")) { priority = IMarker.PRIORITY_HIGH; } else if (part.startsWith("\n")) { line++; } if (priority != -1) { Task task = new Task(part, priority, line, startIndex); if (tasks == null) { tasks = new ArrayList<Task>(); } tasks.add(task); } startIndex += part.length(); } } return tasks; } public static class Task { private final String text; private final int priority; private final int line; private final int startIndex; public Task(String text, int priority, int line, int startIndex) { this.text = text; this.priority = priority; this.line = line; this.startIndex = startIndex; } public String getText() { return text; } public int getPriority() { return priority; } public int getLine() { return line; } public int getStartIndex() { return startIndex; } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonTaskUtil.java
1,351
public class NodeMappingRefreshAction extends AbstractComponent { private final TransportService transportService; private final MetaDataMappingService metaDataMappingService; @Inject public NodeMappingRefreshAction(Settings settings, TransportService transportService, MetaDataMappingService metaDataMappingService) { super(settings); this.transportService = transportService; this.metaDataMappingService = metaDataMappingService; transportService.registerHandler(NodeMappingRefreshTransportHandler.ACTION, new NodeMappingRefreshTransportHandler()); } public void nodeMappingRefresh(final ClusterState state, final NodeMappingRefreshRequest request) throws ElasticsearchException { DiscoveryNodes nodes = state.nodes(); if (nodes.localNodeMaster()) { innerMappingRefresh(request); } else { transportService.sendRequest(state.nodes().masterNode(), NodeMappingRefreshTransportHandler.ACTION, request, EmptyTransportResponseHandler.INSTANCE_SAME); } } private void innerMappingRefresh(NodeMappingRefreshRequest request) { metaDataMappingService.refreshMapping(request.index(), request.indexUUID(), request.types()); } private class NodeMappingRefreshTransportHandler extends BaseTransportRequestHandler<NodeMappingRefreshRequest> { static final String ACTION = "cluster/nodeMappingRefresh"; @Override public NodeMappingRefreshRequest newInstance() { return new NodeMappingRefreshRequest(); } @Override public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel) throws Exception { innerMappingRefresh(request); channel.sendResponse(TransportResponse.Empty.INSTANCE); } @Override public String executor() { return ThreadPool.Names.SAME; } } public static class NodeMappingRefreshRequest extends TransportRequest { private String index; private String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; private String[] types; private String nodeId; NodeMappingRefreshRequest() { } public NodeMappingRefreshRequest(String index, String indexUUID, String[] types, String nodeId) { this.index = index; this.indexUUID = indexUUID; this.types = types; this.nodeId = nodeId; } public String index() { return index; } public String indexUUID() { return indexUUID; } public String[] types() { return types; } public String nodeId() { return nodeId; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(index); out.writeStringArray(types); out.writeString(nodeId); out.writeString(indexUUID); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); index = in.readString(); types = in.readStringArray(); nodeId = in.readString(); indexUUID = in.readString(); } } }
0true
src_main_java_org_elasticsearch_cluster_action_index_NodeMappingRefreshAction.java
685
constructors[COLLECTION_CLEAR] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new CollectionClearOperation(); } };
0true
hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java
326
public enum Placement { PREPEND,APPEND,SPECIFIC }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_Placement.java
2,160
public class ContextDocIdSet { public final AtomicReaderContext context; public final DocIdSet docSet; public ContextDocIdSet(AtomicReaderContext context, DocIdSet docSet) { this.context = context; this.docSet = docSet; } }
0true
src_main_java_org_elasticsearch_common_lucene_docset_ContextDocIdSet.java
3,893
public class MissingFilterParser implements FilterParser { public static final String NAME = "missing"; @Inject public MissingFilterParser() { } @Override public String[] names() { return new String[]{NAME}; } @Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); String fieldPattern = null; String filterName = null; boolean nullValue = false; boolean existence = true; XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if ("field".equals(currentFieldName)) { fieldPattern = parser.text(); } else if ("null_value".equals(currentFieldName)) { nullValue = parser.booleanValue(); } else if ("existence".equals(currentFieldName)) { existence = parser.booleanValue(); } else if ("_name".equals(currentFieldName)) { filterName = parser.text(); } else { throw new QueryParsingException(parseContext.index(), "[missing] filter does not support [" + currentFieldName + "]"); } } } if (fieldPattern == null) { throw new QueryParsingException(parseContext.index(), "missing must be provided with a [field]"); } if (!existence && !nullValue) { throw new QueryParsingException(parseContext.index(), "missing must have either existence, or null_value, or both set to true"); } MapperService.SmartNameObjectMapper smartNameObjectMapper = parseContext.smartObjectMapper(fieldPattern); if (smartNameObjectMapper != null && smartNameObjectMapper.hasMapper()) { // automatic make the object mapper pattern fieldPattern = fieldPattern + ".*"; } Set<String> fields = parseContext.simpleMatchToIndexNames(fieldPattern); if (fields.isEmpty()) { if (existence) { // if we ask for existence of fields, and we found none, then we should match on all return Queries.MATCH_ALL_FILTER; } return null; } Filter existenceFilter = null; Filter nullFilter = null; MapperService.SmartNameFieldMappers nonNullFieldMappers = null; if (existence) { XBooleanFilter boolFilter = new XBooleanFilter(); for (String field : fields) { MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(field); if (smartNameFieldMappers != null) { nonNullFieldMappers = smartNameFieldMappers; } Filter filter = null; if (smartNameFieldMappers != null && smartNameFieldMappers.hasMapper()) { filter = smartNameFieldMappers.mapper().rangeFilter(null, null, true, true, parseContext); } if (filter == null) { filter = new TermRangeFilter(field, null, null, true, true); } boolFilter.add(filter, BooleanClause.Occur.SHOULD); } // we always cache this one, really does not change... (exists) // its ok to cache under the fieldName cacheKey, since its per segment and the mapping applies to this data on this segment... existenceFilter = parseContext.cacheFilter(boolFilter, new CacheKeyFilter.Key("$exists$" + fieldPattern)); existenceFilter = new NotFilter(existenceFilter); // cache the not filter as well, so it will be faster existenceFilter = parseContext.cacheFilter(existenceFilter, new CacheKeyFilter.Key("$missing$" + fieldPattern)); } if (nullValue) { for (String field : fields) { MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(field); if (smartNameFieldMappers != null && smartNameFieldMappers.hasMapper()) { nullFilter = smartNameFieldMappers.mapper().nullValueFilter(); if (nullFilter != null) { // cache the not filter as well, so it will be faster nullFilter = parseContext.cacheFilter(nullFilter, new CacheKeyFilter.Key("$null$" + fieldPattern)); } } } } Filter filter; if (nullFilter != null) { if (existenceFilter != null) { XBooleanFilter combined = new XBooleanFilter(); combined.add(existenceFilter, BooleanClause.Occur.SHOULD); combined.add(nullFilter, BooleanClause.Occur.SHOULD); // cache the not filter as well, so it will be faster filter = parseContext.cacheFilter(combined, null); } else { filter = nullFilter; } } else { filter = existenceFilter; } if (filter == null) { return null; } filter = wrapSmartNameFilter(filter, nonNullFieldMappers, parseContext); if (filterName != null) { parseContext.addNamedFilter(filterName, existenceFilter); } return filter; } }
1no label
src_main_java_org_elasticsearch_index_query_MissingFilterParser.java
4,427
shardGatewayService.recover(indexShouldExists, new IndexShardGatewayService.RecoveryListener() { @Override public void onRecoveryDone() { shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(), "after recovery from gateway"); } @Override public void onIgnoreRecovery(String reason) { } @Override public void onRecoveryFailed(IndexShardGatewayRecoveryException e) { handleRecoveryFailure(indexService, indexMetaData, shardRouting, true, e); } });
1no label
src_main_java_org_elasticsearch_indices_cluster_IndicesClusterStateService.java
527
public class OStorageException extends OException { private static final long serialVersionUID = -2655748565531836968L; public OStorageException(String string) { super(string); } public OStorageException(String message, Throwable cause) { super(message, cause); } }
0true
core_src_main_java_com_orientechnologies_orient_core_exception_OStorageException.java
589
protected static final class RemovedValue { public static final RemovedValue INSTANCE = new RemovedValue(); }
0true
core_src_main_java_com_orientechnologies_orient_core_index_OIndexAbstract.java
1,626
public enum VARIABLES { DBNAME, DATE }
0true
server_src_main_java_com_orientechnologies_orient_server_handler_OAutomaticBackup.java
401
public class CreateSnapshotResponse extends ActionResponse implements ToXContent { @Nullable private SnapshotInfo snapshotInfo; CreateSnapshotResponse(@Nullable SnapshotInfo snapshotInfo) { this.snapshotInfo = snapshotInfo; } CreateSnapshotResponse() { } /** * Returns snapshot information if snapshot was completed by the time this method returned or null otherwise. * * @return snapshot information or null */ public SnapshotInfo getSnapshotInfo() { return snapshotInfo; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); snapshotInfo = SnapshotInfo.readOptionalSnapshotInfo(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalStreamable(snapshotInfo); } /** * Returns HTTP status * <p/> * <ul> * <li>{@link RestStatus#ACCEPTED}</li> if snapshot is still in progress * <li>{@link RestStatus#OK}</li> if snapshot was successful or partially successful * <li>{@link RestStatus#INTERNAL_SERVER_ERROR}</li> if snapshot failed completely * </ul> * * @return */ public RestStatus status() { if (snapshotInfo == null) { return RestStatus.ACCEPTED; } return snapshotInfo.status(); } static final class Fields { static final XContentBuilderString SNAPSHOT = new XContentBuilderString("snapshot"); static final XContentBuilderString ACCEPTED = new XContentBuilderString("accepted"); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (snapshotInfo != null) { builder.field(Fields.SNAPSHOT); snapshotInfo.toXContent(builder, params); } else { builder.field(Fields.ACCEPTED, true); } return builder; } }
0true
src_main_java_org_elasticsearch_action_admin_cluster_snapshots_create_CreateSnapshotResponse.java
819
public static class Order { public static final int Codes = 1000; public static final int Advanced = 2000; }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OfferImpl.java
3,612
public static class Builder extends AbstractFieldMapper.Builder<Builder, StringFieldMapper> { protected String nullValue = Defaults.NULL_VALUE; protected int positionOffsetGap = Defaults.POSITION_OFFSET_GAP; protected NamedAnalyzer searchQuotedAnalyzer; protected int ignoreAbove = Defaults.IGNORE_ABOVE; public Builder(String name) { super(name, new FieldType(Defaults.FIELD_TYPE)); builder = this; } public Builder nullValue(String nullValue) { this.nullValue = nullValue; return this; } @Override public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) { super.searchAnalyzer(searchAnalyzer); if (searchQuotedAnalyzer == null) { searchQuotedAnalyzer = searchAnalyzer; } return this; } public Builder positionOffsetGap(int positionOffsetGap) { this.positionOffsetGap = positionOffsetGap; return this; } public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) { this.searchQuotedAnalyzer = analyzer; return builder; } public Builder ignoreAbove(int ignoreAbove) { this.ignoreAbove = ignoreAbove; return this; } @Override public StringFieldMapper build(BuilderContext context) { if (positionOffsetGap > 0) { indexAnalyzer = new NamedAnalyzer(indexAnalyzer, positionOffsetGap); searchAnalyzer = new NamedAnalyzer(searchAnalyzer, positionOffsetGap); searchQuotedAnalyzer = new NamedAnalyzer(searchQuotedAnalyzer, positionOffsetGap); } // if the field is not analyzed, then by default, we should omit norms and have docs only // index options, as probably what the user really wants // if they are set explicitly, we will use those values // we also change the values on the default field type so that toXContent emits what // differs from the defaults FieldType defaultFieldType = new FieldType(Defaults.FIELD_TYPE); if (fieldType.indexed() && !fieldType.tokenized()) { defaultFieldType.setOmitNorms(true); defaultFieldType.setIndexOptions(IndexOptions.DOCS_ONLY); if (!omitNormsSet && boost == Defaults.BOOST) { fieldType.setOmitNorms(true); } if (!indexOptionsSet) { fieldType.setIndexOptions(IndexOptions.DOCS_ONLY); } } defaultFieldType.freeze(); StringFieldMapper fieldMapper = new StringFieldMapper(buildNames(context), boost, fieldType, defaultFieldType, docValues, nullValue, indexAnalyzer, searchAnalyzer, searchQuotedAnalyzer, positionOffsetGap, ignoreAbove, postingsProvider, docValuesProvider, similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } }
0true
src_main_java_org_elasticsearch_index_mapper_core_StringFieldMapper.java
3,140
Future<Object> future = executorService.submit(new Callable<Object>() { @Override public Object call() throws Exception { engine.flush(new Engine.Flush()); ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_2, false); engine.create(new Engine.Create(null, newUid("2"), doc2)); engine.flush(new Engine.Flush()); ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_3, false); engine.create(new Engine.Create(null, newUid("3"), doc3)); return null; } });
0true
src_test_java_org_elasticsearch_index_engine_internal_InternalEngineTests.java
104
public static class Group { public static class Name { public static final String Basic = "PageImpl_Basic"; public static final String Page = "PageImpl_Page"; public static final String Rules = "PageImpl_Rules"; } public static class Order { public static final int Basic = 1000; public static final int Page = 2000; public static final int Rules = 1000; } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageImpl.java
335
public class DoSomeTransactionsThenWait { public static void main( String[] args ) throws Exception { String storeDir = args[0]; GraphDatabaseService db = new GraphDatabaseFactory().newEmbeddedDatabase( storeDir ); int count = Integer.parseInt( args[1] ); for ( int i = 0; i < count; i++ ) { Transaction tx = db.beginTx(); try { db.createNode(); tx.success(); } finally { tx.finish(); } } touch( storeDir, "done" ); while ( true ) Thread.sleep( 1000 ); } private static void touch( String storeDir, String name ) throws Exception { new File( storeDir, name ).createNewFile(); } }
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_DoSomeTransactionsThenWait.java
475
public class GetAliasesAction extends IndicesAction<GetAliasesRequest, GetAliasesResponse, GetAliasesRequestBuilder> { public static final GetAliasesAction INSTANCE = new GetAliasesAction(); public static final String NAME = "indices/get/aliases"; private GetAliasesAction() { super(NAME); } @Override public GetAliasesRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new GetAliasesRequestBuilder(client); } @Override public GetAliasesResponse newResponse() { return new GetAliasesResponse(); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_alias_get_GetAliasesAction.java
518
public class TransportIndicesExistsAction extends TransportMasterNodeReadOperationAction<IndicesExistsRequest, IndicesExistsResponse> { @Inject public TransportIndicesExistsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) { super(settings, transportService, clusterService, threadPool); } @Override protected String executor() { // lightweight in memory check return ThreadPool.Names.SAME; } @Override protected String transportAction() { return IndicesExistsAction.NAME; } @Override protected IndicesExistsRequest newRequest() { return new IndicesExistsRequest(); } @Override protected IndicesExistsResponse newResponse() { return new IndicesExistsResponse(); } @Override protected void doExecute(IndicesExistsRequest request, ActionListener<IndicesExistsResponse> listener) { // don't call this since it will throw IndexMissingException //request.indices(clusterService.state().metaData().concreteIndices(request.indices())); super.doExecute(request, listener); } @Override protected ClusterBlockException checkBlock(IndicesExistsRequest request, ClusterState state) { return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices()); } @Override protected void masterOperation(final IndicesExistsRequest request, final ClusterState state, final ActionListener<IndicesExistsResponse> listener) throws ElasticsearchException { boolean exists; try { // Similar as the previous behaviour, but now also aliases and wildcards are supported. clusterService.state().metaData().concreteIndices(request.indices(), request.indicesOptions()); exists = true; } catch (IndexMissingException e) { exists = false; } listener.onResponse(new IndicesExistsResponse(exists)); } }
1no label
src_main_java_org_elasticsearch_action_admin_indices_exists_indices_TransportIndicesExistsAction.java
215
XPostingsHighlighter highlighter2 = new XPostingsHighlighter(Integer.MAX_VALUE - 1) { @Override protected char getMultiValuedSeparator(String field) { return HighlightUtils.PARAGRAPH_SEPARATOR; } @Override protected PassageFormatter getFormatter(String field) { return new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()); } };
0true
src_test_java_org_apache_lucene_search_postingshighlight_CustomPostingsHighlighterTests.java
812
public abstract class AbstractAlterOperation extends AtomicLongBackupAwareOperation { protected IFunction<Long, Long> function; protected long response; protected long backup; public AbstractAlterOperation() { } public AbstractAlterOperation(String name, IFunction<Long, Long> function) { super(name); this.function = function; } @Override public Object getResponse() { return response; } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { super.writeInternal(out); out.writeObject(function); } @Override protected void readInternal(ObjectDataInput in) throws IOException { super.readInternal(in); function = in.readObject(); } @Override public Operation getBackupOperation() { return new SetBackupOperation(name, backup); } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_operations_AbstractAlterOperation.java
304
public class MergeEhCacheManagerFactoryBean extends EhCacheManagerFactoryBean implements ApplicationContextAware { private ApplicationContext applicationContext; @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } @javax.annotation.Resource(name="blMergedCacheConfigLocations") protected Set<String> mergedCacheConfigLocations; protected List<Resource> configLocations; @Override public void destroy() { super.destroy(); try { CacheManager cacheManager = getObject(); Field cacheManagerTimer = CacheManager.class.getDeclaredField("cacheManagerTimer"); cacheManagerTimer.setAccessible(true); Object failSafeTimer = cacheManagerTimer.get(cacheManager); Field timer = failSafeTimer.getClass().getDeclaredField("timer"); timer.setAccessible(true); Object time = timer.get(failSafeTimer); Field thread = time.getClass().getDeclaredField("thread"); thread.setAccessible(true); Thread item = (Thread) thread.get(time); item.setContextClassLoader(Thread.currentThread().getContextClassLoader().getParent()); } catch (NoSuchFieldException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } @PostConstruct public void configureMergedItems() { List<Resource> temp = new ArrayList<Resource>(); if (mergedCacheConfigLocations != null && !mergedCacheConfigLocations.isEmpty()) { for (String location : mergedCacheConfigLocations) { temp.add(applicationContext.getResource(location)); } } if (configLocations != null && !configLocations.isEmpty()) { for (Resource resource : configLocations) { temp.add(resource); } } try { MergeXmlConfigResource merge = new MergeXmlConfigResource(); ResourceInputStream[] sources = new ResourceInputStream[temp.size()]; int j=0; for (Resource resource : temp) { sources[j] = new ResourceInputStream(resource.getInputStream(), resource.getURL().toString()); j++; } setConfigLocation(merge.getMergedConfigResource(sources)); } catch (Exception e) { throw new FatalBeanException("Unable to merge cache locations", e); } } public void setConfigLocations(List<Resource> configLocations) throws BeansException { this.configLocations = configLocations; } }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_cache_ehcache_MergeEhCacheManagerFactoryBean.java
5,085
transportService.sendRequest(node, SearchScanTransportHandler.ACTION, request, new BaseTransportResponseHandler<QuerySearchResult>() { @Override public QuerySearchResult newInstance() { return new QuerySearchResult(); } @Override public void handleResponse(QuerySearchResult response) { listener.onResult(response); } @Override public void handleException(TransportException exp) { listener.onFailure(exp); } @Override public String executor() { return ThreadPool.Names.SAME; } });
1no label
src_main_java_org_elasticsearch_search_action_SearchServiceTransportAction.java
836
EMBEDDEDSET("EmbeddedSet", 11, new Class<?>[] { Set.class }, new Class<?>[] { Set.class }) { },
0true
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java
152
public class StructuredContentDTO implements Serializable { private static final long serialVersionUID = 1L; protected Long id; protected String contentName; protected String contentType; protected String localeCode; protected Integer priority; protected Long sandboxId; protected Map values = new HashMap<String,String>(); protected String ruleExpression; protected List<ItemCriteriaDTO> itemCriteriaDTOList; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getContentName() { return contentName; } public void setContentName(String contentName) { values.put("contentName", contentName); this.contentName = contentName; } public String getContentType() { return contentType; } public void setContentType(String contentType) { values.put("contentType", contentType); this.contentType = contentType; } public String getLocaleCode() { return localeCode; } public void setLocaleCode(String localeCode) { values.put("localeCode", localeCode); this.localeCode = localeCode; } public Integer getPriority() { return priority; } public void setPriority(Integer priority) { values.put("priority", priority); this.priority = priority; } public Long getSandboxId() { return sandboxId; } public void setSandboxId(Long sandboxId) { this.sandboxId = sandboxId; } public Map getValues() { return values; } public void setValues(Map values) { this.values = values; } public String getRuleExpression() { return ruleExpression; } public void setRuleExpression(String ruleExpression) { this.ruleExpression = ruleExpression; } public List<ItemCriteriaDTO> getItemCriteriaDTOList() { return itemCriteriaDTOList; } public void setItemCriteriaDTOList(List<ItemCriteriaDTO> itemCriteriaDTOList) { this.itemCriteriaDTOList = itemCriteriaDTOList; } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_dto_StructuredContentDTO.java
3,561
public static class Defaults extends NumberFieldMapper.Defaults { public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); static { FIELD_TYPE.freeze(); } public static final Byte NULL_VALUE = null; }
0true
src_main_java_org_elasticsearch_index_mapper_core_ByteFieldMapper.java
2,058
public class MapEntrySetOperation extends AbstractMapOperation implements PartitionAwareOperation { Set<Map.Entry<Data, Data>> entrySet; public MapEntrySetOperation(String name) { super(name); } public MapEntrySetOperation() { } public void run() { RecordStore recordStore = mapService.getRecordStore(getPartitionId(), name); entrySet = recordStore.entrySetData(); if (mapContainer.getMapConfig().isStatisticsEnabled()) { ((MapService) getService()).getLocalMapStatsImpl(name).incrementOtherOperations(); } } @Override public Object getResponse() { return new MapEntrySet(entrySet); } }
0true
hazelcast_src_main_java_com_hazelcast_map_operation_MapEntrySetOperation.java
1,041
public static class Order { public static final int Advanced = 2000; }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderItemImpl.java
138
class FindBodyVisitor extends Visitor { @Override public void visit(Tree.Body that) { super.visit(that); if (that.getStatements().contains(statement)) { for (Tree.Statement st: that.getStatements()) { if (st instanceof Tree.AttributeDeclaration) { Tree.AttributeDeclaration ad = (Tree.AttributeDeclaration) st; if (ad.getDeclarationModel().equals(dec) && ad.getSpecifierOrInitializerExpression()==null) { createJoinDeclarationProposal(proposals, spec, file, dec, that, that.getStatements().indexOf(st), ad); break; } } } } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_JoinDeclarationProposal.java