Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
161k
| target
class label 2
classes | project
stringlengths 33
167
|
---|---|---|---|
675 |
constructors[COLLECTION_ROLLBACK] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new CollectionRollbackOperation();
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java
|
104 |
public static class Group {
public static class Name {
public static final String Basic = "PageImpl_Basic";
public static final String Page = "PageImpl_Page";
public static final String Rules = "PageImpl_Rules";
}
public static class Order {
public static final int Basic = 1000;
public static final int Page = 2000;
public static final int Rules = 1000;
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageImpl.java
|
1,134 |
public class SuggestSearchBenchMark {
public static void main(String[] args) throws Exception {
int SEARCH_ITERS = 200;
Settings settings = settingsBuilder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.build();
Node[] nodes = new Node[1];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "node" + i)).node();
}
Client client = nodes[0].client();
try {
client.admin().indices().prepareCreate("test").setSettings(settings).addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("_source").field("enabled", false).endObject()
.startObject("_all").field("enabled", false).endObject()
.startObject("_type").field("index", "no").endObject()
.startObject("_id").field("index", "no").endObject()
.startObject("properties")
.startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", true).endObject()
.endObject()
.endObject().endObject()).execute().actionGet();
ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
if (clusterHealthResponse.isTimedOut()) {
System.err.println("--> Timed out waiting for cluster health");
}
StopWatch stopWatch = new StopWatch().start();
long COUNT = SizeValue.parseSizeValue("10m").singles();
int BATCH = 100;
System.out.println("Indexing [" + COUNT + "] ...");
long ITERS = COUNT / BATCH;
long i = 1;
char character = 'a';
int idCounter = 0;
for (; i <= ITERS; i++) {
int termCounter = 0;
BulkRequestBuilder request = client.prepareBulk();
for (int j = 0; j < BATCH; j++) {
request.add(Requests.indexRequest("test").type("type1").id(Integer.toString(idCounter++)).source(source("prefix" + character + termCounter++)));
}
character++;
BulkResponse response = request.execute().actionGet();
if (response.hasFailures()) {
System.err.println("failures...");
}
}
System.out.println("Indexing took " + stopWatch.totalTime());
client.admin().indices().prepareRefresh().execute().actionGet();
System.out.println("Count: " + client.prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount());
} catch (Exception e) {
System.out.println("--> Index already exists, ignoring indexing phase, waiting for green");
ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("10m").execute().actionGet();
if (clusterHealthResponse.isTimedOut()) {
System.err.println("--> Timed out waiting for cluster health");
}
client.admin().indices().prepareRefresh().execute().actionGet();
System.out.println("Count: " + client.prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount());
}
System.out.println("Warming up...");
char startChar = 'a';
for (int i = 0; i <= 20; i++) {
String term = "prefix" + startChar;
SearchResponse response = client.prepareSearch()
.setQuery(prefixQuery("field", term))
.addSuggestion(SuggestBuilder.termSuggestion("field").field("field").text(term).suggestMode("always"))
.execute().actionGet();
if (response.getHits().totalHits() == 0) {
System.err.println("No hits");
continue;
}
startChar++;
}
System.out.println("Starting benchmarking suggestions.");
startChar = 'a';
long timeTaken = 0;
for (int i = 0; i <= SEARCH_ITERS; i++) {
String term = "prefix" + startChar;
SearchResponse response = client.prepareSearch()
.setQuery(matchQuery("field", term))
.addSuggestion(SuggestBuilder.termSuggestion("field").text(term).field("field").suggestMode("always"))
.execute().actionGet();
timeTaken += response.getTookInMillis();
if (response.getSuggest() == null) {
System.err.println("No suggestions");
continue;
}
List<? extends Option> options = response.getSuggest().getSuggestion("field").getEntries().get(0).getOptions();
if (options == null || options.isEmpty()) {
System.err.println("No suggestions");
}
startChar++;
}
System.out.println("Avg time taken without filter " + (timeTaken / SEARCH_ITERS));
client.close();
for (Node node : nodes) {
node.close();
}
}
private static XContentBuilder source(String nameValue) throws IOException {
return jsonBuilder().startObject()
.field("field", nameValue)
.endObject();
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_search_SuggestSearchBenchMark.java
|
1,296 |
clusterService.submitStateUpdateTask("1", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
invoked1.countDown();
try {
block1.await();
} catch (InterruptedException e) {
fail();
}
return currentState;
}
@Override
public void onFailure(String source, Throwable t) {
invoked1.countDown();
fail();
}
});
| 0true
|
src_test_java_org_elasticsearch_cluster_ClusterServiceTests.java
|
632 |
public class IndexStatus implements Iterable<IndexShardStatus> {
private final String index;
private final Map<Integer, IndexShardStatus> indexShards;
IndexStatus(String index, ShardStatus[] shards) {
this.index = index;
Map<Integer, List<ShardStatus>> tmpIndexShards = Maps.newHashMap();
for (ShardStatus shard : shards) {
List<ShardStatus> lst = tmpIndexShards.get(shard.getShardRouting().id());
if (lst == null) {
lst = newArrayList();
tmpIndexShards.put(shard.getShardRouting().id(), lst);
}
lst.add(shard);
}
indexShards = Maps.newHashMap();
for (Map.Entry<Integer, List<ShardStatus>> entry : tmpIndexShards.entrySet()) {
indexShards.put(entry.getKey(), new IndexShardStatus(entry.getValue().get(0).getShardRouting().shardId(), entry.getValue().toArray(new ShardStatus[entry.getValue().size()])));
}
}
public String getIndex() {
return this.index;
}
/**
* A shard id to index shard status map (note, index shard status is the replication shard group that maps
* to the shard id).
*/
public Map<Integer, IndexShardStatus> getShards() {
return this.indexShards;
}
/**
* Returns only the primary shards store size in bytes.
*/
public ByteSizeValue getPrimaryStoreSize() {
long bytes = -1;
for (IndexShardStatus shard : this) {
if (shard.getPrimaryStoreSize() != null) {
if (bytes == -1) {
bytes = 0;
}
bytes += shard.getPrimaryStoreSize().bytes();
}
}
if (bytes == -1) {
return null;
}
return new ByteSizeValue(bytes);
}
/**
* Returns the full store size in bytes, of both primaries and replicas.
*/
public ByteSizeValue getStoreSize() {
long bytes = -1;
for (IndexShardStatus shard : this) {
if (shard.getStoreSize() != null) {
if (bytes == -1) {
bytes = 0;
}
bytes += shard.getStoreSize().bytes();
}
}
if (bytes == -1) {
return null;
}
return new ByteSizeValue(bytes);
}
public long getTranslogOperations() {
long translogOperations = -1;
for (IndexShardStatus shard : this) {
if (shard.getTranslogOperations() != -1) {
if (translogOperations == -1) {
translogOperations = 0;
}
translogOperations += shard.getTranslogOperations();
}
}
return translogOperations;
}
private transient DocsStatus docs;
public DocsStatus getDocs() {
if (docs != null) {
return docs;
}
DocsStatus docs = null;
for (IndexShardStatus shard : this) {
if (shard.getDocs() == null) {
continue;
}
if (docs == null) {
docs = new DocsStatus();
}
docs.numDocs += shard.getDocs().getNumDocs();
docs.maxDoc += shard.getDocs().getMaxDoc();
docs.deletedDocs += shard.getDocs().getDeletedDocs();
}
this.docs = docs;
return docs;
}
/**
* Total merges of this index.
*/
public MergeStats getMergeStats() {
MergeStats mergeStats = new MergeStats();
for (IndexShardStatus shard : this) {
mergeStats.add(shard.getMergeStats());
}
return mergeStats;
}
public RefreshStats getRefreshStats() {
RefreshStats refreshStats = new RefreshStats();
for (IndexShardStatus shard : this) {
refreshStats.add(shard.getRefreshStats());
}
return refreshStats;
}
public FlushStats getFlushStats() {
FlushStats flushStats = new FlushStats();
for (IndexShardStatus shard : this) {
flushStats.add(shard.getFlushStats());
}
return flushStats;
}
@Override
public Iterator<IndexShardStatus> iterator() {
return indexShards.values().iterator();
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_status_IndexStatus.java
|
97 |
final Thread thread = new Thread() {
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
hz1.getLifecycleService().terminate();
}
};
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java
|
350 |
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(request.delay.millis());
} catch (InterruptedException e) {
// ignore
}
final CountDownLatch latch = new CountDownLatch(nodesIds.length);
for (String nodeId : nodesIds) {
final DiscoveryNode node = state.nodes().get(nodeId);
if (node == null) {
logger.warn("[partial_cluster_shutdown]: no node to shutdown for node_id [{}]", nodeId);
latch.countDown();
continue;
}
logger.trace("[partial_cluster_shutdown]: sending shutdown request to [{}]", node);
transportService.sendRequest(node, NodeShutdownRequestHandler.ACTION, new NodeShutdownRequest(request), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleResponse(TransportResponse.Empty response) {
logger.trace("[partial_cluster_shutdown]: received shutdown response from [{}]", node);
latch.countDown();
}
@Override
public void handleException(TransportException exp) {
logger.warn("[partial_cluster_shutdown]: received failed shutdown response from [{}]", exp, node);
latch.countDown();
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
// ignore
}
logger.info("[partial_cluster_shutdown]: done shutting down [{}]", ((Object) nodesIds));
}
});
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_TransportNodesShutdownAction.java
|
752 |
public class MultiGetItemResponse implements Streamable {
private GetResponse response;
private MultiGetResponse.Failure failure;
MultiGetItemResponse() {
}
public MultiGetItemResponse(GetResponse response, MultiGetResponse.Failure failure) {
this.response = response;
this.failure = failure;
}
/**
* The index name of the document.
*/
public String getIndex() {
if (failure != null) {
return failure.getIndex();
}
return response.getIndex();
}
/**
* The type of the document.
*/
public String getType() {
if (failure != null) {
return failure.getType();
}
return response.getType();
}
/**
* The id of the document.
*/
public String getId() {
if (failure != null) {
return failure.getId();
}
return response.getId();
}
/**
* Is this a failed execution?
*/
public boolean isFailed() {
return failure != null;
}
/**
* The actual get response, <tt>null</tt> if its a failure.
*/
public GetResponse getResponse() {
return this.response;
}
/**
* The failure if relevant.
*/
public MultiGetResponse.Failure getFailure() {
return this.failure;
}
public static MultiGetItemResponse readItemResponse(StreamInput in) throws IOException {
MultiGetItemResponse response = new MultiGetItemResponse();
response.readFrom(in);
return response;
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
failure = MultiGetResponse.Failure.readFailure(in);
} else {
response = new GetResponse();
response.readFrom(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (failure != null) {
out.writeBoolean(true);
failure.writeTo(out);
} else {
out.writeBoolean(false);
response.writeTo(out);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_get_MultiGetItemResponse.java
|
352 |
public interface BroadleafClassTransformer extends ClassTransformer {
public void compileJPAProperties(Properties props, Object key) throws Exception;
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_convert_BroadleafClassTransformer.java
|
75 |
public interface VertexList extends Iterable<TitanVertex> {
/**
* Returns the number of vertices in this list.
*
* @return Number of vertices in the list.
*/
public int size();
/**
* Returns the vertex at a given position in the list.
*
* @param pos Position for which to retrieve the vertex.
* @return TitanVertex at the given position
*/
public TitanVertex get(int pos);
/**
* Sorts this list according to vertex ids in increasing order.
* If the list is already sorted, invoking this method incurs no cost.
*
* @throws UnsupportedOperationException If not all vertices in this list have an id
*/
public void sort();
/**
* Whether this list of vertices is sorted by id in increasing order.
*
* @return
*/
public boolean isSorted();
/**
* Returns a sub list of this list of vertices from the given position with the given number of vertices.
*
* @param fromPosition
* @param length
* @return
*/
public VertexList subList(int fromPosition, int length);
/**
* Returns a list of ids of all vertices in this list of vertices in the same order of the original vertex list.
* <p/>
* Uses an efficient primitive variable-sized array.
*
* @return A list of idAuthorities of all vertices in this list of vertices in the same order of the original vertex list.
* @see AbstractLongList
*/
public LongArrayList getIDs();
/**
* Returns the id of the vertex at the specified position
*
* @param pos The position of the vertex in the list
* @return The id of that vertex
*/
public long getID(int pos);
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_VertexList.java
|
61 |
public class AWSClient {
private String endpoint;
private final AwsConfig awsConfig;
public AWSClient(AwsConfig awsConfig) {
if (awsConfig == null) {
throw new IllegalArgumentException("AwsConfig is required!");
}
if (awsConfig.getAccessKey() == null) {
throw new IllegalArgumentException("AWS access key is required!");
}
if (awsConfig.getSecretKey() == null) {
throw new IllegalArgumentException("AWS secret key is required!");
}
this.awsConfig = awsConfig;
endpoint = awsConfig.getHostHeader();
}
public List<String> getPrivateIpAddresses() throws Exception {
return new DescribeInstances(awsConfig).execute(endpoint);
}
public void setEndpoint(String s) {
this.endpoint = s;
}
}
| 0true
|
hazelcast-cloud_src_main_java_com_hazelcast_aws_AWSClient.java
|
388 |
public class SupportLevelFilter extends Filter {
@Override
public int decide(LoggingEvent event) {
if(SupportLevel.SUPPORT.equals(event.getLevel())) {
return Filter.DENY;
}
return Filter.ACCEPT;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_logging_SupportLevelFilter.java
|
508 |
public class OClassDictionary {
private static final OClassDictionary instance = new OClassDictionary();
public Class<?> getClassByCode(final char iType) {
switch (iType) {
case '0':
return ODocument.class;
// case '1':
// return ORecordColumn.class;
case '2':
return ORecordFlat.class;
case '3':
return ORecordBytes.class;
case '4':
return OClass.class;
case '5':
return OProperty.class;
case '6':
return OUser.class;
case '7':
return OStorageConfiguration.class;
case '8':
return OStoragePhysicalClusterConfigurationLocal.class;
case '9':
return OStorageDataConfiguration.class;
case 'a':
return OStorageClusterHoleConfiguration.class;
case 'b':
return OStorageDataHoleConfiguration.class;
case 'c':
return OStorageSegmentConfiguration.class;
case 'd':
return OStorageFileConfiguration.class;
case 'f':
return OStoragePhysicalClusterConfigurationLocal.class;
}
throw new OConfigurationException("Unsupported record type: " + iType);
}
public Character getCodeByClass(final Class<?> iClass) {
if (iClass.equals(ODocument.class))
return '0';
// if (iClass.equals(ORecordColumn.class))
// return '1';
if (iClass.equals(ORecordFlat.class))
return '2';
if (iClass.equals(ORecordBytes.class))
return '3';
if (iClass.equals(OClass.class))
return '4';
if (iClass.equals(OProperty.class))
return '5';
if (iClass.equals(OUser.class))
return '6';
if (iClass.equals(OStorageConfiguration.class))
return '7';
if (iClass.equals(OStoragePhysicalClusterConfigurationLocal.class))
return '8';
if (iClass.equals(OStorageDataConfiguration.class))
return '9';
if (iClass.equals(OStorageClusterHoleConfiguration.class))
return 'a';
if (iClass.equals(OStorageDataHoleConfiguration.class))
return 'b';
if (iClass.equals(OStorageSegmentConfiguration.class))
return 'c';
if (iClass.equals(OStorageFileConfiguration.class))
return 'd';
if (iClass.equals(OStoragePhysicalClusterConfigurationLocal.class))
return 'f';
return null;
}
public static OClassDictionary instance() {
return instance;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_entity_OClassDictionary.java
|
93 |
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface ConsoleCommand {
String[] aliases() default {};
String description() default "";
boolean splitInWords() default true;
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_console_annotation_ConsoleCommand.java
|
5,830 |
public class SearchContextHighlight {
private final List<Field> fields;
public SearchContextHighlight(List<Field> fields) {
this.fields = fields;
}
public List<Field> fields() {
return fields;
}
public static class Field {
// Fields that default to null or -1 are often set to their real default in HighlighterParseElement#parse
private final String field;
private int fragmentCharSize = -1;
private int numberOfFragments = -1;
private int fragmentOffset = -1;
private String encoder;
private String[] preTags;
private String[] postTags;
private Boolean scoreOrdered;
private Boolean highlightFilter;
private Boolean requireFieldMatch;
private String highlighterType;
private Boolean forceSource;
private String fragmenter;
private int boundaryMaxScan = -1;
private Character[] boundaryChars = null;
private Query highlightQuery;
private int noMatchSize = -1;
private Set<String> matchedFields;
private Map<String, Object> options;
private int phraseLimit = -1;
public Field(String field) {
this.field = field;
}
public String field() {
return field;
}
public int fragmentCharSize() {
return fragmentCharSize;
}
public void fragmentCharSize(int fragmentCharSize) {
this.fragmentCharSize = fragmentCharSize;
}
public int numberOfFragments() {
return numberOfFragments;
}
public void numberOfFragments(int numberOfFragments) {
this.numberOfFragments = numberOfFragments;
}
public int fragmentOffset() {
return fragmentOffset;
}
public void fragmentOffset(int fragmentOffset) {
this.fragmentOffset = fragmentOffset;
}
public String encoder() {
return encoder;
}
public void encoder(String encoder) {
this.encoder = encoder;
}
public String[] preTags() {
return preTags;
}
public void preTags(String[] preTags) {
this.preTags = preTags;
}
public String[] postTags() {
return postTags;
}
public void postTags(String[] postTags) {
this.postTags = postTags;
}
public Boolean scoreOrdered() {
return scoreOrdered;
}
public void scoreOrdered(boolean scoreOrdered) {
this.scoreOrdered = scoreOrdered;
}
public Boolean highlightFilter() {
return highlightFilter;
}
public void highlightFilter(boolean highlightFilter) {
this.highlightFilter = highlightFilter;
}
public Boolean requireFieldMatch() {
return requireFieldMatch;
}
public void requireFieldMatch(boolean requireFieldMatch) {
this.requireFieldMatch = requireFieldMatch;
}
public String highlighterType() {
return highlighterType;
}
public void highlighterType(String type) {
this.highlighterType = type;
}
public Boolean forceSource() {
return forceSource;
}
public void forceSource(boolean forceSource) {
this.forceSource = forceSource;
}
public String fragmenter() {
return fragmenter;
}
public void fragmenter(String fragmenter) {
this.fragmenter = fragmenter;
}
public int boundaryMaxScan() {
return boundaryMaxScan;
}
public void boundaryMaxScan(int boundaryMaxScan) {
this.boundaryMaxScan = boundaryMaxScan;
}
public Character[] boundaryChars() {
return boundaryChars;
}
public void boundaryChars(Character[] boundaryChars) {
this.boundaryChars = boundaryChars;
}
public Query highlightQuery() {
return highlightQuery;
}
public void highlightQuery(Query highlightQuery) {
this.highlightQuery = highlightQuery;
}
public int noMatchSize() {
return noMatchSize;
}
public void noMatchSize(int noMatchSize) {
this.noMatchSize = noMatchSize;
}
public int phraseLimit() {
return phraseLimit;
}
public void phraseLimit(int phraseLimit) {
this.phraseLimit = phraseLimit;
}
public Set<String> matchedFields() {
return matchedFields;
}
public void matchedFields(Set<String> matchedFields) {
this.matchedFields = matchedFields;
}
public Map<String, Object> options() {
return options;
}
public void options(Map<String, Object> options) {
this.options = options;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_highlight_SearchContextHighlight.java
|
72 |
public interface OSharedResource {
void acquireSharedLock();
void releaseSharedLock();
void acquireExclusiveLock();
void releaseExclusiveLock();
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_concur_resource_OSharedResource.java
|
167 |
@Test(alwaysRun = true)
public abstract class SpeedTestGroup {
protected static final int TIME_WAIT = 1000;
protected List<SpeedTestAbstract> tests = new ArrayList<SpeedTestAbstract>();
protected HashMap<String, TreeMap<Long, String>> results = new HashMap<String, TreeMap<Long, String>>();
protected SpeedTestGroup() {
}
public void go() {
for (SpeedTestAbstract test : tests) {
test.data().go(test);
Runtime.getRuntime().gc();
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
}
}
}
protected SpeedTestAbstract addTest(SpeedTestAbstract test) {
test.data.setTestGroup(this);
tests.add(test);
return test;
}
public void setResult(String iResultType, String iTestName, long iResult) {
TreeMap<Long, String> result = results.get(iResultType);
if (result == null) {
result = new TreeMap<Long, String>();
results.put(iResultType, result);
}
result.put(iResult, iTestName);
}
@AfterClass
protected void tearDown() throws Exception {
printResults();
}
protected void printResults() {
System.out.println("FINAL RESULTS (faster is the first one):");
int i;
for (Entry<String, TreeMap<Long, String>> result : results.entrySet()) {
System.out.println("+ " + result.getKey() + ":");
i = 1;
long refValue = 0;
for (Entry<Long, String> entry : result.getValue().entrySet())
if (i == 1) {
System.out.println(" " + i++ + ": " + entry.getValue() + " = " + entry.getKey());
refValue = entry.getKey();
} else
System.out.println(" " + i++ + ": " + entry.getValue() + " = " + entry.getKey() + " (+"
+ (entry.getKey() * 100 / refValue - 100) + "%)");
System.out.println();
}
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_test_SpeedTestGroup.java
|
1,469 |
public static class Builder {
private ShardId shardId;
private final List<ShardRouting> shards;
private boolean primaryAllocatedPostApi;
public Builder(IndexShardRoutingTable indexShard) {
this.shardId = indexShard.shardId;
this.shards = newArrayList(indexShard.shards);
this.primaryAllocatedPostApi = indexShard.primaryAllocatedPostApi();
}
public Builder(ShardId shardId, boolean primaryAllocatedPostApi) {
this.shardId = shardId;
this.shards = newArrayList();
this.primaryAllocatedPostApi = primaryAllocatedPostApi;
}
public Builder addShard(ImmutableShardRouting shardEntry) {
for (ShardRouting shard : shards) {
// don't add two that map to the same node id
// we rely on the fact that a node does not have primary and backup of the same shard
if (shard.assignedToNode() && shardEntry.assignedToNode()
&& shard.currentNodeId().equals(shardEntry.currentNodeId())) {
return this;
}
}
shards.add(shardEntry);
return this;
}
public Builder removeShard(ShardRouting shardEntry) {
shards.remove(shardEntry);
return this;
}
public IndexShardRoutingTable build() {
// we can automatically set allocatedPostApi to true if the primary is active
if (!primaryAllocatedPostApi) {
for (ShardRouting shardRouting : shards) {
if (shardRouting.primary() && shardRouting.active()) {
primaryAllocatedPostApi = true;
}
}
}
return new IndexShardRoutingTable(shardId, ImmutableList.copyOf(shards), primaryAllocatedPostApi);
}
public static IndexShardRoutingTable readFrom(StreamInput in) throws IOException {
String index = in.readString();
return readFromThin(in, index);
}
public static IndexShardRoutingTable readFromThin(StreamInput in, String index) throws IOException {
int iShardId = in.readVInt();
boolean allocatedPostApi = in.readBoolean();
Builder builder = new Builder(new ShardId(index, iShardId), allocatedPostApi);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
ImmutableShardRouting shard = ImmutableShardRouting.readShardRoutingEntry(in, index, iShardId);
builder.addShard(shard);
}
return builder.build();
}
public static void writeTo(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException {
out.writeString(indexShard.shardId().index().name());
writeToThin(indexShard, out);
}
public static void writeToThin(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException {
out.writeVInt(indexShard.shardId.id());
out.writeBoolean(indexShard.primaryAllocatedPostApi());
out.writeVInt(indexShard.shards.size());
for (ShardRouting entry : indexShard) {
entry.writeToThin(out);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_cluster_routing_IndexShardRoutingTable.java
|
93 |
DISJOINT {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).disjoint((Geoshape) condition);
}
@Override
public String toString() {
return "disjoint";
}
@Override
public boolean hasNegation() {
return true;
}
@Override
public TitanPredicate negate() {
return INTERSECT;
}
},
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geo.java
|
545 |
public class TableCreator {
protected Col[] cols;
protected int rowWidth;
protected StringBuilder sb;
protected int globalRowHeaderWidth = 20;
public TableCreator(Col[] cols) {
this.cols = cols;
this.rowWidth = calculateRowWidth();
this.sb = new StringBuilder("\r\n");
addSeparator();
addRow(cols);
addSeparator();
}
protected int calculateRowWidth() {
int length = 1;
for (Col col : cols) {
length += col.width + 3;
}
return length;
}
public TableCreator addSeparator() {
sb.append(StringUtils.leftPad("", rowWidth, '-')).append("\r\n");
return this;
}
public TableCreator addRow(Col[] cols) {
String[] data = new String[cols.length];
for (int i = 0; i < cols.length; i++) {
data[i] = cols[i].title;
}
return addRow(data);
}
public TableCreator addRow(Object[] data) {
if (data.length != cols.length) {
throw new IllegalArgumentException("Wrong number of data elements. Needed [" + cols.length + "] " +
"but received [" + data.length + "]");
}
sb.append('|');
for (int i = 0; i < data.length; i++) {
String trimmed = StringUtils.left(String.valueOf(data[i]), cols[i].width);
sb.append(' ').append(StringUtils.rightPad(trimmed, cols[i].width)).append(" |");
}
sb.append("\r\n");
return this;
}
public TableCreator addRow(String rowHeader, Object rowData) {
String trimmed = StringUtils.left(rowHeader, globalRowHeaderWidth);
sb.append("| ")
.append(StringUtils.rightPad(trimmed, globalRowHeaderWidth))
.append(StringUtils.rightPad(String.valueOf(rowData), rowWidth - globalRowHeaderWidth - 3))
.append("|\r\n");
return this;
}
public TableCreator withGlobalRowHeaderWidth(int width) {
this.globalRowHeaderWidth = width;
return this;
}
public String toString() {
return sb.toString();
}
public static class Col {
String title;
int width;
public Col(String title) {
this.title = title;
this.width = title.length();
}
public Col(String title, int width) {
this.title = title;
this.width = width;
}
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_TableCreator.java
|
91 |
mapClient.addEntryListener(new EntryAdapter<Integer, GenericEvent>() {
public void entryAdded(EntryEvent<Integer, GenericEvent> event) {
adds++;
}
public void entryEvicted(EntryEvent<Integer, GenericEvent> event) {
if (event.getValue() == null) evictionsNull++;
}
}, true);
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientEntryListenerDisconnectTest.java
|
336 |
public enum OPERATION_MODE {
SYNCHRONOUS, ASYNCHRONOUS, ASYNCHRONOUS_NOANSWER
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_db_ODatabaseComplex.java
|
13 |
{
@Override
public boolean accept( Throwable item )
{
return !(item instanceof LifecycleException);
}
}));
| 1no label
|
enterprise_ha_src_main_java_org_neo4j_kernel_ha_backup_HaBackupProvider.java
|
576 |
public class OptimizeRequest extends BroadcastOperationRequest<OptimizeRequest> {
public static final class Defaults {
public static final boolean WAIT_FOR_MERGE = true;
public static final int MAX_NUM_SEGMENTS = -1;
public static final boolean ONLY_EXPUNGE_DELETES = false;
public static final boolean FLUSH = true;
}
private boolean waitForMerge = Defaults.WAIT_FOR_MERGE;
private int maxNumSegments = Defaults.MAX_NUM_SEGMENTS;
private boolean onlyExpungeDeletes = Defaults.ONLY_EXPUNGE_DELETES;
private boolean flush = Defaults.FLUSH;
/**
* Constructs an optimization request over one or more indices.
*
* @param indices The indices to optimize, no indices passed means all indices will be optimized.
*/
public OptimizeRequest(String... indices) {
super(indices);
}
public OptimizeRequest() {
}
/**
* Should the call block until the optimize completes. Defaults to <tt>true</tt>.
*/
public boolean waitForMerge() {
return waitForMerge;
}
/**
* Should the call block until the optimize completes. Defaults to <tt>true</tt>.
*/
public OptimizeRequest waitForMerge(boolean waitForMerge) {
this.waitForMerge = waitForMerge;
return this;
}
/**
* Will optimize the index down to <= maxNumSegments. By default, will cause the optimize
* process to optimize down to half the configured number of segments.
*/
public int maxNumSegments() {
return maxNumSegments;
}
/**
* Will optimize the index down to <= maxNumSegments. By default, will cause the optimize
* process to optimize down to half the configured number of segments.
*/
public OptimizeRequest maxNumSegments(int maxNumSegments) {
this.maxNumSegments = maxNumSegments;
return this;
}
/**
* Should the optimization only expunge deletes from the index, without full optimization.
* Defaults to full optimization (<tt>false</tt>).
*/
public boolean onlyExpungeDeletes() {
return onlyExpungeDeletes;
}
/**
* Should the optimization only expunge deletes from the index, without full optimization.
* Defaults to full optimization (<tt>false</tt>).
*/
public OptimizeRequest onlyExpungeDeletes(boolean onlyExpungeDeletes) {
this.onlyExpungeDeletes = onlyExpungeDeletes;
return this;
}
/**
* Should flush be performed after the optimization. Defaults to <tt>true</tt>.
*/
public boolean flush() {
return flush;
}
/**
* Should flush be performed after the optimization. Defaults to <tt>true</tt>.
*/
public OptimizeRequest flush(boolean flush) {
this.flush = flush;
return this;
}
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
waitForMerge = in.readBoolean();
maxNumSegments = in.readInt();
onlyExpungeDeletes = in.readBoolean();
flush = in.readBoolean();
}
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(waitForMerge);
out.writeInt(maxNumSegments);
out.writeBoolean(onlyExpungeDeletes);
out.writeBoolean(flush);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_optimize_OptimizeRequest.java
|
249 |
public class BroadleafCurrencyUtils {
public static final MathContext ROUND_FLOOR_MATH_CONTEXT = new MathContext(0, RoundingMode.FLOOR);
public static Money getMoney(BigDecimal amount, BroadleafCurrency currency) {
if (amount == null) {
return null;
}
if (currency != null) {
return new Money(amount, currency.getCurrencyCode());
} else {
return new Money(amount);
}
}
public static Money getMoney(BroadleafCurrency currency) {
if (currency != null) {
return new Money(0,currency.getCurrencyCode());
} else {
return new Money();
}
}
public static Currency getCurrency(Money money) {
if (money == null) {
return Money.defaultCurrency();
}
return (money.getCurrency() == null) ? Money.defaultCurrency() : money.getCurrency();
}
public static Currency getCurrency(BroadleafCurrency currency) {
return (currency == null) ? Money.defaultCurrency() : Currency.getInstance(currency.getCurrencyCode());
}
/**
* Returns the unit amount (e.g. .01 for US and all other 2 decimal currencies)
* @param currency
* @return
*/
public static Money getUnitAmount(Money difference) {
Currency currency = BroadleafCurrencyUtils.getCurrency(difference);
BigDecimal divisor = new BigDecimal(Math.pow(10, currency.getDefaultFractionDigits()));
BigDecimal unitAmount = new BigDecimal("1").divide(divisor);
if (difference.lessThan(BigDecimal.ZERO)) {
unitAmount = unitAmount.negate();
}
return new Money(unitAmount, currency);
}
/**
* Returns the unit amount (e.g. .01 for US and all other 2 decimal currencies)
* @param currency
* @return
*/
public static Money getUnitAmount(BroadleafCurrency blCurrency) {
Currency currency = getCurrency(blCurrency);
BigDecimal divisor = new BigDecimal(Math.pow(10, currency.getDefaultFractionDigits()));
BigDecimal unitAmount = new BigDecimal("1").divide(divisor);
return new Money(unitAmount, currency);
}
/**
* Returns the remainder amount if the passed in totalAmount was divided by the
* quantity taking into account the normal unit of the currency (e.g. .01 for US).
* @param currency
* @return
*/
public static int calculateRemainder(Money totalAmount, int quantity) {
if (totalAmount == null || totalAmount.isZero() || quantity == 0) {
return 0;
}
// Use this to convert to a whole number (e.g. 1.05 becomes 105 in US currency).
BigDecimal multiplier = new BigDecimal(10).pow(totalAmount.getAmount().scale());
BigDecimal amount = totalAmount.getAmount().multiply(multiplier);
BigDecimal remainder = amount.remainder(new BigDecimal(quantity), ROUND_FLOOR_MATH_CONTEXT);
return remainder.toBigInteger().intValue();
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_currency_util_BroadleafCurrencyUtils.java
|
151 |
private static class CommandVisitor implements CommandRecordVisitor
{
private final int localId;
private final Visitor visitor;
public CommandVisitor( int localId, Visitor visitor )
{
this.localId = localId;
this.visitor = visitor;
}
@Override
public void visitNode( NodeRecord record )
{
if ( !record.inUse() )
{
visitor.visitDeleteNode( localId, record.getId() );
}
else
{
visitor.visitUpdateNode( localId, record );
}
}
@Override
public void visitRelationship( RelationshipRecord record )
{
if ( !record.inUse() )
{
visitor.visitDeleteRelationship( localId, record.getId() );
}
else
{
visitor.visitUpdateRelationship( localId, record );
}
}
@Override
public void visitProperty( PropertyRecord record )
{
if ( !record.inUse() )
{
visitor.visitDeleteProperty( localId, record.getId() );
}
else
{
visitor.visitUpdateProperty( localId, record );
}
}
@Override
public void visitRelationshipTypeToken( RelationshipTypeTokenRecord record )
{
if ( !record.inUse() )
{
visitor.visitDeleteRelationshipTypeToken( localId, record.getId() );
}
else
{
visitor.visitUpdateRelationshipTypeToken( localId, record );
}
}
@Override
public void visitLabelToken( LabelTokenRecord record )
{
if ( !record.inUse() )
{
visitor.visitDeleteLabelToken( localId, record.getId() );
}
else
{
visitor.visitUpdateLabelToken( localId, record );
}
}
@Override
public void visitPropertyKeyToken( PropertyKeyTokenRecord record )
{
if ( !record.inUse() )
{
visitor.visitDeletePropertyKeyToken( localId, record.getId() );
}
else
{
visitor.visitUpdatePropertyKeyToken( localId, record );
}
}
@Override
public void visitNeoStore( NeoStoreRecord record )
{
if ( !record.inUse() )
{
visitor.visitDeleteNeoStore( localId, record.getId() );
}
else
{
visitor.visitUpdateNeoStore( localId, record );
}
}
@Override
public void visitSchemaRule( Collection<DynamicRecord> records )
{
if ( ! records.isEmpty() )
{
DynamicRecord first = records.iterator().next();
if ( !first.inUse() )
{
visitor.visitDeleteSchemaRule( localId, records, first.getId() );
}
else
{
visitor.visitUpdateSchemaRule( localId, records );
}
}
}
}
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_TransactionReader.java
|
20 |
@Controller("blAdminCatalogActionsController")
public class AdminCatalogActionsController extends AdminAbstractController {
@Resource(name = "blAdminCatalogService")
protected AdminCatalogService adminCatalogService;
/**
* Invokes a separate service to generate a list of Skus for a particular {@link Product} and that {@link Product}'s
* Product Options
*/
@RequestMapping(value = "product/{productId}/{skusFieldName}/generate-skus",
method = RequestMethod.GET,
produces = "application/json")
public @ResponseBody Map<String, Object> generateSkus(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable(value = "productId") Long productId,
@PathVariable(value = "skusFieldName") String skusFieldName) {
HashMap<String, Object> result = new HashMap<String, Object>();
Integer skusGenerated = adminCatalogService.generateSkusFromProduct(productId);
//TODO: Externalize these messages to property files
if (skusGenerated == 0) {
result.put("message", "No Skus were generated. It is likely that each product option value permutation " +
"already has a Sku associated with it");
} else if (skusGenerated == -1) {
result.put("message", "This product has no Product Options configured to generate Skus from");
} else {
result.put("message", skusGenerated + " Skus have been generated from the configured product options");
}
String url = request.getRequestURL().toString();
url = url.substring(0, url.indexOf("/generate-skus"));
result.put("skusGenerated", skusGenerated);
result.put("listGridUrl", url);
return result;
}
}
| 0true
|
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_web_controller_action_AdminCatalogActionsController.java
|
451 |
public static class AdminPresentation {
public static final String FRIENDLYNAME = "friendlyName";
public static final String SECURITYLEVEL = "securityLevel";
public static final String ORDER = "order";
public static final String GRIDORDER = "gridOrder";
public static final String VISIBILITY = "visibility";
public static final String FIELDTYPE = "fieldType";
public static final String GROUP = "group";
public static final String GROUPORDER = "groupOrder";
public static final String GROUPCOLLAPSED = "groupCollapsed";
public static final String TAB = "tab";
public static final String TABORDER = "tabOrder";
public static final String LARGEENTRY = "largeEntry";
public static final String PROMINENT = "prominent";
public static final String COLUMNWIDTH = "columnWidth";
public static final String BROADLEAFENUMERATION = "broadleafEnumeration";
public static final String REQUIREDOVERRIDE = "requiredOverride";
public static final String EXCLUDED = "excluded";
public static final String TOOLTIP = "tooltip";
public static final String HELPTEXT = "helpText";
public static final String HINT = "hint";
public static final String SHOWIFPROPERTY = "showIfProperty";
public static final String CURRENCYCODEFIELD = "currencyCodeField";
public static final String RULEIDENTIFIER = "ruleIdentifier";
public static final String READONLY = "readOnly";
public static final String VALIDATIONCONFIGURATIONS = "validationConfigurations";
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_override_PropertyType.java
|
810 |
public abstract class ReadRequest extends PartitionClientRequest
implements Portable, SecureRequest {
protected String name;
public ReadRequest() {
}
public ReadRequest(String name) {
this.name = name;
}
@Override
protected int getPartition() {
ClientEngine clientEngine = getClientEngine();
Data key = clientEngine.getSerializationService().toData(name);
return clientEngine.getPartitionService().getPartitionId(key);
}
@Override
public String getServiceName() {
return AtomicLongService.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return AtomicLongPortableHook.F_ID;
}
@Override
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
}
@Override
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
}
@Override
public Permission getRequiredPermission() {
return new AtomicLongPermission(name, ActionConstants.ACTION_READ);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_client_ReadRequest.java
|
1,467 |
public class OSQLFunctionBothV extends OSQLFunctionMove {
public static final String NAME = "bothV";
public OSQLFunctionBothV() {
super(NAME, 0, 1);
}
@Override
protected Object move(final OrientBaseGraph graph, final OIdentifiable iRecord, final String[] iLabels) {
return e2v(graph, iRecord, Direction.BOTH, iLabels);
}
}
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionBothV.java
|
131 |
public class LongMaxUpdater extends Striped64 implements Serializable {
private static final long serialVersionUID = 7249069246863182397L;
/**
* Version of max for use in retryUpdate
*/
final long fn(long v, long x) { return v > x ? v : x; }
/**
* Creates a new instance with initial maximum of {@code
* Long.MIN_VALUE}.
*/
public LongMaxUpdater() {
base = Long.MIN_VALUE;
}
/**
* Updates the maximum to be at least the given value.
*
* @param x the value to update
*/
public void update(long x) {
Cell[] as; long b, v; HashCode hc; Cell a; int n;
if ((as = cells) != null ||
(b = base) < x && !casBase(b, x)) {
boolean uncontended = true;
int h = (hc = threadHashCode.get()).code;
if (as == null || (n = as.length) < 1 ||
(a = as[(n - 1) & h]) == null ||
((v = a.value) < x && !(uncontended = a.cas(v, x))))
retryUpdate(x, hc, uncontended);
}
}
/**
* Returns the current maximum. The returned value is
* <em>NOT</em> an atomic snapshot; invocation in the absence of
* concurrent updates returns an accurate result, but concurrent
* updates that occur while the value is being calculated might
* not be incorporated.
*
* @return the maximum
*/
public long max() {
Cell[] as = cells;
long max = base;
if (as != null) {
int n = as.length;
long v;
for (int i = 0; i < n; ++i) {
Cell a = as[i];
if (a != null && (v = a.value) > max)
max = v;
}
}
return max;
}
/**
* Resets variables maintaining updates to {@code Long.MIN_VALUE}.
* This method may be a useful alternative to creating a new
* updater, but is only effective if there are no concurrent
* updates. Because this method is intrinsically racy, it should
* only be used when it is known that no threads are concurrently
* updating.
*/
public void reset() {
internalReset(Long.MIN_VALUE);
}
/**
* Equivalent in effect to {@link #max} followed by {@link
* #reset}. This method may apply for example during quiescent
* points between multithreaded computations. If there are
* updates concurrent with this method, the returned value is
* <em>not</em> guaranteed to be the final value occurring before
* the reset.
*
* @return the maximum
*/
public long maxThenReset() {
Cell[] as = cells;
long max = base;
base = Long.MIN_VALUE;
if (as != null) {
int n = as.length;
for (int i = 0; i < n; ++i) {
Cell a = as[i];
if (a != null) {
long v = a.value;
a.value = Long.MIN_VALUE;
if (v > max)
max = v;
}
}
}
return max;
}
/**
* Returns the String representation of the {@link #max}.
* @return the String representation of the {@link #max}
*/
public String toString() {
return Long.toString(max());
}
/**
* Equivalent to {@link #max}.
*
* @return the maximum
*/
public long longValue() {
return max();
}
/**
* Returns the {@link #max} as an {@code int} after a narrowing
* primitive conversion.
*/
public int intValue() {
return (int)max();
}
/**
* Returns the {@link #max} as a {@code float}
* after a widening primitive conversion.
*/
public float floatValue() {
return (float)max();
}
/**
* Returns the {@link #max} as a {@code double} after a widening
* primitive conversion.
*/
public double doubleValue() {
return (double)max();
}
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
s.writeLong(max());
}
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
busy = 0;
cells = null;
base = s.readLong();
}
}
| 0true
|
src_main_java_jsr166e_LongMaxUpdater.java
|
70 |
public class TxLog
{
public static final int MAX_RECORD_SIZE = 3 + Xid.MAXGTRIDSIZE + Xid.MAXBQUALSIZE;
public static final int LOG_ROTATION_THRESHOLD = 1000; // As a count of records
public static final int SCAN_WINDOW_SIZE = MAX_RECORD_SIZE * LOG_ROTATION_THRESHOLD;
private final ByteCounterMonitor bufferMonitor;
public static final byte NULL_BYTE = 0;
public static final byte TX_START = 1;
public static final byte BRANCH_ADD = 2;
public static final byte MARK_COMMIT = 3;
public static final byte TX_DONE = 4;
private final Collection<ByteArrayKey> activeTransactions = new HashSet<>();
private final FileSystemAbstraction fileSystem;
private File name = null;
private LogBuffer logBuffer;
private int recordCount = 0;
private static final class ByteArrayKey
{
private final byte[] bytes;
private ByteArrayKey( byte[] bytes )
{
this.bytes = bytes;
}
@Override
public int hashCode()
{
return Arrays.hashCode( bytes );
}
@Override
public boolean equals( Object obj )
{
return obj instanceof ByteArrayKey && Arrays.equals( bytes, ((ByteArrayKey)obj).bytes );
}
}
/**
* Initializes a transaction log using <CODE>filename</CODE>. If the file
* isn't empty the position will be set to size of file so new records will
* be appended.
*
* @param fileName
* Filename of file to use
* @param fileSystem
* The concrete FileSystemAbstraction to use.
* @param monitors {@link Monitors}.
* @throws IOException
* If unable to open file
*/
public TxLog( File fileName, FileSystemAbstraction fileSystem, Monitors monitors ) throws IOException
{
this.bufferMonitor = monitors.newMonitor( ByteCounterMonitor.class, getClass() );
if ( fileName == null )
{
throw new IllegalArgumentException( "Null filename" );
}
this.fileSystem = fileSystem;
StoreChannel fileChannel = fileSystem.open( fileName, "rw" );
fileChannel.position( fileChannel.size() );
logBuffer = new DirectMappedLogBuffer( fileChannel, bufferMonitor );
this.name = fileName;
recreateActiveTransactionState();
}
private void recreateActiveTransactionState() throws IOException
{
for ( List<Record> tx : getDanglingRecords() )
{
for ( Record record : tx )
{
if ( record.getType() == TX_START )
{
activeTransactions.add( new ByteArrayKey( record.getGlobalId() ) );
}
}
}
}
/**
* Returns the name of the transaction log.
*/
public String getName()
{
return name.getPath();
}
/**
* Returns the number of records (one of TX_START,BRANCH_ADD,MARK_COMMIT or
* TX_DONE) written since this instance was created or truncated.
*/
public int getRecordCount()
{
return recordCount;
}
/**
* Closes the file representing the transaction log.
*/
public synchronized void close() throws IOException
{
logBuffer.force();
logBuffer.getFileChannel().close();
}
/**
* Forces the log file (with metadata). Useful when switching log.
*/
public void force() throws IOException
{
logBuffer.force();
}
/**
* Truncates the file to zero size and sets the record count to zero.
*/
public synchronized void truncate() throws IOException
{
StoreChannel fileChannel = logBuffer.getFileChannel();
fileChannel.position( 0 );
fileChannel.truncate( 0 );
recordCount = 0;
logBuffer = new DirectMappedLogBuffer( fileChannel, bufferMonitor );
activeTransactions.clear();
}
/**
* Writes a <CODE>TX_START</CODE> record to the file.
*
* @param globalId
* The global id of the new transaction
* @throws IOException
* If unable to write
*/
// tx_start(byte)|gid_length(byte)|globalId
public synchronized void txStart( byte globalId[] ) throws IOException
{
assertNotNull( globalId, "global id" );
if ( !activeTransactions.add( new ByteArrayKey( globalId ) ) )
{
throw new IllegalStateException( "Global ID " + Arrays.toString( globalId ) + " already started" );
}
byte globalIdSize = (byte) globalId.length;
logBuffer.put( TX_START ).put( globalIdSize ).put( globalId );
recordCount++;
}
private void assertNotNull( Object obj, String name )
{
if ( obj == null )
{
throw new IllegalArgumentException( "Null " + name );
}
}
/**
* Writes a <CODE>BRANCH_ADD</CODE> record to the file.
*
* @param globalId
* The global id of the transaction
* @param branchId
* The branch id for the enlisted resource
* @throws IOException
* If unable to write
*/
// add_branch(byte)|gid_length(byte)|bid_length(byte)|globalId|branchId
public synchronized void addBranch( byte globalId[], byte branchId[] )
throws IOException
{
assertNotNull( globalId, "global id" );
assertNotNull( branchId, "branch id" );
assertActive( globalId );
byte globalIdSize = (byte) globalId.length;
byte branchIdSize = (byte) branchId.length;
logBuffer.put( BRANCH_ADD ).put( globalIdSize ).put( branchIdSize ).put( globalId ).put( branchId );
recordCount++;
}
private void assertActive( byte[] globalId )
{
if ( !activeTransactions.contains( new ByteArrayKey( globalId ) ) )
{
throw new IllegalStateException( "Global ID " + Arrays.toString( globalId ) + " not active" );
}
}
/**
* Writes a <CODE>MARK_COMMIT</CODE> record to the file and forces the
* file to disk.
*
* @param globalId
* The global id of the transaction
* @throws IOException
* If unable to write
*/
// mark_committing(byte)|gid_length(byte)|globalId
// forces
public synchronized void markAsCommitting( byte globalId[], ForceMode forceMode ) throws IOException
{
assertNotNull( globalId, "global id" );
assertActive( globalId );
byte globalIdSize = (byte) globalId.length;
logBuffer.put( MARK_COMMIT ).put( globalIdSize ).put( globalId );
forceMode.force( logBuffer );
recordCount++;
}
/**
* Writes a <CODE>TX_DONE</CODE> record to the file.
*
* @param globalId
* The global id of the transaction completed
* @throws IOException
* If unable to write
*/
// tx_done(byte)|gid_length(byte)|globalId
public synchronized void txDone( byte globalId[] ) throws IOException
{
assertNotNull( globalId, "global id" );
if ( !activeTransactions.remove( new ByteArrayKey( globalId ) ) )
{
throw new IllegalStateException( "Global ID " + Arrays.toString( globalId ) + " not active" );
}
byte globalIdSize = (byte) globalId.length;
logBuffer.put( TX_DONE ).put( globalIdSize ).put( globalId );
recordCount++;
}
/**
* Made public for testing only.
* <p>
* Wraps a transaction record in the tx log file.
*/
public static class Record
{
private byte type = 0;
private byte globalId[] = null;
private byte branchId[] = null;
private int seqNr = -1;
Record( byte type, byte globalId[], byte branchId[], int seqNr )
{
if ( type < 1 || type > 4 )
{
throw new IllegalArgumentException( "Illegal type: " + type );
}
this.type = type;
this.globalId = globalId;
this.branchId = branchId;
this.seqNr = seqNr;
}
public byte getType()
{
return type;
}
public byte[] getGlobalId()
{
return globalId;
}
public byte[] getBranchId()
{
return branchId;
}
public int getSequenceNumber()
{
return seqNr;
}
@Override
public String toString()
{
XidImpl xid = new XidImpl( globalId, branchId == null ? new byte[0] : branchId );
int size = 1 + sizeOf( globalId ) + sizeOf( branchId );
return "TxLogRecord[" + typeName() + "," + xid + "," + seqNr + "," + size + "]";
}
private int sizeOf( byte[] id )
{
// If id is null it means this record type doesn't have it. TX_START/MARK_COMMIT/TX_DONE
// only has the global id, whereas BRANCH_ADD has got both the global and branch ids.
if ( id == null )
{
return 0;
}
// The length of the array (1 byte) + the actual array
return 1 + id.length;
}
String typeName()
{
switch ( type )
{
case TX_START:
return "TX_START";
case BRANCH_ADD:
return "BRANCH_ADD";
case MARK_COMMIT:
return "MARK_COMMIT";
case TX_DONE:
return "TX_DONE";
default:
return "<unknown type>";
}
}
}
void writeRecord( Record record, ForceMode forceMode ) throws IOException
{
switch ( record.getType() )
{
case TX_START:
txStart( record.getGlobalId() );
break;
case BRANCH_ADD:
addBranch( record.getGlobalId(), record.getBranchId() );
break;
case MARK_COMMIT:
markAsCommitting( record.getGlobalId(), forceMode );
break;
default:
// TX_DONE should never be passed in here
throw new IOException( "Illegal record type[" + record.getType() + "]" );
}
}
/**
* Returns an array of lists, each list contains dangling records
* (transactions that hasn't been completed yet) grouped after global by
* transaction id.
*/
public synchronized Iterable<List<Record>> getDanglingRecords()
throws IOException
{
StoreChannel fileChannel = logBuffer.getFileChannel();
ByteBuffer buffer = ByteBuffer.allocateDirect(SCAN_WINDOW_SIZE);
readFileIntoBuffer( fileChannel, buffer, 0 );
// next record position
long nextPosition = 0;
// holds possible dangling records
int seqNr = 0;
Map<Xid,List<Record>> recordMap = new HashMap<>();
while ( buffer.hasRemaining() )
{
byte recordType = buffer.get();
int recordSize;
switch ( recordType )
{
case TX_START:
recordSize = readTxStartRecordInto( recordMap, buffer, seqNr++ );
break;
case BRANCH_ADD:
recordSize = readBranchAddRecordInto( recordMap, buffer, seqNr++ );
break;
case MARK_COMMIT:
recordSize = readMarkCommitRecordInto( recordMap, buffer, seqNr++ );
break;
case TX_DONE:
recordSize = readTxDoneAndRemoveTransactionFrom( recordMap, buffer );
break;
case NULL_BYTE:
// We accept and ignore arbitrary null-bytes in between records.
// I'm not sure where they come from, though. A challenge for another day.
// For now we just make sure to increment nextPosition, so we skip over
// them in case we want to move our buffer window.
recordSize = 1;
break;
default:
throw new IOException( "Unknown type: " + recordType );
}
if ( recordSize == 0 )
{
// Getting a record size of 0 means that read* methods found an incomplete or empty byte stream.
break;
}
nextPosition += recordSize;
// Reposition the scan window if we're getting to the end of it and there is more bytes in the
// channel to be read.
if ( buffer.remaining() < MAX_RECORD_SIZE && (fileChannel.size() - nextPosition) > buffer.remaining() )
{
readFileIntoBuffer( fileChannel, buffer, nextPosition );
}
}
return recordMap.values();
}
private void readFileIntoBuffer( StoreChannel fileChannel, ByteBuffer buffer, long nextPosition ) throws IOException
{
buffer.clear();
fileChannel.position( nextPosition );
fileChannel.read( buffer );
buffer.flip();
}
/**
* Read a TX_START record from the buffer, attach the given sequence number and store it in the recordMap.
* Returns the size of the record in bytes, or 0 if the byte stream is incomplete or empty.
*/
private static int readTxStartRecordInto(Map<Xid, List<Record>> recordMap, ByteBuffer buffer, int seqNr)
throws IOException
{
if ( !buffer.hasRemaining() )
{
return 0;
}
byte globalId[] = new byte[buffer.get()];
if ( buffer.remaining() < globalId.length )
{
return 0;
}
buffer.get(globalId);
Xid xid = new XidImpl( globalId, new byte[0] );
if ( recordMap.containsKey( xid ) )
{
throw new IOException( "Tx start for same xid[" + xid + "] found twice" );
}
List<Record> recordList = new LinkedList<>();
recordList.add( new Record( TX_START, globalId, null, seqNr ) );
recordMap.put( xid, recordList );
return 2 + globalId.length;
}
/**
* Same as {@link #readTxStartRecordInto}, but for BRANCH_ADD records.
*/
private static int readBranchAddRecordInto( Map<Xid, List<Record>> recordMap, ByteBuffer buffer, int seqNr)
throws IOException
{
if ( buffer.remaining() < 2 )
{
return 0;
}
byte globalId[] = new byte[buffer.get()];
byte branchId[] = new byte[buffer.get()];
if ( buffer.remaining() < globalId.length + branchId.length )
{
return 0;
}
buffer.get( globalId );
buffer.get( branchId );
Xid xid = new XidImpl( globalId, new byte[0] );
if ( !recordMap.containsKey( xid ) )
{
throw new IOException( String.format(
"Branch[%s] found for [%s] but no record list found in map",
UTF8.decode( branchId ), xid ) );
}
recordMap.get( xid ).add( new Record( BRANCH_ADD, globalId, branchId, seqNr ) );
return 3 + globalId.length + branchId.length;
}
/**
* Same as {@link #readTxStartRecordInto}, but for MARK_COMMIT records.
*/
private static int readMarkCommitRecordInto( Map<Xid, List<Record>> recordMap, ByteBuffer buffer, int seqNr)
throws IOException
{
if ( !buffer.hasRemaining() )
{
return 0;
}
byte globalId[] = new byte[buffer.get()];
if ( buffer.remaining() < globalId.length )
{
return 0;
}
buffer.get( globalId );
Xid xid = new XidImpl( globalId, new byte[0] );
if ( !recordMap.containsKey( xid ) )
{
throw new IOException(
"Committing xid[" + xid + "] mark found but no record list found in map" );
}
List<Record> recordList = recordMap.get( xid );
recordList.add( new Record( MARK_COMMIT, globalId, null, seqNr ) );
recordMap.put(xid, recordList);
return 2 + globalId.length;
}
/**
* Read a TX_DONE record from the given buffer, and removes the associated transaction from the given recordMap.
* Returns the size of the TX_DONE record in bytes, or 0 if the byte stream is incomplete of empty.
*/
private static int readTxDoneAndRemoveTransactionFrom( Map<Xid, List<Record>> recordMap, ByteBuffer buffer )
throws IOException
{
if ( !buffer.hasRemaining() )
{
return 0;
}
byte globalId[] = new byte[buffer.get()];
if ( buffer.remaining() < globalId.length )
{
return 0;
}
buffer.get( globalId );
Xid xid = new XidImpl( globalId, new byte[0] );
if ( !recordMap.containsKey( xid ) )
{
throw new IOException(
"Committing xid[" + xid + "] mark found but no record list found in map" );
}
recordMap.remove( xid );
return 2 + globalId.length;
}
/**
* Switches log file. Copies the dangling records in current log file to the
* <CODE>newFile</CODE> and then makes the switch closing the old log file.
*
* @param newFile
* The filename of the new file to switch to
* @throws IOException
* If unable to switch log file
*/
public synchronized void switchToLogFile( File newFile )
throws IOException
{
if ( newFile == null )
{
throw new IllegalArgumentException( "Null filename" );
}
// copy all dangling records from current log to new log
force();
Iterable<List<Record>> itr = getDanglingRecords();
close();
List<Record> records = new ArrayList<>();
for ( List<Record> tx : itr )
{
records.addAll( tx );
}
Collections.sort( records, new Comparator<Record>()
{
@Override
public int compare( Record r1, Record r2 )
{
return r1.getSequenceNumber() - r2.getSequenceNumber();
}
} );
Iterator<Record> recordItr = records.iterator();
StoreChannel fileChannel = fileSystem.open( newFile, "rw" );
fileChannel.position( fileChannel.size() );
logBuffer = new DirectMappedLogBuffer( fileChannel, bufferMonitor );
name = newFile;
truncate();
while ( recordItr.hasNext() )
{
Record record = recordItr.next();
writeRecord( record, ForceMode.forced );
}
force();
}
}
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_TxLog.java
|
61 |
class AddParenthesesProposal extends CorrectionProposal {
AddParenthesesProposal(Declaration dec, int offset,
TextFileChange change) {
super("Add empty parameter list to '" + dec.getName() + "'" +
(dec.getContainer() instanceof TypeDeclaration?
" in '" + ((TypeDeclaration) dec.getContainer()).getName() + "'" : ""),
change, new Region(offset, 0));
}
static void addAddParenthesesProposal(ProblemLocation problem, IFile file,
Collection<ICompletionProposal> proposals, Node node) {
Tree.Declaration decNode = (Tree.Declaration) node;
Node n = getBeforeParenthesisNode(decNode);
if (n!=null) {
int offset = n.getStopIndex();
TextFileChange change = new TextFileChange("Add Empty Parameter List", file);
change.setEdit(new InsertEdit(offset+1, "()"));
proposals.add(new AddParenthesesProposal(decNode.getDeclarationModel(),
offset+2, change));
}
}
private static Node getBeforeParenthesisNode(Tree.Declaration decNode) {
Node n = decNode.getIdentifier();
if (decNode instanceof Tree.TypeDeclaration) {
Tree.TypeParameterList tpl = ((Tree.TypeDeclaration) decNode).getTypeParameterList();
if (tpl!=null) {
n = tpl;
}
}
if (decNode instanceof Tree.AnyMethod) {
Tree.TypeParameterList tpl = ((Tree.AnyMethod) decNode).getTypeParameterList();
if (tpl!=null) {
n = tpl;
}
}
return n;
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AddParenthesesProposal.java
|
5,976 |
return new LookupFactory() {
@Override
public Lookup getLookup(FieldMapper<?> mapper, CompletionSuggestionContext suggestionContext) {
AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.names().indexName());
if (analyzingSuggestHolder == null) {
return null;
}
int flags = analyzingSuggestHolder.preserveSep ? XAnalyzingSuggester.PRESERVE_SEP : 0;
XAnalyzingSuggester suggester;
if (suggestionContext.isFuzzy()) {
suggester = new XFuzzySuggester(mapper.indexAnalyzer(), mapper.searchAnalyzer(), flags,
analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions,
suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(),
suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), suggestionContext.isFuzzyUnicodeAware(),
analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads,
analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte,
analyzingSuggestHolder.holeCharacter);
} else {
suggester = new XAnalyzingSuggester(mapper.indexAnalyzer(), mapper.searchAnalyzer(), flags,
analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions,
analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads,
analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte,
analyzingSuggestHolder.holeCharacter);
}
return suggester;
}
@Override
public CompletionStats stats(String... fields) {
long sizeInBytes = 0;
ObjectLongOpenHashMap<String> completionFields = null;
if (fields != null && fields.length > 0) {
completionFields = new ObjectLongOpenHashMap<String>(fields.length);
}
for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) {
sizeInBytes += entry.getValue().fst.sizeInBytes();
if (fields == null || fields.length == 0) {
continue;
}
for (String field : fields) {
// support for getting fields by regex as in fielddata
if (Regex.simpleMatch(field, entry.getKey())) {
long fstSize = entry.getValue().fst.sizeInBytes();
completionFields.addTo(field, fstSize);
}
}
}
return new CompletionStats(sizeInBytes, completionFields);
}
@Override
AnalyzingSuggestHolder getAnalyzingSuggestHolder(FieldMapper<?> mapper) {
return lookupMap.get(mapper.names().indexName());
}
@Override
public long ramBytesUsed() {
return ramBytesUsed;
}
};
| 1no label
|
src_main_java_org_elasticsearch_search_suggest_completion_AnalyzingCompletionLookupProvider.java
|
72 |
@SuppressWarnings("serial")
static final class MapReduceEntriesToLongTask<K,V>
extends BulkTask<K,V,Long> {
final ObjectToLong<Map.Entry<K,V>> transformer;
final LongByLongToLong reducer;
final long basis;
long result;
MapReduceEntriesToLongTask<K,V> rights, nextRight;
MapReduceEntriesToLongTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceEntriesToLongTask<K,V> nextRight,
ObjectToLong<Map.Entry<K,V>> transformer,
long basis,
LongByLongToLong reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.basis = basis; this.reducer = reducer;
}
public final Long getRawResult() { return result; }
public final void compute() {
final ObjectToLong<Map.Entry<K,V>> transformer;
final LongByLongToLong reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
long r = this.basis;
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceEntriesToLongTask<K,V>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, r, reducer)).fork();
}
for (Node<K,V> p; (p = advance()) != null; )
r = reducer.apply(r, transformer.apply(p));
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceEntriesToLongTask<K,V>
t = (MapReduceEntriesToLongTask<K,V>)c,
s = t.rights;
while (s != null) {
t.result = reducer.apply(t.result, s.result);
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
81 |
public final class ClientEndpoint implements Client {
private final ClientEngineImpl clientEngine;
private final Connection conn;
private final ConcurrentMap<String, TransactionContext> transactionContextMap
= new ConcurrentHashMap<String, TransactionContext>();
private final List<Runnable> removeListenerActions = Collections.synchronizedList(new LinkedList<Runnable>());
private final SocketAddress socketAddress;
private String uuid;
private LoginContext loginContext;
private ClientPrincipal principal;
private boolean firstConnection;
private volatile boolean authenticated;
ClientEndpoint(ClientEngineImpl clientEngine, Connection conn, String uuid) {
this.clientEngine = clientEngine;
this.conn = conn;
if (conn instanceof TcpIpConnection) {
TcpIpConnection tcpIpConnection = (TcpIpConnection) conn;
socketAddress = tcpIpConnection.getSocketChannelWrapper().socket().getRemoteSocketAddress();
} else {
socketAddress = null;
}
this.uuid = uuid;
}
Connection getConnection() {
return conn;
}
@Override
public String getUuid() {
return uuid;
}
public boolean live() {
return conn.live();
}
void setLoginContext(LoginContext loginContext) {
this.loginContext = loginContext;
}
public Subject getSubject() {
return loginContext != null ? loginContext.getSubject() : null;
}
public boolean isFirstConnection() {
return firstConnection;
}
void authenticated(ClientPrincipal principal, boolean firstConnection) {
this.principal = principal;
this.uuid = principal.getUuid();
this.firstConnection = firstConnection;
this.authenticated = true;
}
public boolean isAuthenticated() {
return authenticated;
}
public ClientPrincipal getPrincipal() {
return principal;
}
@Override
public InetSocketAddress getSocketAddress() {
return (InetSocketAddress) socketAddress;
}
@Override
public ClientType getClientType() {
switch (conn.getType()) {
case JAVA_CLIENT:
return ClientType.JAVA;
case CSHARP_CLIENT:
return ClientType.CSHARP;
case CPP_CLIENT:
return ClientType.CPP;
case PYTHON_CLIENT:
return ClientType.PYTHON;
case RUBY_CLIENT:
return ClientType.RUBY;
case BINARY_CLIENT:
return ClientType.OTHER;
default:
throw new IllegalArgumentException("Invalid connection type: " + conn.getType());
}
}
public TransactionContext getTransactionContext(String txnId) {
final TransactionContext transactionContext = transactionContextMap.get(txnId);
if (transactionContext == null) {
throw new TransactionException("No transaction context found for txnId:" + txnId);
}
return transactionContext;
}
public void setTransactionContext(TransactionContext transactionContext) {
transactionContextMap.put(transactionContext.getTxnId(), transactionContext);
}
public void removeTransactionContext(String txnId) {
transactionContextMap.remove(txnId);
}
public void setListenerRegistration(final String service, final String topic, final String id) {
removeListenerActions.add(new Runnable() {
@Override
public void run() {
EventService eventService = clientEngine.getEventService();
eventService.deregisterListener(service, topic, id);
}
});
}
public void setDistributedObjectListener(final String id) {
removeListenerActions.add(new Runnable() {
@Override
public void run() {
clientEngine.getProxyService().removeProxyListener(id);
}
});
}
public void clearAllListeners() {
for (Runnable removeAction : removeListenerActions) {
try {
removeAction.run();
} catch (Exception e) {
getLogger().warning("Exception during destroy action", e);
}
}
removeListenerActions.clear();
}
void destroy() throws LoginException {
for (Runnable removeAction : removeListenerActions) {
try {
removeAction.run();
} catch (Exception e) {
getLogger().warning("Exception during destroy action", e);
}
}
LoginContext lc = loginContext;
if (lc != null) {
lc.logout();
}
for (TransactionContext context : transactionContextMap.values()) {
Transaction transaction = TransactionAccessor.getTransaction(context);
if (context.isXAManaged() && transaction.getState() == PREPARED) {
TransactionManagerServiceImpl transactionManager =
(TransactionManagerServiceImpl) clientEngine.getTransactionManagerService();
transactionManager.addTxBackupLogForClientRecovery(transaction);
} else {
try {
context.rollbackTransaction();
} catch (HazelcastInstanceNotActiveException e) {
getLogger().finest(e);
} catch (Exception e) {
getLogger().warning(e);
}
}
}
authenticated = false;
}
private ILogger getLogger() {
return clientEngine.getLogger(getClass());
}
public void sendResponse(Object response, int callId) {
boolean isError = false;
Object clientResponseObject;
if (response == null) {
clientResponseObject = ClientEngineImpl.NULL;
} else if (response instanceof Throwable) {
isError = true;
ClientExceptionConverter converter = ClientExceptionConverters.get(getClientType());
clientResponseObject = converter.convert((Throwable) response);
} else {
clientResponseObject = response;
}
ClientResponse clientResponse = new ClientResponse(clientEngine.toData(clientResponseObject), isError, callId);
clientEngine.sendResponse(this, clientResponse);
}
public void sendEvent(Object event, int callId) {
Data data = clientEngine.toData(event);
clientEngine.sendResponse(this, new ClientResponse(data, callId, true));
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ClientEndpoint{");
sb.append("conn=").append(conn);
sb.append(", uuid='").append(uuid).append('\'');
sb.append(", firstConnection=").append(firstConnection);
sb.append(", authenticated=").append(authenticated);
sb.append('}');
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_client_ClientEndpoint.java
|
2,627 |
public final class BinaryClassDefinitionProxy extends BinaryClassDefinition implements ClassDefinition {
public BinaryClassDefinitionProxy(int factoryId, int classId, int version, byte[] binary) {
this.classId = classId;
this.version = version;
this.factoryId = factoryId;
setBinary(binary);
}
public ClassDefinition toReal(SerializationContext context) throws IOException {
final ClassDefinition cd = context.lookup(factoryId, classId, version);
return cd != null ? cd : context.createClassDefinition(factoryId, getBinary());
}
public FieldDefinition get(String name) {
throw new UnsupportedOperationException();
}
public FieldDefinition get(int fieldIndex) {
throw new UnsupportedOperationException();
}
public boolean hasField(String fieldName) {
throw new UnsupportedOperationException();
}
public Set<String> getFieldNames() {
throw new UnsupportedOperationException();
}
public FieldType getFieldType(String fieldName) {
throw new UnsupportedOperationException();
}
public int getFieldClassId(String fieldName) {
throw new UnsupportedOperationException();
}
public int getFieldCount() {
throw new UnsupportedOperationException();
}
public void writeData(ObjectDataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
public void readData(ObjectDataInput in) throws IOException {
throw new UnsupportedOperationException();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_serialization_BinaryClassDefinitionProxy.java
|
1,580 |
public class ClassMetadata implements Serializable {
private static final long serialVersionUID = 1L;
private String ceilingType;
private ClassTree polymorphicEntities;
private Property[] properties;
private String currencyCode = "USD";
private Map<String, Property> pMap = null;
public Map<String, Property> getPMap() {
if (pMap == null) {
pMap = BLCMapUtils.keyedMap(properties, new TypedClosure<String, Property>() {
@Override
public String getKey(Property value) {
return value.getName();
}
});
}
return pMap;
}
public String getCeilingType() {
return ceilingType;
}
public void setCeilingType(String type) {
this.ceilingType = type;
}
public ClassTree getPolymorphicEntities() {
return polymorphicEntities;
}
public void setPolymorphicEntities(ClassTree polymorphicEntities) {
this.polymorphicEntities = polymorphicEntities;
}
public Property[] getProperties() {
return properties;
}
public void setProperties(Property[] property) {
this.properties = property;
}
public String getCurrencyCode() {
return currencyCode;
}
public void setCurrencyCode(String currencyCode) {
this.currencyCode = currencyCode;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_ClassMetadata.java
|
645 |
@Component("blAuthenticationSuccessRedirectStrategy")
public class BroadleafAuthenticationSuccessRedirectStrategy implements RedirectStrategy {
private String redirectPath="/redirect";
private RedirectStrategy redirectStrategy = new DefaultRedirectStrategy();
@Override
public void sendRedirect(HttpServletRequest request, HttpServletResponse response, String url) throws IOException {
if (BroadleafControllerUtility.isAjaxRequest(request)) {
request.getSession().setAttribute("BLC_REDIRECT_URL", url);
url = getRedirectPath();
}
redirectStrategy.sendRedirect(request, response, url);
}
public String updateLoginErrorUrlForAjax(String url) {
String blcAjax = BroadleafControllerUtility.BLC_AJAX_PARAMETER;
if (url != null && url.indexOf("?") > 0) {
url = url + "&" + blcAjax + "=true";
} else {
url = url + "?" + blcAjax + "=true";
}
return url;
}
public String getRedirectPath() {
return redirectPath;
}
public void setRedirectPath(String redirectPath) {
this.redirectPath = redirectPath;
}
public RedirectStrategy getRedirectStrategy() {
return redirectStrategy;
}
public void setRedirectStrategy(RedirectStrategy redirectStrategy) {
this.redirectStrategy = redirectStrategy;
}
}
| 1no label
|
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_common_web_security_BroadleafAuthenticationSuccessRedirectStrategy.java
|
155 |
public abstract class TransactionInterceptorProvider extends Service
{
private final String name;
public TransactionInterceptorProvider( String name )
{
super( name );
this.name = name;
}
/**
* Returns the name of this provider
*
* @return The name of this provider
*/
public final String name()
{
return name;
}
/**
* Creates a TransactionInterceptor with the given datasource and options.
* It is possible for this method to return null, signifying that the
* options passed did not allow for instantiation.
*
* @param ds The datasource the TransactionInterceptor will communicate with
* @param options An object that can be the options to instantiate the
* interceptor with - e.g "false" to prevent instantiation
* @return An implementation of TransactionInterceptor or null if the
* options say so.
*/
public abstract TransactionInterceptor create( XaDataSource ds,
String options, DependencyResolver dependencyResolver );
/**
* Creates a TransactionInterceptor with the given datasource and options
* and the given TransactionInterceptor as the next in the chain.
* It is possible for this method to return null, signifying that the
* options passed did not allow for instantiation.
*
* @param ds The datasource the TransactionInterceptor will communicate with
* @param options An object that can be the options to instantiate the
* interceptor with - e.g "false" to prevent instantiation
* @param next The next interceptor in the chain - can be null
* @return An implementation of TransactionInterceptor or null if the
* options say so.
*/
public abstract TransactionInterceptor create( TransactionInterceptor next,
XaDataSource ds, String options, DependencyResolver dependencyResolver );
}
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_TransactionInterceptorProvider.java
|
892 |
public class TransportSearchScrollScanAction extends AbstractComponent {
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final SearchServiceTransportAction searchService;
private final SearchPhaseController searchPhaseController;
@Inject
public TransportSearchScrollScanAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.searchService = searchService;
this.searchPhaseController = searchPhaseController;
}
public void execute(SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
new AsyncAction(request, scrollId, listener).start();
}
private class AsyncAction {
private final SearchScrollRequest request;
private final ActionListener<SearchResponse> listener;
private final ParsedScrollId scrollId;
private final DiscoveryNodes nodes;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
private final AtomicArray<QueryFetchSearchResult> queryFetchResults;
private final AtomicInteger successfulOps;
private final AtomicInteger counter;
private final long startTime = System.currentTimeMillis();
private AsyncAction(SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.request = request;
this.listener = listener;
this.scrollId = scrollId;
this.nodes = clusterService.state().nodes();
this.successfulOps = new AtomicInteger(scrollId.getContext().length);
this.counter = new AtomicInteger(scrollId.getContext().length);
this.queryFetchResults = new AtomicArray<QueryFetchSearchResult>(scrollId.getContext().length);
}
protected final ShardSearchFailure[] buildShardFailures() {
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
for (int i = 0; i < failures.length; i++) {
failures[i] = entries.get(i).value;
}
return failures;
}
// we do our best to return the shard failures, but its ok if its not fully concurrently safe
// we simply try and return as much as possible
protected final void addShardFailure(final int shardIndex, ShardSearchFailure failure) {
if (shardFailures == null) {
shardFailures = new AtomicArray<ShardSearchFailure>(scrollId.getContext().length);
}
shardFailures.set(shardIndex, failure);
}
public void start() {
if (scrollId.getContext().length == 0) {
final InternalSearchResponse internalResponse = new InternalSearchResponse(new InternalSearchHits(InternalSearchHits.EMPTY, Long.parseLong(this.scrollId.getAttributes().get("total_hits")), 0.0f), null, null, null, false);
listener.onResponse(new SearchResponse(internalResponse, request.scrollId(), 0, 0, 0l, buildShardFailures()));
return;
}
int localOperations = 0;
Tuple<String, Long>[] context = scrollId.getContext();
for (int i = 0; i < context.length; i++) {
Tuple<String, Long> target = context[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null) {
if (nodes.localNodeId().equals(node.id())) {
localOperations++;
} else {
executePhase(i, node, target.v2());
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]");
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
}
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
Tuple<String, Long> target = context1[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
executePhase(i, node, target.v2());
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
final Tuple<String, Long> target = context1[i];
final int shardIndex = i;
final DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
try {
if (localAsync) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executePhase(shardIndex, node, target.v2());
}
});
} else {
executePhase(shardIndex, node, target.v2());
}
} catch (Throwable t) {
onPhaseFailure(t, target.v2(), shardIndex);
}
}
}
}
}
for (Tuple<String, Long> target : scrollId.getContext()) {
DiscoveryNode node = nodes.get(target.v1());
if (node == null) {
if (logger.isDebugEnabled()) {
logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]");
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
} else {
}
}
}
void executePhase(final int shardIndex, DiscoveryNode node, final long searchId) {
searchService.sendExecuteScan(node, internalScrollSearchRequest(searchId, request), new SearchServiceListener<QueryFetchSearchResult>() {
@Override
public void onResult(QueryFetchSearchResult result) {
queryFetchResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
onPhaseFailure(t, searchId, shardIndex);
}
});
}
void onPhaseFailure(Throwable t, long searchId, int shardIndex) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute query phase", t, searchId);
}
addShardFailure(shardIndex, new ShardSearchFailure(t));
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
private void finishHim() {
try {
innerFinishHim();
} catch (Throwable e) {
ReduceSearchPhaseException failure = new ReduceSearchPhaseException("fetch", "", e, buildShardFailures());
if (logger.isDebugEnabled()) {
logger.debug("failed to reduce search", failure);
}
listener.onFailure(failure);
}
}
private void innerFinishHim() throws IOException {
int numberOfHits = 0;
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
numberOfHits += entry.value.queryResult().topDocs().scoreDocs.length;
}
ScoreDoc[] docs = new ScoreDoc[numberOfHits];
int counter = 0;
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
ScoreDoc[] scoreDocs = entry.value.queryResult().topDocs().scoreDocs;
for (ScoreDoc scoreDoc : scoreDocs) {
scoreDoc.shardIndex = entry.index;
docs[counter++] = scoreDoc;
}
}
final InternalSearchResponse internalResponse = searchPhaseController.merge(docs, queryFetchResults, queryFetchResults);
((InternalSearchHits) internalResponse.hits()).totalHits = Long.parseLong(this.scrollId.getAttributes().get("total_hits"));
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
if (entry.value.queryResult().topDocs().scoreDocs.length < entry.value.queryResult().size()) {
// we found more than we want for this round, remove this from our scrolling
queryFetchResults.set(entry.index, null);
}
}
String scrollId = null;
if (request.scroll() != null) {
// we rebuild the scroll id since we remove shards that we finished scrolling on
scrollId = TransportSearchHelper.buildScrollId(this.scrollId.getType(), queryFetchResults, this.scrollId.getAttributes()); // continue moving the total_hits
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(),
System.currentTimeMillis() - startTime, buildShardFailures()));
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_search_type_TransportSearchScrollScanAction.java
|
84 |
protected enum RESULT {
OK, ERROR, EXIT
};
| 0true
|
commons_src_main_java_com_orientechnologies_common_console_OConsoleApplication.java
|
191 |
public class ClientProperties {
public static final String PROP_CONNECTION_TIMEOUT = "hazelcast.client.connection.timeout";
public static final String PROP_CONNECTION_TIMEOUT_DEFAULT = "5000";
public static final String PROP_HEARTBEAT_INTERVAL = "hazelcast.client.heartbeat.interval";
public static final String PROP_HEARTBEAT_INTERVAL_DEFAULT = "10000";
public static final String PROP_MAX_FAILED_HEARTBEAT_COUNT = "hazelcast.client.max.failed.heartbeat.count";
public static final String PROP_MAX_FAILED_HEARTBEAT_COUNT_DEFAULT = "3";
public static final String PROP_RETRY_COUNT = "hazelcast.client.retry.count";
public static final String PROP_RETRY_COUNT_DEFAULT = "20";
public static final String PROP_RETRY_WAIT_TIME = "hazelcast.client.retry.wait.time";
public static final String PROP_RETRY_WAIT_TIME_DEFAULT = "250";
public final ClientProperty CONNECTION_TIMEOUT;
public final ClientProperty HEARTBEAT_INTERVAL;
public final ClientProperty MAX_FAILED_HEARTBEAT_COUNT;
public final ClientProperty RETRY_COUNT;
public final ClientProperty RETRY_WAIT_TIME;
public ClientProperties(ClientConfig clientConfig) {
CONNECTION_TIMEOUT = new ClientProperty(clientConfig, PROP_CONNECTION_TIMEOUT, PROP_CONNECTION_TIMEOUT_DEFAULT);
HEARTBEAT_INTERVAL = new ClientProperty(clientConfig, PROP_HEARTBEAT_INTERVAL, PROP_HEARTBEAT_INTERVAL_DEFAULT);
MAX_FAILED_HEARTBEAT_COUNT = new ClientProperty(clientConfig, PROP_MAX_FAILED_HEARTBEAT_COUNT, PROP_MAX_FAILED_HEARTBEAT_COUNT_DEFAULT);
RETRY_COUNT = new ClientProperty(clientConfig, PROP_RETRY_COUNT, PROP_RETRY_COUNT_DEFAULT);
RETRY_WAIT_TIME = new ClientProperty(clientConfig, PROP_RETRY_WAIT_TIME, PROP_RETRY_WAIT_TIME_DEFAULT);
}
public static class ClientProperty {
private final String name;
private final String value;
ClientProperty(ClientConfig config, String name) {
this(config, name, (String) null);
}
ClientProperty(ClientConfig config, String name, ClientProperty defaultValue) {
this(config, name, defaultValue != null ? defaultValue.getString() : null);
}
ClientProperty(ClientConfig config, String name, String defaultValue) {
this.name = name;
String configValue = (config != null) ? config.getProperty(name) : null;
if (configValue != null) {
value = configValue;
} else if (System.getProperty(name) != null) {
value = System.getProperty(name);
} else {
value = defaultValue;
}
}
public String getName() {
return this.name;
}
public String getValue() {
return value;
}
public int getInteger() {
return Integer.parseInt(this.value);
}
public byte getByte() {
return Byte.parseByte(this.value);
}
public boolean getBoolean() {
return Boolean.valueOf(this.value);
}
public String getString() {
return value;
}
public long getLong() {
return Long.parseLong(this.value);
}
@Override
public String toString() {
return "ClientProperty [name=" + this.name + ", value=" + this.value + "]";
}
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_config_ClientProperties.java
|
4,223 |
public abstract class FsDirectoryService extends AbstractIndexShardComponent implements DirectoryService, StoreRateLimiting.Listener, StoreRateLimiting.Provider {
protected final FsIndexStore indexStore;
private final CounterMetric rateLimitingTimeInNanos = new CounterMetric();
public FsDirectoryService(ShardId shardId, @IndexSettings Settings indexSettings, IndexStore indexStore) {
super(shardId, indexSettings);
this.indexStore = (FsIndexStore) indexStore;
}
@Override
public final long throttleTimeInNanos() {
return rateLimitingTimeInNanos.count();
}
@Override
public final StoreRateLimiting rateLimiting() {
return indexStore.rateLimiting();
}
protected final LockFactory buildLockFactory() throws IOException {
String fsLock = componentSettings.get("lock", componentSettings.get("fs_lock", "native"));
LockFactory lockFactory = NoLockFactory.getNoLockFactory();
if (fsLock.equals("native")) {
// TODO LUCENE MONITOR: this is not needed in next Lucene version
lockFactory = new NativeFSLockFactory();
} else if (fsLock.equals("simple")) {
lockFactory = new SimpleFSLockFactory();
} else if (fsLock.equals("none")) {
lockFactory = NoLockFactory.getNoLockFactory();
}
return lockFactory;
}
@Override
public final void renameFile(Directory dir, String from, String to) throws IOException {
final FSDirectory fsDirectory = DirectoryUtils.getLeaf(dir, FSDirectory.class);
if (fsDirectory == null) {
throw new ElasticsearchIllegalArgumentException("Can not rename file on non-filesystem based directory ");
}
File directory = fsDirectory.getDirectory();
File old = new File(directory, from);
File nu = new File(directory, to);
if (nu.exists())
if (!nu.delete())
throw new IOException("Cannot delete " + nu);
if (!old.exists()) {
throw new FileNotFoundException("Can't rename from [" + from + "] to [" + to + "], from does not exists");
}
boolean renamed = false;
for (int i = 0; i < 3; i++) {
if (old.renameTo(nu)) {
renamed = true;
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new InterruptedIOException(e.getMessage());
}
}
if (!renamed) {
throw new IOException("Failed to rename, from [" + from + "], to [" + to + "]");
}
}
@Override
public final void fullDelete(Directory dir) throws IOException {
final FSDirectory fsDirectory = DirectoryUtils.getLeaf(dir, FSDirectory.class);
if (fsDirectory == null) {
throw new ElasticsearchIllegalArgumentException("Can not fully delete on non-filesystem based directory");
}
FileSystemUtils.deleteRecursively(fsDirectory.getDirectory());
// if we are the last ones, delete also the actual index
String[] list = fsDirectory.getDirectory().getParentFile().list();
if (list == null || list.length == 0) {
FileSystemUtils.deleteRecursively(fsDirectory.getDirectory().getParentFile());
}
}
@Override
public Directory[] build() throws IOException {
File[] locations = indexStore.shardIndexLocations(shardId);
Directory[] dirs = new Directory[locations.length];
for (int i = 0; i < dirs.length; i++) {
FileSystemUtils.mkdirs(locations[i]);
FSDirectory wrapped = newFSDirectory(locations[i], buildLockFactory());
dirs[i] = new RateLimitedFSDirectory(wrapped, this, this) ;
}
return dirs;
}
protected abstract FSDirectory newFSDirectory(File location, LockFactory lockFactory) throws IOException;
@Override
public final void onPause(long nanos) {
rateLimitingTimeInNanos.inc(nanos);
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_store_fs_FsDirectoryService.java
|
660 |
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, keySerializer, (OBinarySerializer<V>) valueSerializer,
indexDefinition != null ? indexDefinition.getTypes() : null, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void deleteWithoutLoad(String indexName) {
acquireExclusiveLock();
try {
final ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
sbTree.deleteWithoutLoad(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, indexDefinition != null ? indexDefinition.getTypes() : null, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public void getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final ValuesTransformer<V> transformer, final ValuesResultListener valuesResultListener) {
acquireSharedLock();
try {
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
return addToResult(transformer, valuesResultListener, entry.getValue());
}
});
} finally {
releaseSharedLock();
}
}
@Override
public void getValuesMajor(Object fromKey, boolean isInclusive, final ValuesTransformer<V> transformer,
final ValuesResultListener valuesResultListener) {
acquireSharedLock();
try {
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
return addToResult(transformer, valuesResultListener, entry.getValue());
}
});
} finally {
releaseSharedLock();
}
}
@Override
public void getValuesMinor(Object toKey, boolean isInclusive, final ValuesTransformer<V> transformer,
final ValuesResultListener valuesResultListener) {
acquireSharedLock();
try {
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
return addToResult(transformer, valuesResultListener, entry.getValue());
}
});
} finally {
releaseSharedLock();
}
}
@Override
public void getEntriesMajor(Object fromKey, boolean isInclusive, final ValuesTransformer<V> transformer,
final EntriesResultListener entriesResultListener) {
acquireSharedLock();
try {
sbTree.loadEntriesMajor(fromKey, isInclusive, new OTreeInternal.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
return addToEntriesResult(transformer, entriesResultListener, key, value);
}
});
} finally {
releaseSharedLock();
}
}
@Override
public void getEntriesMinor(Object toKey, boolean isInclusive, final ValuesTransformer<V> transformer,
final EntriesResultListener entriesResultListener) {
acquireSharedLock();
try {
sbTree.loadEntriesMinor(toKey, isInclusive, new OTreeInternal.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
return addToEntriesResult(transformer, entriesResultListener, key, value);
}
});
} finally {
releaseSharedLock();
}
}
@Override
public void getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final ValuesTransformer<V> transformer,
final EntriesResultListener entriesResultListener) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OTreeInternal.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
return addToEntriesResult(transformer, entriesResultListener, key, value);
}
});
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private boolean addToResult(ValuesTransformer<V> transformer, ValuesResultListener resultListener, V value) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
boolean cont = resultListener.addResult(transformedValue);
if (!cont)
return false;
}
return true;
} else
return resultListener.addResult((OIdentifiable) value);
}
private boolean addToEntriesResult(ValuesTransformer<V> transformer, EntriesResultListener entriesResultListener, Object key,
V value) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
boolean cont = entriesResultListener.addResult(document);
if (!cont)
return false;
}
return true;
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
return entriesResultListener.addResult(document);
}
}
private static final class ItemsCounter<V> implements OSBTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_index_engine_OSBTreeIndexEngine.java
|
1,424 |
clusterService.submitStateUpdateTask("refresh-mapping [" + index + "][" + Arrays.toString(types) + "]", Priority.HIGH, new ClusterStateUpdateTask() {
@Override
public void onFailure(String source, Throwable t) {
logger.warn("failure during [{}]", t, source);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
return executeRefreshOrUpdate(currentState, insertOrder);
}
});
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_MetaDataMappingService.java
|
282 |
@RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientRandomLBTest {
@AfterClass
public static void destroy() {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testRandomLB_withoutMembers() {
RandomLB lb = new RandomLB();
Member m = lb.next();
assertNull(m);
}
@Test
public void testRandomLB_withMembers() {
RandomLB randomLB = new RandomLB();
TestHazelcastInstanceFactory factory = new TestHazelcastInstanceFactory(1);
HazelcastInstance server = factory.newHazelcastInstance();
Cluster cluster = server.getCluster();
ClientConfig clientConfig = new ClientConfig();
clientConfig.setLoadBalancer(randomLB);
randomLB.init(cluster, clientConfig);
Member member = cluster.getLocalMember();
Member nextMember = randomLB.next();
assertEquals(member, nextMember);
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_loadBalancer_ClientRandomLBTest.java
|
1,127 |
public static class Factory implements NativeScriptFactory {
@Override
public ExecutableScript newScript(@Nullable Map<String, Object> params) {
return new NativeConstantScoreScript();
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_scripts_score_script_NativeConstantScoreScript.java
|
335 |
public interface ODatabaseComplex<T extends Object> extends ODatabase, OUserObject2RecordHandler {
public enum OPERATION_MODE {
SYNCHRONOUS, ASYNCHRONOUS, ASYNCHRONOUS_NOANSWER
}
/**
* Creates a new entity instance.
*
* @return The new instance.
*/
public <RET extends Object> RET newInstance();
/**
* Returns the Dictionary manual index.
*
* @return ODictionary instance
*/
public ODictionary<T> getDictionary();
/**
* Returns the current user logged into the database.
*
* @see OSecurity
*/
public OUser getUser();
/**
* Set user for current database instance
*/
public void setUser(OUser user);
/**
* Loads the entity and return it.
*
* @param iObject
* The entity to load. If the entity was already loaded it will be reloaded and all the changes will be lost.
* @return
*/
public <RET extends T> RET load(T iObject);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan, boolean iIgnoreCache);
/**
* Force the reloading of the entity.
*
* @param iObject
* The entity to load. If the entity was already loaded it will be reloaded and all the changes will be lost.
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The loaded entity
*/
public <RET extends T> RET reload(final T iObject, String iFetchPlan, boolean iIgnoreCache);
/**
* Loads the entity by the Record ID.
*
* @param iRecordId
* The unique record id of the entity to load.
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId);
/**
* Loads the entity by the Record ID using a fetch plan.
*
* @param iRecordId
* The unique record id of the entity to load.
* @param iFetchPlan
* Fetch plan used
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan);
/**
* Loads the entity by the Record ID using a fetch plan and specifying if the cache must be ignored.
*
* @param iRecordId
* The unique record id of the entity to load.
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache);
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone);
/**
* Saves an entity in synchronous mode. If the entity is not dirty, then the operation will be ignored. For custom entity
* implementations assure to set the entity as dirty.
*
* @param iObject
* The entity to save
* @return The saved entity.
*/
public <RET extends T> RET save(T iObject);
/**
* Saves an entity specifying the mode. If the entity is not dirty, then the operation will be ignored. For custom entity
* implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
*
* @param iObject
* The entity to save
* @param iMode
* Mode of save: synchronous (default) or asynchronous
* @param iForceCreate
* Flag that indicates that record should be created. If record with current rid already exists, exception is thrown
* @param iRecordCreatedCallback
* @param iRecordUpdatedCallback
*/
public <RET extends T> RET save(T iObject, OPERATION_MODE iMode, boolean iForceCreate,
ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback);
/**
* Saves an entity in the specified cluster in synchronous mode. If the entity is not dirty, then the operation will be ignored.
* For custom entity implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
* @param iObject
* The entity to save
* @param iClusterName
* Name of the cluster where to save
* @return The saved entity.
*/
public <RET extends T> RET save(T iObject, String iClusterName);
public boolean updatedReplica(T iObject);
/**
* Saves an entity in the specified cluster specifying the mode. If the entity is not dirty, then the operation will be ignored.
* For custom entity implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
*
* @param iObject
* The entity to save
* @param iClusterName
* Name of the cluster where to save
* @param iMode
* Mode of save: synchronous (default) or asynchronous
* @param iForceCreate
* Flag that indicates that record should be created. If record with current rid already exists, exception is thrown
* @param iRecordCreatedCallback
* @param iRecordUpdatedCallback
*/
public <RET extends T> RET save(T iObject, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback);
/**
* Deletes an entity from the database in synchronous mode.
*
* @param iObject
* The entity to delete.
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(T iObject);
/**
* Deletes the entity with the received RID from the database.
*
* @param iRID
* The RecordID to delete.
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(ORID iRID);
/**
* Deletes the entity with the received RID from the database.
*
* @param iRID
* The RecordID to delete.
* @param iVersion
* for MVCC
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(ORID iRID, ORecordVersion iVersion);
public ODatabaseComplex<T> cleanOutRecord(ORID rid, ORecordVersion version);
/**
* Return active transaction. Cannot be null. If no transaction is active, then a OTransactionNoTx instance is returned.
*
* @return OTransaction implementation
*/
public OTransaction getTransaction();
/**
* Begins a new transaction. By default the type is OPTIMISTIC. If a previous transaction was started it will be rollbacked and
* closed before to start a new one. A transaction once begun has to be closed by calling the {@link #commit()} or
* {@link #rollback()}.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin();
/**
* Begins a new transaction specifying the transaction type. If a previous transaction was started it will be rollbacked and
* closed before to start a new one. A transaction once begun has to be closed by calling the {@link #commit()} or
* {@link #rollback()}.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin(TXTYPE iStatus);
/**
* Attaches a transaction as current.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin(OTransaction iTx) throws OTransactionException;
/**
* Commits the current transaction. The approach is all or nothing. All changes will be permanent following the storage type. If
* the operation succeed all the entities changed inside the transaction context will be effectives. If the operation fails, all
* the changed entities will be restored in the datastore. Memory instances are not guaranteed to being restored as well.
*
* @return
*/
public ODatabaseComplex<T> commit() throws OTransactionException;
/**
* Aborts the current running transaction. All the pending changed entities will be restored in the datastore. Memory instances
* are not guaranteed to being restored as well.
*
* @return
*/
public ODatabaseComplex<T> rollback() throws OTransactionException;
/**
* Execute a query against the database.
*
* @param iCommand
* Query command
* @param iArgs
* Optional parameters to bind to the query
* @return List of POJOs
*/
public <RET extends List<?>> RET query(final OQuery<?> iCommand, final Object... iArgs);
/**
* Execute a command against the database. A command can be a SQL statement or a Procedure. If the OStorage used is remote
* (OStorageRemote) then the command will be executed remotely and the result returned back to the calling client.
*
* @param iCommand
* Command request to execute.
* @return The same Command request received as parameter.
* @see OStorageRemote
*/
public <RET extends OCommandRequest> RET command(OCommandRequest iCommand);
/**
* Return the OMetadata instance. Cannot be null.
*
* @return The OMetadata instance.
*/
public OMetadata getMetadata();
/**
* Returns the database owner. Used in wrapped instances to know the up level ODatabase instance.
*
* @return Returns the database owner.
*/
public ODatabaseComplex<?> getDatabaseOwner();
/**
* Internal. Sets the database owner.
*/
public ODatabaseComplex<?> setDatabaseOwner(ODatabaseComplex<?> iOwner);
/**
* Return the underlying database. Used in wrapper instances to know the down level ODatabase instance.
*
* @return The underlying ODatabase implementation.
*/
public <DB extends ODatabase> DB getUnderlying();
/**
* Internal method. Don't call it directly unless you're building an internal component.
*/
public void setInternal(ATTRIBUTES attribute, Object iValue);
/**
* Registers a hook to listen all events for Records.
*
* @param iHookImpl
* ORecordHook implementation
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB registerHook(ORecordHook iHookImpl);
public <DB extends ODatabaseComplex<?>> DB registerHook(final ORecordHook iHookImpl, HOOK_POSITION iPosition);
/**
* Retrieves all the registered hooks.
*
* @return A not-null unmodifiable set of ORecordHook instances. If there are no hooks registered, the Set is empty.
*/
public Set<ORecordHook> getHooks();
/**
* Unregisters a previously registered hook.
*
* @param iHookImpl
* ORecordHook implementation
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB unregisterHook(ORecordHook iHookImpl);
/**
* Invokes the callback on all the configured hooks.
*
* @param iObject
* The object passed change based on the Database implementation: records for {@link ODatabaseRecord} implementations and
* POJO for {@link ODatabaseObject} implementations.
* @return True if the input record is changed, otherwise false
*/
public RESULT callbackHooks(TYPE iType, OIdentifiable iObject);
/**
* Returns if the Multi Version Concurrency Control is enabled or not. If enabled the version of the record is checked before each
* update and delete against the records.
*
* @return true if enabled, otherwise false
* @see ODatabaseRecord#setMVCC(boolean)
*/
public boolean isMVCC();
/**
* Enables or disables the Multi-Version Concurrency Control. If enabled the version of the record is checked before each update
* and delete against the records.
*
* @param iValue
* @see ODatabaseRecord#isMVCC()
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB setMVCC(boolean iValue);
public String getType();
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_db_ODatabaseComplex.java
|
786 |
public final class AtomicLongDataSerializerHook implements DataSerializerHook {
public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.ATOMIC_LONG_DS_FACTORY, -17);
public static final int ADD_BACKUP = 0;
public static final int ADD_AND_GET = 1;
public static final int ALTER = 2;
public static final int ALTER_AND_GET = 3;
public static final int APPLY = 4;
public static final int COMPARE_AND_SET = 5;
public static final int GET = 6;
public static final int GET_AND_SET = 7;
public static final int GET_AND_ALTER = 8;
public static final int GET_AND_ADD = 9;
public static final int SET_OPERATION = 10;
public static final int SET_BACKUP = 11;
public static final int REPLICATION = 12;
@Override
public int getFactoryId() {
return F_ID;
}
@Override
public DataSerializableFactory createFactory() {
return new DataSerializableFactory() {
@Override
public IdentifiedDataSerializable create(int typeId) {
switch (typeId) {
case ADD_BACKUP:
return new AddBackupOperation();
case ADD_AND_GET:
return new AddAndGetOperation();
case ALTER:
return new AlterOperation();
case ALTER_AND_GET:
return new AlterAndGetOperation();
case APPLY:
return new ApplyOperation();
case COMPARE_AND_SET:
return new CompareAndSetOperation();
case GET:
return new GetOperation();
case GET_AND_SET:
return new GetAndSetOperation();
case GET_AND_ALTER:
return new GetAndAlterOperation();
case GET_AND_ADD:
return new GetAndAddOperation();
case SET_OPERATION:
return new SetOperation();
case SET_BACKUP:
return new SetBackupOperation();
case REPLICATION:
return new AtomicLongReplicationOperation();
default:
return null;
}
}
};
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_AtomicLongDataSerializerHook.java
|
4,676 |
abstract class QueryCollector extends Collector {
final IndexFieldData<?> idFieldData;
final IndexSearcher searcher;
final ConcurrentMap<HashedBytesRef, Query> queries;
final ESLogger logger;
final Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
final HashedBytesRef spare = new HashedBytesRef(new BytesRef());
BytesValues values;
final List<Collector> facetCollectors = new ArrayList<Collector>();
final Collector facetAndAggregatorCollector;
QueryCollector(ESLogger logger, PercolateContext context) {
this.logger = logger;
this.queries = context.percolateQueries();
this.searcher = context.docSearcher();
final FieldMapper<?> idMapper = context.mapperService().smartNameFieldMapper(IdFieldMapper.NAME);
this.idFieldData = context.fieldData().getForField(idMapper);
if (context.facets() != null) {
for (SearchContextFacets.Entry entry : context.facets().entries()) {
if (entry.isGlobal()) {
continue; // not supported for now
}
Collector collector = entry.getFacetExecutor().collector();
if (entry.getFilter() != null) {
if (collector instanceof NestedFacetExecutor.Collector) {
collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector, entry.getFilter());
} else {
collector = new FilteredCollector(collector, entry.getFilter());
}
}
facetCollectors.add(collector);
}
}
List<Collector> collectors = new ArrayList<Collector>(facetCollectors);
if (context.aggregations() != null) {
AggregationContext aggregationContext = new AggregationContext(context);
context.aggregations().aggregationContext(aggregationContext);
List<Aggregator> aggregatorCollectors = new ArrayList<Aggregator>();
Aggregator[] aggregators = context.aggregations().factories().createTopLevelAggregators(aggregationContext);
for (int i = 0; i < aggregators.length; i++) {
if (!(aggregators[i] instanceof GlobalAggregator)) {
Aggregator aggregator = aggregators[i];
if (aggregator.shouldCollect()) {
aggregatorCollectors.add(aggregator);
}
}
}
context.aggregations().aggregators(aggregators);
if (!aggregatorCollectors.isEmpty()) {
collectors.add(new AggregationPhase.AggregationsCollector(aggregatorCollectors, aggregationContext));
}
}
int size = collectors.size();
if (size == 0) {
facetAndAggregatorCollector = null;
} else if (size == 1) {
facetAndAggregatorCollector = collectors.get(0);
} else {
facetAndAggregatorCollector = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.setScorer(scorer);
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
// we use the UID because id might not be indexed
values = idFieldData.load(context).getBytesValues(true);
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.setNextReader(context);
}
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
static Match match(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
return new Match(logger, context, highlightPhase);
}
static Count count(ESLogger logger, PercolateContext context) {
return new Count(logger, context);
}
static MatchAndScore matchAndScore(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
return new MatchAndScore(logger, context, highlightPhase);
}
static MatchAndSort matchAndSort(ESLogger logger, PercolateContext context) {
return new MatchAndSort(logger, context);
}
protected final Query getQuery(int doc) {
final int numValues = values.setDocument(doc);
if (numValues == 0) {
return null;
}
assert numValues == 1;
spare.reset(values.nextValue(), values.currentValueHash());
return queries.get(spare);
}
final static class Match extends QueryCollector {
final PercolateContext context;
final HighlightPhase highlightPhase;
final List<BytesRef> matches = new ArrayList<BytesRef>();
final List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
final boolean limit;
final int size;
long counter = 0;
Match(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
super(logger, context);
this.limit = context.limit;
this.size = context.size;
this.context = context;
this.highlightPhase = highlightPhase;
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
searcher.search(query, collector);
if (collector.exists()) {
if (!limit || counter < size) {
matches.add(values.copyShared());
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
counter++;
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
long counter() {
return counter;
}
List<BytesRef> matches() {
return matches;
}
List<Map<String, HighlightField>> hls() {
return hls;
}
}
final static class MatchAndSort extends QueryCollector {
private final TopScoreDocCollector topDocsCollector;
MatchAndSort(ESLogger logger, PercolateContext context) {
super(logger, context);
// TODO: Use TopFieldCollector.create(...) for ascending and decending scoring?
topDocsCollector = TopScoreDocCollector.create(context.size, false);
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
searcher.search(query, collector);
if (collector.exists()) {
topDocsCollector.collect(doc);
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
super.setNextReader(context);
topDocsCollector.setNextReader(context);
}
@Override
public void setScorer(Scorer scorer) throws IOException {
topDocsCollector.setScorer(scorer);
}
TopDocs topDocs() {
return topDocsCollector.topDocs();
}
}
final static class MatchAndScore extends QueryCollector {
final PercolateContext context;
final HighlightPhase highlightPhase;
final List<BytesRef> matches = new ArrayList<BytesRef>();
final List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
// TODO: Use thread local in order to cache the scores lists?
final FloatArrayList scores = new FloatArrayList();
final boolean limit;
final int size;
long counter = 0;
private Scorer scorer;
MatchAndScore(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
super(logger, context);
this.limit = context.limit;
this.size = context.size;
this.context = context;
this.highlightPhase = highlightPhase;
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
searcher.search(query, collector);
if (collector.exists()) {
if (!limit || counter < size) {
matches.add(values.copyShared());
scores.add(scorer.score());
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
counter++;
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
long counter() {
return counter;
}
List<BytesRef> matches() {
return matches;
}
FloatArrayList scores() {
return scores;
}
List<Map<String, HighlightField>> hls() {
return hls;
}
}
final static class Count extends QueryCollector {
private long counter = 0;
Count(ESLogger logger, PercolateContext context) {
super(logger, context);
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
searcher.search(query, collector);
if (collector.exists()) {
counter++;
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
long counter() {
return counter;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_percolator_QueryCollector.java
|
810 |
public class TransportShardMultiPercolateAction extends TransportShardSingleOperationAction<TransportShardMultiPercolateAction.Request, TransportShardMultiPercolateAction.Response> {
private final PercolatorService percolatorService;
@Inject
public TransportShardMultiPercolateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, PercolatorService percolatorService) {
super(settings, threadPool, clusterService, transportService);
this.percolatorService = percolatorService;
}
@Override
protected String transportAction() {
return "mpercolate/shard";
}
@Override
protected String executor() {
return ThreadPool.Names.PERCOLATE;
}
@Override
protected Request newRequest() {
return new Request();
}
@Override
protected Response newResponse() {
return new Response();
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, Request request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, Request request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, request.index());
}
@Override
protected ShardIterator shards(ClusterState state, Request request) throws ElasticsearchException {
return clusterService.operationRouting().getShards(
clusterService.state(), request.index(), request.shardId(), request.preference
);
}
@Override
protected Response shardOperation(Request request, int shardId) throws ElasticsearchException {
// TODO: Look into combining the shard req's docs into one in memory index.
Response response = new Response();
response.items = new ArrayList<Response.Item>(request.items.size());
for (Request.Item item : request.items) {
Response.Item responseItem;
int slot = item.slot;
try {
responseItem = new Response.Item(slot, percolatorService.percolate(item.request));
} catch (Throwable t) {
if (TransportActions.isShardNotAvailableException(t)) {
throw (ElasticsearchException) t;
} else {
logger.debug("[{}][{}] failed to multi percolate", t, request.index(), request.shardId());
responseItem = new Response.Item(slot, new StringText(ExceptionsHelper.detailedMessage(t)));
}
}
response.items.add(responseItem);
}
return response;
}
public static class Request extends SingleShardOperationRequest {
private int shardId;
private String preference;
private List<Item> items;
public Request() {
}
public Request(String concreteIndex, int shardId, String preference) {
this.index = concreteIndex;
this.shardId = shardId;
this.preference = preference;
this.items = new ArrayList<Item>();
}
public int shardId() {
return shardId;
}
public void add(Item item) {
items.add(item);
}
public List<Item> items() {
return items;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardId = in.readVInt();
preference = in.readOptionalString();
int size = in.readVInt();
items = new ArrayList<Item>(size);
for (int i = 0; i < size; i++) {
int slot = in.readVInt();
PercolateShardRequest shardRequest = new PercolateShardRequest(index(), shardId);
shardRequest.documentType(in.readString());
shardRequest.source(in.readBytesReference());
shardRequest.docSource(in.readBytesReference());
shardRequest.onlyCount(in.readBoolean());
Item item = new Item(slot, shardRequest);
items.add(item);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(shardId);
out.writeOptionalString(preference);
out.writeVInt(items.size());
for (Item item : items) {
out.writeVInt(item.slot);
out.writeString(item.request.documentType());
out.writeBytesReference(item.request.source());
out.writeBytesReference(item.request.docSource());
out.writeBoolean(item.request.onlyCount());
}
}
public static class Item {
private final int slot;
private final PercolateShardRequest request;
public Item(int slot, PercolateShardRequest request) {
this.slot = slot;
this.request = request;
}
public int slot() {
return slot;
}
public PercolateShardRequest request() {
return request;
}
}
}
public static class Response extends ActionResponse {
private List<Item> items;
public List<Item> items() {
return items;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(items.size());
for (Item item : items) {
out.writeVInt(item.slot);
if (item.response != null) {
out.writeBoolean(true);
item.response.writeTo(out);
} else {
out.writeBoolean(false);
out.writeText(item.error);
}
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
items = new ArrayList<Item>(size);
for (int i = 0; i < size; i++) {
int slot = in.readVInt();
if (in.readBoolean()) {
PercolateShardResponse shardResponse = new PercolateShardResponse();
shardResponse.readFrom(in);
items.add(new Item(slot, shardResponse));
} else {
items.add(new Item(slot, in.readText()));
}
}
}
public static class Item {
private final int slot;
private final PercolateShardResponse response;
private final Text error;
public Item(Integer slot, PercolateShardResponse response) {
this.slot = slot;
this.response = response;
this.error = null;
}
public Item(Integer slot, Text error) {
this.slot = slot;
this.error = error;
this.response = null;
}
public int slot() {
return slot;
}
public PercolateShardResponse response() {
return response;
}
public Text error() {
return error;
}
public boolean failed() {
return error != null;
}
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_percolate_TransportShardMultiPercolateAction.java
|
31 |
@Service("blOrderFieldService")
public class OrderFieldServiceImpl extends AbstractRuleBuilderFieldService {
@Override
public void init() {
fields.add(new FieldData.Builder()
.label("rule_orderCurrenceIsDefault")
.name("currency.defaultFlag")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_orderCurrencyCode")
.name("currency.currencyCode")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_orderCurrencyName")
.name("currency.friendlyName")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_orderLocaleIsDefault")
.name("locale.defaultFlag")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_orderLocaleCode")
.name("locale.localeCode")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_orderLocaleName")
.name("locale.friendlyName")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_orderSubtotal")
.name("subTotal")
.operators("blcOperators_Numeric")
.options("[]")
.type(SupportedFieldType.MONEY)
.build());
}
@Override
public String getName() {
return RuleIdentifier.ORDER;
}
@Override
public String getDtoClassName() {
return "org.broadleafcommerce.core.order.domain.OrderImpl";
}
}
| 0true
|
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_web_rulebuilder_service_OrderFieldServiceImpl.java
|
1,132 |
public class NativePayloadSumScoreScript extends AbstractSearchScript {
public static final String NATIVE_PAYLOAD_SUM_SCRIPT_SCORE = "native_payload_sum_script_score";
String field = null;
String[] terms = null;
public static class Factory implements NativeScriptFactory {
@Override
public ExecutableScript newScript(@Nullable Map<String, Object> params) {
return new NativePayloadSumScoreScript(params);
}
}
private NativePayloadSumScoreScript(Map<String, Object> params) {
params.entrySet();
terms = new String[params.size()];
field = params.keySet().iterator().next();
Object o = params.get(field);
ArrayList<String> arrayList = (ArrayList<String>) o;
terms = arrayList.toArray(new String[arrayList.size()]);
}
@Override
public Object run() {
float score = 0;
IndexField indexField = indexLookup().get(field);
for (int i = 0; i < terms.length; i++) {
IndexFieldTerm indexFieldTerm = indexField.get(terms[i], IndexLookup.FLAG_PAYLOADS | IndexLookup.FLAG_CACHE);
for (TermPosition pos : indexFieldTerm) {
score += pos.payloadAsFloat(0);
}
}
return score;
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_scripts_score_script_NativePayloadSumScoreScript.java
|
626 |
public class DocsStatus {
long numDocs = 0;
long maxDoc = 0;
long deletedDocs = 0;
/**
* The number of docs.
*/
public long getNumDocs() {
return numDocs;
}
/**
* The max doc.
*/
public long getMaxDoc() {
return maxDoc;
}
/**
* The number of deleted docs in the index.
*/
public long getDeletedDocs() {
return deletedDocs;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_status_DocsStatus.java
|
1,342 |
public static class MappingUpdatedResponse extends ActionResponse {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_action_index_MappingUpdatedAction.java
|
1,298 |
clusterService.submitStateUpdateTask("1", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
invoked3.countDown();
try {
block2.await();
} catch (InterruptedException e) {
fail();
}
return currentState;
}
@Override
public void onFailure(String source, Throwable t) {
invoked3.countDown();
fail();
}
});
| 0true
|
src_test_java_org_elasticsearch_cluster_ClusterServiceTests.java
|
431 |
public enum ForeignKeyRestrictionType {
ID_EQ,
COLLECTION_SIZE_EQ
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_client_ForeignKeyRestrictionType.java
|
4,477 |
shard.recover(new Engine.RecoveryHandler() {
@Override
public void phase1(final SnapshotIndexCommit snapshot) throws ElasticsearchException {
long totalSize = 0;
long existingTotalSize = 0;
try {
StopWatch stopWatch = new StopWatch().start();
for (String name : snapshot.getFiles()) {
StoreFileMetaData md = shard.store().metaData(name);
boolean useExisting = false;
if (request.existingFiles().containsKey(name)) {
// we don't compute checksum for segments, so always recover them
if (!name.startsWith("segments") && md.isSame(request.existingFiles().get(name))) {
response.phase1ExistingFileNames.add(name);
response.phase1ExistingFileSizes.add(md.length());
existingTotalSize += md.length();
useExisting = true;
if (logger.isTraceEnabled()) {
logger.trace("[{}][{}] recovery [phase1] to {}: not recovering [{}], exists in local store and has checksum [{}], size [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), name, md.checksum(), md.length());
}
}
}
if (!useExisting) {
if (request.existingFiles().containsKey(name)) {
logger.trace("[{}][{}] recovery [phase1] to {}: recovering [{}], exists in local store, but is different: remote [{}], local [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), name, request.existingFiles().get(name), md);
} else {
logger.trace("[{}][{}] recovery [phase1] to {}: recovering [{}], does not exists in remote", request.shardId().index().name(), request.shardId().id(), request.targetNode(), name);
}
response.phase1FileNames.add(name);
response.phase1FileSizes.add(md.length());
}
totalSize += md.length();
}
response.phase1TotalSize = totalSize;
response.phase1ExistingTotalSize = existingTotalSize;
logger.trace("[{}][{}] recovery [phase1] to {}: recovering_files [{}] with total_size [{}], reusing_files [{}] with total_size [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), response.phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize));
RecoveryFilesInfoRequest recoveryInfoFilesRequest = new RecoveryFilesInfoRequest(request.recoveryId(), request.shardId(), response.phase1FileNames, response.phase1FileSizes,
response.phase1ExistingFileNames, response.phase1ExistingFileSizes, response.phase1TotalSize, response.phase1ExistingTotalSize);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FILES_INFO, recoveryInfoFilesRequest, TransportRequestOptions.options().withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
final CountDownLatch latch = new CountDownLatch(response.phase1FileNames.size());
final AtomicReference<Throwable> lastException = new AtomicReference<Throwable>();
int fileIndex = 0;
for (final String name : response.phase1FileNames) {
ThreadPoolExecutor pool;
long fileSize = response.phase1FileSizes.get(fileIndex);
if (fileSize > recoverySettings.SMALL_FILE_CUTOFF_BYTES) {
pool = recoverySettings.concurrentStreamPool();
} else {
pool = recoverySettings.concurrentSmallFileStreamPool();
}
pool.execute(new Runnable() {
@Override
public void run() {
IndexInput indexInput = null;
try {
final int BUFFER_SIZE = (int) recoverySettings.fileChunkSize().bytes();
byte[] buf = new byte[BUFFER_SIZE];
StoreFileMetaData md = shard.store().metaData(name);
// TODO: maybe use IOContext.READONCE?
indexInput = shard.store().openInputRaw(name, IOContext.READ);
boolean shouldCompressRequest = recoverySettings.compress();
if (CompressorFactory.isCompressed(indexInput)) {
shouldCompressRequest = false;
}
long len = indexInput.length();
long readCount = 0;
while (readCount < len) {
if (shard.state() == IndexShardState.CLOSED) { // check if the shard got closed on us
throw new IndexShardClosedException(shard.shardId());
}
int toRead = readCount + BUFFER_SIZE > len ? (int) (len - readCount) : BUFFER_SIZE;
long position = indexInput.getFilePointer();
if (recoverySettings.rateLimiter() != null) {
recoverySettings.rateLimiter().pause(toRead);
}
indexInput.readBytes(buf, 0, toRead, false);
BytesArray content = new BytesArray(buf, 0, toRead);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FILE_CHUNK, new RecoveryFileChunkRequest(request.recoveryId(), request.shardId(), name, position, len, md.checksum(), content),
TransportRequestOptions.options().withCompress(shouldCompressRequest).withType(TransportRequestOptions.Type.RECOVERY).withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
readCount += toRead;
}
} catch (Throwable e) {
lastException.set(e);
} finally {
IOUtils.closeWhileHandlingException(indexInput);
latch.countDown();
}
}
});
fileIndex++;
}
latch.await();
if (lastException.get() != null) {
throw lastException.get();
}
// now, set the clean files request
Set<String> snapshotFiles = Sets.newHashSet(snapshot.getFiles());
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.CLEAN_FILES, new RecoveryCleanFilesRequest(request.recoveryId(), shard.shardId(), snapshotFiles), TransportRequestOptions.options().withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase1] to {}: took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
response.phase1Time = stopWatch.totalTime().millis();
} catch (Throwable e) {
throw new RecoverFilesRecoveryException(request.shardId(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), e);
}
}
@Override
public void phase2(Translog.Snapshot snapshot) throws ElasticsearchException {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
logger.trace("[{}][{}] recovery [phase2] to {}: start", request.shardId().index().name(), request.shardId().id(), request.targetNode());
StopWatch stopWatch = new StopWatch().start();
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.PREPARE_TRANSLOG, new RecoveryPrepareForTranslogOperationsRequest(request.recoveryId(), request.shardId()), TransportRequestOptions.options().withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
stopWatch.stop();
response.startTime = stopWatch.totalTime().millis();
logger.trace("[{}][{}] recovery [phase2] to {}: start took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
logger.trace("[{}][{}] recovery [phase2] to {}: sending transaction log operations", request.shardId().index().name(), request.shardId().id(), request.targetNode());
stopWatch = new StopWatch().start();
int totalOperations = sendSnapshot(snapshot);
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase2] to {}: took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
response.phase2Time = stopWatch.totalTime().millis();
response.phase2Operations = totalOperations;
}
@Override
public void phase3(Translog.Snapshot snapshot) throws ElasticsearchException {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
logger.trace("[{}][{}] recovery [phase3] to {}: sending transaction log operations", request.shardId().index().name(), request.shardId().id(), request.targetNode());
StopWatch stopWatch = new StopWatch().start();
int totalOperations = sendSnapshot(snapshot);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FINALIZE, new RecoveryFinalizeRecoveryRequest(request.recoveryId(), request.shardId()), TransportRequestOptions.options().withTimeout(internalActionLongTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
if (request.markAsRelocated()) {
// TODO what happens if the recovery process fails afterwards, we need to mark this back to started
try {
shard.relocated("to " + request.targetNode());
} catch (IllegalIndexShardStateException e) {
// we can ignore this exception since, on the other node, when it moved to phase3
// it will also send shard started, which might cause the index shard we work against
// to move be closed by the time we get to the the relocated method
}
}
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase3] to {}: took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
response.phase3Time = stopWatch.totalTime().millis();
response.phase3Operations = totalOperations;
}
private int sendSnapshot(Translog.Snapshot snapshot) throws ElasticsearchException {
int ops = 0;
long size = 0;
int totalOperations = 0;
List<Translog.Operation> operations = Lists.newArrayList();
while (snapshot.hasNext()) {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
Translog.Operation operation = snapshot.next();
operations.add(operation);
ops += 1;
size += operation.estimateSize();
totalOperations++;
if (ops >= recoverySettings.translogOps() || size >= recoverySettings.translogSize().bytes()) {
// don't throttle translog, since we lock for phase3 indexing, so we need to move it as
// fast as possible. Note, sine we index docs to replicas while the index files are recovered
// the lock can potentially be removed, in which case, it might make sense to re-enable
// throttling in this phase
// if (recoverySettings.rateLimiter() != null) {
// recoverySettings.rateLimiter().pause(size);
// }
RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest(request.recoveryId(), request.shardId(), operations);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, TransportRequestOptions.options().withCompress(recoverySettings.compress()).withType(TransportRequestOptions.Type.RECOVERY).withTimeout(internalActionLongTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
ops = 0;
size = 0;
operations.clear();
}
}
// send the leftover
if (!operations.isEmpty()) {
RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest(request.recoveryId(), request.shardId(), operations);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, TransportRequestOptions.options().withCompress(recoverySettings.compress()).withType(TransportRequestOptions.Type.RECOVERY).withTimeout(internalActionLongTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
return totalOperations;
}
});
| 1no label
|
src_main_java_org_elasticsearch_indices_recovery_RecoverySource.java
|
449 |
@Deprecated
public @interface AdminPresentationToOneLookupOverride {
/**
* The name of the property whose AdminPresentationToOneLookup annotation should be overwritten
*
* @return the name of the property that should be overwritten
*/
String name();
/**
* The AdminPresentationToOneLookup to overwrite the property with
*
* @return the AdminPresentation being mapped to the attribute
*/
AdminPresentationToOneLookup value();
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_override_AdminPresentationToOneLookupOverride.java
|
853 |
private class TransportHandler extends BaseTransportRequestHandler<SearchScrollRequest> {
@Override
public SearchScrollRequest newInstance() {
return new SearchScrollRequest();
}
@Override
public void messageReceived(SearchScrollRequest request, final TransportChannel channel) throws Exception {
// no need for a threaded listener
request.listenerThreaded(false);
execute(request, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send response for search", e1);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_search_TransportSearchScrollAction.java
|
3,780 |
public abstract class MergeSchedulerProvider<T extends MergeScheduler> extends AbstractIndexShardComponent implements IndexShardComponent {
public static interface FailureListener {
void onFailedMerge(MergePolicy.MergeException e);
}
/**
* Listener for events before/after single merges. Called on the merge thread.
*/
public static interface Listener {
/**
* A callback before a merge is going to execute. Note, any logic here will block the merge
* till its done.
*/
void beforeMerge(OnGoingMerge merge);
/**
* A callback after a merge is going to execute. Note, any logic here will block the merge
* thread.
*/
void afterMerge(OnGoingMerge merge);
}
private final ThreadPool threadPool;
private final CopyOnWriteArrayList<FailureListener> failureListeners = new CopyOnWriteArrayList<FailureListener>();
private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<Listener>();
private final boolean notifyOnMergeFailure;
protected MergeSchedulerProvider(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool) {
super(shardId, indexSettings);
this.threadPool = threadPool;
this.notifyOnMergeFailure = componentSettings.getAsBoolean("notify_on_failure", true);
}
public void addFailureListener(FailureListener listener) {
failureListeners.add(listener);
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
protected void failedMerge(final MergePolicy.MergeException e) {
if (!notifyOnMergeFailure) {
return;
}
for (final FailureListener failureListener : failureListeners) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
failureListener.onFailedMerge(e);
}
});
}
}
protected void beforeMerge(OnGoingMerge merge) {
for (Listener listener : listeners) {
listener.beforeMerge(merge);
}
}
protected void afterMerge(OnGoingMerge merge) {
for (Listener listener : listeners) {
listener.afterMerge(merge);
}
}
public abstract T newMergeScheduler();
public abstract MergeStats stats();
public abstract Set<OnGoingMerge> onGoingMerges();
}
| 1no label
|
src_main_java_org_elasticsearch_index_merge_scheduler_MergeSchedulerProvider.java
|
695 |
public static interface Listener {
/**
* Callback before the bulk is executed.
*/
void beforeBulk(long executionId, BulkRequest request);
/**
* Callback after a successful execution of bulk request.
*/
void afterBulk(long executionId, BulkRequest request, BulkResponse response);
/**
* Callback after a failed execution of bulk request.
*/
void afterBulk(long executionId, BulkRequest request, Throwable failure);
}
| 0true
|
src_main_java_org_elasticsearch_action_bulk_BulkProcessor.java
|
617 |
public class IndicesStatsAction extends IndicesAction<IndicesStatsRequest, IndicesStatsResponse, IndicesStatsRequestBuilder> {
public static final IndicesStatsAction INSTANCE = new IndicesStatsAction();
public static final String NAME = "indices/stats";
private IndicesStatsAction() {
super(NAME);
}
@Override
public IndicesStatsResponse newResponse() {
return new IndicesStatsResponse();
}
@Override
public IndicesStatsRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new IndicesStatsRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_stats_IndicesStatsAction.java
|
346 |
public class TransportNodesShutdownAction extends TransportMasterNodeOperationAction<NodesShutdownRequest, NodesShutdownResponse> {
private final Node node;
private final ClusterName clusterName;
private final boolean disabled;
private final TimeValue delay;
@Inject
public TransportNodesShutdownAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
Node node, ClusterName clusterName) {
super(settings, transportService, clusterService, threadPool);
this.node = node;
this.clusterName = clusterName;
this.disabled = settings.getAsBoolean("action.disable_shutdown", componentSettings.getAsBoolean("disabled", false));
this.delay = componentSettings.getAsTime("delay", TimeValue.timeValueMillis(200));
this.transportService.registerHandler(NodeShutdownRequestHandler.ACTION, new NodeShutdownRequestHandler());
}
@Override
protected String executor() {
return ThreadPool.Names.GENERIC;
}
@Override
protected String transportAction() {
return NodesShutdownAction.NAME;
}
@Override
protected NodesShutdownRequest newRequest() {
return new NodesShutdownRequest();
}
@Override
protected NodesShutdownResponse newResponse() {
return new NodesShutdownResponse();
}
@Override
protected void processBeforeDelegationToMaster(NodesShutdownRequest request, ClusterState state) {
String[] nodesIds = request.nodesIds;
if (nodesIds != null) {
for (int i = 0; i < nodesIds.length; i++) {
// replace the _local one, since it looses its meaning when going over to the master...
if ("_local".equals(nodesIds[i])) {
nodesIds[i] = state.nodes().localNodeId();
}
}
}
}
@Override
protected void masterOperation(final NodesShutdownRequest request, final ClusterState state, final ActionListener<NodesShutdownResponse> listener) throws ElasticsearchException {
if (disabled) {
throw new ElasticsearchIllegalStateException("Shutdown is disabled");
}
final ObjectOpenHashSet<DiscoveryNode> nodes = new ObjectOpenHashSet<DiscoveryNode>();
if (state.nodes().isAllNodes(request.nodesIds)) {
logger.info("[cluster_shutdown]: requested, shutting down in [{}]", request.delay);
nodes.addAll(state.nodes().dataNodes().values());
nodes.addAll(state.nodes().masterNodes().values());
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(request.delay.millis());
} catch (InterruptedException e) {
// ignore
}
// first, stop the cluster service
logger.trace("[cluster_shutdown]: stopping the cluster service so no re-routing will occur");
clusterService.stop();
final CountDownLatch latch = new CountDownLatch(nodes.size());
for (ObjectCursor<DiscoveryNode> cursor : nodes) {
final DiscoveryNode node = cursor.value;
if (node.id().equals(state.nodes().masterNodeId())) {
// don't shutdown the master yet...
latch.countDown();
} else {
logger.trace("[cluster_shutdown]: sending shutdown request to [{}]", node);
transportService.sendRequest(node, NodeShutdownRequestHandler.ACTION, new NodeShutdownRequest(request), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleResponse(TransportResponse.Empty response) {
logger.trace("[cluster_shutdown]: received shutdown response from [{}]", node);
latch.countDown();
}
@Override
public void handleException(TransportException exp) {
logger.warn("[cluster_shutdown]: received failed shutdown response from [{}]", exp, node);
latch.countDown();
}
});
}
}
try {
latch.await();
} catch (InterruptedException e) {
// ignore
}
logger.info("[cluster_shutdown]: done shutting down all nodes except master, proceeding to master");
// now, kill the master
logger.trace("[cluster_shutdown]: shutting down the master [{}]", state.nodes().masterNode());
transportService.sendRequest(state.nodes().masterNode(), NodeShutdownRequestHandler.ACTION, new NodeShutdownRequest(request), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleResponse(TransportResponse.Empty response) {
logger.trace("[cluster_shutdown]: received shutdown response from master");
}
@Override
public void handleException(TransportException exp) {
logger.warn("[cluster_shutdown]: received failed shutdown response master", exp);
}
});
}
});
t.start();
} else {
final String[] nodesIds = state.nodes().resolveNodesIds(request.nodesIds);
logger.info("[partial_cluster_shutdown]: requested, shutting down [{}] in [{}]", nodesIds, request.delay);
for (String nodeId : nodesIds) {
final DiscoveryNode node = state.nodes().get(nodeId);
if (node != null) {
nodes.add(node);
}
}
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(request.delay.millis());
} catch (InterruptedException e) {
// ignore
}
final CountDownLatch latch = new CountDownLatch(nodesIds.length);
for (String nodeId : nodesIds) {
final DiscoveryNode node = state.nodes().get(nodeId);
if (node == null) {
logger.warn("[partial_cluster_shutdown]: no node to shutdown for node_id [{}]", nodeId);
latch.countDown();
continue;
}
logger.trace("[partial_cluster_shutdown]: sending shutdown request to [{}]", node);
transportService.sendRequest(node, NodeShutdownRequestHandler.ACTION, new NodeShutdownRequest(request), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleResponse(TransportResponse.Empty response) {
logger.trace("[partial_cluster_shutdown]: received shutdown response from [{}]", node);
latch.countDown();
}
@Override
public void handleException(TransportException exp) {
logger.warn("[partial_cluster_shutdown]: received failed shutdown response from [{}]", exp, node);
latch.countDown();
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
// ignore
}
logger.info("[partial_cluster_shutdown]: done shutting down [{}]", ((Object) nodesIds));
}
});
t.start();
}
listener.onResponse(new NodesShutdownResponse(clusterName, nodes.toArray(DiscoveryNode.class)));
}
private class NodeShutdownRequestHandler extends BaseTransportRequestHandler<NodeShutdownRequest> {
static final String ACTION = "/cluster/nodes/shutdown/node";
@Override
public NodeShutdownRequest newInstance() {
return new NodeShutdownRequest();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void messageReceived(final NodeShutdownRequest request, TransportChannel channel) throws Exception {
if (disabled) {
throw new ElasticsearchIllegalStateException("Shutdown is disabled");
}
logger.info("shutting down in [{}]", delay);
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(delay.millis());
} catch (InterruptedException e) {
// ignore
}
if (!request.exit) {
logger.info("initiating requested shutdown (no exit)...");
try {
node.close();
} catch (Exception e) {
logger.warn("Failed to shutdown", e);
}
return;
}
boolean shutdownWithWrapper = false;
if (System.getProperty("elasticsearch-service") != null) {
try {
Class wrapperManager = settings.getClassLoader().loadClass("org.tanukisoftware.wrapper.WrapperManager");
logger.info("initiating requested shutdown (using service)");
wrapperManager.getMethod("stopAndReturn", int.class).invoke(null, 0);
shutdownWithWrapper = true;
} catch (Throwable e) {
logger.error("failed to initial shutdown on service wrapper", e);
}
}
if (!shutdownWithWrapper) {
logger.info("initiating requested shutdown...");
try {
node.close();
} catch (Exception e) {
logger.warn("Failed to shutdown", e);
} finally {
// make sure we initiate the shutdown hooks, so the Bootstrap#main thread will exit
System.exit(0);
}
}
}
});
t.start();
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
}
static class NodeShutdownRequest extends TransportRequest {
boolean exit;
NodeShutdownRequest() {
}
NodeShutdownRequest(NodesShutdownRequest request) {
super(request);
this.exit = request.exit();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
exit = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(exit);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_TransportNodesShutdownAction.java
|
557 |
public class TransportGetFieldMappingsAction extends TransportClusterInfoAction<GetFieldMappingsRequest, GetFieldMappingsResponse> {
private final IndicesService indicesService;
@Inject
public TransportGetFieldMappingsAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool) {
super(settings, transportService, clusterService, threadPool);
this.indicesService = indicesService;
}
@Override
protected String transportAction() {
return GetFieldMappingsAction.NAME;
}
@Override
protected GetFieldMappingsRequest newRequest() {
return new GetFieldMappingsRequest();
}
@Override
protected GetFieldMappingsResponse newResponse() {
return new GetFieldMappingsResponse();
}
@Override
protected void doMasterOperation(final GetFieldMappingsRequest request, final ClusterState state, final ActionListener<GetFieldMappingsResponse> listener) throws ElasticsearchException {
listener.onResponse(new GetFieldMappingsResponse(findMappings(request.indices(), request.types(), request.fields(), request.includeDefaults())));
}
private ImmutableMap<String, ImmutableMap<String, ImmutableMap<String, FieldMappingMetaData>>> findMappings(String[] concreteIndices,
final String[] types,
final String[] fields,
boolean includeDefaults) {
assert types != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return ImmutableMap.of();
}
boolean isProbablySingleFieldRequest = concreteIndices.length == 1 && types.length == 1 && fields.length == 1;
ImmutableMap.Builder<String, ImmutableMap<String, ImmutableMap<String, FieldMappingMetaData>>> indexMapBuilder = ImmutableMap.builder();
Sets.SetView<String> intersection = Sets.intersection(Sets.newHashSet(concreteIndices), indicesService.indices());
for (String index : intersection) {
IndexService indexService = indicesService.indexService(index);
Collection<String> typeIntersection;
if (types.length == 0) {
typeIntersection = indexService.mapperService().types();
} else {
typeIntersection = Collections2.filter(indexService.mapperService().types(), new Predicate<String>() {
@Override
public boolean apply(String type) {
return Regex.simpleMatch(types, type);
}
});
}
MapBuilder<String, ImmutableMap<String, FieldMappingMetaData>> typeMappings = new MapBuilder<String, ImmutableMap<String, FieldMappingMetaData>>();
for (String type : typeIntersection) {
DocumentMapper documentMapper = indexService.mapperService().documentMapper(type);
ImmutableMap<String, FieldMappingMetaData> fieldMapping = findFieldMappingsByType(documentMapper, fields, includeDefaults, isProbablySingleFieldRequest);
if (!fieldMapping.isEmpty()) {
typeMappings.put(type, fieldMapping);
}
}
if (!typeMappings.isEmpty()) {
indexMapBuilder.put(index, typeMappings.immutableMap());
}
}
return indexMapBuilder.build();
}
private static final ToXContent.Params includeDefaultsParams = new ToXContent.Params() {
final static String INCLUDE_DEFAULTS = "include_defaults";
@Override
public String param(String key) {
if (INCLUDE_DEFAULTS.equals(key)) {
return "true";
}
return null;
}
@Override
public String param(String key, String defaultValue) {
if (INCLUDE_DEFAULTS.equals(key)) {
return "true";
}
return defaultValue;
}
@Override
public boolean paramAsBoolean(String key, boolean defaultValue) {
if (INCLUDE_DEFAULTS.equals(key)) {
return true;
}
return defaultValue;
}
public Boolean paramAsBoolean(String key, Boolean defaultValue) {
if (INCLUDE_DEFAULTS.equals(key)) {
return true;
}
return defaultValue;
}
@Override @Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return paramAsBoolean(key, defaultValue);
}
};
private ImmutableMap<String, FieldMappingMetaData> findFieldMappingsByType(DocumentMapper documentMapper, String[] fields,
boolean includeDefaults, boolean isProbablySingleFieldRequest) throws ElasticsearchException {
MapBuilder<String, FieldMappingMetaData> fieldMappings = new MapBuilder<String, FieldMappingMetaData>();
ImmutableList<FieldMapper> allFieldMappers = documentMapper.mappers().mappers();
for (String field : fields) {
if (Regex.isMatchAllPattern(field)) {
for (FieldMapper fieldMapper : allFieldMappers) {
addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, includeDefaults);
}
} else if (Regex.isSimpleMatchPattern(field)) {
// go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name.
// also make sure we only store each mapper once.
boolean[] resolved = new boolean[allFieldMappers.size()];
for (int i = 0; i < allFieldMappers.size(); i++) {
FieldMapper fieldMapper = allFieldMappers.get(i);
if (Regex.simpleMatch(field, fieldMapper.names().fullName())) {
addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, includeDefaults);
resolved[i] = true;
}
}
for (int i = 0; i < allFieldMappers.size(); i++) {
if (resolved[i]) {
continue;
}
FieldMapper fieldMapper = allFieldMappers.get(i);
if (Regex.simpleMatch(field, fieldMapper.names().indexName())) {
addFieldMapper(fieldMapper.names().indexName(), fieldMapper, fieldMappings, includeDefaults);
resolved[i] = true;
}
}
for (int i = 0; i < allFieldMappers.size(); i++) {
if (resolved[i]) {
continue;
}
FieldMapper fieldMapper = allFieldMappers.get(i);
if (Regex.simpleMatch(field, fieldMapper.names().name())) {
addFieldMapper(fieldMapper.names().name(), fieldMapper, fieldMappings, includeDefaults);
resolved[i] = true;
}
}
} else {
// not a pattern
FieldMapper fieldMapper = documentMapper.mappers().smartNameFieldMapper(field);
if (fieldMapper != null) {
addFieldMapper(field, fieldMapper, fieldMappings, includeDefaults);
} else if (isProbablySingleFieldRequest) {
fieldMappings.put(field, FieldMappingMetaData.NULL);
}
}
}
return fieldMappings.immutableMap();
}
private void addFieldMapper(String field, FieldMapper fieldMapper, MapBuilder<String, FieldMappingMetaData> fieldMappings, boolean includeDefaults) {
if (fieldMappings.containsKey(field)) {
return;
}
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS);
builder.endObject();
fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.names().fullName(), builder.bytes()));
} catch (IOException e) {
throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_indices_mapping_get_TransportGetFieldMappingsAction.java
|
86 |
{
@Override
public void run()
{
getAllProperties( node );
getAllProperties( relationship );
}
private void getAllProperties( PropertyContainer entity )
{
for ( String key : entity.getPropertyKeys() )
{
entity.getProperty( key );
}
}
};
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_ReadTransactionLogWritingTest.java
|
14 |
class BodyVisitor extends Visitor {
Node node, currentBody, result;
BodyVisitor(Node node, Node root) {
this.node = node;
currentBody = root;
}
@Override
public void visitAny(Node that) {
if (that==node) {
result = currentBody;
}
else {
Node cb = currentBody;
if (that instanceof Tree.Body) {
currentBody = that;
}
if (that instanceof Tree.NamedArgumentList) {
currentBody = that;
}
super.visitAny(that);
currentBody = cb;
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_CeylonCompletionProcessor.java
|
265 |
public interface EmailTrackingManager {
public Long createTrackedEmail(String emailAddress, String type, String extraValue);
public void recordOpen (Long emailId, Map<String, String> extraValues);
public void recordClick(Long emailId , Map<String, String> parameterMap, String customerId, Map<String, String> extraValues);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_email_service_EmailTrackingManager.java
|
77 |
private class State
{
private final GraphDatabaseService graphDb;
private Transaction tx;
public State( GraphDatabaseService graphDb )
{
this.graphDb = graphDb;
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestManualAcquireLock.java
|
717 |
public class CollectionRemoveOperation extends CollectionBackupAwareOperation {
private Data value;
private long itemId = -1;
public CollectionRemoveOperation() {
}
public CollectionRemoveOperation(String name, Data value) {
super(name);
this.value = value;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
response = false;
final CollectionItem item = getOrCreateContainer().remove(value);
if (item != null) {
response = true;
itemId = item.getItemId();
}
}
@Override
public void afterRun() throws Exception {
if (itemId != -1) {
publishEvent(ItemEventType.REMOVED, value);
}
}
@Override
public boolean shouldBackup() {
return itemId != -1;
}
@Override
public Operation getBackupOperation() {
return new CollectionRemoveBackupOperation(name, itemId);
}
@Override
public int getId() {
return CollectionDataSerializerHook.COLLECTION_REMOVE;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
value.writeData(out);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
value = new Data();
value.readData(in);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_CollectionRemoveOperation.java
|
59 |
{
@Override
public void bytesWritten( long numberOfBytes )
{
bytesWritten.addAndGet( numberOfBytes );
}
@Override
public void bytesRead( long numberOfBytes )
{
}
}, TxLog.class.getName() );
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestTxLogMonitoring.java
|
773 |
public class CollectionReserveAddOperation extends CollectionOperation {
String transactionId;
Data value;
public CollectionReserveAddOperation() {
}
public CollectionReserveAddOperation(String name, String transactionId, Data value) {
super(name);
this.transactionId = transactionId;
this.value = value;
}
@Override
public int getId() {
return CollectionDataSerializerHook.COLLECTION_RESERVE_ADD;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
response = getOrCreateContainer().reserveAdd(transactionId, value);
}
@Override
public void afterRun() throws Exception {
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeUTF(transactionId);
out.writeObject(value);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
transactionId = in.readUTF();
value = in.readObject();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_txn_CollectionReserveAddOperation.java
|
970 |
public abstract class OStringSerializerHelper {
public static final char RECORD_SEPARATOR = ',';
public static final String CLASS_SEPARATOR = "@";
public static final char LINK = ORID.PREFIX;
public static final char EMBEDDED_BEGIN = '(';
public static final char EMBEDDED_END = ')';
public static final char LIST_BEGIN = '[';
public static final char LIST_END = ']';
public static final char SET_BEGIN = '<';
public static final char SET_END = '>';
public static final char MAP_BEGIN = '{';
public static final char MAP_END = '}';
public static final char BINARY_BEGINEND = '_';
public static final char CUSTOM_TYPE = '^';
public static final char ENTRY_SEPARATOR = ':';
public static final char PARAMETER_NAMED = ':';
public static final char PARAMETER_POSITIONAL = '?';
public static final char[] PARAMETER_SEPARATOR = new char[] { ',' };
public static final char[] PARAMETER_EXT_SEPARATOR = new char[] { ' ', '.' };
public static final char[] DEFAULT_IGNORE_CHARS = new char[] { '\n', '\r', ' ' };
public static final char[] DEFAULT_FIELD_SEPARATOR = new char[] { ',', ' ' };
public static final char COLLECTION_SEPARATOR = ',';
public static final String LINKSET_PREFIX = "" + SET_BEGIN + LINK + CLASS_SEPARATOR;
public static Object fieldTypeFromStream(final ODocument iDocument, OType iType, final Object iValue) {
if (iValue == null)
return null;
if (iType == null)
iType = OType.EMBEDDED;
switch (iType) {
case STRING:
if (iValue instanceof String) {
final String s = (String) iValue;
return decode(s.substring(1, s.length() - 1));
}
return iValue.toString();
case INTEGER:
if (iValue instanceof Integer)
return iValue;
return new Integer(getStringContent(iValue));
case BOOLEAN:
if (iValue instanceof Boolean)
return iValue;
return new Boolean(getStringContent(iValue));
case DECIMAL:
if (iValue instanceof BigDecimal)
return iValue;
return new BigDecimal(getStringContent(iValue));
case FLOAT:
if (iValue instanceof Float)
return iValue;
return new Float(getStringContent(iValue));
case LONG:
if (iValue instanceof Long)
return iValue;
return new Long(getStringContent(iValue));
case DOUBLE:
if (iValue instanceof Double)
return iValue;
return new Double(getStringContent(iValue));
case SHORT:
if (iValue instanceof Short)
return iValue;
return new Short(getStringContent(iValue));
case BYTE:
if (iValue instanceof Byte)
return iValue;
return new Byte(getStringContent(iValue));
case BINARY:
return getBinaryContent(iValue);
case DATE:
case DATETIME:
if (iValue instanceof Date)
return iValue;
return new Date(Long.parseLong(getStringContent(iValue)));
case LINK:
if (iValue instanceof ORID)
return iValue.toString();
else if (iValue instanceof String)
return new ORecordId((String) iValue);
else
return ((ORecord<?>) iValue).getIdentity().toString();
case EMBEDDED:
// EMBEDDED
return OStringSerializerAnyStreamable.INSTANCE.fromStream((String) iValue);
case EMBEDDEDMAP:
// RECORD
final String value = (String) iValue;
return ORecordSerializerSchemaAware2CSV.INSTANCE.embeddedMapFromStream(iDocument, null, value, null);
}
throw new IllegalArgumentException("Type " + iType + " does not support converting value: " + iValue);
}
public static List<String> smartSplit(final String iSource, final char iRecordSeparator, final char... iJumpChars) {
return smartSplit(iSource, new char[] { iRecordSeparator }, 0, -1, false, true, false, iJumpChars);
}
public static List<String> smartSplit(final String iSource, final char iRecordSeparator, final boolean iConsiderSets,
final char... iJumpChars) {
return smartSplit(iSource, new char[] { iRecordSeparator }, 0, -1, false, true, iConsiderSets, iJumpChars);
}
public static List<String> smartSplit(final String iSource, final char[] iRecordSeparator, int beginIndex, final int endIndex,
final boolean iStringSeparatorExtended, boolean iConsiderBraces, boolean iConsiderSets, final char... iJumpChars) {
final StringBuilder buffer = new StringBuilder();
final ArrayList<String> parts = new ArrayList<String>();
if (iSource != null && !iSource.isEmpty()) {
while ((beginIndex = parse(iSource, buffer, beginIndex, endIndex, iRecordSeparator, iStringSeparatorExtended,
iConsiderBraces, iConsiderSets, iJumpChars)) > -1) {
parts.add(buffer.toString());
buffer.setLength(0);
}
if (buffer.length() > 0 || isCharPresent(iSource.charAt(iSource.length() - 1), iRecordSeparator))
parts.add(buffer.toString());
}
return parts;
}
public static int parse(final String iSource, final StringBuilder iBuffer, final int beginIndex, final int endIndex,
final char[] iSeparator, final boolean iStringSeparatorExtended, final boolean iConsiderBraces, final boolean iConsiderSets,
final char... iJumpChars) {
char stringBeginChar = ' ';
boolean encodeMode = false;
int insideParenthesis = 0;
int insideList = 0;
int insideSet = 0;
int insideMap = 0;
int insideLinkPart = 0;
final int max = endIndex > -1 ? endIndex + 1 : iSource.length();
final char[] buffer = new char[max - beginIndex];
iSource.getChars(beginIndex, max, buffer, 0);
iBuffer.ensureCapacity(max);
// JUMP FIRST CHARS
int i = 0;
for (; i < buffer.length; ++i) {
final char c = buffer[i];
if (!isCharPresent(c, iJumpChars))
break;
}
for (; i < buffer.length; ++i) {
final char c = buffer[i];
if (stringBeginChar == ' ') {
// OUTSIDE A STRING
if (iConsiderBraces)
if (c == LIST_BEGIN)
insideList++;
else if (c == LIST_END) {
if (!isCharPresent(c, iSeparator)) {
if (insideList == 0)
throw new OSerializationException("Found invalid " + LIST_END
+ " character. Ensure it is opened and closed correctly.");
insideList--;
}
} else if (c == EMBEDDED_BEGIN) {
insideParenthesis++;
} else if (c == EMBEDDED_END) {
// if (!isCharPresent(c, iRecordSeparator)) {
if (insideParenthesis == 0)
throw new OSerializationException("Found invalid " + EMBEDDED_END
+ " character. Ensure it is opened and closed correctly.");
// }
insideParenthesis--;
} else if (c == MAP_BEGIN) {
insideMap++;
} else if (c == MAP_END) {
if (!isCharPresent(c, iSeparator)) {
if (insideMap == 0)
throw new OSerializationException("Found invalid " + MAP_END
+ " character. Ensure it is opened and closed correctly.");
insideMap--;
}
} else if (c == LINK)
// FIRST PART OF LINK
insideLinkPart = 1;
else if (insideLinkPart == 1 && c == ORID.SEPARATOR)
// SECOND PART OF LINK
insideLinkPart = 2;
else if (iConsiderSets)
if (c == SET_BEGIN)
insideSet++;
else if (c == SET_END) {
if (!isCharPresent(c, iSeparator)) {
if (insideSet == 0)
throw new OSerializationException("Found invalid " + SET_END
+ " character. Ensure it is opened and closed correctly.");
insideSet--;
}
}
if (insideLinkPart > 0 && c != '-' && !Character.isDigit(c) && c != ORID.SEPARATOR && c != LINK)
insideLinkPart = 0;
if ((c == '"' || iStringSeparatorExtended && c == '\'') && !encodeMode) {
// START STRING
stringBeginChar = c;
}
if (insideParenthesis == 0 && insideList == 0 && insideSet == 0 && insideMap == 0 && insideLinkPart == 0) {
// OUTSIDE A PARAMS/COLLECTION/MAP
if (isCharPresent(c, iSeparator)) {
// SEPARATOR (OUTSIDE A STRING): PUSH
return beginIndex + i + 1;
}
}
if (iJumpChars.length > 0) {
if (isCharPresent(c, iJumpChars))
continue;
}
} else {
// INSIDE A STRING
if ((c == '"' || iStringSeparatorExtended && c == '\'') && !encodeMode) {
// CLOSE THE STRING ?
if (stringBeginChar == c) {
// SAME CHAR AS THE BEGIN OF THE STRING: CLOSE IT AND PUSH
stringBeginChar = ' ';
}
}
}
if (c == '\\' && !encodeMode) {
// ESCAPE CHARS
final char nextChar = buffer[i + 1];
if (nextChar == 'u') {
i = OStringParser.readUnicode(buffer, i + 2, iBuffer);
continue;
} else if (nextChar == 'n') {
iBuffer.append("\n");
i++;
continue;
} else if (nextChar == 'r') {
iBuffer.append("\r");
i++;
continue;
} else if (nextChar == 't') {
iBuffer.append("\t");
i++;
continue;
} else if (nextChar == 'f') {
iBuffer.append("\f");
i++;
continue;
} else
encodeMode = true;
} else
encodeMode = false;
if (c != '\\' && encodeMode) {
encodeMode = false;
}
iBuffer.append(c);
}
return -1;
}
public static boolean isCharPresent(final char iCharacter, final char[] iCharacters) {
final int len = iCharacters.length;
for (int i = 0; i < len; ++i) {
if (iCharacter == iCharacters[i]) {
return true;
}
}
return false;
}
public static List<String> split(final String iSource, final char iRecordSeparator, final char... iJumpCharacters) {
return split(iSource, 0, iSource.length(), iRecordSeparator, iJumpCharacters);
}
public static Collection<String> split(final Collection<String> iParts, final String iSource, final char iRecordSeparator,
final char... iJumpCharacters) {
return split(iParts, iSource, 0, iSource.length(), iRecordSeparator, iJumpCharacters);
}
public static List<String> split(final String iSource, final int iStartPosition, final int iEndPosition,
final char iRecordSeparator, final char... iJumpCharacters) {
return (List<String>) split(new ArrayList<String>(), iSource, iStartPosition, iSource.length(), iRecordSeparator,
iJumpCharacters);
}
public static Collection<String> split(final Collection<String> iParts, final String iSource, final int iStartPosition,
final int iEndPosition, final char iRecordSeparator, final char... iJumpCharacters) {
return split(iParts, iSource, iStartPosition, iEndPosition, String.valueOf(iRecordSeparator), iJumpCharacters);
}
public static Collection<String> split(final Collection<String> iParts, final String iSource, final int iStartPosition,
int iEndPosition, final String iRecordSeparators, final char... iJumpCharacters) {
if (iEndPosition == -1)
iEndPosition = iSource.length();
final StringBuilder buffer = new StringBuilder();
for (int i = iStartPosition; i < iEndPosition; ++i) {
char c = iSource.charAt(i);
if (iRecordSeparators.indexOf(c) > -1) {
iParts.add(buffer.toString());
buffer.setLength(0);
} else {
if (iJumpCharacters.length > 0 && buffer.length() == 0) {
// CHECK IF IT'S A CHAR TO JUMP
if (!isCharPresent(c, iJumpCharacters)) {
buffer.append(c);
}
} else
buffer.append(c);
}
}
if (iJumpCharacters.length > 0 && buffer.length() > 0) {
// CHECK THE END OF LAST ITEM IF NEED TO CUT THE CHARS TO JUMP
char b;
int newSize = 0;
boolean found;
for (int i = buffer.length() - 1; i >= 0; --i) {
b = buffer.charAt(i);
found = false;
for (char j : iJumpCharacters) {
if (j == b) {
found = true;
++newSize;
break;
}
}
if (!found)
break;
}
if (newSize > 0)
buffer.setLength(buffer.length() - newSize);
}
iParts.add(buffer.toString());
return iParts;
}
public static String joinIntArray(int[] iArray) {
final StringBuilder ids = new StringBuilder();
for (int id : iArray) {
if (ids.length() > 0)
ids.append(RECORD_SEPARATOR);
ids.append(id);
}
return ids.toString();
}
public static int[] splitIntArray(final String iInput) {
final List<String> items = split(iInput, RECORD_SEPARATOR);
final int[] values = new int[items.size()];
for (int i = 0; i < items.size(); ++i) {
values[i] = Integer.parseInt(items.get(i).trim());
}
return values;
}
public static boolean contains(final String iText, final char iSeparator) {
if (iText == null)
return false;
final int max = iText.length();
for (int i = 0; i < max; ++i) {
if (iText.charAt(i) == iSeparator)
return true;
}
return false;
}
public static int getCollection(final String iText, final int iStartPosition, final Collection<String> iCollection) {
return getCollection(iText, iStartPosition, iCollection, LIST_BEGIN, LIST_END, COLLECTION_SEPARATOR);
}
public static int getCollection(final String iText, final int iStartPosition, final Collection<String> iCollection,
final char iCollectionBegin, final char iCollectionEnd, final char iCollectionSeparator) {
final StringBuilder buffer = new StringBuilder();
int openPos = iText.indexOf(iCollectionBegin, iStartPosition);
if (openPos == -1)
return -1;
boolean escape = false;
int currentPos, deep;
int maxPos = iText.length() - 1;
for (currentPos = openPos + 1, deep = 1; deep > 0; currentPos++) {
if (currentPos > maxPos)
return -1;
char c = iText.charAt(currentPos);
if (buffer.length() == 0 && c == ' ')
continue;
if (c == iCollectionBegin) {
// BEGIN
buffer.append(c);
deep++;
} else if (c == iCollectionEnd) {
// END
if (deep > 1)
buffer.append(c);
deep--;
} else if (c == iCollectionSeparator) {
// SEPARATOR
if (deep > 1) {
buffer.append(c);
} else {
iCollection.add(buffer.toString().trim());
buffer.setLength(0);
}
} else {
// COLLECT
if (!escape && c == '\\' && (currentPos + 1 <= maxPos)) {
// ESCAPE CHARS
final char nextChar = iText.charAt(currentPos + 1);
if (nextChar == 'u') {
currentPos = OStringParser.readUnicode(iText, currentPos + 2, buffer);
} else if (nextChar == 'n') {
buffer.append("\n");
currentPos++;
} else if (nextChar == 'r') {
buffer.append("\r");
currentPos++;
} else if (nextChar == 't') {
buffer.append("\t");
currentPos++;
} else if (nextChar == 'f') {
buffer.append("\f");
currentPos++;
} else
escape = true;
continue;
}
buffer.append(c);
}
}
if (buffer.length() > 0)
iCollection.add(buffer.toString().trim());
return --currentPos;
}
public static int getParameters(final String iText, final int iBeginPosition, int iEndPosition, final List<String> iParameters) {
iParameters.clear();
final int openPos = iText.indexOf(EMBEDDED_BEGIN, iBeginPosition);
if (openPos == -1 || (iEndPosition > -1 && openPos > iEndPosition))
return iBeginPosition;
final StringBuilder buffer = new StringBuilder();
parse(iText, buffer, openPos, iEndPosition, PARAMETER_EXT_SEPARATOR, true, true, false);
if (buffer.length() == 0)
return iBeginPosition;
final String t = buffer.substring(1, buffer.length() - 1).trim();
final List<String> pars = smartSplit(t, PARAMETER_SEPARATOR, 0, -1, true, true, false);
for (int i = 0; i < pars.size(); ++i)
iParameters.add(pars.get(i).trim());
return iBeginPosition + buffer.length();
}
public static int getEmbedded(final String iText, final int iBeginPosition, int iEndPosition, final StringBuilder iEmbedded) {
final int openPos = iText.indexOf(EMBEDDED_BEGIN, iBeginPosition);
if (openPos == -1 || (iEndPosition > -1 && openPos > iEndPosition))
return iBeginPosition;
final StringBuilder buffer = new StringBuilder();
parse(iText, buffer, openPos, iEndPosition, PARAMETER_EXT_SEPARATOR, true, true, false);
if (buffer.length() == 0)
return iBeginPosition;
final String t = buffer.substring(1, buffer.length() - 1).trim();
iEmbedded.append(t);
return iBeginPosition + buffer.length();
}
public static List<String> getParameters(final String iText) {
final List<String> params = new ArrayList<String>();
try {
getParameters(iText, 0, -1, params);
} catch (Exception e) {
throw new OCommandSQLParsingException("Error on reading parameters in: " + iText);
}
return params;
}
public static Map<String, String> getMap(final String iText) {
int openPos = iText.indexOf(MAP_BEGIN);
if (openPos == -1)
return Collections.emptyMap();
int closePos = iText.indexOf(MAP_END, openPos + 1);
if (closePos == -1)
return Collections.emptyMap();
final List<String> entries = smartSplit(iText.substring(openPos + 1, closePos), COLLECTION_SEPARATOR);
if (entries.size() == 0)
return Collections.emptyMap();
Map<String, String> map = new HashMap<String, String>();
List<String> entry;
for (String item : entries) {
if (item != null && !item.isEmpty()) {
entry = OStringSerializerHelper.split(item, OStringSerializerHelper.ENTRY_SEPARATOR);
map.put((String) fieldTypeFromStream(null, OType.STRING, entry.get(0)), entry.get(1));
}
}
return map;
}
/**
* Transforms, only if needed, the source string escaping the characters \ and ".
*
* @param iText
* Input String
* @return Modified string if needed, otherwise the same input object
* @see OStringSerializerHelper#decode(String)
*/
public static String encode(final String iText) {
int pos = -1;
final int newSize = iText.length();
for (int i = 0; i < newSize; ++i) {
final char c = iText.charAt(i);
if (c == '"' || c == '\\') {
pos = i;
break;
}
}
if (pos > -1) {
// CHANGE THE INPUT STRING
final StringBuilder iOutput = new StringBuilder();
char c;
for (int i = 0; i < iText.length(); ++i) {
c = iText.charAt(i);
if (c == '"' || c == '\\')
iOutput.append('\\');
iOutput.append(c);
}
return iOutput.toString();
}
return iText;
}
/**
* Transforms, only if needed, the source string un-escaping the characters \ and ".
*
* @param iText
* Input String
* @return Modified string if needed, otherwise the same input object
* @see OStringSerializerHelper#encode(String)
*/
public static String decode(final String iText) {
int pos = -1;
final int textSize = iText.length();
for (int i = 0; i < textSize; ++i)
if (iText.charAt(i) == '"' || iText.charAt(i) == '\\') {
pos = i;
break;
}
if (pos == -1)
// NOT FOUND, RETURN THE SAME STRING (AVOID COPIES)
return iText;
// CHANGE THE INPUT STRING
final StringBuilder buffer = new StringBuilder(textSize);
buffer.append(iText.substring(0, pos));
boolean escaped = false;
for (int i = pos; i < textSize; ++i) {
final char c = iText.charAt(i);
if (escaped)
escaped = false;
else if (c == '\\') {
escaped = true;
continue;
}
buffer.append(c);
}
return buffer.toString();
}
public static OClass getRecordClassName(final String iValue, OClass iLinkedClass) {
// EXTRACT THE CLASS NAME
final int classSeparatorPos = OStringParser.indexOfOutsideStrings(iValue, OStringSerializerHelper.CLASS_SEPARATOR.charAt(0), 0,
-1);
if (classSeparatorPos > -1) {
final String className = iValue.substring(0, classSeparatorPos);
final ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
if (className != null && database != null)
iLinkedClass = database.getMetadata().getSchema().getClass(className);
}
return iLinkedClass;
}
public static String getStringContent(final Object iValue) {
// MOVED
return OIOUtils.getStringContent(iValue);
}
/**
* Returns the binary representation of a content. If it's a String a Base64 decoding is applied.
*/
public static byte[] getBinaryContent(final Object iValue) {
if (iValue == null)
return null;
else if (iValue instanceof OBinary)
return ((OBinary) iValue).toByteArray();
else if (iValue instanceof byte[])
return (byte[]) iValue;
else if (iValue instanceof String) {
String s = (String) iValue;
if (s.length() > 1 && (s.charAt(0) == BINARY_BEGINEND && s.charAt(s.length() - 1) == BINARY_BEGINEND)
|| (s.charAt(0) == '\'' && s.charAt(s.length() - 1) == '\''))
// @COMPATIBILITY 1.0rc7-SNAPSHOT ' TO SUPPORT OLD DATABASES
s = s.substring(1, s.length() - 1);
// IN CASE OF JSON BINARY IMPORT THIS EXEPTION IS WRONG
// else
// throw new IllegalArgumentException("Not binary type: " + iValue);
return OBase64Utils.decode(s);
} else
throw new IllegalArgumentException("Cannot parse binary as the same type as the value (class=" + iValue.getClass().getName()
+ "): " + iValue);
}
/**
* Checks if a string contains alphanumeric only characters.
*
* @param iContent
* String to check
* @return true is all the content is alphanumeric, otherwise false
*/
public static boolean isAlphanumeric(final String iContent) {
final int tot = iContent.length();
for (int i = 0; i < tot; ++i) {
if (!Character.isLetterOrDigit(iContent.charAt(i)))
return false;
}
return true;
}
public static String removeQuotationMarks(final String iValue) {
if (iValue != null
&& iValue.length() > 1
&& (iValue.charAt(0) == '\'' && iValue.charAt(iValue.length() - 1) == '\'' || iValue.charAt(0) == '"'
&& iValue.charAt(iValue.length() - 1) == '"'))
return iValue.substring(1, iValue.length() - 1);
return iValue;
}
public static boolean startsWithIgnoreCase(final String iFirst, final String iSecond) {
if (iFirst == null)
throw new IllegalArgumentException("Origin string to compare is null");
if (iSecond == null)
throw new IllegalArgumentException("String to match is null");
final int iSecondLength = iSecond.length();
if (iSecondLength > iFirst.length())
return false;
for (int i = 0; i < iSecondLength; ++i) {
if (Character.toUpperCase(iFirst.charAt(i)) != Character.toUpperCase(iSecond.charAt(i)))
return false;
}
return true;
}
public static int indexOf(final String iSource, final int iBegin, char... iChars) {
if (iChars.length == 1)
// ONE CHAR: USE JAVA INDEXOF
return iSource.indexOf(iChars[0], iBegin);
final int len = iSource.length();
for (int i = iBegin; i < len; ++i) {
for (int k = 0; k < iChars.length; ++k) {
final char c = iSource.charAt(i);
if (c == iChars[k])
return i;
}
}
return -1;
}
/**
* Finds the end of a block delimited by 2 chars.
*/
public static final int findEndBlock(final String iOrigin, final char iBeginChar, final char iEndChar, final int iBeginOffset) {
int inc = 0;
for (int i = iBeginOffset; i < iOrigin.length(); i++) {
char c = iOrigin.charAt(i);
if (c == '\'') {
// skip to text end
int tend = i;
while (true) {
tend = iOrigin.indexOf('\'', tend + 1);
if (tend < 0) {
throw new OCommandSQLParsingException("Could not find end of text area.", iOrigin, i);
}
if (iOrigin.charAt(tend - 1) == '\\') {
// inner quote, skip it
continue;
} else {
break;
}
}
i = tend;
continue;
}
if (c != iBeginChar && c != iEndChar)
continue;
if (c == iBeginChar) {
inc++;
} else if (c == iEndChar) {
inc--;
if (inc == 0) {
return i;
}
}
}
return -1;
}
public static int getLowerIndexOf(final String iText, final int iBeginOffset, final String... iToSearch) {
int lowest = -1;
for (String toSearch : iToSearch) {
int index = iText.indexOf(toSearch, iBeginOffset);
if (index > -1 && (lowest == -1 || index < lowest))
lowest = index;
}
return lowest;
}
public static int getHigherIndexOf(final String iText, final int iBeginOffset, final String... iToSearch) {
int lowest = -1;
for (String toSearch : iToSearch) {
int index = iText.indexOf(toSearch, iBeginOffset);
if (index > -1 && (lowest == -1 || index > lowest))
lowest = index;
}
return lowest;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_OStringSerializerHelper.java
|
87 |
public class ODFACommandStream implements OCommandStream {
public static final int BUFFER_SIZE = 1024;
private Reader reader;
private CharBuffer buffer;
private final Set<Character> separators = new HashSet<Character>(Arrays.asList(';', '\n'));
private int position;
private int start;
private int end;
private StringBuilder partialResult;
private State state;
public ODFACommandStream(String commands) {
reader = new StringReader(commands);
init();
}
public ODFACommandStream(File file) throws FileNotFoundException {
reader = new BufferedReader(new FileReader(file));
init();
}
private void init() {
buffer = CharBuffer.allocate(BUFFER_SIZE);
buffer.flip();
}
@Override
public boolean hasNext() {
try {
fillBuffer();
return buffer.hasRemaining();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void fillBuffer() throws IOException {
if (!buffer.hasRemaining()) {
buffer.clear();
reader.read(buffer);
buffer.flip();
}
}
@Override
public String nextCommand() {
try {
fillBuffer();
partialResult = new StringBuilder();
state = State.S;
start = 0;
end = -1;
position = 0;
Symbol s = null;
while (state != State.E) {
s = nextSymbol();
final State newState = transition(state, s);
if (state == State.S && newState != State.S)
start = position;
if (newState == State.A)
end = position;
if (newState == State.F)
throw new IllegalStateException("Unexpected end of file");
state = newState;
position++;
}
if (s == Symbol.EOF) {
position--;
if (end == -1) {
start = 0;
end = 0;
}
}
final String result;
if (partialResult.length() > 0) {
if (end > 0) {
result = partialResult.append(buffer.subSequence(start, end + 1).toString()).toString();
} else {
partialResult.setLength(partialResult.length() + end + 1);
result = partialResult.toString();
}
} else {
result = buffer.subSequence(start, end + 1).toString();
}
buffer.position(buffer.position() + position);
return result;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private Symbol nextSymbol() throws IOException {
Symbol s;
if (buffer.position() + position < buffer.limit()) {
s = symbol(buffer.charAt(position));
} else {
buffer.compact();
int read = reader.read(buffer);
buffer.flip();
if (read == 0) {
// There is something in source, but buffer is full
if (state != State.S)
partialResult.append(buffer.subSequence(start, position).toString());
start = 0;
end = end - position;
buffer.clear();
read = reader.read(buffer);
buffer.flip();
position = 0;
}
if (read == -1) {
s = Symbol.EOF;
} else {
s = symbol(buffer.charAt(position));
}
}
return s;
}
private State transition(State s, Symbol c) {
switch (s) {
case S:
switch (c) {
case LATTER:
return State.A;
case WS:
return State.S;
case AP:
return State.B;
case QT:
return State.C;
case SEP:
return State.S;
case EOF:
return State.E;
}
break;
case A:
case D:
switch (c) {
case LATTER:
return State.A;
case WS:
return State.D;
case AP:
return State.B;
case QT:
return State.C;
case SEP:
return State.E;
case EOF:
return State.E;
}
break;
case B:
switch (c) {
case LATTER:
return State.B;
case WS:
return State.B;
case AP:
return State.A;
case QT:
return State.B;
case SEP:
return State.B;
case EOF:
return State.F;
}
break;
case C:
switch (c) {
case LATTER:
return State.C;
case WS:
return State.C;
case AP:
return State.C;
case QT:
return State.A;
case SEP:
return State.C;
case EOF:
return State.F;
}
break;
case E:
return State.E;
case F:
return State.F;
}
throw new IllegalStateException();
}
@Override
public void close() {
try {
reader.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public Symbol symbol(Character c) {
if (c.equals('\''))
return Symbol.AP;
if (c.equals('"'))
return Symbol.QT;
if (separators.contains(c))
return Symbol.SEP;
if (Character.isWhitespace(c))
return Symbol.WS;
return Symbol.LATTER;
}
private enum State {
S, A, B, C, D, E, F
}
private enum Symbol {
LATTER, WS, QT, AP, SEP, EOF
}
}
| 1no label
|
commons_src_main_java_com_orientechnologies_common_console_ODFACommandStream.java
|
1,410 |
Job registerCeylonModules = new Job("Load the Ceylon Metamodel for plugin dependencies") {
protected IStatus run(IProgressMonitor monitor) {
Activator.loadBundleAsModule(bundleContext.getBundle());
return Status.OK_STATUS;
};
};
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_ui_CeylonPlugin.java
|
629 |
private class LifecycleCountingListener implements LifecycleListener {
Map<LifecycleState, AtomicInteger> counter = new ConcurrentHashMap<LifecycleState, AtomicInteger>();
BlockingQueue<LifecycleState> eventQueue = new LinkedBlockingQueue<LifecycleState>();
LifecycleCountingListener() {
for (LifecycleEvent.LifecycleState state : LifecycleEvent.LifecycleState.values()) {
counter.put(state, new AtomicInteger(0));
}
}
public void stateChanged(LifecycleEvent event) {
counter.get(event.getState()).incrementAndGet();
eventQueue.offer(event.getState());
}
int getCount(LifecycleEvent.LifecycleState state) {
return counter.get(state).get();
}
boolean waitFor(LifecycleEvent.LifecycleState state, int seconds) {
long remainingMillis = TimeUnit.SECONDS.toMillis(seconds);
while (remainingMillis >= 0) {
LifecycleEvent.LifecycleState received = null;
try {
long now = Clock.currentTimeMillis();
received = eventQueue.poll(remainingMillis, TimeUnit.MILLISECONDS);
remainingMillis -= (Clock.currentTimeMillis() - now);
} catch (InterruptedException e) {
return false;
}
if (received != null && received == state) {
return true;
}
}
return false;
}
}
| 0true
|
hazelcast_src_test_java_com_hazelcast_cluster_SplitBrainHandlerTest.java
|
541 |
public class PrepareTransactionRequest extends BaseTransactionRequest {
public PrepareTransactionRequest() {
}
@Override
protected Object innerCall() throws Exception {
ClientEndpoint endpoint = getEndpoint();
TransactionContext transactionContext = endpoint.getTransactionContext(txnId);
Transaction transaction = TransactionAccessor.getTransaction(transactionContext);
transaction.prepare();
return null;
}
@Override
public String getServiceName() {
return ClientEngineImpl.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return ClientTxnPortableHook.F_ID;
}
@Override
public int getClassId() {
return ClientTxnPortableHook.PREPARE;
}
@Override
public Permission getRequiredPermission() {
return new TransactionPermission();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_txn_PrepareTransactionRequest.java
|
1,044 |
public class MultiTermVectorsResponse extends ActionResponse implements Iterable<MultiTermVectorsItemResponse>, ToXContent {
/**
* Represents a failure.
*/
public static class Failure implements Streamable {
private String index;
private String type;
private String id;
private String message;
Failure() {
}
public Failure(String index, String type, String id, String message) {
this.index = index;
this.type = type;
this.id = id;
this.message = message;
}
/**
* The index name of the action.
*/
public String getIndex() {
return this.index;
}
/**
* The type of the action.
*/
public String getType() {
return type;
}
/**
* The id of the action.
*/
public String getId() {
return id;
}
/**
* The failure message.
*/
public String getMessage() {
return this.message;
}
public static Failure readFailure(StreamInput in) throws IOException {
Failure failure = new Failure();
failure.readFrom(in);
return failure;
}
@Override
public void readFrom(StreamInput in) throws IOException {
index = in.readString();
type = in.readOptionalString();
id = in.readString();
message = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeOptionalString(type);
out.writeString(id);
out.writeString(message);
}
}
private MultiTermVectorsItemResponse[] responses;
MultiTermVectorsResponse() {
}
public MultiTermVectorsResponse(MultiTermVectorsItemResponse[] responses) {
this.responses = responses;
}
public MultiTermVectorsItemResponse[] getResponses() {
return this.responses;
}
@Override
public Iterator<MultiTermVectorsItemResponse> iterator() {
return Iterators.forArray(responses);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startArray(Fields.DOCS);
for (MultiTermVectorsItemResponse response : responses) {
if (response.isFailed()) {
builder.startObject();
Failure failure = response.getFailure();
builder.field(Fields._INDEX, failure.getIndex());
builder.field(Fields._TYPE, failure.getType());
builder.field(Fields._ID, failure.getId());
builder.field(Fields.ERROR, failure.getMessage());
builder.endObject();
} else {
TermVectorResponse getResponse = response.getResponse();
getResponse.toXContent(builder, params);
}
}
builder.endArray();
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString DOCS = new XContentBuilderString("docs");
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString ERROR = new XContentBuilderString("error");
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
responses = new MultiTermVectorsItemResponse[in.readVInt()];
for (int i = 0; i < responses.length; i++) {
responses[i] = MultiTermVectorsItemResponse.readItemResponse(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(responses.length);
for (MultiTermVectorsItemResponse response : responses) {
response.writeTo(out);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_termvector_MultiTermVectorsResponse.java
|
674 |
public class GetWarmersAction extends IndicesAction<GetWarmersRequest, GetWarmersResponse, GetWarmersRequestBuilder> {
public static final GetWarmersAction INSTANCE = new GetWarmersAction();
public static final String NAME = "warmers/get";
private GetWarmersAction() {
super(NAME);
}
@Override
public GetWarmersRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new GetWarmersRequestBuilder((InternalGenericClient) client);
}
@Override
public GetWarmersResponse newResponse() {
return new GetWarmersResponse();
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_warmer_get_GetWarmersAction.java
|
74 |
public class CeylonCorrectionProcessor extends QuickAssistAssistant
implements IQuickAssistProcessor {
private CeylonEditor editor; //may only be used for quick assists!!!
private Tree.CompilationUnit model;
private IFile file; //may only be used for markers!
public CeylonCorrectionProcessor(CeylonEditor editor) {
this.editor = editor;
setQuickAssistProcessor(this);
}
public CeylonCorrectionProcessor(IMarker marker) {
IFileEditorInput input = MarkerUtils.getInput(marker);
if (input!=null) {
file = input.getFile();
IProject project = file.getProject();
IJavaProject javaProject = JavaCore.create(project);
TypeChecker tc = getProjectTypeChecker(project);
if (tc!=null) {
try {
for (IPackageFragmentRoot pfr: javaProject.getPackageFragmentRoots()) {
if (pfr.getPath().isPrefixOf(file.getFullPath())) {
IPath relPath = file.getFullPath().makeRelativeTo(pfr.getPath());
model = tc.getPhasedUnitFromRelativePath(relPath.toString())
.getCompilationUnit();
}
}
}
catch (JavaModelException e) {
e.printStackTrace();
}
}
}
setQuickAssistProcessor(this);
}
private IFile getFile() {
if (editor!=null &&
editor.getEditorInput() instanceof FileEditorInput) {
FileEditorInput input = (FileEditorInput) editor.getEditorInput();
if (input!=null) {
return input.getFile();
}
}
return file;
}
private Tree.CompilationUnit getRootNode() {
if (editor!=null) {
return editor.getParseController().getRootNode();
}
else if (model!=null) {
return (Tree.CompilationUnit) model;
}
else {
return null;
}
}
@Override
public String getErrorMessage() {
return null;
}
private void collectProposals(IQuickAssistInvocationContext context,
IAnnotationModel model, Collection<Annotation> annotations,
boolean addQuickFixes, boolean addQuickAssists,
Collection<ICompletionProposal> proposals) {
ArrayList<ProblemLocation> problems = new ArrayList<ProblemLocation>();
// collect problem locations and corrections from marker annotations
for (Annotation curr: annotations) {
if (curr instanceof CeylonAnnotation) {
ProblemLocation problemLocation =
getProblemLocation((CeylonAnnotation) curr, model);
if (problemLocation != null) {
problems.add(problemLocation);
}
}
}
if (problems.isEmpty() && addQuickFixes) {
for (Annotation curr: annotations) {
if (curr instanceof SimpleMarkerAnnotation) {
collectMarkerProposals((SimpleMarkerAnnotation) curr, proposals);
}
}
}
ProblemLocation[] problemLocations =
problems.toArray(new ProblemLocation[problems.size()]);
Arrays.sort(problemLocations);
if (addQuickFixes) {
collectCorrections(context, problemLocations, proposals);
}
if (addQuickAssists) {
collectAssists(context, problemLocations, proposals);
}
}
private static ProblemLocation getProblemLocation(CeylonAnnotation annotation,
IAnnotationModel model) {
int problemId = annotation.getId();
if (problemId != -1) {
Position pos = model.getPosition((Annotation) annotation);
if (pos != null) {
return new ProblemLocation(pos.getOffset(), pos.getLength(),
annotation); // java problems all handled by the quick assist processors
}
}
return null;
}
private void collectAssists(IQuickAssistInvocationContext context,
ProblemLocation[] locations, Collection<ICompletionProposal> proposals) {
if (proposals.isEmpty()) {
addProposals(context, editor, proposals);
}
}
private static void collectMarkerProposals(SimpleMarkerAnnotation annotation,
Collection<ICompletionProposal> proposals) {
IMarker marker = annotation.getMarker();
IMarkerResolution[] res = IDE.getMarkerHelpRegistry().getResolutions(marker);
if (res.length > 0) {
for (int i = 0; i < res.length; i++) {
proposals.add(new CeylonMarkerResolutionProposal(res[i], marker));
}
}
}
@Override
public ICompletionProposal[] computeQuickAssistProposals(IQuickAssistInvocationContext context) {
ArrayList<ICompletionProposal> proposals = new ArrayList<ICompletionProposal>();
ISourceViewer viewer = context.getSourceViewer();
List<Annotation> annotations =
getAnnotationsForLine(viewer, getLine(context, viewer));
collectProposals(context, viewer.getAnnotationModel(),
annotations, true, true, proposals);
return proposals.toArray(new ICompletionProposal[proposals.size()]);
}
private int getLine(IQuickAssistInvocationContext context, ISourceViewer viewer) {
try {
return viewer.getDocument().getLineOfOffset(context.getOffset());
}
catch (BadLocationException e) {
e.printStackTrace();
return 0;
}
}
public void collectCorrections(IQuickAssistInvocationContext context,
ProblemLocation location, Collection<ICompletionProposal> proposals) {
Tree.CompilationUnit rootNode = getRootNode();
if (rootNode!=null) {
addProposals(context, location, getFile(),
rootNode, proposals);
}
}
private void collectCorrections(IQuickAssistInvocationContext context,
ProblemLocation[] locations, Collection<ICompletionProposal> proposals) {
ISourceViewer viewer = context.getSourceViewer();
Tree.CompilationUnit rootNode = getRootNode();
for (int i=locations.length-1; i>=0; i--) {
ProblemLocation loc = locations[i];
if (loc.getOffset()<=viewer.getSelectedRange().x) {
for (int j=i; j>=0; j--) {
ProblemLocation location = locations[j];
if (location.getOffset()!=loc.getOffset()) {
break;
}
addProposals(context, location, getFile(),
rootNode, proposals);
}
if (!proposals.isEmpty()) {
viewer.setSelectedRange(loc.getOffset(),
loc.getLength());
return;
}
}
}
for (int i=0; i<locations.length; i++) {
ProblemLocation loc = locations[i];
for (int j=i; j<locations.length; j++) {
ProblemLocation location = locations[j];
if (location.getOffset()!=loc.getOffset()) break;
addProposals(context, location, getFile(),
rootNode, proposals);
}
if (!proposals.isEmpty()) {
viewer.setSelectedRange(loc.getOffset(),
loc.getLength());
return;
}
}
}
public static boolean canFix(IMarker marker) {
try {
if (marker.getType().equals(PROBLEM_MARKER_ID)) {
return marker.getAttribute(MarkerCreator.ERROR_CODE_KEY,0)>0;
}
else {
return false;
}
}
catch (CoreException e) {
return false;
}
}
@Override
public boolean canFix(Annotation annotation) {
if (annotation instanceof CeylonAnnotation) {
return ((CeylonAnnotation) annotation).getId()>0;
}
else if (annotation instanceof MarkerAnnotation) {
return canFix(((MarkerAnnotation) annotation).getMarker());
}
else {
return false;
}
}
@Override
public boolean canAssist(IQuickAssistInvocationContext context) {
//oops, all this is totally useless, because
//this method never gets called :-/
/*Tree.CompilationUnit cu = (CompilationUnit) context.getModel()
.getAST(new NullMessageHandler(), new NullProgressMonitor());
return CeylonSourcePositionLocator.findNode(cu, context.getOffset(),
context.getOffset()+context.getLength()) instanceof Tree.Term;*/
return true;
}
private void addProposals(IQuickAssistInvocationContext context,
ProblemLocation problem, IFile file,
Tree.CompilationUnit rootNode,
Collection<ICompletionProposal> proposals) {
if (file==null) return;
IProject project = file.getProject();
TypeChecker tc = getProjectTypeChecker(project);
int offset = problem.getOffset();
Node node = Nodes.findNode(rootNode, offset,
offset + problem.getLength());
switch ( problem.getProblemId() ) {
case 100:
addDeclareLocalProposal(rootNode, node, proposals, file, editor);
//fall through:
case 102:
if (tc!=null) {
addImportProposals(rootNode, node, proposals, file);
}
addCreateEnumProposal(rootNode, node, problem, proposals,
project, tc, file);
addCreationProposals(rootNode, node, problem, proposals,
project, tc, file);
if (tc!=null) {
addChangeReferenceProposals(rootNode, node, problem, proposals, file);
}
break;
case 101:
addCreateParameterProposals(rootNode, node, problem, proposals,
project, tc, file);
if (tc!=null) {
addChangeReferenceProposals(rootNode, node, problem, proposals, file);
}
break;
case 200:
addSpecifyTypeProposal(rootNode, node, proposals, null);
break;
case 300:
addRefineFormalMembersProposal(proposals, node, rootNode, false);
addMakeAbstractDecProposal(proposals, project, node);
break;
case 350:
addRefineFormalMembersProposal(proposals, node, rootNode, true);
addMakeAbstractDecProposal(proposals, project, node);
break;
case 310:
addMakeAbstractDecProposal(proposals, project, node);
break;
case 320:
addRemoveAnnotationProposal(node, "formal", proposals, project);
break;
case 400:
addMakeSharedProposal(proposals, project, node);
break;
case 500:
case 510:
addMakeDefaultProposal(proposals, project, node);
break;
case 600:
addMakeActualDecProposal(proposals, project, node);
break;
case 701:
addMakeSharedDecProposal(proposals, project, node);
addRemoveAnnotationDecProposal(proposals, "actual", project, node);
break;
case 702:
addMakeSharedDecProposal(proposals, project, node);
addRemoveAnnotationDecProposal(proposals, "formal", project, node);
break;
case 703:
addMakeSharedDecProposal(proposals, project, node);
addRemoveAnnotationDecProposal(proposals, "default", project, node);
break;
case 710:
case 711:
addMakeSharedProposal(proposals, project, node);
break;
case 712:
addExportModuleImportProposal(proposals, project, node);
break;
case 713:
addMakeSharedProposalForSupertypes(proposals, project, node);
break;
case 714:
addExportModuleImportProposalForSupertypes(proposals, project, node, rootNode);
break;
case 800:
case 804:
addMakeVariableProposal(proposals, project, node);
break;
case 803:
addMakeVariableProposal(proposals, project, node);
break;
case 801:
addMakeVariableDecProposal(proposals, project, rootNode, node);
break;
case 802:
break;
case 905:
addMakeContainerAbstractProposal(proposals, project, node);
break;
case 1100:
addMakeContainerAbstractProposal(proposals, project, node);
addRemoveAnnotationDecProposal(proposals, "formal", project, node);
break;
case 1101:
addRemoveAnnotationDecProposal(proposals, "formal", project, node);
//TODO: replace body with ;
break;
case 1000:
addAddParenthesesProposal(problem, file, proposals, node);
addChangeDeclarationProposal(problem, file, proposals, node);
break;
case 1050:
addFixAliasProposal(proposals, file, problem);
break;
case 1200:
case 1201:
addRemoveAnnotationDecProposal(proposals, "shared", project, node);
break;
case 1300:
case 1301:
addMakeRefinedSharedProposal(proposals, project, node);
addRemoveAnnotationDecProposal(proposals, "actual", project, node);
break;
case 1302:
case 1312:
case 1307:
addRemoveAnnotationDecProposal(proposals, "formal", project, node);
break;
case 1303:
case 1313:
addRemoveAnnotationDecProposal(proposals, "default", project, node);
break;
case 1400:
case 1401:
addMakeFormalDecProposal(proposals, project, node);
break;
case 1450:
addMakeFormalDecProposal(proposals, project, node);
addParameterProposals(proposals, file, rootNode, node, null);
addInitializerProposals(proposals, file, rootNode, node);
break;
case 1500:
addRemoveAnnotationDecProposal(proposals, "variable", project, node);
break;
case 1600:
addRemoveAnnotationDecProposal(proposals, "abstract", project, node);
break;
case 2000:
addCreateParameterProposals(rootNode, node, problem, proposals,
project, tc, file);
break;
case 2100:
addChangeTypeProposals(rootNode, node, problem, proposals, project);
addSatisfiesProposals(rootNode, node, proposals, project);
break;
case 2102:
addChangeTypeArgProposals(rootNode, node, problem, proposals, project);
addSatisfiesProposals(rootNode, node, proposals, project);
break;
case 2101:
addEllipsisToSequenceParameterProposal(rootNode, node, proposals, file);
break;
case 3000:
addAssignToLocalProposal(rootNode, proposals, node, offset);
addAssignToForProposal(rootNode, proposals, node, offset);
addAssignToIfExistsProposal(rootNode, proposals, node, offset);
addAssignToIfNonemptyProposal(rootNode, proposals, node, offset);
addAssignToTryProposal(rootNode, proposals, node, offset);
addAssignToIfIsProposal(rootNode, proposals, node, offset);
addPrintProposal(rootNode, proposals, node, offset);
break;
case 3100:
addShadowReferenceProposal(file, rootNode, proposals, node);
break;
case 3101:
case 3102:
addShadowSwitchReferenceProposal(file, rootNode, proposals, node);
break;
case 5001:
case 5002:
addChangeIdentifierCaseProposal(node, proposals, file);
break;
case 6000:
addFixMultilineStringIndentation(proposals, file, rootNode, node);
break;
case 7000:
addModuleImportProposals(proposals, project, tc, node);
break;
case 8000:
addRenameDescriptorProposal(rootNode, context, problem, proposals, file);
//TODO: figure out some other way to get a Shell!
if (context.getSourceViewer()!=null) {
addMoveDirProposal(file, rootNode, project, proposals,
context.getSourceViewer().getTextWidget().getShell());
}
break;
case 9000:
addChangeRefiningTypeProposal(file, rootNode, proposals, node);
break;
case 9100:
case 9200:
addChangeRefiningParametersProposal(file, rootNode, proposals, node);
break;
}
}
private void addProposals(IQuickAssistInvocationContext context,
CeylonEditor editor, Collection<ICompletionProposal> proposals) {
if (editor==null) return;
IDocument doc = context.getSourceViewer().getDocument();
IProject project = EditorUtil.getProject(editor.getEditorInput());
IFile file = EditorUtil.getFile(editor.getEditorInput());
Tree.CompilationUnit rootNode = editor.getParseController().getRootNode();
if (rootNode!=null) {
Node node = Nodes.findNode(rootNode, context.getOffset(),
context.getOffset() + context.getLength());
int currentOffset = editor.getSelection().getOffset();
RenameProposal.add(proposals, editor);
InlineDeclarationProposal.add(proposals, editor);
ChangeParametersProposal.add(proposals, editor);
ExtractValueProposal.add(proposals, editor, node);
ExtractFunctionProposal.add(proposals, editor, node);
ExtractParameterProposal.add(proposals, editor, node);
CollectParametersProposal.add(proposals, editor);
MoveOutProposal.add(proposals, editor, node);
MakeReceiverProposal.add(proposals, editor, node);
InvertBooleanProposal.add(proposals, editor);
addAssignToLocalProposal(rootNode, proposals, node, currentOffset);
addAssignToForProposal(rootNode, proposals, node, currentOffset);
addAssignToIfExistsProposal(rootNode, proposals, node, currentOffset);
addAssignToIfNonemptyProposal(rootNode, proposals, node, currentOffset);
addAssignToTryProposal(rootNode, proposals, node, currentOffset);
addAssignToIfIsProposal(rootNode, proposals, node, currentOffset);
addPrintProposal(rootNode, proposals, node, currentOffset);
addConvertToNamedArgumentsProposal(proposals, file, rootNode,
editor, currentOffset);
addConvertToPositionalArgumentsProposal(proposals, file, rootNode,
editor, currentOffset);
Tree.Statement statement = findStatement(rootNode, node);
Tree.Declaration declaration = findDeclaration(rootNode, node);
Tree.NamedArgument argument = findArgument(rootNode, node);
Tree.ImportMemberOrType imp = findImport(rootNode, node);
addVerboseRefinementProposal(proposals, file, statement, rootNode);
addAnnotationProposals(proposals, project, declaration,
doc, currentOffset);
addTypingProposals(proposals, file, rootNode, node, declaration, editor);
addAnonymousFunctionProposals(editor, proposals, doc, file, rootNode,
currentOffset);
addDeclarationProposals(editor, proposals, doc, file, rootNode,
declaration, currentOffset);
addConvertToClassProposal(proposals, declaration, editor);
addAssertExistsDeclarationProposals(proposals, doc, file, rootNode, declaration);
addSplitDeclarationProposals(proposals, doc, file, rootNode, declaration);
addJoinDeclarationProposal(proposals, rootNode, statement, file);
addParameterProposals(proposals, file, rootNode, declaration, editor);
addArgumentProposals(proposals, doc, file, argument);
addUseAliasProposal(imp, proposals, editor);
addRenameAliasProposal(imp, proposals, editor);
addRemoveAliasProposal(imp, proposals, file, editor);
addRenameVersionProposals(node, proposals, rootNode, editor);
addConvertToIfElseProposal(doc, proposals, file, statement);
addConvertToThenElseProposal(rootNode, doc, proposals, file, statement);
addReverseIfElseProposal(doc, proposals, file, statement, rootNode);
addConvertGetterToMethodProposal(proposals, editor, file, statement);
addConvertMethodToGetterProposal(proposals, editor, file, statement);
addThrowsAnnotationProposal(proposals, statement, rootNode, file, doc);
MoveToNewUnitProposal.add(proposals, editor);
MoveToUnitProposal.add(proposals, editor);
addRefineFormalMembersProposal(proposals, node, rootNode, false);
addConvertToVerbatimProposal(proposals, file, rootNode, node, doc);
addConvertFromVerbatimProposal(proposals, file, rootNode, node, doc);
addConvertToConcatenationProposal(proposals, file, rootNode, node, doc);
addConvertToInterpolationProposal(proposals, file, rootNode, node, doc);
addExpandTypeProposal(editor, statement, file, doc, proposals);
}
}
private void addAnnotationProposals(Collection<ICompletionProposal> proposals,
IProject project, Tree.Declaration decNode, IDocument doc, int offset) {
if (decNode!=null) {
try {
Node in = Nodes.getIdentifyingNode(decNode);
if (in==null ||
doc.getLineOfOffset(in.getStartIndex())!=
doc.getLineOfOffset(offset)) {
return;
}
}
catch (BadLocationException e) {
e.printStackTrace();
}
Declaration d = decNode.getDeclarationModel();
if (d!=null) {
if (decNode instanceof Tree.AttributeDeclaration) {
addMakeVariableDecProposal(proposals, project, decNode);
}
if ((d.isClassOrInterfaceMember()||d.isToplevel()) &&
!d.isShared()) {
addMakeSharedDecProposal(proposals, project, decNode);
}
if (d.isClassOrInterfaceMember() &&
!d.isDefault() && !d.isFormal()) {
if (decNode instanceof Tree.AnyClass) {
addMakeDefaultDecProposal(proposals, project, decNode);
}
else if (decNode instanceof Tree.AnyAttribute) {
addMakeDefaultDecProposal(proposals, project, decNode);
}
else if (decNode instanceof Tree.AnyMethod) {
addMakeDefaultDecProposal(proposals, project, decNode);
}
if (decNode instanceof Tree.ClassDefinition) {
addMakeFormalDecProposal(proposals, project, decNode);
}
else if (decNode instanceof Tree.AttributeDeclaration) {
if (((Tree.AttributeDeclaration) decNode).getSpecifierOrInitializerExpression()==null) {
addMakeFormalDecProposal(proposals, project, decNode);
}
}
else if (decNode instanceof Tree.MethodDeclaration) {
if (((Tree.MethodDeclaration) decNode).getSpecifierExpression()==null) {
addMakeFormalDecProposal(proposals, project, decNode);
}
}
}
}
}
}
private static void addAnonymousFunctionProposals(CeylonEditor editor,
Collection<ICompletionProposal> proposals, IDocument doc,
IFile file, Tree.CompilationUnit cu,
final int currentOffset) {
class FindAnonFunctionVisitor extends Visitor {
Tree.FunctionArgument result;
public void visit(Tree.FunctionArgument that) {
if (currentOffset>=that.getStartIndex() &&
currentOffset<=that.getStopIndex()+1) {
result = that;
}
super.visit(that);
}
}
FindAnonFunctionVisitor v = new FindAnonFunctionVisitor();
v.visit(cu);
Tree.FunctionArgument fun = v.result;
if (fun!=null) {
if (fun.getExpression()!=null) {
addConvertToBlockProposal(doc, proposals, file, fun);
}
if (fun.getBlock()!=null) {
addConvertToSpecifierProposal(doc, proposals, file,
fun.getBlock(), true);
}
}
}
private static void addDeclarationProposals(CeylonEditor editor,
Collection<ICompletionProposal> proposals, IDocument doc,
IFile file, Tree.CompilationUnit cu,
Tree.Declaration decNode, int currentOffset) {
if (decNode==null) return;
if (decNode.getAnnotationList()!=null) {
Integer stopIndex = decNode.getAnnotationList().getStopIndex();
if (stopIndex!=null && currentOffset<=stopIndex+1) {
return;
}
}
if (decNode instanceof Tree.TypedDeclaration) {
Tree.TypedDeclaration tdn = (Tree.TypedDeclaration) decNode;
if (tdn.getType()!=null) {
Integer stopIndex = tdn.getType().getStopIndex();
if (stopIndex!=null && currentOffset<=stopIndex+1) {
return;
}
}
}
if (decNode instanceof Tree.AttributeDeclaration) {
Tree.AttributeDeclaration attDecNode = (Tree.AttributeDeclaration) decNode;
Tree.SpecifierOrInitializerExpression se =
attDecNode.getSpecifierOrInitializerExpression();
if (se instanceof Tree.LazySpecifierExpression) {
addConvertToBlockProposal(doc, proposals, file, decNode);
}
else {
addConvertToGetterProposal(doc, proposals, file, attDecNode);
}
}
if (decNode instanceof Tree.MethodDeclaration) {
Tree.SpecifierOrInitializerExpression se =
((Tree.MethodDeclaration) decNode).getSpecifierExpression();
if (se instanceof Tree.LazySpecifierExpression) {
addConvertToBlockProposal(doc, proposals, file, decNode);
}
}
if (decNode instanceof Tree.AttributeSetterDefinition) {
Tree.SpecifierOrInitializerExpression se =
((Tree.AttributeSetterDefinition) decNode).getSpecifierExpression();
if (se instanceof Tree.LazySpecifierExpression) {
addConvertToBlockProposal(doc, proposals, file, decNode);
}
Tree.Block b = ((Tree.AttributeSetterDefinition) decNode).getBlock();
if (b!=null) {
addConvertToSpecifierProposal(doc, proposals, file, b);
}
}
if (decNode instanceof Tree.AttributeGetterDefinition) {
Tree.Block b = ((Tree.AttributeGetterDefinition) decNode).getBlock();
if (b!=null) {
addConvertToSpecifierProposal(doc, proposals, file, b);
}
}
if (decNode instanceof Tree.MethodDefinition) {
Tree.Block b = ((Tree.MethodDefinition) decNode).getBlock();
if (b!=null) {
addConvertToSpecifierProposal(doc, proposals, file, b);
}
}
}
private void addArgumentProposals(Collection<ICompletionProposal> proposals,
IDocument doc, IFile file, Tree.StatementOrArgument node) {
if (node instanceof Tree.MethodArgument) {
Tree.MethodArgument ma = (Tree.MethodArgument) node;
Tree.SpecifierOrInitializerExpression se =
ma.getSpecifierExpression();
if (se instanceof Tree.LazySpecifierExpression) {
addConvertToBlockProposal(doc, proposals, file, node);
}
Tree.Block b = ma.getBlock();
if (b!=null) {
addConvertToSpecifierProposal(doc, proposals, file, b);
}
}
if (node instanceof Tree.AttributeArgument) {
Tree.AttributeArgument aa = (Tree.AttributeArgument) node;
Tree.SpecifierOrInitializerExpression se =
aa.getSpecifierExpression();
if (se instanceof Tree.LazySpecifierExpression) {
addConvertToBlockProposal(doc, proposals, file, node);
}
Tree.Block b = aa.getBlock();
if (b!=null) {
addConvertToSpecifierProposal(doc, proposals, file, b);
}
}
if (node instanceof Tree.SpecifiedArgument) {
Tree.SpecifiedArgument sa = (Tree.SpecifiedArgument) node;
addFillInArgumentNameProposal(proposals, doc, file, sa);
}
}
private void addCreationProposals(Tree.CompilationUnit cu, final Node node,
ProblemLocation problem, Collection<ICompletionProposal> proposals,
IProject project, TypeChecker tc, IFile file) {
if (node instanceof Tree.MemberOrTypeExpression) {
addCreateProposals(cu, node, proposals, project, file);
}
else if (node instanceof Tree.SimpleType) {
class FindExtendedTypeExpressionVisitor extends Visitor {
Tree.InvocationExpression invocationExpression;
@Override
public void visit(Tree.ExtendedType that) {
super.visit(that);
if (that.getType()==node) {
invocationExpression = that.getInvocationExpression();
}
}
}
FindExtendedTypeExpressionVisitor v = new FindExtendedTypeExpressionVisitor();
v.visit(cu);
if (v.invocationExpression!=null) {
addCreateProposals(cu, v.invocationExpression.getPrimary(),
proposals, project, file);
}
}
//TODO: should we add this stuff back in??
/*else if (node instanceof Tree.BaseType) {
Tree.BaseType bt = (Tree.BaseType) node;
String brokenName = bt.getIdentifier().getText();
String idef = "interface " + brokenName + " {}";
String idesc = "interface '" + brokenName + "'";
String cdef = "class " + brokenName + "() {}";
String cdesc = "class '" + brokenName + "()'";
//addCreateLocalProposals(proposals, project, idef, idesc, INTERFACE, cu, bt);
addCreateLocalProposals(proposals, project, cdef, cdesc, CLASS, cu, bt, null, null);
addCreateToplevelProposals(proposals, project, idef, idesc, INTERFACE, cu, bt, null, null);
addCreateToplevelProposals(proposals, project, cdef, cdesc, CLASS, cu, bt, null, null);
CreateInNewUnitProposal.addCreateToplevelProposal(proposals, idef, idesc,
INTERFACE, file, brokenName, null, null);
CreateInNewUnitProposal.addCreateToplevelProposal(proposals, cdef, cdesc,
CLASS, file, brokenName, null, null);
}*/
if (node instanceof Tree.BaseType) {
Tree.BaseType bt = (Tree.BaseType) node;
String brokenName = bt.getIdentifier().getText();
addCreateTypeParameterProposal(proposals, project, cu, bt, brokenName);
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_CeylonCorrectionProcessor.java
|
508 |
public class DeleteIndexAction extends IndicesAction<DeleteIndexRequest, DeleteIndexResponse, DeleteIndexRequestBuilder> {
public static final DeleteIndexAction INSTANCE = new DeleteIndexAction();
public static final String NAME = "indices/delete";
private DeleteIndexAction() {
super(NAME);
}
@Override
public DeleteIndexResponse newResponse() {
return new DeleteIndexResponse();
}
@Override
public DeleteIndexRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new DeleteIndexRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_delete_DeleteIndexAction.java
|
45 |
public class PackageCompletions {
static final class PackageDescriptorProposal extends CompletionProposal {
PackageDescriptorProposal(int offset, String prefix, String packageName) {
super(offset, prefix, PACKAGE,
"package " + packageName,
"package " + packageName + ";");
}
@Override
protected boolean qualifiedNameIsPath() {
return true;
}
}
static final class PackageProposal extends CompletionProposal {
private final boolean withBody;
private final int len;
private final Package p;
private final String completed;
private final CeylonParseController cpc;
PackageProposal(int offset, String prefix, boolean withBody,
int len, Package p, String completed,
CeylonParseController cpc) {
super(offset, prefix, PACKAGE, completed,
completed.substring(len));
this.withBody = withBody;
this.len = len;
this.p = p;
this.completed = completed;
this.cpc = cpc;
}
@Override
public Point getSelection(IDocument document) {
if (withBody) {
return new Point(offset+completed.length()-prefix.length()-len-5, 3);
}
else {
return new Point(offset+completed.length()-prefix.length()-len, 0);
}
}
@Override
public void apply(IDocument document) {
super.apply(document);
if (withBody &&
EditorsUI.getPreferenceStore()
.getBoolean(LINKED_MODE)) {
final LinkedModeModel linkedModeModel = new LinkedModeModel();
final Point selection = getSelection(document);
List<ICompletionProposal> proposals = new ArrayList<ICompletionProposal>();
for (final Declaration d: p.getMembers()) {
if (Util.isResolvable(d) && d.isShared() &&
!isOverloadedVersion(d)) {
proposals.add(new ICompletionProposal() {
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public Image getImage() {
return getImageForDeclaration(d);
}
@Override
public String getDisplayString() {
return d.getName();
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument document) {
try {
document.replace(selection.x, selection.y,
d.getName());
}
catch (BadLocationException e) {
e.printStackTrace();
}
linkedModeModel.exit(ILinkedModeListener.UPDATE_CARET);
}
});
}
}
if (!proposals.isEmpty()) {
ProposalPosition linkedPosition =
new ProposalPosition(document, selection.x, selection.y, 0,
proposals.toArray(NO_COMPLETIONS));
try {
LinkedMode.addLinkedPosition(linkedModeModel, linkedPosition);
LinkedMode.installLinkedMode((CeylonEditor) EditorUtil.getCurrentEditor(),
document, linkedModeModel, this, new LinkedMode.NullExitPolicy(),
-1, 0);
}
catch (BadLocationException ble) {
ble.printStackTrace();
}
}
}
}
@Override
public String getAdditionalProposalInfo() {
return getDocumentationFor(cpc, p);
}
@Override
protected boolean qualifiedNameIsPath() {
return true;
}
}
static void addPackageCompletions(CeylonParseController cpc,
int offset, String prefix, Tree.ImportPath path, Node node,
List<ICompletionProposal> result, boolean withBody) {
String fullPath = fullPath(offset, prefix, path);
addPackageCompletions(offset, prefix, node, result, fullPath.length(),
fullPath+prefix, cpc, withBody);
}
private static void addPackageCompletions(final int offset, final String prefix,
Node node, List<ICompletionProposal> result, final int len, String pfp,
final CeylonParseController cpc, final boolean withBody) {
//TODO: someday it would be nice to propose from all packages
// and auto-add the module dependency!
/*TypeChecker tc = CeylonBuilder.getProjectTypeChecker(cpc.getProject().getRawProject());
if (tc!=null) {
for (Module m: tc.getContext().getModules().getListOfModules()) {*/
//Set<Package> packages = new HashSet<Package>();
Unit unit = node.getUnit();
if (unit!=null) { //a null unit can occur if we have not finished parsing the file
Module module = unit.getPackage().getModule();
for (final Package p: module.getAllPackages()) {
//if (!packages.contains(p)) {
//packages.add(p);
//if ( p.getModule().equals(module) || p.isShared() ) {
final String pkg = escapePackageName(p);
if (!pkg.isEmpty() && pkg.startsWith(pfp)) {
boolean already = false;
if (!pfp.equals(pkg)) {
//don't add already imported packages, unless
//it is an exact match to the typed path
for (ImportList il: node.getUnit().getImportLists()) {
if (il.getImportedScope()==p) {
already = true;
break;
}
}
}
if (!already) {
result.add(new PackageProposal(offset, prefix, withBody,
len, p, pkg + (withBody ? " { ... }" : ""), cpc));
}
}
//}
}
}
}
static void addPackageDescriptorCompletion(CeylonParseController cpc,
int offset, String prefix, List<ICompletionProposal> result) {
if (!"package".startsWith(prefix)) return;
IFile file = cpc.getProject().getFile(cpc.getPath());
String packageName = getPackageName(file);
if (packageName!=null) {
result.add(new PackageDescriptorProposal(offset, prefix, packageName));
}
}
static void addCurrentPackageNameCompletion(CeylonParseController cpc,
int offset, String prefix, List<ICompletionProposal> result) {
IFile file = cpc.getProject().getFile(cpc.getPath());
String moduleName = getPackageName(file);
if (moduleName!=null) {
result.add(new CompletionProposal(offset, prefix,
isModuleDescriptor(cpc) ? MODULE : PACKAGE,
moduleName, moduleName));
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_PackageCompletions.java
|
625 |
public class BroadleafControllerUtility {
protected static final Log LOG = LogFactory.getLog(BroadleafControllerUtility.class);
public static final String BLC_REDIRECT_ATTRIBUTE = "blc_redirect";
public static final String BLC_AJAX_PARAMETER = "blcAjax";
/**
* A helper method that returns whether or not the given request was invoked via an AJAX call
*
* Returns true if the request contains the XMLHttpRequest header or a blcAjax=true parameter.
*
* @param request
* @return - whether or not it was an AJAX request
*/
public static boolean isAjaxRequest(HttpServletRequest request) {
String ajaxParameter = request.getParameter(BLC_AJAX_PARAMETER);
String requestedWithHeader = request.getHeader("X-Requested-With");
boolean result = (ajaxParameter != null && "true".equals(ajaxParameter))
|| "XMLHttpRequest".equals(requestedWithHeader);
if (LOG.isTraceEnabled()) {
StringBuilder sb = new StringBuilder()
.append("Request URL: [").append(request.getServletPath()).append("]")
.append(" - ")
.append("ajaxParam: [").append(String.valueOf(ajaxParameter)).append("]")
.append(" - ")
.append("X-Requested-With: [").append(requestedWithHeader).append("]")
.append(" - ")
.append("Returning: [").append(result).append("]");
LOG.trace(sb.toString());
}
return result;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_web_controller_BroadleafControllerUtility.java
|
298 |
new Thread() {
public void run() {
l.forceUnlock();
latch.countDown();
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_lock_ClientLockTest.java
|
252 |
public final class RateLimitedFSDirectory extends FilterDirectory{
private final StoreRateLimiting.Provider rateLimitingProvider;
private final StoreRateLimiting.Listener rateListener;
public RateLimitedFSDirectory(FSDirectory wrapped, StoreRateLimiting.Provider rateLimitingProvider,
StoreRateLimiting.Listener rateListener) {
super(wrapped);
this.rateLimitingProvider = rateLimitingProvider;
this.rateListener = rateListener;
}
@Override
public IndexOutput createOutput(String name, IOContext context) throws IOException {
final IndexOutput output = in.createOutput(name, context);
StoreRateLimiting rateLimiting = rateLimitingProvider.rateLimiting();
StoreRateLimiting.Type type = rateLimiting.getType();
RateLimiter limiter = rateLimiting.getRateLimiter();
if (type == StoreRateLimiting.Type.NONE || limiter == null) {
return output;
}
if (context.context == Context.MERGE) {
// we are mering, and type is either MERGE or ALL, rate limit...
return new RateLimitedIndexOutput(limiter, rateListener, output);
}
if (type == StoreRateLimiting.Type.ALL) {
return new RateLimitedIndexOutput(limiter, rateListener, output);
}
// we shouldn't really get here...
return output;
}
@Override
public void close() throws IOException {
in.close();
}
@Override
public String toString() {
StoreRateLimiting rateLimiting = rateLimitingProvider.rateLimiting();
StoreRateLimiting.Type type = rateLimiting.getType();
RateLimiter limiter = rateLimiting.getRateLimiter();
if (type == StoreRateLimiting.Type.NONE || limiter == null) {
return StoreUtils.toString(in);
} else {
return "rate_limited(" + StoreUtils.toString(in) + ", type=" + type.name() + ", rate=" + limiter.getMbPerSec() + ")";
}
}
static final class RateLimitedIndexOutput extends BufferedIndexOutput {
private final IndexOutput delegate;
private final BufferedIndexOutput bufferedDelegate;
private final RateLimiter rateLimiter;
private final StoreRateLimiting.Listener rateListener;
RateLimitedIndexOutput(final RateLimiter rateLimiter, final StoreRateLimiting.Listener rateListener, final IndexOutput delegate) {
super(delegate instanceof BufferedIndexOutput ? ((BufferedIndexOutput) delegate).getBufferSize() : BufferedIndexOutput.DEFAULT_BUFFER_SIZE);
if (delegate instanceof BufferedIndexOutput) {
bufferedDelegate = (BufferedIndexOutput) delegate;
this.delegate = delegate;
} else {
this.delegate = delegate;
bufferedDelegate = null;
}
this.rateLimiter = rateLimiter;
this.rateListener = rateListener;
}
@Override
protected void flushBuffer(byte[] b, int offset, int len) throws IOException {
rateListener.onPause(rateLimiter.pause(len));
if (bufferedDelegate != null) {
bufferedDelegate.flushBuffer(b, offset, len);
} else {
delegate.writeBytes(b, offset, len);
}
}
@Override
public long length() throws IOException {
return delegate.length();
}
@Override
public void seek(long pos) throws IOException {
flush();
delegate.seek(pos);
}
@Override
public void flush() throws IOException {
try {
super.flush();
} finally {
delegate.flush();
}
}
@Override
public void setLength(long length) throws IOException {
delegate.setLength(length);
}
@Override
public void close() throws IOException {
try {
super.close();
} finally {
delegate.close();
}
}
}
}
| 1no label
|
src_main_java_org_apache_lucene_store_RateLimitedFSDirectory.java
|
48 |
{
@Override
public void masterIsElected( HighAvailabilityMemberChangeEvent event )
{
}
@Override
public void masterIsAvailable( HighAvailabilityMemberChangeEvent event )
{
if ( event.getOldState().equals( HighAvailabilityMemberState.TO_MASTER ) && event.getNewState().equals(
HighAvailabilityMemberState.MASTER ) )
{
doAfterRecoveryAndStartup( true );
}
}
@Override
public void slaveIsAvailable( HighAvailabilityMemberChangeEvent event )
{
if ( event.getOldState().equals( HighAvailabilityMemberState.TO_SLAVE ) && event.getNewState().equals(
HighAvailabilityMemberState.SLAVE ) )
{
doAfterRecoveryAndStartup( false );
}
}
@Override
public void instanceStops( HighAvailabilityMemberChangeEvent event )
{
}
private void doAfterRecoveryAndStartup( boolean isMaster )
{
try
{
synchronized ( xaDataSourceManager )
{
HighlyAvailableGraphDatabase.this.doAfterRecoveryAndStartup( isMaster );
}
}
catch ( Throwable throwable )
{
msgLog.error( "Post recovery error", throwable );
try
{
memberStateMachine.stop();
}
catch ( Throwable throwable1 )
{
msgLog.warn( "Could not stop", throwable1 );
}
try
{
memberStateMachine.start();
}
catch ( Throwable throwable1 )
{
msgLog.warn( "Could not start", throwable1 );
}
}
}
} );
| 1no label
|
enterprise_ha_src_main_java_org_neo4j_kernel_ha_HighlyAvailableGraphDatabase.java
|
627 |
c4.addListenerConfig(new ListenerConfig(new LifecycleListener() {
public void stateChanged(final LifecycleEvent event) {
if (event.getState() == LifecycleState.MERGED) {
latch.countDown();
}
}
}));
| 0true
|
hazelcast_src_test_java_com_hazelcast_cluster_SplitBrainHandlerTest.java
|
44 |
public class InvalidIDException extends TitanException {
public InvalidIDException(String msg) {
super(msg);
}
public InvalidIDException(String msg, Throwable cause) {
super(msg, cause);
}
public InvalidIDException(Throwable cause) {
super(cause);
}
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_InvalidIDException.java
|
96 |
class ODirectMemoryFactory {
public static final ODirectMemoryFactory INSTANCE = new ODirectMemoryFactory();
private static final ODirectMemory directMemory;
static {
ODirectMemory localDirectMemory = null;
try {
final Class<?> jnaClass = Class.forName("com.orientechnologies.nio.OJNADirectMemory");
if (jnaClass == null)
localDirectMemory = null;
else
localDirectMemory = (ODirectMemory) jnaClass.newInstance();
} catch (Exception e) {
// ignore
}
if (localDirectMemory == null) {
try {
final Class<?> sunClass = Class.forName("sun.misc.Unsafe");
if (sunClass != null) {
localDirectMemory = OUnsafeMemory.INSTANCE;
OLogManager.instance().warn(
ODirectMemoryFactory.class,
"Sun Unsafe direct memory implementation is going to be used, "
+ "this implementation is not stable so please use JNA version instead.");
}
} catch (Exception e) {
// ignore
}
}
directMemory = localDirectMemory;
}
public ODirectMemory directMemory() {
return directMemory;
}
}
| 1no label
|
commons_src_main_java_com_orientechnologies_common_directmemory_ODirectMemoryFactory.java
|
464 |
public class CeylonNavigatorLabelProvider extends
CeylonLabelProvider implements ICommonLabelProvider {
ICommonContentExtensionSite extensionSite;
public CeylonNavigatorLabelProvider() {
super(true); // small images
}
@Override
public StyledString getStyledText(Object element) {
if (element instanceof ExternalModuleNode) {
ExternalModuleNode externalModule = (ExternalModuleNode) element;
JDTModule jdtModule = externalModule.getModule();
JDTModule mod = ((ExternalModuleNode) element).getModule();
String name = super.getStyledText(mod).toString();
StyledString moduleText = new StyledString(name);
if (jdtModule != null) {
moduleText.append(" - " + jdtModule.getVersion(), QUALIFIER_STYLER);
}
return moduleText;
}
if (element instanceof SourceModuleNode) {
JDTModule module = ((SourceModuleNode) element).getModule();
if (module==null) {
return new StyledString(((SourceModuleNode) element).getElementName());
}
else {
String name = super.getStyledText(module).toString();
StyledString result = new StyledString(name);
if (module != null && module.isDefaultModule()) {
result = result.insert('(', 0).append(')').append("");
}
return result;
}
}
if (element instanceof RepositoryNode) {
RepositoryNode repoNode = (RepositoryNode) element;
String stringToDisplay = getRepositoryString(repoNode);
return new StyledString(stringToDisplay);
}
if (element instanceof Package || element instanceof IPackageFragment) {
return new StyledString(super.getStyledText(element).getString());
}
if (element instanceof CeylonArchiveFileStore) {
CeylonArchiveFileStore archiveFileStore = (CeylonArchiveFileStore)element;
if (archiveFileStore.getParent() == null) {
return new StyledString("Ceylon Sources").append(" - " + archiveFileStore.getArchivePath().toOSString(), QUALIFIER_STYLER);
}
return new StyledString(archiveFileStore.getName());
}
if (element instanceof JarPackageFragmentRoot) {
JarPackageFragmentRoot jpfr = (JarPackageFragmentRoot) element;
if (ArtifactContext.CAR.substring(1).equalsIgnoreCase(jpfr.getPath().getFileExtension())) {
return new StyledString("Java Binaries").append(" - " + jpfr.getPath().toOSString(), QUALIFIER_STYLER);
} else {
return getJavaNavigatorLabelProvider().getStyledText(element);
}
}
if (element instanceof IProject || element instanceof IJavaProject) {
return getJavaNavigatorLabelProvider().getStyledText(element);
}
StyledString styledString = super.getStyledText(element);
if (styledString.getString().equals("<something>")) {
StyledString javaResult = getJavaNavigatorLabelProvider().getStyledText(element);
if (! javaResult.getString().trim().isEmpty()) {
return javaResult;
}
}
return styledString;
}
private String getRepositoryString(RepositoryNode repoNode) {
String displayString = repoNode.getDisplayString();
String stringToDisplay = null;
if (Constants.REPO_URL_CEYLON.equals(displayString)) {
stringToDisplay = "Herd Modules";
}
if (stringToDisplay == null && JDKRepository.JDK_REPOSITORY_DISPLAY_STRING.equals(displayString)) {
stringToDisplay = "JDK Modules";
}
if (stringToDisplay == null && CeylonBuilder.getInterpolatedCeylonSystemRepo(repoNode.project).equals(displayString)) {
stringToDisplay = "System Modules";
}
if (stringToDisplay == null && CeylonBuilder.getCeylonModulesOutputDirectory(repoNode.project).getAbsolutePath().equals(displayString)) {
stringToDisplay = "Output Modules";
}
if (stringToDisplay == null && CeylonProjectConfig.get(repoNode.project).getMergedRepositories().getCacheRepoDir().getAbsolutePath().equals(displayString)) {
stringToDisplay = "Cached Modules";
}
if (stringToDisplay == null && CeylonProjectConfig.get(repoNode.project).getMergedRepositories().getUserRepoDir().getAbsolutePath().equals(displayString)) {
stringToDisplay = "User Modules";
}
if (stringToDisplay == null) {
try {
for (IProject referencedProject: repoNode.project.getReferencedProjects()) {
if (referencedProject.isOpen() && CeylonNature.isEnabled(referencedProject)) {
if (CeylonBuilder.getCeylonModulesOutputDirectory(referencedProject).getAbsolutePath().equals(displayString)) {
stringToDisplay = "Modules of Referenced Project : " + referencedProject.getName() + "";
break;
}
}
}
} catch (CoreException e) {
}
}
if (stringToDisplay == null) {
for (Repositories.Repository repo : CeylonProjectConfig.get(repoNode.project).getMergedRepositories().getLocalLookupRepositories()) {
if (repo.getUrl().startsWith("./") && repo.getUrl().length() > 2) {
IPath relativePath = Path.fromPortableString(repo.getUrl().substring(2));
IFolder folder = repoNode.project.getFolder(relativePath);
if (folder.exists() && folder.getLocation().toFile().getAbsolutePath().equals(displayString)) {
stringToDisplay = "Local Repository : " + relativePath.toString() + "";
break;
}
}
}
}
if (stringToDisplay == null && NodeUtils.UNKNOWN_REPOSITORY.equals(displayString)) {
stringToDisplay = "Unknown Repository";
}
if (stringToDisplay == null) {
stringToDisplay = displayString;
}
return stringToDisplay;
}
@Override
public Image getImage(Object element) {
JavaNavigatorLabelProvider javaProvider = getJavaNavigatorLabelProvider();
if (element instanceof IProject || element instanceof IJavaProject) {
Image javaContributedImage = javaProvider.getImage(element);
if (javaContributedImage != null) {
return javaContributedImage;
}
}
if (element instanceof IPackageFragment &&
! CeylonBuilder.isInSourceFolder((IPackageFragment)element)) {
return javaProvider.getImage(element);
}
if (element instanceof ExternalModuleNode) {
return super.getImage(((ExternalModuleNode)element).getModule());
}
if (element instanceof SourceModuleNode) {
int decorationAttributes = 0;
for (Object child : getContentProvider().getChildren(element)) {
if (!hasPipelinedChildren(child)) {
continue;
}
int childValue = getDecorationAttributes(child);
if ((childValue & ERROR) != 0) {
decorationAttributes = ERROR;
break;
}
if ((childValue & WARNING) != 0) {
decorationAttributes = WARNING;
}
}
JDTModule module = ((SourceModuleNode)element).getModule();
if (module==null) {
return getDecoratedImage(CEYLON_MODULE, decorationAttributes, true);
}
else {
return getDecoratedImage(getImageKey(module), decorationAttributes, true);
}
}
if (element instanceof CeylonArchiveFileStore) {
CeylonArchiveFileStore archiveFileStore = (CeylonArchiveFileStore)element;
if (archiveFileStore.getParent() != null
&& ! archiveFileStore.fetchInfo().isDirectory()) {
IFolder sourceArchiveFolder = ExternalSourceArchiveManager.getExternalSourceArchiveManager().getSourceArchive(archiveFileStore.getArchivePath());
if (sourceArchiveFolder != null && sourceArchiveFolder.exists()) {
IResource file = sourceArchiveFolder.findMember(archiveFileStore.getEntryPath());
if (file instanceof IFile) {
element = file;
}
}
}
}
if (element instanceof IFile) {
if (! CeylonBuilder.isCeylon((IFile) element)) {
return javaProvider.getImage(element);
}
}
return super.getImage(element);
}
private boolean hasPipelinedChildren(Object child) {
return getContentProvider().hasPipelinedChildren(child,
getJavaNavigatorContentProvider().hasChildren(child));
}
@Override
protected String getImageKey(Object element) {
if (element instanceof RepositoryNode) {
return RUNTIME_OBJ;
}
if (element instanceof IPackageFragment) {
return CEYLON_PACKAGE;
}
if (element instanceof CeylonArchiveFileStore) {
CeylonArchiveFileStore archiveFileStore = (CeylonArchiveFileStore)element;
if (archiveFileStore.getParent() == null) {
return CEYLON_SOURCE_ARCHIVE;
} else {
if (archiveFileStore.fetchInfo().isDirectory()) {
return CEYLON_PACKAGE;
} else {
return CEYLON_FILE;
}
}
}
if (element instanceof JarPackageFragmentRoot) {
return CEYLON_BINARY_ARCHIVE;
}
return super.getImageKey(element);
}
@Override
public void restoreState(IMemento aMemento) {
// TODO Auto-generated method stub
}
@Override
public void saveState(IMemento aMemento) {
// TODO Auto-generated method stub
}
@Override
public String getDescription(Object anElement) {
if (anElement instanceof RepositoryNode) {
Repository repo = ((RepositoryNode)anElement).getRepository();
if (repo != null) {
return "Repository path : " + repo.getDisplayString();
}
}
if (anElement instanceof CeylonArchiveFileStore) {
CeylonArchiveFileStore archive = (CeylonArchiveFileStore)anElement;
if (archive.getParent() == null) {
return archive.getArchivePath().toOSString();
}
}
return null;
}
@Override
public void init(ICommonContentExtensionSite aConfig) {
extensionSite = aConfig;
}
private INavigatorContentExtension getJavaNavigatorExtension() {
@SuppressWarnings("unchecked")
Set<INavigatorContentExtension> set = extensionSite.getService().findContentExtensionsByTriggerPoint(JavaCore.create(ResourcesPlugin.getWorkspace().getRoot()));
for (INavigatorContentExtension extension : set) {
if (extension.getDescriptor().equals(extensionSite.getExtension().getDescriptor().getOverriddenDescriptor())) {
return extension;
}
}
return null;
}
private JavaNavigatorLabelProvider getJavaNavigatorLabelProvider() {
INavigatorContentExtension javaExtension = getJavaNavigatorExtension();
if (javaExtension != null) {
return (JavaNavigatorLabelProvider) javaExtension.getLabelProvider();
}
return null;
}
private JavaNavigatorContentProvider getJavaNavigatorContentProvider() {
INavigatorContentExtension javaExtension = getJavaNavigatorExtension();
if (javaExtension != null) {
return (JavaNavigatorContentProvider) javaExtension.getContentProvider();
}
return null;
}
private CeylonNavigatorContentProvider getContentProvider() {
return (CeylonNavigatorContentProvider) extensionSite.getExtension().getContentProvider();
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_navigator_CeylonNavigatorLabelProvider.java
|
1,112 |
public class OSQLFunctionAverage extends OSQLFunctionMathAbstract {
public static final String NAME = "avg";
private Number sum;
private int total = 0;
public OSQLFunctionAverage() {
super(NAME, 1, -1);
}
public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) {
if (iParameters.length == 1) {
if (iParameters[0] instanceof Number)
sum((Number) iParameters[0]);
else if (OMultiValue.isMultiValue(iParameters[0]))
for (Object n : OMultiValue.getMultiValueIterable(iParameters[0]))
sum((Number) n);
} else {
sum = null;
for (int i = 0; i < iParameters.length; ++i)
sum((Number) iParameters[i]);
}
return getResult();
}
protected void sum(Number value) {
if (value != null) {
total++;
if (sum == null)
// FIRST TIME
sum = value;
else
sum = OType.increment(sum, value);
}
}
public String getSyntax() {
return "Syntax error: avg(<field> [,<field>*])";
}
@Override
public Object getResult() {
if (returnDistributedResult()) {
final Map<String, Object> doc = new HashMap<String, Object>();
doc.put("sum", sum);
doc.put("total", total);
return doc;
} else {
if (sum instanceof Integer)
return sum.intValue() / total;
else if (sum instanceof Long)
return sum.longValue() / total;
else if (sum instanceof Float)
return sum.floatValue() / total;
else if (sum instanceof Double)
return sum.doubleValue() / total;
else if (sum instanceof BigDecimal)
return ((BigDecimal) sum).divide(new BigDecimal(total));
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public Object mergeDistributedResult(List<Object> resultsToMerge) {
Number sum = null;
int total = 0;
for (Object iParameter : resultsToMerge) {
final Map<String, Object> item = (Map<String, Object>) iParameter;
if (sum == null)
sum = (Number) item.get("sum");
else
sum = OType.increment(sum, (Number) item.get("sum"));
total += (Integer) item.get("total");
}
if (sum instanceof Integer)
return sum.intValue() / total;
else if (sum instanceof Long)
return sum.longValue() / total;
else if (sum instanceof Float)
return sum.floatValue() / total;
else if (sum instanceof Double)
return sum.doubleValue() / total;
else if (sum instanceof BigDecimal)
return ((BigDecimal) sum).divide(new BigDecimal(total));
return null;
}
@Override
public boolean aggregateResults() {
return configuredParameters.length == 1;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_math_OSQLFunctionAverage.java
|
1,104 |
SINGLE_VALUES_DENSE_ENUM {
public int numValues() {
return 1;
}
@Override
public long nextValue() {
return RANDOM.nextInt(16);
}
},
| 0true
|
src_test_java_org_elasticsearch_benchmark_fielddata_LongFieldDataBenchmark.java
|
1,520 |
public class RebalanceAfterActiveTests extends ElasticsearchAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(RebalanceAfterActiveTests.class);
@Test
public void testRebalanceOnlyAfterAllShardsAreActive() {
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.put("cluster.routing.allocation.allow_rebalance", "always")
.put("cluster.routing.allocation.cluster_concurrent_rebalance", -1)
.build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").numberOfShards(5).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test").shards().size(), equalTo(5));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(1).currentNodeId(), nullValue());
}
logger.info("start two nodes and fully start the shards");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
}
logger.info("start all the primary shards, replicas will start initializing");
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(INITIALIZING));
}
logger.info("now, start 8 more nodes, and check that no rebalancing/relocation have happened");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.put(newNode("node3")).put(newNode("node4")).put(newNode("node5")).put(newNode("node6")).put(newNode("node7")).put(newNode("node8")).put(newNode("node9")).put(newNode("node10")))
.build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(INITIALIZING));
}
logger.info("start the replica shards, rebalancing should start");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
// we only allow one relocation at a time
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(5));
assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(5));
logger.info("complete relocation, other half of relocation should happen");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
// we now only relocate 3, since 2 remain where they are!
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(7));
assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(3));
logger.info("complete relocation, thats it!");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(10));
// make sure we have an even relocation
for (RoutingNode routingNode : routingNodes) {
assertThat(routingNode.size(), equalTo(1));
}
}
}
| 0true
|
src_test_java_org_elasticsearch_cluster_routing_allocation_RebalanceAfterActiveTests.java
|
58 |
public class HttpPostCommandParser implements CommandParser {
public TextCommand parser(SocketTextReader socketTextReader, String cmd, int space) {
StringTokenizer st = new StringTokenizer(cmd);
st.nextToken();
String uri = null;
if (st.hasMoreTokens()) {
uri = st.nextToken();
} else {
return new ErrorCommand(ERROR_CLIENT);
}
return new HttpPostCommand(socketTextReader, uri);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_rest_HttpPostCommandParser.java
|
164 |
public interface SpeedTest {
public void cycle() throws Exception;
public void init() throws Exception;
public void deinit() throws Exception;
public void beforeCycle() throws Exception;
public void afterCycle() throws Exception;
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_test_SpeedTest.java
|
3,547 |
public static class MultiFields {
public static MultiFields empty() {
return new MultiFields(Defaults.PATH_TYPE, ImmutableOpenMap.<String, Mapper>of());
}
public static class Builder {
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
private ContentPath.Type pathType = Defaults.PATH_TYPE;
public Builder pathType(ContentPath.Type pathType) {
this.pathType = pathType;
return this;
}
public Builder add(Mapper.Builder builder) {
mapperBuilders.put(builder.name(), builder);
return this;
}
@SuppressWarnings("unchecked")
public MultiFields build(AbstractFieldMapper.Builder mainFieldBuilder, BuilderContext context) {
if (pathType == Defaults.PATH_TYPE && mapperBuilders.isEmpty()) {
return empty();
} else if (mapperBuilders.isEmpty()) {
return new MultiFields(pathType, ImmutableOpenMap.<String, Mapper>of());
} else {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainFieldBuilder.name());
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
String key = cursor.key;
Mapper.Builder value = cursor.value;
mapperBuilders.put(key, value.build(context));
}
context.path().remove();
context.path().pathType(origPathType);
ImmutableOpenMap.Builder<String, Mapper> mappers = mapperBuilders.cast();
return new MultiFields(pathType, mappers.build());
}
}
}
private final ContentPath.Type pathType;
private volatile ImmutableOpenMap<String, Mapper> mappers;
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, Mapper> mappers) {
this.pathType = pathType;
this.mappers = mappers;
// we disable the all in multi-field mappers
for (ObjectCursor<Mapper> cursor : mappers.values()) {
Mapper mapper = cursor.value;
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
}
}
}
public void parse(AbstractFieldMapper mainField, ParseContext context) throws IOException {
if (mappers.isEmpty()) {
return;
}
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainField.name());
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
context.path().remove();
context.path().pathType(origPathType);
}
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
AbstractFieldMapper mergeWithMultiField = (AbstractFieldMapper) mergeWith;
List<FieldMapper> newFieldMappers = null;
ImmutableOpenMap.Builder<String, Mapper> newMappersBuilder = null;
for (ObjectCursor<Mapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
Mapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
// we disable the all in multi-field mappers
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
}
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(mergeWithMapper.name(), mergeWithMapper);
if (mergeWithMapper instanceof AbstractFieldMapper) {
if (newFieldMappers == null) {
newFieldMappers = new ArrayList<FieldMapper>(2);
}
newFieldMappers.add((FieldMapper) mergeWithMapper);
}
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
}
}
// first add all field mappers
if (newFieldMappers != null) {
mergeContext.docMapper().addFieldMappers(newFieldMappers);
}
// now publish mappers
if (newMappersBuilder != null) {
mappers = newMappersBuilder.build();
}
}
public void traverse(FieldMapperListener fieldMapperListener) {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.traverse(fieldMapperListener);
}
}
public void close() {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.close();
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (!mappers.isEmpty()) {
builder.startObject("fields");
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.toXContent(builder, params);
}
builder.endObject();
}
return builder;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_core_AbstractFieldMapper.java
|
2,580 |
class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
int minimumMasterNodes = settings.getAsInt("discovery.zen.minimum_master_nodes", ZenDiscovery.this.electMaster.minimumMasterNodes());
if (minimumMasterNodes != ZenDiscovery.this.electMaster.minimumMasterNodes()) {
logger.info("updating discovery.zen.minimum_master_nodes from [{}] to [{}]", ZenDiscovery.this.electMaster.minimumMasterNodes(), minimumMasterNodes);
handleMinimumMasterNodesChanged(minimumMasterNodes);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
|
5,312 |
public static class Bucket extends InternalTerms.Bucket {
final BytesRef termBytes;
public Bucket(BytesRef term, long docCount, InternalAggregations aggregations) {
super(docCount, aggregations);
this.termBytes = term;
}
@Override
public String getKey() {
return termBytes.utf8ToString();
}
@Override
public Text getKeyAsText() {
return new BytesText(new BytesArray(termBytes));
}
@Override
public Number getKeyAsNumber() {
// this method is needed for scripted numeric faceting
return Double.parseDouble(termBytes.utf8ToString());
}
@Override
int compareTerm(Terms.Bucket other) {
return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes);
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_terms_StringTerms.java
|
94 |
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
public @interface ConsoleParameter {
String name() default "";
String description() default "";
boolean optional() default false;
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_console_annotation_ConsoleParameter.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.