Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
410 | public class ClientClusterProxy implements Cluster {
private final ClientClusterServiceImpl clusterService;
public ClientClusterProxy(ClientClusterServiceImpl clusterService) {
this.clusterService = clusterService;
}
@Override
public String addMembershipListener(MembershipListener listener) {
return clusterService.addMembershipListenerWithInit(listener);
}
@Override
public boolean removeMembershipListener(String registrationId) {
return clusterService.removeMembershipListener(registrationId);
}
@Override
public Set<Member> getMembers() {
final Collection<MemberImpl> members = clusterService.getMemberList();
return members != null ? new LinkedHashSet<Member>(members) : Collections.<Member>emptySet();
}
@Override
public Member getLocalMember() {
throw new UnsupportedOperationException("Client has no local member!");
}
@Override
public long getClusterTime() {
return clusterService.getClusterTime();
}
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientClusterProxy.java |
3,988 | public class FunctionScoreQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder<FunctionScoreQueryBuilder> {
private final QueryBuilder queryBuilder;
private final FilterBuilder filterBuilder;
private Float boost;
private Float maxBoost;
private String scoreMode;
private String boostMode;
private ArrayList<FilterBuilder> filters = new ArrayList<FilterBuilder>();
private ArrayList<ScoreFunctionBuilder> scoreFunctions = new ArrayList<ScoreFunctionBuilder>();
public FunctionScoreQueryBuilder(QueryBuilder queryBuilder) {
this.queryBuilder = queryBuilder;
this.filterBuilder = null;
}
public FunctionScoreQueryBuilder(FilterBuilder filterBuilder) {
this.filterBuilder = filterBuilder;
this.queryBuilder = null;
}
public FunctionScoreQueryBuilder() {
this.filterBuilder = null;
this.queryBuilder = null;
}
public FunctionScoreQueryBuilder(ScoreFunctionBuilder scoreFunctionBuilder) {
queryBuilder = null;
filterBuilder = null;
this.filters.add(null);
this.scoreFunctions.add(scoreFunctionBuilder);
}
public FunctionScoreQueryBuilder add(FilterBuilder filter, ScoreFunctionBuilder scoreFunctionBuilder) {
this.filters.add(filter);
this.scoreFunctions.add(scoreFunctionBuilder);
return this;
}
public FunctionScoreQueryBuilder add(ScoreFunctionBuilder scoreFunctionBuilder) {
this.filters.add(null);
this.scoreFunctions.add(scoreFunctionBuilder);
return this;
}
public FunctionScoreQueryBuilder scoreMode(String scoreMode) {
this.scoreMode = scoreMode;
return this;
}
public FunctionScoreQueryBuilder boostMode(String boostMode) {
this.boostMode = boostMode;
return this;
}
public FunctionScoreQueryBuilder boostMode(CombineFunction combineFunction) {
this.boostMode = combineFunction.getName();
return this;
}
public FunctionScoreQueryBuilder maxBoost(float maxBoost) {
this.maxBoost = maxBoost;
return this;
}
/**
* Sets the boost for this query. Documents matching this query will (in
* addition to the normal weightings) have their score multiplied by the
* boost provided.
*/
public FunctionScoreQueryBuilder boost(float boost) {
this.boost = boost;
return this;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(FunctionScoreQueryParser.NAME);
if (queryBuilder != null) {
builder.field("query");
queryBuilder.toXContent(builder, params);
} else if (filterBuilder != null) {
builder.field("filter");
filterBuilder.toXContent(builder, params);
}
// If there is only one function without a filter, we later want to
// create a FunctionScoreQuery.
// For this, we only build the scoreFunction.Tthis will be translated to
// FunctionScoreQuery in the parser.
if (filters.size() == 1 && filters.get(0) == null) {
scoreFunctions.get(0).toXContent(builder, params);
} else { // in all other cases we build the format needed for a
// FiltersFunctionScoreQuery
builder.startArray("functions");
for (int i = 0; i < filters.size(); i++) {
builder.startObject();
if (filters.get(i) != null) {
builder.field("filter");
filters.get(i).toXContent(builder, params);
}
scoreFunctions.get(i).toXContent(builder, params);
builder.endObject();
}
builder.endArray();
}
if (scoreMode != null) {
builder.field("score_mode", scoreMode);
}
if (boostMode != null) {
builder.field("boost_mode", boostMode);
}
if (maxBoost != null) {
builder.field("max_boost", maxBoost);
}
if (boost != null) {
builder.field("boost", boost);
}
builder.endObject();
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_functionscore_FunctionScoreQueryBuilder.java |
0 | public class AndroidBuildHookProvider implements ICeylonBuildHookProvider {
private static final class AndroidCeylonBuildHook extends CeylonBuildHook {
public static final String CEYLON_GENERATED_ARCHIVES_PREFIX = "ceylonGenerated-";
public static final String CEYLON_GENERATED_CLASSES_ARCHIVE = CEYLON_GENERATED_ARCHIVES_PREFIX + "CeylonClasses.jar";
public static final String ANDROID_LIBS_DIRECTORY = "libs";
public static final String[] ANDROID_PROVIDED_PACKAGES = new String[] {"android.app"};
public static final String[] UNNECESSARY_CEYLON_RUNTIME_LIBRARIES = new String[] {"org.jboss.modules",
"com.redhat.ceylon.module-resolver",
"com.redhat.ceylon.common"};
boolean areModulesChanged = false;
boolean hasAndroidNature = false;
boolean isReentrantBuild = false;
boolean isFullBuild = false;
WeakReference<IProgressMonitor> monitorRef = null;
WeakReference<IProject> projectRef = null;
private IProgressMonitor getMonitor() {
if (monitorRef != null) {
return monitorRef.get();
}
return null;
}
private IProject getProject() {
if (projectRef != null) {
return projectRef.get();
}
return null;
}
@Override
protected void startBuild(int kind, @SuppressWarnings("rawtypes") Map args,
IProject project, IBuildConfiguration config, IBuildContext context, IProgressMonitor monitor) throws CoreException {
try {
hasAndroidNature = project.hasNature("com.android.ide.eclipse.adt.AndroidNature");
} catch (CoreException e) {
hasAndroidNature= false;
}
areModulesChanged = false;
monitorRef = new WeakReference<IProgressMonitor>(monitor);
projectRef = new WeakReference<IProject>(project);
isReentrantBuild = args.containsKey(CeylonBuilder.BUILDER_ID + ".reentrant");
if (hasAndroidNature) {
IJavaProject javaProject =JavaCore.create(project);
boolean CeylonCPCFound = false;
IMarker[] buildMarkers = project.findMarkers(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER, true, DEPTH_ZERO);
for (IMarker m: buildMarkers) {
if (CeylonAndroidPlugin.PLUGIN_ID.equals(m.getAttribute(IMarker.SOURCE_ID))) {
m.delete();
}
}
for (IClasspathEntry entry : javaProject.getRawClasspath()) {
if (CeylonClasspathUtil.isProjectModulesClasspathContainer(entry.getPath())) {
CeylonCPCFound = true;
} else {
IPath containerPath = entry.getPath();
int size = containerPath.segmentCount();
if (size > 0) {
if (containerPath.segment(0).equals("com.android.ide.eclipse.adt.LIBRARIES") ||
containerPath.segment(0).equals("com.android.ide.eclipse.adt.DEPENDENCIES")) {
if (! CeylonCPCFound) {
//if the ClassPathContainer is missing, add an error
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.SOURCE_ID, CeylonAndroidPlugin.PLUGIN_ID);
marker.setAttribute(IMarker.MESSAGE, "Invalid Java Build Path for project " + project.getName() + " : " +
"The Ceylon libraries should be set before the Android libraries in the Java Build Path. " +
"Move down the 'Android Private Libraries' and 'Android Dependencies' after the Ceylon Libraries " +
"in the 'Order and Export' tab of the 'Java Build Path' properties page.");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, "Java Build Path Order");
throw new CoreException(new Status(IStatus.CANCEL, CeylonAndroidPlugin.PLUGIN_ID, IResourceStatus.OK,
"Build cancelled because of invalid build path", null));
}
}
}
}
}
}
}
@Override
protected void deltasAnalyzed(List<IResourceDelta> currentDeltas,
BooleanHolder sourceModified,
BooleanHolder mustDoFullBuild,
BooleanHolder mustResolveClasspathContainer,
boolean mustContinueBuild) {
if (mustContinueBuild && hasAndroidNature) {
CeylonBuilder.waitForUpToDateJavaModel(10000, getProject(), getMonitor());
}
}
@Override
protected void setAndRefreshClasspathContainer() {
areModulesChanged = true;
}
@Override
protected void doFullBuild() {
isFullBuild = true;
}
@Override
protected void afterGeneratingBinaries() {
IProject project = getProject();
if (project == null) {
return;
}
if (! isReentrantBuild && hasAndroidNature) {
try {
File libsDirectory = project.findMember(ANDROID_LIBS_DIRECTORY).getLocation().toFile();
if (!libsDirectory.exists()) {
libsDirectory.mkdirs();
}
Files.walkFileTree(java.nio.file.FileSystems.getDefault().getPath(libsDirectory.getAbsolutePath()), new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException
{
if (areModulesChanged || isFullBuild ?
path.getFileName().toString().startsWith(CEYLON_GENERATED_ARCHIVES_PREFIX) :
path.getFileName().toString().equals(CEYLON_GENERATED_CLASSES_ARCHIVE)) {
try {
Files.delete(path);
} catch(IOException ioe) {
CeylonAndroidPlugin.logError("Could not delete a ceylon jar from the android libs directory", ioe);
}
}
return FileVisitResult.CONTINUE;
}
});
final List<IFile> filesToAddInArchive = new LinkedList<>();
final IFolder ceylonOutputFolder = CeylonBuilder.getCeylonClassesOutputFolder(project);
ceylonOutputFolder.refreshLocal(DEPTH_INFINITE, getMonitor());
ceylonOutputFolder.accept(new IResourceVisitor() {
@Override
public boolean visit(IResource resource) throws CoreException {
if (resource instanceof IFile) {
filesToAddInArchive.add((IFile)resource);
}
return true;
}
});
if (! filesToAddInArchive.isEmpty()) {
JarPackageData jarPkgData = new JarPackageData();
jarPkgData.setBuildIfNeeded(false);
jarPkgData.setOverwrite(true);
jarPkgData.setGenerateManifest(true);
jarPkgData.setExportClassFiles(true);
jarPkgData.setCompress(true);
jarPkgData.setJarLocation(project.findMember("libs").getLocation().append(CEYLON_GENERATED_CLASSES_ARCHIVE).makeAbsolute());
jarPkgData.setElements(filesToAddInArchive.toArray());
JarWriter3 jarWriter = null;
try {
jarWriter = new JarWriter3(jarPkgData, null);
for (IFile fileToAdd : filesToAddInArchive) {
jarWriter.write(fileToAdd, fileToAdd.getFullPath().makeRelativeTo(ceylonOutputFolder.getFullPath()));
}
} finally {
if (jarWriter != null) {
jarWriter.close();
}
}
}
if (isFullBuild || areModulesChanged) {
List<Path> jarsToCopyToLib = new LinkedList<>();
IJavaProject javaProject = JavaCore.create(project);
List<IClasspathContainer> cpContainers = CeylonClasspathUtil.getCeylonClasspathContainers(javaProject);
if (cpContainers != null) {
for (IClasspathContainer cpc : cpContainers) {
for (IClasspathEntry cpe : cpc.getClasspathEntries()) {
if (cpe.getEntryKind() == IClasspathEntry.CPE_LIBRARY) {
Path path = FileSystems.getDefault().getPath(cpe.getPath().toOSString());
if (! Files.isDirectory(path, LinkOption.NOFOLLOW_LINKS) &&
Files.exists(path, LinkOption.NOFOLLOW_LINKS)) {
boolean isAndroidProvidedJar = false;
providerPackageFound:
for (IPackageFragmentRoot root : javaProject.getAllPackageFragmentRoots()) {
if (javaProject.isOnClasspath(root) &&
cpe.equals(root.getResolvedClasspathEntry())) {
for (String providedPackage : ANDROID_PROVIDED_PACKAGES) {
if (root.getPackageFragment(providedPackage).exists()) {
isAndroidProvidedJar = true;
break providerPackageFound;
}
}
}
}
if (! isAndroidProvidedJar) {
jarsToCopyToLib.add(path);
}
}
}
}
}
}
for (String runtimeJar : CeylonPlugin.getRuntimeRequiredJars()) {
boolean isNecessary = true;
for (String unnecessaryRuntime : UNNECESSARY_CEYLON_RUNTIME_LIBRARIES) {
if (runtimeJar.contains(unnecessaryRuntime + "-")) {
isNecessary = false;
break;
}
}
if (isNecessary) {
jarsToCopyToLib.add(FileSystems.getDefault().getPath(runtimeJar));
}
}
for (Path archive : jarsToCopyToLib) {
String newName = CEYLON_GENERATED_ARCHIVES_PREFIX + archive.getFileName();
if (newName.endsWith(ArtifactContext.CAR)) {
newName = newName.replaceFirst("\\.car$", "\\.jar");
}
Path destinationPath = FileSystems.getDefault().getPath(project.findMember(ANDROID_LIBS_DIRECTORY).getLocation().toOSString(), newName);
try {
Files.copy(archive, destinationPath);
} catch (IOException e) {
CeylonAndroidPlugin.logError("Could not copy a ceylon jar to the android libs directory", e);
}
}
}
project.findMember(ANDROID_LIBS_DIRECTORY).refreshLocal(DEPTH_INFINITE, getMonitor());
} catch (Exception e) {
CeylonAndroidPlugin.logError("Error during the generation of ceylon-derived archives for Android", e);
}
}
}
@Override
protected void endBuild() {
areModulesChanged = false;
hasAndroidNature = false;
isReentrantBuild = false;
isFullBuild = false;
monitorRef = null;
projectRef = null;
}
}
private static CeylonBuildHook buildHook = new AndroidCeylonBuildHook();
@Override
public CeylonBuildHook getHook() {
return buildHook;
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.android.plugin_src_com_redhat_ceylon_eclipse_android_plugin_AndroidBuildHookProvider.java |
671 | public class DeleteWarmerResponse extends AcknowledgedResponse {
DeleteWarmerResponse() {
super();
}
DeleteWarmerResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_warmer_delete_DeleteWarmerResponse.java |
2,845 | @SuppressWarnings("unchecked")
private static class PartitionTable {
final Set<Integer>[] partitions = new Set[InternalPartition.MAX_REPLICA_COUNT];
Set<Integer> getPartitions(int index) {
check(index);
Set<Integer> set = partitions[index];
if (set == null) {
set = new HashSet<Integer>();
partitions[index] = set;
}
return set;
}
boolean add(int index, Integer partitionId) {
return getPartitions(index).add(partitionId);
}
boolean contains(int index, Integer partitionId) {
return getPartitions(index).contains(partitionId);
}
boolean contains(Integer partitionId) {
for (Set<Integer> set : partitions) {
if (set != null && set.contains(partitionId)) {
return true;
}
}
return false;
}
boolean remove(int index, Integer partitionId) {
return getPartitions(index).remove(partitionId);
}
int size(int index) {
return getPartitions(index).size();
}
void reset() {
for (Set<Integer> set : partitions) {
if (set != null) {
set.clear();
}
}
}
private void check(int index) {
if (index < 0 || index >= InternalPartition.MAX_REPLICA_COUNT) {
throw new ArrayIndexOutOfBoundsException(index);
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_partition_impl_PartitionStateGeneratorImpl.java |
950 | threadPool.executor(executor).execute(new Runnable() {
@Override
public void run() {
try {
masterOperation(request, clusterService.state(), listener);
} catch (Throwable e) {
listener.onFailure(e);
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_support_master_TransportMasterNodeOperationAction.java |
963 | public final class IsLockedRequest extends AbstractIsLockedRequest
implements RetryableRequest {
public IsLockedRequest() {
}
public IsLockedRequest(Data key) {
super(key);
}
public IsLockedRequest(Data key, long threadId) {
super(key, threadId);
}
@Override
protected InternalLockNamespace getNamespace() {
String name = getName();
return new InternalLockNamespace(name);
}
@Override
public int getFactoryId() {
return LockPortableHook.FACTORY_ID;
}
@Override
public int getClassId() {
return LockPortableHook.IS_LOCKED;
}
@Override
public Permission getRequiredPermission() {
String name = getName();
return new LockPermission(name, ActionConstants.ACTION_READ);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_lock_client_IsLockedRequest.java |
916 | while (makeDbCall(iMyDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return myIterator.hasNext();
}
})) { | 0true
| core_src_main_java_com_orientechnologies_orient_core_record_impl_ODocumentHelper.java |
1,376 | public static class Builder {
private String alias;
private CompressedString filter;
private String indexRouting;
private String searchRouting;
public Builder(String alias) {
this.alias = alias;
}
public Builder(AliasMetaData aliasMetaData) {
this(aliasMetaData.alias());
filter = aliasMetaData.filter();
indexRouting = aliasMetaData.indexRouting();
searchRouting = aliasMetaData.searchRouting();
}
public String alias() {
return alias;
}
public Builder filter(CompressedString filter) {
this.filter = filter;
return this;
}
public Builder filter(String filter) {
if (!Strings.hasLength(filter)) {
this.filter = null;
return this;
}
try {
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
try {
filter(parser.mapOrdered());
} finally {
parser.close();
}
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
}
}
public Builder filter(Map<String, Object> filter) {
if (filter == null || filter.isEmpty()) {
this.filter = null;
return this;
}
try {
XContentBuilder builder = XContentFactory.jsonBuilder().map(filter);
this.filter = new CompressedString(builder.bytes());
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
}
}
public Builder filter(XContentBuilder filterBuilder) {
try {
return filter(filterBuilder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
}
}
public Builder routing(String routing) {
this.indexRouting = routing;
this.searchRouting = routing;
return this;
}
public Builder indexRouting(String indexRouting) {
this.indexRouting = indexRouting;
return this;
}
public Builder searchRouting(String searchRouting) {
this.searchRouting = searchRouting;
return this;
}
public AliasMetaData build() {
return new AliasMetaData(alias, filter, indexRouting, searchRouting);
}
public static void toXContent(AliasMetaData aliasMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(aliasMetaData.alias(), XContentBuilder.FieldCaseConversion.NONE);
boolean binary = params.paramAsBoolean("binary", false);
if (aliasMetaData.filter() != null) {
if (binary) {
builder.field("filter", aliasMetaData.filter.compressed());
} else {
byte[] data = aliasMetaData.filter().uncompressed();
XContentParser parser = XContentFactory.xContent(data).createParser(data);
Map<String, Object> filter = parser.mapOrdered();
parser.close();
builder.field("filter", filter);
}
}
if (aliasMetaData.indexRouting() != null) {
builder.field("index_routing", aliasMetaData.indexRouting());
}
if (aliasMetaData.searchRouting() != null) {
builder.field("search_routing", aliasMetaData.searchRouting());
}
builder.endObject();
}
public static AliasMetaData fromXContent(XContentParser parser) throws IOException {
Builder builder = new Builder(parser.currentName());
String currentFieldName = null;
XContentParser.Token token = parser.nextToken();
if (token == null) {
// no data...
return builder.build();
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("filter".equals(currentFieldName)) {
Map<String, Object> filter = parser.mapOrdered();
builder.filter(filter);
}
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
if ("filter".equals(currentFieldName)) {
builder.filter(new CompressedString(parser.binaryValue()));
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("routing".equals(currentFieldName)) {
builder.routing(parser.text());
} else if ("index_routing".equals(currentFieldName) || "indexRouting".equals(currentFieldName)) {
builder.indexRouting(parser.text());
} else if ("search_routing".equals(currentFieldName) || "searchRouting".equals(currentFieldName)) {
builder.searchRouting(parser.text());
}
}
}
return builder.build();
}
public static void writeTo(AliasMetaData aliasMetaData, StreamOutput out) throws IOException {
out.writeString(aliasMetaData.alias());
if (aliasMetaData.filter() != null) {
out.writeBoolean(true);
aliasMetaData.filter.writeTo(out);
} else {
out.writeBoolean(false);
}
if (aliasMetaData.indexRouting() != null) {
out.writeBoolean(true);
out.writeString(aliasMetaData.indexRouting());
} else {
out.writeBoolean(false);
}
if (aliasMetaData.searchRouting() != null) {
out.writeBoolean(true);
out.writeString(aliasMetaData.searchRouting());
} else {
out.writeBoolean(false);
}
}
public static AliasMetaData readFrom(StreamInput in) throws IOException {
String alias = in.readString();
CompressedString filter = null;
if (in.readBoolean()) {
filter = CompressedString.readCompressedString(in);
}
String indexRouting = null;
if (in.readBoolean()) {
indexRouting = in.readString();
}
String searchRouting = null;
if (in.readBoolean()) {
searchRouting = in.readString();
}
return new AliasMetaData(alias, filter, indexRouting, searchRouting);
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_AliasMetaData.java |
198 | public interface Authenticator {
void auth(ClientConnection connection) throws AuthenticationException, IOException;
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_connection_Authenticator.java |
19 | private class NetworkMessageSender
extends SimpleChannelHandler
{
@Override
public void channelConnected( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
Channel ctxChannel = ctx.getChannel();
openedChannel( getURI( (InetSocketAddress) ctxChannel.getRemoteAddress() ), ctxChannel );
channels.add( ctxChannel );
}
@Override
public void messageReceived( ChannelHandlerContext ctx, MessageEvent event ) throws Exception
{
final Message message = (Message) event.getMessage();
msgLog.debug( "Received: " + message );
receiver.receive( message );
}
@Override
public void channelClosed( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
closedChannel( ctx.getChannel() );
channels.remove( ctx.getChannel() );
}
@Override
public void exceptionCaught( ChannelHandlerContext ctx, ExceptionEvent e ) throws Exception
{
Throwable cause = e.getCause();
if ( ! ( cause instanceof ConnectException || cause instanceof RejectedExecutionException ) )
{
msgLog.error( "Receive exception:", cause );
}
}
} | 1no label
| enterprise_cluster_src_main_java_org_neo4j_cluster_com_NetworkSender.java |
1,024 | public static class Name {
public static final String General = "OrderImpl_Order";
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderImpl.java |
2,689 | public abstract class BlobStoreGateway extends SharedStorageGateway {
private BlobStore blobStore;
private ByteSizeValue chunkSize;
private BlobPath basePath;
private ImmutableBlobContainer metaDataBlobContainer;
private boolean compress;
private volatile int currentIndex;
protected BlobStoreGateway(Settings settings, ThreadPool threadPool, ClusterService clusterService) {
super(settings, threadPool, clusterService);
}
protected void initialize(BlobStore blobStore, ClusterName clusterName, @Nullable ByteSizeValue defaultChunkSize) throws IOException {
this.blobStore = blobStore;
this.chunkSize = componentSettings.getAsBytesSize("chunk_size", defaultChunkSize);
this.basePath = BlobPath.cleanPath().add(clusterName.value());
this.metaDataBlobContainer = blobStore.immutableBlobContainer(basePath.add("metadata"));
this.currentIndex = findLatestIndex();
this.compress = componentSettings.getAsBoolean("compress", true);
logger.debug("Latest metadata found at index [" + currentIndex + "]");
}
@Override
public String toString() {
return type() + "://" + blobStore + "/" + basePath;
}
public BlobStore blobStore() {
return blobStore;
}
public BlobPath basePath() {
return basePath;
}
public ByteSizeValue chunkSize() {
return this.chunkSize;
}
@Override
public void reset() throws Exception {
blobStore.delete(BlobPath.cleanPath());
}
@Override
public MetaData read() throws GatewayException {
try {
this.currentIndex = findLatestIndex();
} catch (IOException e) {
throw new GatewayException("Failed to find latest metadata to read from", e);
}
if (currentIndex == -1)
return null;
String metaData = "metadata-" + currentIndex;
try {
return readMetaData(metaDataBlobContainer.readBlobFully(metaData));
} catch (GatewayException e) {
throw e;
} catch (Exception e) {
throw new GatewayException("Failed to read metadata [" + metaData + "] from gateway", e);
}
}
public CommitPoint findCommitPoint(String index, int shardId) throws IOException {
BlobPath path = BlobStoreIndexGateway.shardPath(basePath, index, shardId);
ImmutableBlobContainer container = blobStore.immutableBlobContainer(path);
ImmutableMap<String, BlobMetaData> blobs = container.listBlobs();
List<CommitPoint> commitPointsList = Lists.newArrayList();
for (BlobMetaData md : blobs.values()) {
if (md.length() == 0) { // a commit point that was not flushed yet...
continue;
}
if (md.name().startsWith("commit-")) {
try {
commitPointsList.add(CommitPoints.fromXContent(container.readBlobFully(md.name())));
} catch (Exception e) {
logger.warn("failed to read commit point at path {} with name [{}]", e, path, md.name());
}
}
}
CommitPoints commitPoints = new CommitPoints(commitPointsList);
if (commitPoints.commits().isEmpty()) {
return null;
}
return commitPoints.commits().get(0);
}
@Override
protected void delete(IndexMetaData indexMetaData) throws ElasticsearchException {
BlobPath indexPath = basePath().add("indices").add(indexMetaData.index());
blobStore.delete(indexPath);
}
@Override
public void write(MetaData metaData) throws GatewayException {
final String newMetaData = "metadata-" + (currentIndex + 1);
try {
BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput stream = bStream;
if (compress) {
stream = CompressorFactory.defaultCompressor().streamOutput(stream);
}
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, stream);
builder.startObject();
MetaData.Builder.toXContent(metaData, builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
builder.close();
metaDataBlobContainer.writeBlob(newMetaData, bStream.bytes().streamInput(), bStream.bytes().length());
} catch (IOException e) {
throw new GatewayException("Failed to write metadata [" + newMetaData + "]", e);
}
currentIndex++;
try {
metaDataBlobContainer.deleteBlobsByFilter(new BlobContainer.BlobNameFilter() {
@Override
public boolean accept(String blobName) {
return blobName.startsWith("metadata-") && !newMetaData.equals(blobName);
}
});
} catch (IOException e) {
logger.debug("Failed to delete old metadata, will do it next time", e);
}
}
private int findLatestIndex() throws IOException {
ImmutableMap<String, BlobMetaData> blobs = metaDataBlobContainer.listBlobsByPrefix("metadata-");
int index = -1;
for (BlobMetaData md : blobs.values()) {
if (logger.isTraceEnabled()) {
logger.trace("[findLatestMetadata]: Processing [" + md.name() + "]");
}
String name = md.name();
int fileIndex = Integer.parseInt(name.substring(name.indexOf('-') + 1));
if (fileIndex >= index) {
// try and read the meta data
byte[] data = null;
try {
data = metaDataBlobContainer.readBlobFully(name);
readMetaData(data);
index = fileIndex;
} catch (IOException e) {
logger.warn("[findLatestMetadata]: failed to read metadata from [{}], data_length [{}] ignoring...", e, name, data == null ? "na" : data.length);
}
}
}
return index;
}
private MetaData readMetaData(byte[] data) throws IOException {
XContentParser parser = null;
try {
parser = XContentHelper.createParser(data, 0, data.length);
return MetaData.Builder.fromXContent(parser);
} finally {
if (parser != null) {
parser.close();
}
}
}
} | 0true
| src_main_java_org_elasticsearch_gateway_blobstore_BlobStoreGateway.java |
810 | public class TransportShardMultiPercolateAction extends TransportShardSingleOperationAction<TransportShardMultiPercolateAction.Request, TransportShardMultiPercolateAction.Response> {
private final PercolatorService percolatorService;
@Inject
public TransportShardMultiPercolateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, PercolatorService percolatorService) {
super(settings, threadPool, clusterService, transportService);
this.percolatorService = percolatorService;
}
@Override
protected String transportAction() {
return "mpercolate/shard";
}
@Override
protected String executor() {
return ThreadPool.Names.PERCOLATE;
}
@Override
protected Request newRequest() {
return new Request();
}
@Override
protected Response newResponse() {
return new Response();
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, Request request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, Request request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, request.index());
}
@Override
protected ShardIterator shards(ClusterState state, Request request) throws ElasticsearchException {
return clusterService.operationRouting().getShards(
clusterService.state(), request.index(), request.shardId(), request.preference
);
}
@Override
protected Response shardOperation(Request request, int shardId) throws ElasticsearchException {
// TODO: Look into combining the shard req's docs into one in memory index.
Response response = new Response();
response.items = new ArrayList<Response.Item>(request.items.size());
for (Request.Item item : request.items) {
Response.Item responseItem;
int slot = item.slot;
try {
responseItem = new Response.Item(slot, percolatorService.percolate(item.request));
} catch (Throwable t) {
if (TransportActions.isShardNotAvailableException(t)) {
throw (ElasticsearchException) t;
} else {
logger.debug("[{}][{}] failed to multi percolate", t, request.index(), request.shardId());
responseItem = new Response.Item(slot, new StringText(ExceptionsHelper.detailedMessage(t)));
}
}
response.items.add(responseItem);
}
return response;
}
public static class Request extends SingleShardOperationRequest {
private int shardId;
private String preference;
private List<Item> items;
public Request() {
}
public Request(String concreteIndex, int shardId, String preference) {
this.index = concreteIndex;
this.shardId = shardId;
this.preference = preference;
this.items = new ArrayList<Item>();
}
public int shardId() {
return shardId;
}
public void add(Item item) {
items.add(item);
}
public List<Item> items() {
return items;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardId = in.readVInt();
preference = in.readOptionalString();
int size = in.readVInt();
items = new ArrayList<Item>(size);
for (int i = 0; i < size; i++) {
int slot = in.readVInt();
PercolateShardRequest shardRequest = new PercolateShardRequest(index(), shardId);
shardRequest.documentType(in.readString());
shardRequest.source(in.readBytesReference());
shardRequest.docSource(in.readBytesReference());
shardRequest.onlyCount(in.readBoolean());
Item item = new Item(slot, shardRequest);
items.add(item);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(shardId);
out.writeOptionalString(preference);
out.writeVInt(items.size());
for (Item item : items) {
out.writeVInt(item.slot);
out.writeString(item.request.documentType());
out.writeBytesReference(item.request.source());
out.writeBytesReference(item.request.docSource());
out.writeBoolean(item.request.onlyCount());
}
}
public static class Item {
private final int slot;
private final PercolateShardRequest request;
public Item(int slot, PercolateShardRequest request) {
this.slot = slot;
this.request = request;
}
public int slot() {
return slot;
}
public PercolateShardRequest request() {
return request;
}
}
}
public static class Response extends ActionResponse {
private List<Item> items;
public List<Item> items() {
return items;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(items.size());
for (Item item : items) {
out.writeVInt(item.slot);
if (item.response != null) {
out.writeBoolean(true);
item.response.writeTo(out);
} else {
out.writeBoolean(false);
out.writeText(item.error);
}
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
items = new ArrayList<Item>(size);
for (int i = 0; i < size; i++) {
int slot = in.readVInt();
if (in.readBoolean()) {
PercolateShardResponse shardResponse = new PercolateShardResponse();
shardResponse.readFrom(in);
items.add(new Item(slot, shardResponse));
} else {
items.add(new Item(slot, in.readText()));
}
}
}
public static class Item {
private final int slot;
private final PercolateShardResponse response;
private final Text error;
public Item(Integer slot, PercolateShardResponse response) {
this.slot = slot;
this.response = response;
this.error = null;
}
public Item(Integer slot, Text error) {
this.slot = slot;
this.error = error;
this.response = null;
}
public int slot() {
return slot;
}
public PercolateShardResponse response() {
return response;
}
public Text error() {
return error;
}
public boolean failed() {
return error != null;
}
}
}
} | 0true
| src_main_java_org_elasticsearch_action_percolate_TransportShardMultiPercolateAction.java |
135 | (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}}); | 0true
| src_main_java_jsr166e_StampedLock.java |
2,817 | public class ArabicNormalizationFilterFactory extends AbstractTokenFilterFactory {
@Inject
public ArabicNormalizationFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new ArabicNormalizationFilter(tokenStream);
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_ArabicNormalizationFilterFactory.java |
3,310 | static final class DoubleValues extends DenseDoubleValues {
private final BigDoubleArrayList values;
DoubleValues(BigDoubleArrayList values) {
super(false);
this.values = values;
}
@Override
public double nextValue() {
return values.get(docId);
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_DoubleArrayAtomicFieldData.java |
1,745 | private static class FetchDeSerializedCount implements EntryProcessor<String, MyObject> {
@Override
public Object process(Map.Entry<String, MyObject> entry) {
return entry.getValue().deserializedCount;
}
@Override
public EntryBackupProcessor<String, MyObject> getBackupProcessor() {
return null;
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_map_EntryProcessorTest.java |
474 | public abstract class ClientProxy implements DistributedObject {
protected final String instanceName;
private final String serviceName;
private final String objectName;
private volatile ClientContext context;
protected ClientProxy(String instanceName, String serviceName, String objectName) {
this.instanceName = instanceName;
this.serviceName = serviceName;
this.objectName = objectName;
}
protected final String listen(ClientRequest registrationRequest, Object partitionKey, EventHandler handler) {
return ListenerUtil.listen(context, registrationRequest, partitionKey, handler);
}
protected final String listen(ClientRequest registrationRequest, EventHandler handler) {
return ListenerUtil.listen(context, registrationRequest, null, handler);
}
protected final boolean stopListening(BaseClientRemoveListenerRequest request, String registrationId) {
return ListenerUtil.stopListening(context, request, registrationId);
}
protected final ClientContext getContext() {
final ClientContext ctx = context;
if (ctx == null) {
throw new DistributedObjectDestroyedException(serviceName, objectName);
}
return ctx;
}
protected final void setContext(ClientContext context) {
this.context = context;
}
@Deprecated
public final Object getId() {
return objectName;
}
public final String getName() {
return objectName;
}
public String getPartitionKey() {
return StringPartitioningStrategy.getPartitionKey(getName());
}
public final String getServiceName() {
return serviceName;
}
public final void destroy() {
onDestroy();
ClientDestroyRequest request = new ClientDestroyRequest(objectName, getServiceName());
try {
context.getInvocationService().invokeOnRandomTarget(request).get();
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
context.removeProxy(this);
context = null;
}
protected abstract void onDestroy();
protected void onShutdown() {
}
protected <T> T invoke(ClientRequest req, Object key) {
try {
final Future future = getInvocationService().invokeOnKeyOwner(req, key);
Object result = future.get();
return toObject(result);
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
protected <T> T invokeInterruptibly(ClientRequest req, Object key) throws InterruptedException {
try {
final Future future = getInvocationService().invokeOnKeyOwner(req, key);
Object result = future.get();
return toObject(result);
} catch (Exception e) {
throw ExceptionUtil.rethrowAllowInterrupted(e);
}
}
private ClientInvocationService getInvocationService() {
return getContext().getInvocationService();
}
protected <T> T invoke(ClientRequest req) {
try {
final Future future = getInvocationService().invokeOnRandomTarget(req);
Object result = future.get();
return toObject(result);
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
protected <T> T invoke(ClientRequest req, Address address) {
try {
final Future future = getInvocationService().invokeOnTarget(req, address);
Object result = future.get();
return toObject(result);
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
protected Data toData(Object o) {
return getContext().getSerializationService().toData(o);
}
protected <T> T toObject(Object data) {
return getContext().getSerializationService().toObject(data);
}
protected void throwExceptionIfNull(Object o) {
if (o == null) {
throw new NullPointerException("Object is null");
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ClientProxy that = (ClientProxy) o;
if (!instanceName.equals(that.instanceName)) {
return false;
}
if (!objectName.equals(that.objectName)) {
return false;
}
if (!serviceName.equals(that.serviceName)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = instanceName.hashCode();
result = 31 * result + serviceName.hashCode();
result = 31 * result + objectName.hashCode();
return result;
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_spi_ClientProxy.java |
484 | return new RecordIterator<Entry>() {
private final Iterator<Entry> items = currentRow.getValue().getSlice(keySlice, transaction).iterator();
@Override
public boolean hasNext() {
ensureOpen();
return items.hasNext();
}
@Override
public Entry next() {
ensureOpen();
return items.next();
}
@Override
public void close() {
isClosed = true;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Column removal not supported");
}
}; | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_inmemory_InMemoryKeyColumnValueStore.java |
4,183 | public class IndexShardSnapshotAndRestoreService extends AbstractIndexShardComponent {
private final InternalIndexShard indexShard;
private final RepositoriesService repositoriesService;
private final RestoreService restoreService;
@Inject
public IndexShardSnapshotAndRestoreService(ShardId shardId, @IndexSettings Settings indexSettings, IndexShard indexShard, RepositoriesService repositoriesService, RestoreService restoreService) {
super(shardId, indexSettings);
this.indexShard = (InternalIndexShard) indexShard;
this.repositoriesService = repositoriesService;
this.restoreService = restoreService;
}
/**
* Creates shard snapshot
*
* @param snapshotId snapshot id
* @param snapshotStatus snapshot status
*/
public void snapshot(final SnapshotId snapshotId, final IndexShardSnapshotStatus snapshotStatus) {
IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(snapshotId.getRepository());
if (!indexShard.routingEntry().primary()) {
throw new IndexShardSnapshotFailedException(shardId, "snapshot should be performed only on primary");
}
if (indexShard.routingEntry().relocating()) {
// do not snapshot when in the process of relocation of primaries so we won't get conflicts
throw new IndexShardSnapshotFailedException(shardId, "cannot snapshot while relocating");
}
if (indexShard.state() == IndexShardState.CREATED || indexShard.state() == IndexShardState.RECOVERING) {
// shard has just been created, or still recovering
throw new IndexShardSnapshotFailedException(shardId, "shard didn't fully recover yet");
}
try {
SnapshotIndexCommit snapshotIndexCommit = indexShard.snapshotIndex();
try {
indexShardRepository.snapshot(snapshotId, shardId, snapshotIndexCommit, snapshotStatus);
if (logger.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
sb.append("snapshot (").append(snapshotId.getSnapshot()).append(") completed to ").append(indexShardRepository).append(", took [").append(TimeValue.timeValueMillis(snapshotStatus.time())).append("]\n");
sb.append(" index : version [").append(snapshotStatus.indexVersion()).append("], number_of_files [").append(snapshotStatus.numberOfFiles()).append("] with total_size [").append(new ByteSizeValue(snapshotStatus.totalSize())).append("]\n");
logger.debug(sb.toString());
}
} finally {
snapshotIndexCommit.release();
}
} catch (SnapshotFailedEngineException e) {
throw e;
} catch (IndexShardSnapshotFailedException e) {
throw e;
} catch (Throwable e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to snapshot", e);
}
}
/**
* Restores shard from {@link RestoreSource} associated with this shard in routing table
*
* @param recoveryStatus recovery status
*/
public void restore(final RecoveryStatus recoveryStatus) {
RestoreSource restoreSource = indexShard.routingEntry().restoreSource();
if (restoreSource == null) {
throw new IndexShardRestoreFailedException(shardId, "empty restore source");
}
if (logger.isTraceEnabled()) {
logger.trace("[{}] restoring shard [{}]", restoreSource.snapshotId(), shardId);
}
try {
IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(restoreSource.snapshotId().getRepository());
ShardId snapshotShardId = shardId;
if (!shardId.getIndex().equals(restoreSource.index())) {
snapshotShardId = new ShardId(restoreSource.index(), shardId.id());
}
indexShardRepository.restore(restoreSource.snapshotId(), shardId, snapshotShardId, recoveryStatus);
restoreService.indexShardRestoreCompleted(restoreSource.snapshotId(), shardId);
} catch (Throwable t) {
throw new IndexShardRestoreFailedException(shardId, "restore failed", t);
}
}
} | 1no label
| src_main_java_org_elasticsearch_index_snapshots_IndexShardSnapshotAndRestoreService.java |
2,293 | public enum Recyclers {
;
/** Return a {@link Recycler} that never recycles entries. */
public static <T> Recycler<T> none(Recycler.C<T> c) {
return new NoneRecycler<T>(c);
}
/** Return a concurrent recycler based on a deque. */
public static <T> Recycler<T> concurrentDeque(Recycler.C<T> c, int limit) {
return new ConcurrentDequeRecycler<T>(c, limit);
}
/** Return a recycler based on a deque. */
public static <T> Recycler<T> deque(Recycler.C<T> c, int limit) {
return new DequeRecycler<T>(c, Queues.<T>newArrayDeque(), limit);
}
/** Return a recycler based on a deque. */
public static <T> Recycler.Factory<T> dequeFactory(final Recycler.C<T> c, final int limit) {
return new Recycler.Factory<T>() {
@Override
public Recycler<T> build() {
return deque(c, limit);
}
};
}
/** Wrap two recyclers and forward to calls to <code>smallObjectRecycler</code> when <code>size < minSize</code> and to
* <code>defaultRecycler</code> otherwise. */
public static <T> Recycler<T> sizing(final Recycler<T> defaultRecycler, final Recycler<T> smallObjectRecycler, final int minSize) {
return new FilterRecycler<T>() {
@Override
protected Recycler<T> getDelegate() {
return defaultRecycler;
}
@Override
public Recycler.V<T> obtain(int sizing) {
if (sizing > 0 && sizing < minSize) {
return smallObjectRecycler.obtain(sizing);
}
return super.obtain(sizing);
}
@Override
public void close() {
defaultRecycler.close();
smallObjectRecycler.close();
}
};
}
/** Create a thread-local recycler, where each thread will have its own instance, create through the provided factory. */
public static <T> Recycler<T> threadLocal(final Recycler.Factory<T> factory) {
return new FilterRecycler<T>() {
private final CloseableThreadLocal<Recycler<T>> recyclers;
{
recyclers = new CloseableThreadLocal<Recycler<T>>() {
@Override
protected Recycler<T> initialValue() {
return factory.build();
}
};
}
@Override
protected Recycler<T> getDelegate() {
return recyclers.get();
}
@Override
public void close() {
recyclers.close();
}
};
}
/** Create a recycler that is wrapped inside a soft reference, so that it cannot cause {@link OutOfMemoryError}s. */
public static <T> Recycler<T> soft(final Recycler.Factory<T> factory) {
return new FilterRecycler<T>() {
SoftReference<Recycler<T>> ref;
{
ref = new SoftReference<Recycler<T>>(null);
}
@Override
protected Recycler<T> getDelegate() {
Recycler<T> recycler = ref.get();
if (recycler == null) {
recycler = factory.build();
ref = new SoftReference<Recycler<T>>(recycler);
}
return recycler;
}
};
}
/** Create a recycler that wraps data in a SoftReference.
* @see #soft(org.elasticsearch.common.recycler.Recycler.Factory) */
public static <T> Recycler.Factory<T> softFactory(final Recycler.Factory<T> factory) {
return new Recycler.Factory<T>() {
@Override
public Recycler<T> build() {
return soft(factory);
}
};
}
/** Wrap the provided recycler so that calls to {@link Recycler#obtain()} and {@link Recycler.V#release()} are protected by
* a lock. */
public static <T> Recycler<T> locked(final Recycler<T> recycler) {
return new FilterRecycler<T>() {
private final Object lock;
{
this.lock = new Object();
}
@Override
protected Recycler<T> getDelegate() {
return recycler;
}
@Override
public org.elasticsearch.common.recycler.Recycler.V<T> obtain(int sizing) {
synchronized (lock) {
return super.obtain(sizing);
}
}
@Override
public org.elasticsearch.common.recycler.Recycler.V<T> obtain() {
synchronized (lock) {
return super.obtain();
}
}
@Override
protected Recycler.V<T> wrap(final Recycler.V<T> delegate) {
return new Recycler.V<T>() {
@Override
public boolean release() throws ElasticsearchException {
synchronized (lock) {
return delegate.release();
}
}
@Override
public T v() {
return delegate.v();
}
@Override
public boolean isRecycled() {
return delegate.isRecycled();
}
};
}
};
}
/** Create a concurrent implementation that can support concurrent access from <code>concurrencyLevel</code> threads with little contention. */
public static <T> Recycler<T> concurrent(final Recycler.Factory<T> factory, final int concurrencyLevel) {
if (concurrencyLevel < 1) {
throw new ElasticsearchIllegalArgumentException("concurrencyLevel must be >= 1");
}
if (concurrencyLevel == 1) {
return locked(factory.build());
}
return new FilterRecycler<T>() {
private final Recycler<T>[] recyclers;
{
@SuppressWarnings("unchecked")
final Recycler<T>[] recyclers = new Recycler[concurrencyLevel];
this.recyclers = recyclers;
for (int i = 0; i < concurrencyLevel; ++i) {
recyclers[i] = locked(factory.build());
}
}
final int slot() {
final long id = Thread.currentThread().getId();
// don't trust Thread.hashCode to have equiprobable low bits
int slot = (int) MurmurHash3.hash(id);
// make positive, otherwise % may return negative numbers
slot &= 0x7FFFFFFF;
slot %= concurrencyLevel;
return slot;
}
@Override
protected Recycler<T> getDelegate() {
return recyclers[slot()];
}
@Override
public void close() {
for (Recycler<T> recycler : recyclers) {
recycler.close();
}
}
};
}
public static <T> Recycler<T> concurrent(final Recycler.Factory<T> factory) {
return concurrent(factory, Runtime.getRuntime().availableProcessors());
}
} | 0true
| src_main_java_org_elasticsearch_common_recycler_Recyclers.java |
2,073 | public class MultipleEntryBackupOperation extends AbstractMapOperation implements BackupOperation, PartitionAwareOperation {
private Set<Data> keys;
private EntryBackupProcessor backupProcessor;
public MultipleEntryBackupOperation() {
}
public MultipleEntryBackupOperation(String name, Set<Data> keys, EntryBackupProcessor backupProcessor) {
super(name);
this.backupProcessor = backupProcessor;
this.keys = keys;
}
@Override
public void run() throws Exception {
final InternalPartitionService partitionService = getNodeEngine().getPartitionService();
final RecordStore recordStore = mapService.getRecordStore(getPartitionId(), name);
MapEntrySimple entry;
for (Data key : keys) {
if (partitionService.getPartitionId(key) != getPartitionId())
continue;
Object objectKey = mapService.toObject(key);
final Map.Entry<Data, Object> mapEntry = recordStore.getMapEntry(key);
final Object valueBeforeProcess = mapService.toObject(mapEntry.getValue());
entry = new MapEntrySimple(objectKey, valueBeforeProcess);
backupProcessor.processBackup(entry);
if (entry.getValue() == null) {
recordStore.removeBackup(key);
} else {
recordStore.putBackup(key, entry.getValue());
}
}
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
backupProcessor = in.readObject();
int size = in.readInt();
keys = new HashSet<Data>(size);
for (int i = 0; i < size; i++) {
Data key = new Data();
key.readData(in);
keys.add(key);
}
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeObject(backupProcessor);
out.writeInt(keys.size());
for (Data key : keys) {
key.writeData(out);
}
}
@Override
public Object getResponse() {
return true;
}
@Override
public String toString() {
return "MultipleEntryBackupOperation{}";
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_MultipleEntryBackupOperation.java |
797 | public class OFunction {
public static final String CLASS_NAME = "OFunction";
protected ODocument document;
/**
* Creates a new function.
*/
public OFunction() {
document = new ODocument(CLASS_NAME);
setLanguage("SQL");
}
/**
* Creates a new function wrapping the saved document.
*
* @param iDocument
* Document to assign
*/
public OFunction(final ODocument iDocument) {
document = iDocument;
}
/**
* Loads a function.
*
* @param iRid
* RID of the function to load
*/
public OFunction(final ORecordId iRid) {
document = ODatabaseRecordThreadLocal.INSTANCE.get().load(iRid);
}
public String getName() {
return document.field("name");
}
public OFunction setName(final String iName) {
document.field("name", iName);
return this;
}
public String getCode() {
return document.field("code");
}
public OFunction setCode(final String iCode) {
document.field("code", iCode);
saveChanges();
return this;
}
public String getLanguage() {
return document.field("language");
}
public OFunction setLanguage(final String iLanguage) {
document.field("language", iLanguage);
return this;
}
public List<String> getParameters() {
return document.field("parameters");
}
public OFunction setParameters(final List<String> iParameters) {
document.field("parameters", iParameters);
return this;
}
public boolean isIdempotent() {
final Boolean idempotent = document.field("idempotent");
return idempotent != null && idempotent;
}
public OFunction setIdempotent(final boolean iIdempotent) {
document.field("idempotent", iIdempotent);
saveChanges();
return this;
}
public Object execute(final Object... iArgs) {
return executeInContext(null, iArgs);
}
public Object executeInContext(final OCommandContext iContext, final Object... iArgs) {
final OCommandExecutorFunction command = new OCommandExecutorFunction();
command.parse(new OCommandFunction(getName()));
final List<String> params = getParameters();
// CONVERT PARAMETERS IN A MAP
Map<Object, Object> args = null;
if (iArgs.length > 0) {
args = new LinkedHashMap<Object, Object>();
for (int i = 0; i < iArgs.length; ++i) {
// final Object argValue = ORecordSerializerStringAbstract.getTypeValue(iArgs[i].toString());
final Object argValue = iArgs[i];
if (params != null && i < params.size())
args.put(params.get(i), argValue);
else
args.put("param" + i, argValue);
}
}
return command.executeInContext(iContext, args);
}
public Object execute(final Map<Object, Object> iArgs) {
final long start = Orient.instance().getProfiler().startChrono();
final OCommandExecutorScript command = new OCommandExecutorScript();
command.parse(new OCommandScript(getLanguage(), getCode()));
final Object result = command.execute(iArgs);
if (Orient.instance().getProfiler().isRecording())
Orient
.instance()
.getProfiler()
.stopChrono("db." + ODatabaseRecordThreadLocal.INSTANCE.get().getName() + ".function.execute",
"Time to execute a function", start, "db.*.function.execute");
return result;
}
public ORID getId() {
return document.getIdentity();
}
@Override
public String toString() {
return getName();
}
/**
* Save pending changes if any.
*/
private void saveChanges() {
document.save();
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_metadata_function_OFunction.java |
262 | public enum TIMEOUT_STRATEGY {
RETURN, EXCEPTION
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_command_OCommandContext.java |
997 | execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Throwable e1) {
logger.warn("Failed to send response for " + transportAction, e1);
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_support_replication_TransportShardReplicationOperationAction.java |
2,656 | static class UnicastPingRequest extends TransportRequest {
int id;
TimeValue timeout;
PingResponse pingResponse;
UnicastPingRequest() {
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
id = in.readInt();
timeout = readTimeValue(in);
pingResponse = readPingResponse(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeInt(id);
timeout.writeTo(out);
pingResponse.writeTo(out);
}
} | 0true
| src_main_java_org_elasticsearch_discovery_zen_ping_unicast_UnicastZenPing.java |
632 | public class NullBroadleafVariableExpression implements BroadleafVariableExpression {
@Override
public String getName() {
return null;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_web_expression_NullBroadleafVariableExpression.java |
2,634 | threadPool.schedule(TimeValue.timeValueMillis(timeout.millis() / 2), ThreadPool.Names.GENERIC, new Runnable() {
@Override
public void run() {
try {
sendPingRequest(id);
} catch (Exception e) {
logger.warn("[{}] failed to send second ping request", e, id);
}
}
}); | 0true
| src_main_java_org_elasticsearch_discovery_zen_ping_multicast_MulticastZenPing.java |
2 | public final class OAlwaysLessKey implements Comparable<Comparable<?>> {
public int compareTo(Comparable<?> o) {
return -1;
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_collection_OAlwaysLessKey.java |
3,106 | public class FlushNotAllowedEngineException extends EngineException {
public FlushNotAllowedEngineException(ShardId shardId, String msg) {
super(shardId, msg);
}
@Override
public RestStatus status() {
return RestStatus.SERVICE_UNAVAILABLE;
}
} | 0true
| src_main_java_org_elasticsearch_index_engine_FlushNotAllowedEngineException.java |
372 | public static class GroupingTestMapper
implements Mapper<Integer, Integer, String, Integer> {
@Override
public void map(Integer key, Integer value, Context<String, Integer> collector) {
collector.emit(String.valueOf(key % 4), value);
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_DistributedMapperClientMapReduceTest.java |
1,985 | map.addEntryListener(new EntryAdapter() {
@Override
public void entryAdded(EntryEvent event) {
latch.countDown();
}
}, true); | 0true
| hazelcast_src_test_java_com_hazelcast_map_mapstore_MapStoreTest.java |
5,221 | public class HistogramParser implements Aggregator.Parser {
@Override
public String type() {
return InternalHistogram.TYPE.name();
}
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<NumericValuesSource>(NumericValuesSource.class);
String field = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false;
long minDocCount = 1;
InternalOrder order = (InternalOrder) InternalOrder.KEY_ASC;
long interval = -1;
boolean assumeSorted = false;
String format = null;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) {
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("interval".equals(currentFieldName)) {
interval = parser.longValue();
} else if ("min_doc_count".equals(currentFieldName) || "minDocCount".equals(currentFieldName)) {
minDocCount = parser.longValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("order".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
String dir = parser.text();
boolean asc = "asc".equals(dir);
if (!asc && !"desc".equals(dir)) {
throw new SearchParseException(context, "Unknown order direction [" + dir + "] in aggregation [" + aggregationName + "]. Should be either [asc] or [desc]");
}
order = resolveOrder(currentFieldName, asc);
}
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in aggregation [" + aggregationName + "].");
}
}
if (interval < 0) {
throw new SearchParseException(context, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
}
Rounding rounding = new Rounding.Interval(interval);
if (script != null) {
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
if (field == null) {
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, InternalHistogram.FACTORY);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, InternalHistogram.FACTORY);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
if (format != null) {
config.formatter(new ValueFormatter.Number.Pattern(format));
}
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, InternalHistogram.FACTORY);
}
static InternalOrder resolveOrder(String key, boolean asc) {
if ("_key".equals(key)) {
return (InternalOrder) (asc ? InternalOrder.KEY_ASC : InternalOrder.KEY_DESC);
}
if ("_count".equals(key)) {
return (InternalOrder) (asc ? InternalOrder.COUNT_ASC : InternalOrder.COUNT_DESC);
}
int i = key.indexOf('.');
if (i < 0) {
return new InternalOrder.Aggregation(key, null, asc);
}
return new InternalOrder.Aggregation(key.substring(0, i), key.substring(i + 1), asc);
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_bucket_histogram_HistogramParser.java |
2,493 | public interface XContentGenerator {
XContentType contentType();
void usePrettyPrint();
void usePrintLineFeedAtEnd();
void writeStartArray() throws IOException;
void writeEndArray() throws IOException;
void writeStartObject() throws IOException;
void writeEndObject() throws IOException;
void writeFieldName(String name) throws IOException;
void writeFieldName(XContentString name) throws IOException;
void writeString(String text) throws IOException;
void writeString(char[] text, int offset, int len) throws IOException;
void writeUTF8String(byte[] text, int offset, int length) throws IOException;
void writeBinary(byte[] data, int offset, int len) throws IOException;
void writeBinary(byte[] data) throws IOException;
void writeNumber(int v) throws IOException;
void writeNumber(long v) throws IOException;
void writeNumber(double d) throws IOException;
void writeNumber(float f) throws IOException;
void writeBoolean(boolean state) throws IOException;
void writeNull() throws IOException;
void writeStringField(String fieldName, String value) throws IOException;
void writeStringField(XContentString fieldName, String value) throws IOException;
void writeBooleanField(String fieldName, boolean value) throws IOException;
void writeBooleanField(XContentString fieldName, boolean value) throws IOException;
void writeNullField(String fieldName) throws IOException;
void writeNullField(XContentString fieldName) throws IOException;
void writeNumberField(String fieldName, int value) throws IOException;
void writeNumberField(XContentString fieldName, int value) throws IOException;
void writeNumberField(String fieldName, long value) throws IOException;
void writeNumberField(XContentString fieldName, long value) throws IOException;
void writeNumberField(String fieldName, double value) throws IOException;
void writeNumberField(XContentString fieldName, double value) throws IOException;
void writeNumberField(String fieldName, float value) throws IOException;
void writeNumberField(XContentString fieldName, float value) throws IOException;
void writeBinaryField(String fieldName, byte[] data) throws IOException;
void writeBinaryField(XContentString fieldName, byte[] data) throws IOException;
void writeArrayFieldStart(String fieldName) throws IOException;
void writeArrayFieldStart(XContentString fieldName) throws IOException;
void writeObjectFieldStart(String fieldName) throws IOException;
void writeObjectFieldStart(XContentString fieldName) throws IOException;
void writeRawField(String fieldName, byte[] content, OutputStream bos) throws IOException;
void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException;
void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException;
void writeRawField(String fieldName, BytesReference content, OutputStream bos) throws IOException;
void copyCurrentStructure(XContentParser parser) throws IOException;
void flush() throws IOException;
void close() throws IOException;
} | 0true
| src_main_java_org_elasticsearch_common_xcontent_XContentGenerator.java |
869 | public class OProcessorManager extends ODynamicFactory<String, OProcessor> {
private static OProcessorManager instance = new OProcessorManager();
public static OProcessorManager getInstance() {
return instance;
}
public Object process(final String iType, final Object iContent, final OCommandContext iContext, ODocument iOutput,
final boolean iReadOnly) {
final OProcessor t = registry.get(iType);
if (t == null)
throw new OProcessException("Cannot find processor type '" + iType + "'");
return t.process(null, iContent, iContext, iOutput, iReadOnly);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_processor_OProcessorManager.java |
3,052 | public class PostingFormats {
private static final ImmutableMap<String, PreBuiltPostingsFormatProvider.Factory> builtInPostingFormats;
static {
MapBuilder<String, PreBuiltPostingsFormatProvider.Factory> buildInPostingFormatsX = MapBuilder.newMapBuilder();
// add defaults ones
for (String luceneName : PostingsFormat.availablePostingsFormats()) {
buildInPostingFormatsX.put(luceneName, new PreBuiltPostingsFormatProvider.Factory(PostingsFormat.forName(luceneName)));
}
final Elasticsearch090PostingsFormat defaultFormat = new Elasticsearch090PostingsFormat();
buildInPostingFormatsX.put("direct", new PreBuiltPostingsFormatProvider.Factory("direct", PostingsFormat.forName("Direct")));
buildInPostingFormatsX.put("memory", new PreBuiltPostingsFormatProvider.Factory("memory", PostingsFormat.forName("Memory")));
// LUCENE UPGRADE: Need to change this to the relevant ones on a lucene upgrade
buildInPostingFormatsX.put("pulsing", new PreBuiltPostingsFormatProvider.Factory("pulsing", PostingsFormat.forName("Pulsing41")));
buildInPostingFormatsX.put(PostingsFormatService.DEFAULT_FORMAT, new PreBuiltPostingsFormatProvider.Factory(PostingsFormatService.DEFAULT_FORMAT, defaultFormat));
buildInPostingFormatsX.put("bloom_pulsing", new PreBuiltPostingsFormatProvider.Factory("bloom_pulsing", wrapInBloom(PostingsFormat.forName("Pulsing41"))));
buildInPostingFormatsX.put("bloom_default", new PreBuiltPostingsFormatProvider.Factory("bloom_default", wrapInBloom(PostingsFormat.forName("Lucene41"))));
builtInPostingFormats = buildInPostingFormatsX.immutableMap();
}
public static final boolean luceneBloomFilter = false;
static PostingsFormat wrapInBloom(PostingsFormat delegate) {
if (luceneBloomFilter) {
return new BloomFilteringPostingsFormat(delegate, new BloomFilterLucenePostingsFormatProvider.CustomBloomFilterFactory());
}
return new BloomFilterPostingsFormat(delegate, BloomFilter.Factory.DEFAULT);
}
public static PostingsFormatProvider.Factory getAsFactory(String name) {
return builtInPostingFormats.get(name);
}
public static PostingsFormatProvider getAsProvider(String name) {
final PreBuiltPostingsFormatProvider.Factory factory = builtInPostingFormats.get(name);
return factory == null ? null : factory.get();
}
public static ImmutableCollection<PreBuiltPostingsFormatProvider.Factory> listFactories() {
return builtInPostingFormats.values();
}
} | 0true
| src_main_java_org_elasticsearch_index_codec_postingsformat_PostingFormats.java |
638 | public abstract class AbstractCollectionProxyImpl<S extends RemoteService, E> extends AbstractDistributedObject<S>
implements InitializingObject {
protected final String name;
protected final int partitionId;
protected AbstractCollectionProxyImpl(String name, NodeEngine nodeEngine, S service) {
super(nodeEngine, service);
this.name = name;
this.partitionId = nodeEngine.getPartitionService().getPartitionId(getNameAsPartitionAwareData());
}
@Override
public void initialize() {
final NodeEngine nodeEngine = getNodeEngine();
CollectionConfig config = getConfig(nodeEngine);
final List<ItemListenerConfig> itemListenerConfigs = config.getItemListenerConfigs();
for (ItemListenerConfig itemListenerConfig : itemListenerConfigs) {
ItemListener listener = itemListenerConfig.getImplementation();
if (listener == null && itemListenerConfig.getClassName() != null) {
try {
listener = ClassLoaderUtil.newInstance(nodeEngine.getConfigClassLoader(), itemListenerConfig.getClassName());
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
if (listener != null) {
if (listener instanceof HazelcastInstanceAware) {
((HazelcastInstanceAware) listener).setHazelcastInstance(nodeEngine.getHazelcastInstance());
}
addItemListener(listener, itemListenerConfig.isIncludeValue());
}
}
}
protected abstract CollectionConfig getConfig(NodeEngine nodeEngine);
@Override
public String getName() {
return name;
}
public boolean add(E e) {
throwExceptionIfNull(e);
final Data value = getNodeEngine().toData(e);
final CollectionAddOperation operation = new CollectionAddOperation(name, value);
final Boolean result = invoke(operation);
return result;
}
public boolean remove(Object o) {
throwExceptionIfNull(o);
final Data value = getNodeEngine().toData(o);
final CollectionRemoveOperation operation = new CollectionRemoveOperation(name, value);
final Boolean result = invoke(operation);
return result;
}
public int size() {
final CollectionSizeOperation operation = new CollectionSizeOperation(name);
final Integer result = invoke(operation);
return result;
}
public boolean isEmpty() {
return size() == 0;
}
public boolean contains(Object o) {
throwExceptionIfNull(o);
Set<Data> valueSet = new HashSet<Data>(1);
valueSet.add(getNodeEngine().toData(o));
final CollectionContainsOperation operation = new CollectionContainsOperation(name, valueSet);
final Boolean result = invoke(operation);
return result;
}
public boolean containsAll(Collection<?> c) {
throwExceptionIfNull(c);
Set<Data> valueSet = new HashSet<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(nodeEngine.toData(o));
}
final CollectionContainsOperation operation = new CollectionContainsOperation(name, valueSet);
final Boolean result = invoke(operation);
return result;
}
public boolean addAll(Collection<? extends E> c) {
throwExceptionIfNull(c);
List<Data> valueList = new ArrayList<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (E e : c) {
throwExceptionIfNull(e);
valueList.add(nodeEngine.toData(e));
}
final CollectionAddAllOperation operation = new CollectionAddAllOperation(name, valueList);
final Boolean result = invoke(operation);
return result;
}
public boolean retainAll(Collection<?> c) {
return compareAndRemove(true, c);
}
public boolean removeAll(Collection<?> c) {
return compareAndRemove(false, c);
}
private boolean compareAndRemove(boolean retain, Collection<?> c) {
throwExceptionIfNull(c);
Set<Data> valueSet = new HashSet<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(nodeEngine.toData(o));
}
final CollectionCompareAndRemoveOperation operation = new CollectionCompareAndRemoveOperation(name, retain, valueSet);
final Boolean result = invoke(operation);
return result;
}
public void clear() {
final CollectionClearOperation operation = new CollectionClearOperation(name);
invoke(operation);
}
public Iterator<E> iterator() {
return getAll().iterator();
}
public Object[] toArray() {
return getAll().toArray();
}
public <T> T[] toArray(T[] a) {
return getAll().toArray(a);
}
private Collection<E> getAll() {
final CollectionGetAllOperation operation = new CollectionGetAllOperation(name);
final SerializableCollection result = invoke(operation);
final Collection<Data> collection = result.getCollection();
final List<E> list = new ArrayList<E>(collection.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Data data : collection) {
list.add(nodeEngine.<E>toObject(data));
}
return list;
}
public String addItemListener(ItemListener<E> listener, boolean includeValue) {
final EventService eventService = getNodeEngine().getEventService();
final CollectionEventFilter filter = new CollectionEventFilter(includeValue);
final EventRegistration registration = eventService.registerListener(getServiceName(), name, filter, listener);
return registration.getId();
}
public boolean removeItemListener(String registrationId) {
EventService eventService = getNodeEngine().getEventService();
return eventService.deregisterListener(getServiceName(), name, registrationId);
}
protected <T> T invoke(CollectionOperation operation) {
final NodeEngine nodeEngine = getNodeEngine();
try {
Future f = nodeEngine.getOperationService().invokeOnPartition(getServiceName(), operation, partitionId);
return nodeEngine.toObject(f.get());
} catch (Throwable throwable) {
throw ExceptionUtil.rethrow(throwable);
}
}
protected void throwExceptionIfNull(Object o) {
if (o == null) {
throw new NullPointerException("Object is null");
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_collection_AbstractCollectionProxyImpl.java |
182 | private static class NullFunction implements IFunction<String,String> {
@Override
public String apply(String input) {
return null;
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_atomicreference_ClientAtomicReferenceTest.java |
229 | private static final class CompletionListener
implements ICompletionListener {
private CeylonEditor editor;
private CeylonCompletionProcessor processor;
private CompletionListener(CeylonEditor editor,
CeylonCompletionProcessor processor) {
this.editor = editor;
this.processor = processor;
}
@Override
public void selectionChanged(ICompletionProposal proposal,
boolean smartToggle) {}
@Override
public void assistSessionStarted(ContentAssistEvent event) {
if (editor!=null) {
editor.pauseBackgroundParsing();
}
processor.sessionStarted();
/*try {
editor.getSite().getWorkbenchWindow().run(true, true, new Warmup());
}
catch (Exception e) {}*/
}
@Override
public void assistSessionEnded(ContentAssistEvent event) {
if (editor!=null) {
editor.unpauseBackgroundParsing();
editor.scheduleParsing();
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonSourceViewerConfiguration.java |
2,083 | public class FileChannelInputStream extends InputStream {
private final FileChannel channel;
private long position;
private long length;
private ByteBuffer bb = null;
private byte[] bs = null; // Invoker's previous array
private byte[] b1 = null;
private long markPosition;
/**
* @param channel The channel to read from
* @param position The position to start reading from
* @param length The length to read
*/
public FileChannelInputStream(FileChannel channel, long position, long length) {
this.channel = channel;
this.position = position;
this.markPosition = position;
this.length = position + length; // easier to work with total length
}
@Override
public int read() throws IOException {
if (b1 == null) {
b1 = new byte[1];
}
int n = read(b1);
if (n == 1) {
return b1[0] & 0xff;
}
return -1;
}
@Override
public int read(byte[] bs, int off, int len) throws IOException {
if (len == 0) {
return 0;
}
if ((length - position) < len) {
len = (int) (length - position);
}
if (len == 0) {
return -1;
}
ByteBuffer bb = ((this.bs == bs) ? this.bb : ByteBuffer.wrap(bs));
bb.limit(Math.min(off + len, bb.capacity()));
bb.position(off);
this.bb = bb;
this.bs = bs;
int read = channel.read(bb, position);
if (read > 0) {
position += read;
}
return read;
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void mark(int readlimit) {
this.markPosition = position;
}
@Override
public void reset() throws IOException {
position = markPosition;
}
} | 0true
| src_main_java_org_elasticsearch_common_io_FileChannelInputStream.java |
6,449 | public class TribeService extends AbstractLifecycleComponent<TribeService> {
public static final ClusterBlock TRIBE_METADATA_BLOCK = new ClusterBlock(10, "tribe node, metadata not allowed", false, false, RestStatus.BAD_REQUEST, ClusterBlockLevel.METADATA);
public static final ClusterBlock TRIBE_WRITE_BLOCK = new ClusterBlock(11, "tribe node, write not allowed", false, false, RestStatus.BAD_REQUEST, ClusterBlockLevel.WRITE);
public static Settings processSettings(Settings settings) {
if (settings.get(TRIBE_NAME) != null) {
// if its a node client started by this service as tribe, remove any tribe group setting
// to avoid recursive configuration
ImmutableSettings.Builder sb = ImmutableSettings.builder().put(settings);
for (String s : settings.getAsMap().keySet()) {
if (s.startsWith("tribe.") && !s.equals(TRIBE_NAME)) {
sb.remove(s);
}
}
return sb.build();
}
Map<String, Settings> nodesSettings = settings.getGroups("tribe", true);
if (nodesSettings.isEmpty()) {
return settings;
}
// its a tribe configured node..., force settings
ImmutableSettings.Builder sb = ImmutableSettings.builder().put(settings);
sb.put("node.client", true); // this node should just act as a node client
sb.put("discovery.type", "local"); // a tribe node should not use zen discovery
sb.put("discovery.initial_state_timeout", 0); // nothing is going to be discovered, since no master will be elected
if (sb.get("cluster.name") == null) {
sb.put("cluster.name", "tribe_" + Strings.randomBase64UUID()); // make sure it won't join other tribe nodes in the same JVM
}
sb.put("gateway.type", "none"); // we shouldn't store anything locally...
sb.put(TransportMasterNodeReadOperationAction.FORCE_LOCAL_SETTING, true);
return sb.build();
}
public static final String TRIBE_NAME = "tribe.name";
private final ClusterService clusterService;
private final List<InternalNode> nodes = Lists.newCopyOnWriteArrayList();
@Inject
public TribeService(Settings settings, ClusterService clusterService) {
super(settings);
this.clusterService = clusterService;
Map<String, Settings> nodesSettings = settings.getGroups("tribe", true);
for (Map.Entry<String, Settings> entry : nodesSettings.entrySet()) {
ImmutableSettings.Builder sb = ImmutableSettings.builder().put(entry.getValue());
sb.put("node.name", settings.get("name") + "/" + entry.getKey());
sb.put(TRIBE_NAME, entry.getKey());
if (sb.get("http.enabled") == null) {
sb.put("http.enabled", false);
}
nodes.add((InternalNode) NodeBuilder.nodeBuilder().settings(sb).client(true).build());
}
if (!nodes.isEmpty()) {
// remove the initial election / recovery blocks since we are not going to have a
// master elected in this single tribe node local "cluster"
clusterService.removeInitialStateBlock(Discovery.NO_MASTER_BLOCK);
clusterService.removeInitialStateBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK);
if (settings.getAsBoolean("tribe.blocks.write", false)) {
clusterService.addInitialStateBlock(TRIBE_WRITE_BLOCK);
}
if (settings.getAsBoolean("tribe.blocks.metadata", false)) {
clusterService.addInitialStateBlock(TRIBE_METADATA_BLOCK);
}
for (InternalNode node : nodes) {
node.injector().getInstance(ClusterService.class).add(new TribeClusterStateListener(node));
}
}
}
@Override
protected void doStart() throws ElasticsearchException {
final CountDownLatch latch = new CountDownLatch(1);
clusterService.submitStateUpdateTask("updating local node id", new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
// add our local node to the mix...
return ClusterState.builder(currentState)
.nodes(DiscoveryNodes.builder(currentState.nodes()).put(clusterService.localNode()).localNodeId(clusterService.localNode().id()))
.build();
}
@Override
public void onFailure(String source, Throwable t) {
try {
logger.error("{}", t, source);
} finally {
latch.countDown();
}
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new ElasticsearchIllegalStateException("Interrupted while starting [" + this.getClass().getSimpleName()+ "]", e);
}
for (InternalNode node : nodes) {
try {
node.start();
} catch (Throwable e) {
// calling close is safe for non started nodes, we can just iterate over all
for (InternalNode otherNode : nodes) {
try {
otherNode.close();
} catch (Throwable t) {
logger.warn("failed to close node {} on failed start", otherNode, t);
}
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new ElasticsearchException(e.getMessage(), e);
}
}
}
@Override
protected void doStop() throws ElasticsearchException {
for (InternalNode node : nodes) {
try {
node.stop();
} catch (Throwable t) {
logger.warn("failed to stop node {}", t, node);
}
}
}
@Override
protected void doClose() throws ElasticsearchException {
for (InternalNode node : nodes) {
try {
node.close();
} catch (Throwable t) {
logger.warn("failed to close node {}", t, node);
}
}
}
class TribeClusterStateListener implements ClusterStateListener {
private final InternalNode tribeNode;
private final String tribeName;
TribeClusterStateListener(InternalNode tribeNode) {
this.tribeNode = tribeNode;
this.tribeName = tribeNode.settings().get(TRIBE_NAME);
}
@Override
public void clusterChanged(final ClusterChangedEvent event) {
logger.debug("[{}] received cluster event, [{}]", tribeName, event.source());
clusterService.submitStateUpdateTask("cluster event from " + tribeName + ", " + event.source(), new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
ClusterState tribeState = event.state();
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(currentState.nodes());
// -- merge nodes
// go over existing nodes, and see if they need to be removed
for (DiscoveryNode discoNode : currentState.nodes()) {
String markedTribeName = discoNode.attributes().get(TRIBE_NAME);
if (markedTribeName != null && markedTribeName.equals(tribeName)) {
if (tribeState.nodes().get(discoNode.id()) == null) {
logger.info("[{}] removing node [{}]", tribeName, discoNode);
nodes.remove(discoNode.id());
}
}
}
// go over tribe nodes, and see if they need to be added
for (DiscoveryNode tribe : tribeState.nodes()) {
if (currentState.nodes().get(tribe.id()) == null) {
// a new node, add it, but also add the tribe name to the attributes
ImmutableMap<String, String> tribeAttr = MapBuilder.newMapBuilder(tribe.attributes()).put(TRIBE_NAME, tribeName).immutableMap();
DiscoveryNode discoNode = new DiscoveryNode(tribe.name(), tribe.id(), tribe.getHostName(), tribe.getHostAddress(), tribe.address(), tribeAttr, tribe.version());
logger.info("[{}] adding node [{}]", tribeName, discoNode);
nodes.put(discoNode);
}
}
// -- merge metadata
MetaData.Builder metaData = MetaData.builder(currentState.metaData());
RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable());
// go over existing indices, and see if they need to be removed
for (IndexMetaData index : currentState.metaData()) {
String markedTribeName = index.settings().get(TRIBE_NAME);
if (markedTribeName != null && markedTribeName.equals(tribeName)) {
IndexMetaData tribeIndex = tribeState.metaData().index(index.index());
if (tribeIndex == null) {
logger.info("[{}] removing index [{}]", tribeName, index.index());
metaData.remove(index.index());
routingTable.remove(index.index());
} else {
// always make sure to update the metadata and routing table, in case
// there are changes in them (new mapping, shards moving from initializing to started)
routingTable.add(tribeState.routingTable().index(index.index()));
Settings tribeSettings = ImmutableSettings.builder().put(tribeIndex.settings()).put(TRIBE_NAME, tribeName).build();
metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings));
}
}
}
// go over tribe one, and see if they need to be added
for (IndexMetaData tribeIndex : tribeState.metaData()) {
if (!currentState.metaData().hasIndex(tribeIndex.index())) {
// a new index, add it, and add the tribe name as a setting
logger.info("[{}] adding index [{}]", tribeName, tribeIndex.index());
Settings tribeSettings = ImmutableSettings.builder().put(tribeIndex.settings()).put(TRIBE_NAME, tribeName).build();
metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings));
routingTable.add(tribeState.routingTable().index(tribeIndex.index()));
}
}
return ClusterState.builder(currentState).nodes(nodes).metaData(metaData).routingTable(routingTable).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.warn("failed to process [{}]", t, source);
}
});
}
}
} | 1no label
| src_main_java_org_elasticsearch_tribe_TribeService.java |
1,328 | public class OAtomicUnitStartRecord extends OOperationUnitRecord implements OWALRecord {
private OLogSequenceNumber lsn;
private boolean isRollbackSupported;
public OAtomicUnitStartRecord() {
}
public OAtomicUnitStartRecord(boolean isRollbackSupported, OOperationUnitId unitId) {
super(unitId);
this.isRollbackSupported = isRollbackSupported;
}
public boolean isRollbackSupported() {
return isRollbackSupported;
}
@Override
public int toStream(byte[] content, int offset) {
offset = super.toStream(content, offset);
content[offset] = isRollbackSupported ? (byte) 1 : 0;
offset++;
return offset;
}
@Override
public int fromStream(byte[] content, int offset) {
offset = super.fromStream(content, offset);
isRollbackSupported = content[offset] > 0;
offset++;
return offset;
}
@Override
public int serializedSize() {
return super.serializedSize() + OByteSerializer.BYTE_SIZE;
}
@Override
public boolean isUpdateMasterRecord() {
return false;
}
@Override
public OLogSequenceNumber getLsn() {
return lsn;
}
@Override
public void setLsn(OLogSequenceNumber lsn) {
this.lsn = lsn;
}
@Override
public String toString() {
return "OAtomicUnitStartRecord{" + "lsn=" + lsn + ", isRollbackSupported=" + isRollbackSupported + '}';
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_OAtomicUnitStartRecord.java |
308 | public class ClusterHealthResponse extends ActionResponse implements Iterable<ClusterIndexHealth>, ToXContent {
private String clusterName;
int numberOfNodes = 0;
int numberOfDataNodes = 0;
int activeShards = 0;
int relocatingShards = 0;
int activePrimaryShards = 0;
int initializingShards = 0;
int unassignedShards = 0;
boolean timedOut = false;
ClusterHealthStatus status = ClusterHealthStatus.RED;
private List<String> validationFailures;
Map<String, ClusterIndexHealth> indices = Maps.newHashMap();
ClusterHealthResponse() {
}
public ClusterHealthResponse(String clusterName, List<String> validationFailures) {
this.clusterName = clusterName;
this.validationFailures = validationFailures;
}
public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) {
this.clusterName = clusterName;
RoutingTableValidation validation = clusterState.routingTable().validate(clusterState.metaData());
validationFailures = validation.failures();
numberOfNodes = clusterState.nodes().size();
numberOfDataNodes = clusterState.nodes().dataNodes().size();
for (String index : concreteIndices) {
IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(index);
IndexMetaData indexMetaData = clusterState.metaData().index(index);
if (indexRoutingTable == null) {
continue;
}
ClusterIndexHealth indexHealth = new ClusterIndexHealth(indexMetaData, indexRoutingTable);
indices.put(indexHealth.getIndex(), indexHealth);
}
status = ClusterHealthStatus.GREEN;
for (ClusterIndexHealth indexHealth : indices.values()) {
activePrimaryShards += indexHealth.activePrimaryShards;
activeShards += indexHealth.activeShards;
relocatingShards += indexHealth.relocatingShards;
initializingShards += indexHealth.initializingShards;
unassignedShards += indexHealth.unassignedShards;
if (indexHealth.getStatus() == ClusterHealthStatus.RED) {
status = ClusterHealthStatus.RED;
} else if (indexHealth.getStatus() == ClusterHealthStatus.YELLOW && status != ClusterHealthStatus.RED) {
status = ClusterHealthStatus.YELLOW;
}
}
if (!validationFailures.isEmpty()) {
status = ClusterHealthStatus.RED;
} else if (clusterState.blocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE)) {
status = ClusterHealthStatus.RED;
}
}
public String getClusterName() {
return clusterName;
}
/**
* The validation failures on the cluster level (without index validation failures).
*/
public List<String> getValidationFailures() {
return this.validationFailures;
}
/**
* All the validation failures, including index level validation failures.
*/
public List<String> getAllValidationFailures() {
List<String> allFailures = newArrayList(getValidationFailures());
for (ClusterIndexHealth indexHealth : indices.values()) {
allFailures.addAll(indexHealth.getValidationFailures());
}
return allFailures;
}
public int getActiveShards() {
return activeShards;
}
public int getRelocatingShards() {
return relocatingShards;
}
public int getActivePrimaryShards() {
return activePrimaryShards;
}
public int getInitializingShards() {
return initializingShards;
}
public int getUnassignedShards() {
return unassignedShards;
}
public int getNumberOfNodes() {
return this.numberOfNodes;
}
public int getNumberOfDataNodes() {
return this.numberOfDataNodes;
}
/**
* <tt>true</tt> if the waitForXXX has timeout out and did not match.
*/
public boolean isTimedOut() {
return this.timedOut;
}
public ClusterHealthStatus getStatus() {
return status;
}
public Map<String, ClusterIndexHealth> getIndices() {
return indices;
}
@Override
public Iterator<ClusterIndexHealth> iterator() {
return indices.values().iterator();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = in.readString();
activePrimaryShards = in.readVInt();
activeShards = in.readVInt();
relocatingShards = in.readVInt();
initializingShards = in.readVInt();
unassignedShards = in.readVInt();
numberOfNodes = in.readVInt();
numberOfDataNodes = in.readVInt();
status = ClusterHealthStatus.fromValue(in.readByte());
int size = in.readVInt();
for (int i = 0; i < size; i++) {
ClusterIndexHealth indexHealth = readClusterIndexHealth(in);
indices.put(indexHealth.getIndex(), indexHealth);
}
timedOut = in.readBoolean();
size = in.readVInt();
if (size == 0) {
validationFailures = ImmutableList.of();
} else {
for (int i = 0; i < size; i++) {
validationFailures.add(in.readString());
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(clusterName);
out.writeVInt(activePrimaryShards);
out.writeVInt(activeShards);
out.writeVInt(relocatingShards);
out.writeVInt(initializingShards);
out.writeVInt(unassignedShards);
out.writeVInt(numberOfNodes);
out.writeVInt(numberOfDataNodes);
out.writeByte(status.value());
out.writeVInt(indices.size());
for (ClusterIndexHealth indexHealth : this) {
indexHealth.writeTo(out);
}
out.writeBoolean(timedOut);
out.writeVInt(validationFailures.size());
for (String failure : validationFailures) {
out.writeString(failure);
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder("ClusterHealthResponse - status [").append(status).append("]")
.append("\ntimedOut [").append(timedOut).append("]")
.append("\nclustername [").append(clusterName).append("]")
.append("\nnumberOfNodes [").append(numberOfNodes).append("]")
.append("\nnumberOfDataNodes [").append(numberOfDataNodes).append("]")
.append("\nactiveShards [").append(activeShards).append("]")
.append("\nrelocatingShards [").append(relocatingShards).append("]")
.append("\nactivePrimaryShards [").append(activePrimaryShards).append("]")
.append("\ninitializingShards [").append(initializingShards).append("]")
.append("\nvalidationFailures ").append(validationFailures)
.append("\nindices:");
for (Map.Entry<String, ClusterIndexHealth> indexEntry : indices.entrySet()) {
builder.append(" [").append(indexEntry.getKey()).append("][").append(indexEntry.getValue().status).append("]");
}
return builder.toString();
}
static final class Fields {
static final XContentBuilderString CLUSTER_NAME = new XContentBuilderString("cluster_name");
static final XContentBuilderString STATUS = new XContentBuilderString("status");
static final XContentBuilderString TIMED_OUT = new XContentBuilderString("timed_out");
static final XContentBuilderString NUMBER_OF_NODES = new XContentBuilderString("number_of_nodes");
static final XContentBuilderString NUMBER_OF_DATA_NODES = new XContentBuilderString("number_of_data_nodes");
static final XContentBuilderString ACTIVE_PRIMARY_SHARDS = new XContentBuilderString("active_primary_shards");
static final XContentBuilderString ACTIVE_SHARDS = new XContentBuilderString("active_shards");
static final XContentBuilderString RELOCATING_SHARDS = new XContentBuilderString("relocating_shards");
static final XContentBuilderString INITIALIZING_SHARDS = new XContentBuilderString("initializing_shards");
static final XContentBuilderString UNASSIGNED_SHARDS = new XContentBuilderString("unassigned_shards");
static final XContentBuilderString VALIDATION_FAILURES = new XContentBuilderString("validation_failures");
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.CLUSTER_NAME, getClusterName());
builder.field(Fields.STATUS, getStatus().name().toLowerCase(Locale.ROOT));
builder.field(Fields.TIMED_OUT, isTimedOut());
builder.field(Fields.NUMBER_OF_NODES, getNumberOfNodes());
builder.field(Fields.NUMBER_OF_DATA_NODES, getNumberOfDataNodes());
builder.field(Fields.ACTIVE_PRIMARY_SHARDS, getActivePrimaryShards());
builder.field(Fields.ACTIVE_SHARDS, getActiveShards());
builder.field(Fields.RELOCATING_SHARDS, getRelocatingShards());
builder.field(Fields.INITIALIZING_SHARDS, getInitializingShards());
builder.field(Fields.UNASSIGNED_SHARDS, getUnassignedShards());
String level = params.param("level", "cluster");
boolean outputIndices = "indices".equals(level) || "shards".equals(level);
if (!getValidationFailures().isEmpty()) {
builder.startArray(Fields.VALIDATION_FAILURES);
for (String validationFailure : getValidationFailures()) {
builder.value(validationFailure);
}
// if we don't print index level information, still print the index validation failures
// so we know why the status is red
if (!outputIndices) {
for (ClusterIndexHealth indexHealth : indices.values()) {
builder.startObject(indexHealth.getIndex());
if (!indexHealth.getValidationFailures().isEmpty()) {
builder.startArray(Fields.VALIDATION_FAILURES);
for (String validationFailure : indexHealth.getValidationFailures()) {
builder.value(validationFailure);
}
builder.endArray();
}
builder.endObject();
}
}
builder.endArray();
}
if (outputIndices) {
builder.startObject(Fields.INDICES);
for (ClusterIndexHealth indexHealth : indices.values()) {
builder.startObject(indexHealth.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
indexHealth.toXContent(builder, params);
builder.endObject();
}
builder.endObject();
}
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_health_ClusterHealthResponse.java |
2,202 | static class InternalFilter extends MultiTermQueryWrapperFilter<RegexpQuery> {
public InternalFilter(Term term) {
super(new RegexpQuery(term));
}
public InternalFilter(Term term, int flags) {
super(new RegexpQuery(term, flags));
}
} | 0true
| src_main_java_org_elasticsearch_common_lucene_search_RegexpFilter.java |
1,006 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_FULFILLMENT_OPTION")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region = "blOrderElements")
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY,
booleanOverrideValue = true))
}
)
@AdminPresentationClass(friendlyName = "Base Fulfillment Option")
public class FulfillmentOptionImpl implements FulfillmentOption {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "FulfillmentOptionId")
@GenericGenerator(
name="FulfillmentOptionId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="FulfillmentOptionImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.FulfillmentOptionImpl")
}
)
@Column(name = "FULFILLMENT_OPTION_ID")
protected Long id;
@Column(name = "NAME")
@AdminPresentation(friendlyName = "FulfillmentOptionImpl_name",
order = Presentation.FieldOrder.NAME, prominent = true, gridOrder = 1000, translatable = true)
protected String name;
@Lob
@Type(type = "org.hibernate.type.StringClobType")
@Column(name = "LONG_DESCRIPTION", length = Integer.MAX_VALUE - 1)
@AdminPresentation(friendlyName = "FulfillmentOptionImpl_longDescription",
order = Presentation.FieldOrder.DESCRIPTION, translatable = true)
protected String longDescription;
@Column(name = "USE_FLAT_RATES")
@AdminPresentation(friendlyName = "FulfillmentOptionImpl_useFlatRates",
order = Presentation.FieldOrder.FLATRATES)
protected Boolean useFlatRates = true;
@Column(name = "FULFILLMENT_TYPE", nullable = false)
protected String fulfillmentType;
@Column(name = "TAX_CODE", nullable = true)
protected String taxCode;
@Column(name = "TAXABLE")
protected Boolean taxable = false;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return DynamicTranslationProvider.getValue(this, "name", name);
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getLongDescription() {
return DynamicTranslationProvider.getValue(this, "longDescription", longDescription);
}
@Override
public void setLongDescription(String longDescription) {
this.longDescription = longDescription;
}
@Override
public Boolean getUseFlatRates() {
return useFlatRates;
}
@Override
public void setUseFlatRates(Boolean useFlatRates) {
this.useFlatRates = useFlatRates;
}
@Override
public FulfillmentType getFulfillmentType() {
return FulfillmentType.getInstance(fulfillmentType);
}
@Override
public void setFulfillmentType(FulfillmentType fulfillmentType) {
this.fulfillmentType = (fulfillmentType == null) ? null : fulfillmentType.getType();
}
@Override
public boolean equals(Object o) {
return EqualsBuilder.reflectionEquals(this, o);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public Boolean getTaxable() {
return this.taxable;
}
@Override
public void setTaxable(Boolean taxable) {
this.taxable = taxable;
}
@Override
public void setTaxCode(String taxCode) {
this.taxCode = taxCode;
}
@Override
public String getTaxCode() {
return this.taxCode;
}
public static class Presentation {
public static class Group {
public static class Name {
}
public static class Order {
}
}
public static class FieldOrder {
public static final int NAME = 1000;
public static final int DESCRIPTION = 2000;
public static final int FLATRATES = 9000;
}
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_FulfillmentOptionImpl.java |
1,069 | public class OIndexSearchResult {
final Map<String, Object> fieldValuePairs = new HashMap<String, Object>(8);
final OQueryOperator lastOperator;
final OSQLFilterItemField.FieldChain lastField;
final Object lastValue;
OIndexSearchResult(final OQueryOperator lastOperator, final OSQLFilterItemField.FieldChain field, final Object value) {
this.lastOperator = lastOperator;
lastField = field;
lastValue = value;
}
/**
* Combines two queries subset into one. This operation will be valid only if {@link #canBeMerged(OIndexSearchResult)} method will
* return <code>true</code> for the same passed in parameter.
*
* @param searchResult
* Query subset to merge.
* @return New instance that presents merged query.
*/
OIndexSearchResult merge(final OIndexSearchResult searchResult) {
final OQueryOperator operator;
final OIndexSearchResult result;
if (searchResult.lastOperator instanceof OQueryOperatorEquals) {
result = new OIndexSearchResult(this.lastOperator, lastField, lastValue);
result.fieldValuePairs.putAll(searchResult.fieldValuePairs);
result.fieldValuePairs.putAll(fieldValuePairs);
result.fieldValuePairs.put(searchResult.lastField.getItemName(0), searchResult.lastValue);
} else {
operator = searchResult.lastOperator;
result = new OIndexSearchResult(operator, searchResult.lastField, searchResult.lastValue);
result.fieldValuePairs.putAll(searchResult.fieldValuePairs);
result.fieldValuePairs.putAll(fieldValuePairs);
result.fieldValuePairs.put(lastField.getItemName(0), lastValue);
}
return result;
}
/**
* @param searchResult
* Query subset is going to be merged with given one.
* @return <code>true</code> if two query subsets can be merged.
*/
boolean canBeMerged(final OIndexSearchResult searchResult) {
if (lastField.isLong() || searchResult.lastField.isLong()) {
return false;
}
return isIndexEqualityOperator(lastOperator) || isIndexEqualityOperator(searchResult.lastOperator);
}
List<String> fields() {
final List<String> result = new ArrayList<String>(fieldValuePairs.size() + 1);
result.addAll(fieldValuePairs.keySet());
result.add(lastField.getItemName(0));
return result;
}
int getFieldCount() {
return fieldValuePairs.size() + 1;
}
public static boolean isIndexEqualityOperator(OQueryOperator queryOperator) {
return queryOperator instanceof OQueryOperatorEquals || queryOperator instanceof OQueryOperatorContains
|| queryOperator instanceof OQueryOperatorContainsKey || queryOperator instanceof OQueryOperatorContainsValue;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_OIndexSearchResult.java |
1,405 | @XmlRootElement(name = "order")
@XmlAccessorType(value = XmlAccessType.FIELD)
public class OrderWrapper extends BaseWrapper implements APIWrapper<Order> {
@XmlElement
protected Long id;
@XmlElement
protected String status;
@XmlElement
protected Money totalTax;
@XmlElement
protected Money totalShipping;
@XmlElement
protected Money subTotal;
@XmlElement
protected Money total;
@XmlElement
protected CustomerWrapper customer;
@XmlElement(name = "orderItem")
@XmlElementWrapper(name = "orderItems")
protected List<OrderItemWrapper> orderItems;
@XmlElement(name = "fulfillmentGroup")
@XmlElementWrapper(name = "fulfillmentGroups")
protected List<FulfillmentGroupWrapper> fulfillmentGroups;
@XmlElement(name = "paymentInfo")
@XmlElementWrapper(name = "paymentInfos")
protected List<PaymentInfoWrapper> paymentInfos;
@XmlElement(name = "orderAdjustment")
@XmlElementWrapper(name = "orderAdjustments")
protected List<AdjustmentWrapper> orderAdjustments;
@XmlElement(name = "orderAttribute")
@XmlElementWrapper(name = "orderAttributes")
protected List<OrderAttributeWrapper> orderAttributes;
@Override
public void wrapDetails(Order model, HttpServletRequest request) {
this.id = model.getId();
this.status = model.getStatus().getType();
this.totalTax = model.getTotalTax();
this.totalShipping = model.getTotalShipping();
this.subTotal = model.getSubTotal();
this.total = model.getTotal();
if (model.getOrderItems() != null && !model.getOrderItems().isEmpty()) {
this.orderItems = new ArrayList<OrderItemWrapper>();
for (OrderItem orderItem : model.getOrderItems()) {
OrderItemWrapper orderItemWrapper = (OrderItemWrapper) context.getBean(OrderItemWrapper.class.getName());
orderItemWrapper.wrapSummary(orderItem, request);
this.orderItems.add(orderItemWrapper);
}
}
if (model.getFulfillmentGroups() != null && !model.getFulfillmentGroups().isEmpty()) {
this.fulfillmentGroups = new ArrayList<FulfillmentGroupWrapper>();
for (FulfillmentGroup fulfillmentGroup : model.getFulfillmentGroups()) {
FulfillmentGroupWrapper fulfillmentGroupWrapper = (FulfillmentGroupWrapper) context.getBean(FulfillmentGroupWrapper.class.getName());
fulfillmentGroupWrapper.wrapSummary(fulfillmentGroup, request);
this.fulfillmentGroups.add(fulfillmentGroupWrapper);
}
}
if (model.getPaymentInfos() != null && !model.getPaymentInfos().isEmpty()) {
this.paymentInfos = new ArrayList<PaymentInfoWrapper>();
for (PaymentInfo paymentInfo : model.getPaymentInfos()) {
PaymentInfoWrapper paymentInfoWrapper = (PaymentInfoWrapper) context.getBean(PaymentInfoWrapper.class.getName());
paymentInfoWrapper.wrapSummary(paymentInfo, request);
this.paymentInfos.add(paymentInfoWrapper);
}
}
if (model.getOrderAdjustments() != null && !model.getOrderAdjustments().isEmpty()) {
this.orderAdjustments = new ArrayList<AdjustmentWrapper>();
for (OrderAdjustment orderAdjustment : model.getOrderAdjustments()) {
AdjustmentWrapper orderAdjustmentWrapper = (AdjustmentWrapper) context.getBean(AdjustmentWrapper.class.getName());
orderAdjustmentWrapper.wrapSummary(orderAdjustment, request);
this.orderAdjustments.add(orderAdjustmentWrapper);
}
}
if (model.getOrderAttributes() != null && !model.getOrderAttributes().isEmpty()) {
Map<String, OrderAttribute> itemAttributes = model.getOrderAttributes();
this.orderAttributes = new ArrayList<OrderAttributeWrapper>();
Set<String> keys = itemAttributes.keySet();
for (String key : keys) {
OrderAttributeWrapper orderAttributeWrapper =
(OrderAttributeWrapper) context.getBean(OrderAttributeWrapper.class.getName());
orderAttributeWrapper.wrapSummary(itemAttributes.get(key), request);
this.orderAttributes.add(orderAttributeWrapper);
}
}
CustomerWrapper customerWrapper = (CustomerWrapper) context.getBean(CustomerWrapper.class.getName());
customerWrapper.wrapDetails(model.getCustomer(), request);
this.customer = customerWrapper;
}
@Override
public void wrapSummary(Order model, HttpServletRequest request) {
wrapDetails(model, request);
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_wrapper_OrderWrapper.java |
5,760 | public class FetchPhase implements SearchPhase {
private final FetchSubPhase[] fetchSubPhases;
@Inject
public FetchPhase(HighlightPhase highlightPhase, ScriptFieldsFetchSubPhase scriptFieldsPhase, PartialFieldsFetchSubPhase partialFieldsPhase,
MatchedQueriesFetchSubPhase matchedQueriesPhase, ExplainFetchSubPhase explainPhase, VersionFetchSubPhase versionPhase,
FetchSourceSubPhase fetchSourceSubPhase, FieldDataFieldsFetchSubPhase fieldDataFieldsFetchSubPhase) {
this.fetchSubPhases = new FetchSubPhase[]{scriptFieldsPhase, partialFieldsPhase, matchedQueriesPhase, explainPhase, highlightPhase,
fetchSourceSubPhase, versionPhase, fieldDataFieldsFetchSubPhase};
}
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
ImmutableMap.Builder<String, SearchParseElement> parseElements = ImmutableMap.builder();
parseElements.put("fields", new FieldsParseElement());
for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
parseElements.putAll(fetchSubPhase.parseElements());
}
return parseElements.build();
}
@Override
public void preProcess(SearchContext context) {
}
public void execute(SearchContext context) {
FieldsVisitor fieldsVisitor;
List<String> extractFieldNames = null;
if (!context.hasFieldNames()) {
if (context.hasPartialFields()) {
// partial fields need the source, so fetch it
fieldsVisitor = new UidAndSourceFieldsVisitor();
} else {
// no fields specified, default to return source if no explicit indication
if (!context.hasScriptFields() && !context.hasFetchSourceContext()) {
context.fetchSourceContext(new FetchSourceContext(true));
}
fieldsVisitor = context.sourceRequested() ? new UidAndSourceFieldsVisitor() : new JustUidFieldsVisitor();
}
} else if (context.fieldNames().isEmpty()) {
if (context.sourceRequested()) {
fieldsVisitor = new UidAndSourceFieldsVisitor();
} else {
fieldsVisitor = new JustUidFieldsVisitor();
}
} else {
boolean loadAllStored = false;
Set<String> fieldNames = null;
for (String fieldName : context.fieldNames()) {
if (fieldName.equals("*")) {
loadAllStored = true;
continue;
}
if (fieldName.equals(SourceFieldMapper.NAME)) {
if (context.hasFetchSourceContext()) {
context.fetchSourceContext().fetchSource(true);
} else {
context.fetchSourceContext(new FetchSourceContext(true));
}
continue;
}
FieldMappers x = context.smartNameFieldMappers(fieldName);
if (x == null) {
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
if (context.smartNameObjectMapper(fieldName) != null) {
throw new ElasticsearchIllegalArgumentException("field [" + fieldName + "] isn't a leaf field");
}
} else if (x.mapper().fieldType().stored()) {
if (fieldNames == null) {
fieldNames = new HashSet<String>();
}
fieldNames.add(x.mapper().names().indexName());
} else {
if (extractFieldNames == null) {
extractFieldNames = newArrayList();
}
extractFieldNames.add(fieldName);
}
}
if (loadAllStored) {
fieldsVisitor = new AllFieldsVisitor(); // load everything, including _source
} else if (fieldNames != null) {
boolean loadSource = extractFieldNames != null || context.sourceRequested();
fieldsVisitor = new CustomFieldsVisitor(fieldNames, loadSource);
} else if (extractFieldNames != null || context.sourceRequested()) {
fieldsVisitor = new UidAndSourceFieldsVisitor();
} else {
fieldsVisitor = new JustUidFieldsVisitor();
}
}
InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
for (int index = 0; index < context.docIdsToLoadSize(); index++) {
int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index];
loadStoredFields(context, fieldsVisitor, docId);
fieldsVisitor.postProcess(context.mapperService());
Map<String, SearchHitField> searchFields = null;
if (!fieldsVisitor.fields().isEmpty()) {
searchFields = new HashMap<String, SearchHitField>(fieldsVisitor.fields().size());
for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) {
searchFields.put(entry.getKey(), new InternalSearchHitField(entry.getKey(), entry.getValue()));
}
}
DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type());
Text typeText;
if (documentMapper == null) {
typeText = new StringAndBytesText(fieldsVisitor.uid().type());
} else {
typeText = documentMapper.typeText();
}
InternalSearchHit searchHit = new InternalSearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields);
hits[index] = searchHit;
int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves());
AtomicReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
int subDoc = docId - subReaderContext.docBase;
// go over and extract fields that are not mapped / stored
context.lookup().setNextReader(subReaderContext);
context.lookup().setNextDocId(subDoc);
if (fieldsVisitor.source() != null) {
context.lookup().source().setNextSource(fieldsVisitor.source());
}
if (extractFieldNames != null) {
for (String extractFieldName : extractFieldNames) {
List<Object> values = context.lookup().source().extractRawValues(extractFieldName);
if (!values.isEmpty()) {
if (searchHit.fieldsOrNull() == null) {
searchHit.fields(new HashMap<String, SearchHitField>(2));
}
SearchHitField hitField = searchHit.fields().get(extractFieldName);
if (hitField == null) {
hitField = new InternalSearchHitField(extractFieldName, new ArrayList<Object>(2));
searchHit.fields().put(extractFieldName, hitField);
}
for (Object value : values) {
hitField.values().add(value);
}
}
}
}
hitContext.reset(searchHit, subReaderContext, subDoc, context.searcher().getIndexReader(), docId, fieldsVisitor);
for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
if (fetchSubPhase.hitExecutionNeeded(context)) {
fetchSubPhase.hitExecute(context, hitContext);
}
}
}
for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
if (fetchSubPhase.hitsExecutionNeeded(context)) {
fetchSubPhase.hitsExecute(context, hits);
}
}
context.fetchResult().hits(new InternalSearchHits(hits, context.queryResult().topDocs().totalHits, context.queryResult().topDocs().getMaxScore()));
}
private void loadStoredFields(SearchContext context, FieldsVisitor fieldVisitor, int docId) {
fieldVisitor.reset();
try {
context.searcher().doc(docId, fieldVisitor);
} catch (IOException e) {
throw new FetchPhaseExecutionException(context, "Failed to fetch doc id [" + docId + "]", e);
}
}
} | 1no label
| src_main_java_org_elasticsearch_search_fetch_FetchPhase.java |
357 | public class ODatabaseDocumentPool extends ODatabasePoolBase<ODatabaseDocumentTx> {
private static ODatabaseDocumentPool globalInstance = new ODatabaseDocumentPool();
public ODatabaseDocumentPool() {
super();
}
public ODatabaseDocumentPool(final String iURL, final String iUserName, final String iUserPassword) {
super(iURL, iUserName, iUserPassword);
}
public static ODatabaseDocumentPool global() {
globalInstance.setup();
return globalInstance;
}
public static ODatabaseDocumentPool global(final int iPoolMin, final int iPoolMax) {
globalInstance.setup(iPoolMin, iPoolMax);
return globalInstance;
}
@Override
protected ODatabaseDocumentTx createResource(Object owner, String iDatabaseName, Object... iAdditionalArgs) {
return new ODatabaseDocumentTxPooled((ODatabaseDocumentPool) owner, iDatabaseName, (String) iAdditionalArgs[0],
(String) iAdditionalArgs[1]);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_document_ODatabaseDocumentPool.java |
876 | public class CountDownLatchProxy extends AbstractDistributedObject<CountDownLatchService> implements ICountDownLatch {
private final String name;
private final int partitionId;
public CountDownLatchProxy(String name, NodeEngine nodeEngine) {
super(nodeEngine, null);
this.name = name;
Data nameAsPartitionAwareData = getNameAsPartitionAwareData();
partitionId = nodeEngine.getPartitionService().getPartitionId(nameAsPartitionAwareData);
}
@Override
public String getName() {
return name;
}
@Override
public boolean await(long timeout, TimeUnit unit) throws InterruptedException {
AwaitOperation op = new AwaitOperation(name, getTimeInMillis(timeout, unit));
Future<Boolean> f = invoke(op);
try {
return f.get();
} catch (ExecutionException e) {
throw rethrowAllowInterrupted(e);
}
}
private static long getTimeInMillis(long time, TimeUnit timeunit) {
return timeunit != null ? timeunit.toMillis(time) : time;
}
@Override
public void countDown() {
CountDownOperation op = new CountDownOperation(name);
InternalCompletableFuture f = invoke(op);
f.getSafely();
}
@Override
public int getCount() {
GetCountOperation op = new GetCountOperation(name);
InternalCompletableFuture<Integer> f = invoke(op);
return f.getSafely();
}
@Override
public boolean trySetCount(int count) {
SetCountOperation op = new SetCountOperation(name, count);
InternalCompletableFuture<Boolean> f = invoke(op);
return f.getSafely();
}
private InternalCompletableFuture invoke(Operation op) {
NodeEngine nodeEngine = getNodeEngine();
OperationService operationService = nodeEngine.getOperationService();
return operationService.invokeOnPartition(CountDownLatchService.SERVICE_NAME, op, partitionId);
}
@Override
public String getServiceName() {
return CountDownLatchService.SERVICE_NAME;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ICountDownLatch{");
sb.append("name='").append(name).append('\'');
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_countdownlatch_CountDownLatchProxy.java |
1,289 | es.execute(new Runnable() {
public void run() {
try {
while (true) {
int key = (int) (random.nextFloat() * entryCount);
int operation = ((int) (random.nextFloat() * 100));
if (operation < getPercentage) {
map.get(String.valueOf(key));
stats.gets.incrementAndGet();
} else if (operation < getPercentage + putPercentage) {
map.put(String.valueOf(key), createValue());
stats.puts.incrementAndGet();
} else {
map.remove(String.valueOf(key));
stats.removes.incrementAndGet();
}
}
} catch (HazelcastInstanceNotActiveException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}); | 0true
| hazelcast_src_main_java_com_hazelcast_examples_SimpleMapTest.java |
1,633 | public interface SandBoxItemDao {
public SandBoxItem retrieveById(Long id);
public SandBoxItem retrieveBySandboxAndTemporaryItemId(Long sandBoxId, SandBoxItemType type, Long tempItemId);
public SandBoxItem addSandBoxItem(Long sandBox, SandBoxOperationType operationType, SandBoxItemType itemType, String description, Long temporaryId, Long originalId);
public SandBoxItem addSandBoxItem(Long sbox, SandBoxOperationType operationType, SandBoxItemType itemType, String description, String groupDescription, Long temporaryId, Long originalId);
public SandBoxItem updateSandBoxItem(SandBoxItem sandBoxItem);
public List<SandBoxItem> retrieveSandBoxItemsForSandbox(Long sandBox);
public void delete(SandBoxItem sandBoxItem);
public List<SandBoxItem> retrieveSandBoxItemsByTypeForSandbox(Long sandBox, SandBoxItemType itemType);
public List<SandBoxItem> retrieveByGroupName(Long sandBoxId, String groupName);
public List<SandBoxItem> retrieveSandBoxItemsByTypesForSandbox(Long sandBox, List<SandBoxItemType> sandBoxItemTypes);
} | 0true
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_SandBoxItemDao.java |
879 | new Thread() {
public void run() {
for (int i = 1; i < k; i++) {
try {
sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
final ICountDownLatch l = instances[i].getCountDownLatch("test");
l.countDown();
assertEquals(k - 1 - i, l.getCount());
}
}
}.start(); | 0true
| hazelcast_src_test_java_com_hazelcast_concurrent_countdownlatch_CountDownLatchTest.java |
1,424 | clusterService.submitStateUpdateTask("refresh-mapping [" + index + "][" + Arrays.toString(types) + "]", Priority.HIGH, new ClusterStateUpdateTask() {
@Override
public void onFailure(String source, Throwable t) {
logger.warn("failure during [{}]", t, source);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
return executeRefreshOrUpdate(currentState, insertOrder);
}
}); | 0true
| src_main_java_org_elasticsearch_cluster_metadata_MetaDataMappingService.java |
824 | static final class Fields {
static final XContentBuilderString RESPONSES = new XContentBuilderString("responses");
static final XContentBuilderString ERROR = new XContentBuilderString("error");
} | 0true
| src_main_java_org_elasticsearch_action_search_MultiSearchResponse.java |
671 | CollectionUtils.filter(filteredFeaturedProducts, new Predicate() {
@Override
public boolean evaluate(Object arg) {
return 'Y' != ((Status) ((FeaturedProduct) arg).getProduct()).getArchived();
}
}); | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_CategoryImpl.java |
102 | public class ODynamicFactory<K, V> {
protected final Map<K, V> registry = new LinkedHashMap<K, V>();
public V get(final K iKey) {
return registry.get(iKey);
}
public void register(final K iKey, final V iValue) {
registry.put(iKey, iValue);
}
public void unregister(final K iKey) {
registry.remove(iKey);
}
public void unregisterAll() {
registry.clear();
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_factory_ODynamicFactory.java |
2,541 | abstract class AbstractIOSelector extends Thread implements IOSelector {
private static final int TIMEOUT = 3;
private static final int WAIT_TIME = 5000;
protected final ILogger logger;
protected final Queue<Runnable> selectorQueue = new ConcurrentLinkedQueue<Runnable>();
protected final IOService ioService;
protected final int waitTime;
protected final Selector selector;
protected boolean live = true;
private final CountDownLatch shutdownLatch = new CountDownLatch(1);
protected AbstractIOSelector(IOService ioService, String tname) {
super(ioService.getThreadGroup(), tname);
this.ioService = ioService;
this.logger = ioService.getLogger(getClass().getName());
// WARNING: This value has significant effect on idle CPU usage!
this.waitTime = WAIT_TIME;
Selector selectorTemp = null;
try {
selectorTemp = Selector.open();
} catch (final IOException e) {
handleSelectorException(e);
}
this.selector = selectorTemp;
}
public final void shutdown() {
selectorQueue.clear();
try {
addTask(new Runnable() {
public void run() {
live = false;
shutdownLatch.countDown();
}
});
interrupt();
} catch (Throwable ignored) {
}
}
public final void awaitShutdown() {
try {
shutdownLatch.await(TIMEOUT, TimeUnit.SECONDS);
} catch (InterruptedException ignored) {
}
}
public final void addTask(Runnable runnable) {
selectorQueue.add(runnable);
}
private void processSelectionQueue() {
//noinspection WhileLoopSpinsOnField
while (live) {
final Runnable runnable = selectorQueue.poll();
if (runnable == null) {
return;
}
runnable.run();
}
}
public final void run() {
try {
//noinspection WhileLoopSpinsOnField
while (live) {
processSelectionQueue();
if (!live || isInterrupted()) {
if (logger.isFinestEnabled()) {
logger.finest(getName() + " is interrupted!");
}
live = false;
return;
}
int selectedKeyCount;
try {
selectedKeyCount = selector.select(waitTime);
} catch (Throwable e) {
logger.warning(e.toString());
continue;
}
if (selectedKeyCount == 0) {
continue;
}
final Set<SelectionKey> setSelectedKeys = selector.selectedKeys();
final Iterator<SelectionKey> it = setSelectedKeys.iterator();
while (it.hasNext()) {
final SelectionKey sk = it.next();
try {
it.remove();
handleSelectionKey(sk);
} catch (Throwable e) {
handleSelectorException(e);
}
}
}
} catch (OutOfMemoryError e) {
ioService.onOutOfMemory(e);
} catch (Throwable e) {
logger.warning("Unhandled exception in " + getName(), e);
} finally {
try {
if (logger.isFinestEnabled()) {
logger.finest("Closing selector " + getName());
}
selector.close();
} catch (final Exception ignored) {
}
}
}
protected abstract void handleSelectionKey(SelectionKey sk);
private void handleSelectorException(final Throwable e) {
String msg = "Selector exception at " + getName() + ", cause= " + e.toString();
logger.warning(msg, e);
if (e instanceof OutOfMemoryError) {
ioService.onOutOfMemory((OutOfMemoryError) e);
}
}
public final Selector getSelector() {
return selector;
}
public final void wakeup() {
selector.wakeup();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_nio_AbstractIOSelector.java |
863 | public class OUser extends ODocumentWrapper {
private static final long serialVersionUID = 1L;
public static final String ADMIN = "admin";
public static final String CLASS_NAME = "OUser";
public enum STATUSES {
SUSPENDED, ACTIVE
}
// AVOID THE INVOCATION OF SETTER
protected Set<ORole> roles = new HashSet<ORole>();
/**
* Constructor used in unmarshalling.
*/
public OUser() {
}
public OUser(final String iName) {
super(CLASS_NAME);
document.field("name", iName);
setAccountStatus(STATUSES.ACTIVE);
}
public OUser(String iUserName, final String iUserPassword) {
super("OUser");
document.field("name", iUserName);
setPassword(iUserPassword);
setAccountStatus(STATUSES.ACTIVE);
}
/**
* Create the user by reading the source document.
*/
public OUser(final ODocument iSource) {
fromStream(iSource);
}
@Override
@OAfterDeserialization
public void fromStream(final ODocument iSource) {
if (document != null)
return;
document = iSource;
roles = new HashSet<ORole>();
final Collection<ODocument> loadedRoles = iSource.field("roles");
if (loadedRoles != null)
for (final ODocument d : loadedRoles) {
final ORole role = document.getDatabase().getMetadata().getSecurity().getRole((String) d.field("name"));
if (role == null) {
OLogManager.instance().warn(this, "User '%s' declare to have the role '%s' but it does not exist in database, skipt it",
getName(), d.field("name"));
document.getDatabase().getMetadata().getSecurity().repair();
} else
roles.add(role);
}
}
/**
* Checks if the user has the permission to access to the requested resource for the requested operation.
*
* @param iResource
* Requested resource
* @param iOperation
* Requested operation
* @return The role that has granted the permission if any, otherwise a OSecurityAccessException exception is raised
* @exception OSecurityAccessException
*/
public ORole allow(final String iResource, final int iOperation) {
if (roles == null || roles.isEmpty())
throw new OSecurityAccessException(document.getDatabase().getName(), "User '" + document.field("name")
+ "' has no role defined");
final ORole role = checkIfAllowed(iResource, iOperation);
if (role == null)
throw new OSecurityAccessException(document.getDatabase().getName(), "User '" + document.field("name")
+ "' has no the permission to execute the operation '" + ORole.permissionToString(iOperation)
+ "' against the resource: " + iResource);
return role;
}
/**
* Checks if the user has the permission to access to the requested resource for the requested operation.
*
* @param iResource
* Requested resource
* @param iOperation
* Requested operation
* @return The role that has granted the permission if any, otherwise null
*/
public ORole checkIfAllowed(final String iResource, final int iOperation) {
for (ORole r : roles) {
if (r == null)
OLogManager.instance().warn(this,
"User '%s' has a null role, bypass it. Consider to fix this user roles before to continue", getName());
else if (r.allow(iResource, iOperation))
return r;
}
return null;
}
/**
* Checks if a rule was defined for the user.
*
* @param iResource
* Requested resource
* @return True is a rule is defined, otherwise false
*/
public boolean isRuleDefined(final String iResource) {
for (ORole r : roles)
if (r == null)
OLogManager.instance().warn(this,
"User '%s' has a null role, bypass it. Consider to fix this user roles before to continue", getName());
else if (r.hasRule(iResource))
return true;
return false;
}
public boolean checkPassword(final String iPassword) {
return OSecurityManager.instance().check(iPassword, (String) document.field("password"));
}
public String getName() {
return document.field("name");
}
public OUser setName(final String iName) {
document.field("name", iName);
return this;
}
public String getPassword() {
return document.field("password");
}
public OUser setPassword(final String iPassword) {
document.field("password", iPassword);
return this;
}
public static final String encryptPassword(final String iPassword) {
return OSecurityManager.instance().digest2String(iPassword, true);
}
public STATUSES getAccountStatus() {
final String status = (String) document.field("status");
if (status == null)
throw new OSecurityException("User '" + getName() + "' has no status");
return STATUSES.valueOf(status);
}
public void setAccountStatus(STATUSES accountStatus) {
document.field("status", accountStatus);
}
public Set<ORole> getRoles() {
return roles;
}
public OUser addRole(final String iRole) {
if (iRole != null)
addRole(document.getDatabase().getMetadata().getSecurity().getRole(iRole));
return this;
}
public OUser addRole(final ORole iRole) {
if (iRole != null)
roles.add(iRole);
final HashSet<ODocument> persistentRoles = new HashSet<ODocument>();
for (ORole r : roles) {
persistentRoles.add(r.toStream());
}
document.field("roles", persistentRoles);
return this;
}
@Override
@SuppressWarnings("unchecked")
public OUser save() {
document.save(OUser.class.getSimpleName());
return this;
}
@Override
public String toString() {
return getName();
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_metadata_security_OUser.java |
3,475 | public static enum Type {
JUST_NAME,
FULL,
} | 0true
| src_main_java_org_elasticsearch_index_mapper_ContentPath.java |
759 | public class ListProxyImpl<E> extends AbstractCollectionProxyImpl<ListService, E> implements IList<E> {
protected ListProxyImpl(String name, NodeEngine nodeEngine, ListService service) {
super(name, nodeEngine, service);
}
@Override
protected CollectionConfig getConfig(NodeEngine nodeEngine) {
return nodeEngine.getConfig().findListConfig(name);
}
@Override
public void add(int index, E e) {
throwExceptionIfNull(e);
final Data value = getNodeEngine().toData(e);
final ListAddOperation operation = new ListAddOperation(name, index, value);
invoke(operation);
}
@Override
public E get(int index) {
final ListGetOperation operation = new ListGetOperation(name, index);
return invoke(operation);
}
@Override
public E set(int index, E element) {
throwExceptionIfNull(element);
final Data value = getNodeEngine().toData(element);
final ListSetOperation operation = new ListSetOperation(name, index, value);
return invoke(operation);
}
@Override
public E remove(int index) {
final ListRemoveOperation operation = new ListRemoveOperation(name, index);
return invoke(operation);
}
@Override
public int indexOf(Object o) {
return indexOfInternal(false, o);
}
@Override
public int lastIndexOf(Object o) {
return indexOfInternal(true, o);
}
private int indexOfInternal(boolean last, Object o) {
throwExceptionIfNull(o);
final Data value = getNodeEngine().toData(o);
final ListIndexOfOperation operation = new ListIndexOfOperation(name, last, value);
final Integer result = invoke(operation);
return result;
}
@Override
public boolean addAll(int index, Collection<? extends E> c) {
throwExceptionIfNull(c);
List<Data> valueList = new ArrayList<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (E e : c) {
throwExceptionIfNull(e);
valueList.add(nodeEngine.toData(e));
}
final ListAddAllOperation operation = new ListAddAllOperation(name, index, valueList);
final Boolean result = invoke(operation);
return result;
}
@Override
public ListIterator<E> listIterator() {
return listIterator(0);
}
@Override
public ListIterator<E> listIterator(int index) {
final List<E> list = subList(-1, -1);
return list.listIterator(index);
}
@Override
public List<E> subList(int fromIndex, int toIndex) {
final ListSubOperation operation = new ListSubOperation(name, fromIndex, toIndex);
final SerializableCollection result = invoke(operation);
final Collection<Data> collection = result.getCollection();
final List<E> list = new ArrayList<E>(collection.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Data data : collection) {
list.add(nodeEngine.<E>toObject(data));
}
return list;
}
@Override
public Iterator<E> iterator() {
return listIterator(0);
}
@Override
public Object[] toArray() {
return subList(-1, -1).toArray();
}
@Override
public <T> T[] toArray(T[] a) {
throwExceptionIfNull(a);
return subList(-1, -1).toArray(a);
}
@Override
public String getServiceName() {
return ListService.SERVICE_NAME;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_list_ListProxyImpl.java |
114 | public interface PageTemplate extends Serializable {
public Long getId();
public void setId(Long id);
public String getTemplateName();
public void setTemplateName(String templateName);
public String getTemplateDescription();
public void setTemplateDescription(String templateDescription);
public String getTemplatePath();
public void setTemplatePath(String templatePath);
public Locale getLocale();
public void setLocale(Locale locale);
public List<FieldGroup> getFieldGroups();
public void setFieldGroups(List<FieldGroup> fieldGroups);
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageTemplate.java |
184 | private class IDStressor implements Runnable {
private final int numRounds;
private final int numPartitions;
private final int maxIterations;
private final IDAuthority authority;
private final List<ConcurrentLinkedQueue<IDBlock>> allocatedBlocks;
private static final long sleepMS = 250L;
private IDStressor(int numRounds, int numPartitions, int maxIterations,
IDAuthority authority, List<ConcurrentLinkedQueue<IDBlock>> ids) {
this.numRounds = numRounds;
this.numPartitions = numPartitions;
this.maxIterations = maxIterations;
this.authority = authority;
this.allocatedBlocks = ids;
}
@Override
public void run() {
try {
runInterruptible();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private void runInterruptible() throws InterruptedException {
int iterations = 0;
long lastStart[] = new long[numPartitions];
for (int i = 0; i < numPartitions; i++)
lastStart[i] = Long.MIN_VALUE;
for (int j = 0; j < numRounds; j++) {
for (int p = 0; p < numPartitions; p++) {
if (maxIterations < ++iterations) {
throwIterationsExceededException();
}
final IDBlock block = allocate(p);
if (null == block) {
Thread.sleep(sleepMS);
p--;
} else {
allocatedBlocks.get(p).add(block);
if (hasEmptyUid) {
long start = block.getId(0);
Assert.assertTrue("Previous block start "
+ lastStart[p] + " exceeds next block start "
+ start, lastStart[p] <= start);
lastStart[p] = start;
}
}
}
}
}
private IDBlock allocate(int partitionIndex) {
IDBlock block;
try {
block = authority.getIDBlock(partitionIndex,partitionIndex,GET_ID_BLOCK_TIMEOUT);
} catch (BackendException e) {
log.error("Unexpected exception while getting ID block", e);
return null;
}
/*
* This is not guaranteed in the consistentkey implementation.
* Writers of ID block claims in that implementation delete their
* writes if they take too long. A peek can see this short-lived
* block claim even though a subsequent getblock does not.
*/
// Assert.assertTrue(nextId <= block[0]);
if (hasEmptyUid) assertEquals(block.getId(0)+ blockSize-1, block.getId(blockSize-1));
log.trace("Obtained ID block {}", block);
return block;
}
private boolean throwIterationsExceededException() {
throw new RuntimeException(
"Exceeded maximum ID allocation iteration count ("
+ maxIterations + "); too many timeouts?");
}
} | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_IDAuthorityTest.java |
22 | static abstract class NavigableSubMap<K, V> extends AbstractMap<K, V> implements ONavigableMap<K, V>, java.io.Serializable {
/**
* The backing map.
*/
final OMVRBTree<K, V> m;
/**
* Endpoints are represented as triples (fromStart, lo, loInclusive) and (toEnd, hi, hiInclusive). If fromStart is true, then
* the low (absolute) bound is the start of the backing map, and the other values are ignored. Otherwise, if loInclusive is
* true, lo is the inclusive bound, else lo is the exclusive bound. Similarly for the upper bound.
*/
final K lo, hi;
final boolean fromStart, toEnd;
final boolean loInclusive, hiInclusive;
NavigableSubMap(final OMVRBTree<K, V> m, final boolean fromStart, K lo, final boolean loInclusive, final boolean toEnd, K hi,
final boolean hiInclusive) {
if (!fromStart && !toEnd) {
if (m.compare(lo, hi) > 0)
throw new IllegalArgumentException("fromKey > toKey");
} else {
if (!fromStart) // type check
m.compare(lo, lo);
if (!toEnd)
m.compare(hi, hi);
}
this.m = m;
this.fromStart = fromStart;
this.lo = lo;
this.loInclusive = loInclusive;
this.toEnd = toEnd;
this.hi = hi;
this.hiInclusive = hiInclusive;
}
// internal utilities
final boolean tooLow(final Object key) {
if (!fromStart) {
int c = m.compare(key, lo);
if (c < 0 || (c == 0 && !loInclusive))
return true;
}
return false;
}
final boolean tooHigh(final Object key) {
if (!toEnd) {
int c = m.compare(key, hi);
if (c > 0 || (c == 0 && !hiInclusive))
return true;
}
return false;
}
final boolean inRange(final Object key) {
return !tooLow(key) && !tooHigh(key);
}
final boolean inClosedRange(final Object key) {
return (fromStart || m.compare(key, lo) >= 0) && (toEnd || m.compare(hi, key) >= 0);
}
final boolean inRange(final Object key, final boolean inclusive) {
return inclusive ? inRange(key) : inClosedRange(key);
}
/*
* Absolute versions of relation operations. Subclasses map to these using like-named "sub" versions that invert senses for
* descending maps
*/
final OMVRBTreeEntryPosition<K, V> absLowest() {
OMVRBTreeEntry<K, V> e = (fromStart ? m.getFirstEntry() : (loInclusive ? m.getCeilingEntry(lo,
PartialSearchMode.LOWEST_BOUNDARY) : m.getHigherEntry(lo)));
return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e);
}
final OMVRBTreeEntryPosition<K, V> absHighest() {
OMVRBTreeEntry<K, V> e = (toEnd ? m.getLastEntry() : (hiInclusive ? m.getFloorEntry(hi, PartialSearchMode.HIGHEST_BOUNDARY)
: m.getLowerEntry(hi)));
return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e);
}
final OMVRBTreeEntryPosition<K, V> absCeiling(K key) {
if (tooLow(key))
return absLowest();
OMVRBTreeEntry<K, V> e = m.getCeilingEntry(key, PartialSearchMode.NONE);
return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e);
}
final OMVRBTreeEntryPosition<K, V> absHigher(K key) {
if (tooLow(key))
return absLowest();
OMVRBTreeEntry<K, V> e = m.getHigherEntry(key);
return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e);
}
final OMVRBTreeEntryPosition<K, V> absFloor(K key) {
if (tooHigh(key))
return absHighest();
OMVRBTreeEntry<K, V> e = m.getFloorEntry(key, PartialSearchMode.NONE);
return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e);
}
final OMVRBTreeEntryPosition<K, V> absLower(K key) {
if (tooHigh(key))
return absHighest();
OMVRBTreeEntry<K, V> e = m.getLowerEntry(key);
return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e);
}
/** Returns the absolute high fence for ascending traversal */
final OMVRBTreeEntryPosition<K, V> absHighFence() {
return (toEnd ? null : new OMVRBTreeEntryPosition<K, V>(hiInclusive ? m.getHigherEntry(hi) : m.getCeilingEntry(hi,
PartialSearchMode.LOWEST_BOUNDARY)));
}
/** Return the absolute low fence for descending traversal */
final OMVRBTreeEntryPosition<K, V> absLowFence() {
return (fromStart ? null : new OMVRBTreeEntryPosition<K, V>(loInclusive ? m.getLowerEntry(lo) : m.getFloorEntry(lo,
PartialSearchMode.HIGHEST_BOUNDARY)));
}
// Abstract methods defined in ascending vs descending classes
// These relay to the appropriate absolute versions
abstract OMVRBTreeEntry<K, V> subLowest();
abstract OMVRBTreeEntry<K, V> subHighest();
abstract OMVRBTreeEntry<K, V> subCeiling(K key);
abstract OMVRBTreeEntry<K, V> subHigher(K key);
abstract OMVRBTreeEntry<K, V> subFloor(K key);
abstract OMVRBTreeEntry<K, V> subLower(K key);
/** Returns ascending iterator from the perspective of this submap */
abstract OLazyIterator<K> keyIterator();
/** Returns descending iterator from the perspective of this submap */
abstract OLazyIterator<K> descendingKeyIterator();
// public methods
@Override
public boolean isEmpty() {
return (fromStart && toEnd) ? m.isEmpty() : entrySet().isEmpty();
}
@Override
public int size() {
return (fromStart && toEnd) ? m.size() : entrySet().size();
}
@Override
public final boolean containsKey(Object key) {
return inRange(key) && m.containsKey(key);
}
@Override
public final V put(K key, V value) {
if (!inRange(key))
throw new IllegalArgumentException("key out of range");
return m.put(key, value);
}
@Override
public final V get(Object key) {
return !inRange(key) ? null : m.get(key);
}
@Override
public final V remove(Object key) {
return !inRange(key) ? null : m.remove(key);
}
public final Map.Entry<K, V> ceilingEntry(K key) {
return exportEntry(subCeiling(key));
}
public final K ceilingKey(K key) {
return keyOrNull(subCeiling(key));
}
public final Map.Entry<K, V> higherEntry(K key) {
return exportEntry(subHigher(key));
}
public final K higherKey(K key) {
return keyOrNull(subHigher(key));
}
public final Map.Entry<K, V> floorEntry(K key) {
return exportEntry(subFloor(key));
}
public final K floorKey(K key) {
return keyOrNull(subFloor(key));
}
public final Map.Entry<K, V> lowerEntry(K key) {
return exportEntry(subLower(key));
}
public final K lowerKey(K key) {
return keyOrNull(subLower(key));
}
public final K firstKey() {
return key(subLowest());
}
public final K lastKey() {
return key(subHighest());
}
public final Map.Entry<K, V> firstEntry() {
return exportEntry(subLowest());
}
public final Map.Entry<K, V> lastEntry() {
return exportEntry(subHighest());
}
public final Map.Entry<K, V> pollFirstEntry() {
OMVRBTreeEntry<K, V> e = subLowest();
Map.Entry<K, V> result = exportEntry(e);
if (e != null)
m.deleteEntry(e);
return result;
}
public final Map.Entry<K, V> pollLastEntry() {
OMVRBTreeEntry<K, V> e = subHighest();
Map.Entry<K, V> result = exportEntry(e);
if (e != null)
m.deleteEntry(e);
return result;
}
// Views
transient ONavigableMap<K, V> descendingMapView = null;
transient EntrySetView entrySetView = null;
transient KeySet<K> navigableKeySetView = null;
@SuppressWarnings("rawtypes")
public final ONavigableSet<K> navigableKeySet() {
KeySet<K> nksv = navigableKeySetView;
return (nksv != null) ? nksv : (navigableKeySetView = new OMVRBTree.KeySet(this));
}
@Override
public final Set<K> keySet() {
return navigableKeySet();
}
public ONavigableSet<K> descendingKeySet() {
return descendingMap().navigableKeySet();
}
public final SortedMap<K, V> subMap(final K fromKey, final K toKey) {
return subMap(fromKey, true, toKey, false);
}
public final SortedMap<K, V> headMap(final K toKey) {
return headMap(toKey, false);
}
public final SortedMap<K, V> tailMap(final K fromKey) {
return tailMap(fromKey, true);
}
// View classes
abstract class EntrySetView extends AbstractSet<Map.Entry<K, V>> {
private transient int size = -1, sizeModCount;
@Override
public int size() {
if (fromStart && toEnd)
return m.size();
if (size == -1 || sizeModCount != m.modCount) {
sizeModCount = m.modCount;
size = 0;
Iterator<?> i = iterator();
while (i.hasNext()) {
size++;
i.next();
}
}
return size;
}
@Override
public boolean isEmpty() {
OMVRBTreeEntryPosition<K, V> n = absLowest();
return n == null || tooHigh(n.getKey());
}
@Override
public boolean contains(final Object o) {
if (!(o instanceof OMVRBTreeEntry))
return false;
final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o;
final K key = entry.getKey();
if (!inRange(key))
return false;
V nodeValue = m.get(key);
return nodeValue != null && valEquals(nodeValue, entry.getValue());
}
@Override
public boolean remove(final Object o) {
if (!(o instanceof OMVRBTreeEntry))
return false;
final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o;
final K key = entry.getKey();
if (!inRange(key))
return false;
final OMVRBTreeEntry<K, V> node = m.getEntry(key, PartialSearchMode.NONE);
if (node != null && valEquals(node.getValue(), entry.getValue())) {
m.deleteEntry(node);
return true;
}
return false;
}
}
/**
* Iterators for SubMaps
*/
abstract class SubMapIterator<T> implements OLazyIterator<T> {
OMVRBTreeEntryPosition<K, V> lastReturned;
OMVRBTreeEntryPosition<K, V> next;
final K fenceKey;
int expectedModCount;
SubMapIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) {
expectedModCount = m.modCount;
lastReturned = null;
next = first;
fenceKey = fence == null ? null : fence.getKey();
}
public final boolean hasNext() {
if (next != null) {
final K k = next.getKey();
return k != fenceKey && !k.equals(fenceKey);
}
return false;
}
final OMVRBTreeEntryPosition<K, V> nextEntry() {
final OMVRBTreeEntryPosition<K, V> e;
if (next != null)
e = new OMVRBTreeEntryPosition<K, V>(next);
else
e = null;
if (e == null || e.entry == null)
throw new NoSuchElementException();
final K k = e.getKey();
if (k == fenceKey || k.equals(fenceKey))
throw new NoSuchElementException();
if (m.modCount != expectedModCount)
throw new ConcurrentModificationException();
next.assign(OMVRBTree.next(e));
lastReturned = e;
return e;
}
final OMVRBTreeEntryPosition<K, V> prevEntry() {
final OMVRBTreeEntryPosition<K, V> e;
if (next != null)
e = new OMVRBTreeEntryPosition<K, V>(next);
else
e = null;
if (e == null || e.entry == null)
throw new NoSuchElementException();
final K k = e.getKey();
if (k == fenceKey || k.equals(fenceKey))
throw new NoSuchElementException();
if (m.modCount != expectedModCount)
throw new ConcurrentModificationException();
next.assign(OMVRBTree.previous(e));
lastReturned = e;
return e;
}
final public T update(final T iValue) {
if (lastReturned == null)
throw new IllegalStateException();
if (m.modCount != expectedModCount)
throw new ConcurrentModificationException();
return (T) lastReturned.entry.setValue((V) iValue);
}
final void removeAscending() {
if (lastReturned == null)
throw new IllegalStateException();
if (m.modCount != expectedModCount)
throw new ConcurrentModificationException();
// deleted entries are replaced by their successors
if (lastReturned.entry.getLeft() != null && lastReturned.entry.getRight() != null)
next = lastReturned;
m.deleteEntry(lastReturned.entry);
lastReturned = null;
expectedModCount = m.modCount;
}
final void removeDescending() {
if (lastReturned == null)
throw new IllegalStateException();
if (m.modCount != expectedModCount)
throw new ConcurrentModificationException();
m.deleteEntry(lastReturned.entry);
lastReturned = null;
expectedModCount = m.modCount;
}
}
final class SubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> {
SubMapEntryIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) {
super(first, fence);
}
public Map.Entry<K, V> next() {
final Map.Entry<K, V> e = OMVRBTree.exportEntry(next);
nextEntry();
return e;
}
public void remove() {
removeAscending();
}
}
final class SubMapKeyIterator extends SubMapIterator<K> {
SubMapKeyIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) {
super(first, fence);
}
public K next() {
return nextEntry().getKey();
}
public void remove() {
removeAscending();
}
}
final class DescendingSubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> {
DescendingSubMapEntryIterator(final OMVRBTreeEntryPosition<K, V> last, final OMVRBTreeEntryPosition<K, V> fence) {
super(last, fence);
}
public Map.Entry<K, V> next() {
final Map.Entry<K, V> e = OMVRBTree.exportEntry(next);
prevEntry();
return e;
}
public void remove() {
removeDescending();
}
}
final class DescendingSubMapKeyIterator extends SubMapIterator<K> {
DescendingSubMapKeyIterator(final OMVRBTreeEntryPosition<K, V> last, final OMVRBTreeEntryPosition<K, V> fence) {
super(last, fence);
}
public K next() {
return prevEntry().getKey();
}
public void remove() {
removeDescending();
}
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java |
65 | {
@Override
@SuppressWarnings("deprecation")
public TxIdGenerator getTxIdGenerator()
{
return TxIdGenerator.DEFAULT;
}
}; | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestXaFramework.java |
133 | public interface StructuredContentField extends Serializable {
/**
* Gets the primary key.
*
* @return the primary key
*/
@Nullable
public Long getId();
/**
* Sets the primary key.
*
* @param id the new primary key
*/
public void setId(@Nullable Long id);
/**
* Returns the fieldKey associated with this field. The key used for a
* <code>StructuredContentField</code> is determined by the associated
* {@link org.broadleafcommerce.cms.field.domain.FieldDefinition} that was used by the
* Content Management System to create this instance.
*
* As an example, a <code>StructuredContentType</code> might be configured to contain a
* field definition with a key of "targetUrl".
*
* @return the key associated with this item
* @see org.broadleafcommerce.cms.field.domain.FieldDefinition
*/
@Nonnull
public String getFieldKey();
/**
* Sets the fieldKey.
* @param fieldKey
* @see org.broadleafcommerce.cms.field.domain.FieldDefinition
*/
public void setFieldKey(@Nonnull String fieldKey);
/**
* Returns the parent <code>StructuredContent</code> item to which this
* field belongs.
*
* @return
*/
@Nonnull
public StructuredContent getStructuredContent();
/**
* Sets the parent <code>StructuredContent</code> item.
* @param structuredContent
*/
public void setStructuredContent(@Nonnull StructuredContent structuredContent);
/**
* Builds a copy of this item. Used by the content management system when an
* item is edited.
*
* @return a copy of this item
*/
@Nonnull
public StructuredContentField cloneEntity();
/**
* Returns the value for this custom field.
*
* @param value
*/
public void setValue(@Nonnull String value);
/**
* Sets the value of this custom field.
* @return
*/
@Nonnull
public String getValue();
/**
* Returns audit information for this content item.
*
* @return
*/
@Nullable
public AdminAuditable getAuditable();
/**
* Sets audit information for this content item. Default implementations automatically
* populate this data during persistence.
*
* @param auditable
*/
public void setAuditable(@Nullable AdminAuditable auditable);
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_domain_StructuredContentField.java |
547 | @Test
public class NodeIdTest {
public void testOneOneAddValues() {
ONodeId one = ONodeId.valueOf(1);
ONodeId two = one.add(one);
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000000000000002");
}
public void testAddOverflowValue() {
ONodeId one = ONodeId.valueOf(0xFFFFFFFFFL);
ONodeId two = one.add(ONodeId.ONE);
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000001000000000");
}
public void testAddOverflowValues() {
ONodeId one = ONodeId.valueOf(0xFFFFFFFFFL);
ONodeId two = one.add(ONodeId.valueOf(0xFFFFFFFFFL));
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000001ffffffffe");
}
public void testAddOverflow() {
ONodeId one = ONodeId.MAX_VALUE;
ONodeId two = one.add(ONodeId.ONE);
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000000000000000");
}
public void testAddSamePositiveAndNegativeNumbers() {
ONodeId one = ONodeId.parseString("1234567895623");
ONodeId two = ONodeId.parseString("-1234567895623");
ONodeId result = one.add(two);
Assert.assertEquals(result, ONodeId.ZERO);
}
public void testAddPositiveMoreNegativeNumbers() {
ONodeId one = ONodeId.parseString("12358971234567895622");
ONodeId two = ONodeId.parseString("-1234567895623");
ONodeId result = one.add(two);
Assert.assertEquals(result, ONodeId.parseString("12358969999999999999"));
}
public void testAddPositiveLessNegativeNumbers() {
ONodeId one = ONodeId.parseString("1234567895623");
ONodeId two = ONodeId.parseString("-12358971234567895622");
ONodeId result = one.add(two);
Assert.assertEquals(result, ONodeId.parseString("-12358969999999999999"));
}
public void testAddToZeroPositive() {
ONodeId one = ONodeId.parseString("1234567895623");
ONodeId two = ONodeId.parseString("0");
ONodeId result = one.add(two);
Assert.assertEquals(result, ONodeId.parseString("1234567895623"));
}
public void testAddToZeroNegative() {
ONodeId one = ONodeId.parseString("-1234567895623");
ONodeId two = ONodeId.parseString("0");
ONodeId result = one.add(two);
Assert.assertEquals(result, ONodeId.parseString("-1234567895623"));
}
public void testAddZeroToPositive() {
ONodeId two = ONodeId.ZERO.add(ONodeId.parseString("1234567895623"));
Assert.assertEquals(two, ONodeId.parseString("1234567895623"));
}
public void testAddZeroToNegative() {
ONodeId two = ONodeId.ZERO.add(ONodeId.parseString("-1234567895623"));
Assert.assertEquals(two, ONodeId.parseString("-1234567895623"));
}
public void testSubtractTwoMinusOne() {
ONodeId one = ONodeId.valueOf(2);
ONodeId two = one.subtract(ONodeId.ONE);
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000000000000001");
}
public void testSubtractOverflowValue() {
ONodeId one = ONodeId.valueOf(0xF0000000L);
ONodeId two = one.subtract(ONodeId.ONE);
String result = two.toHexString();
Assert.assertEquals(result, "0000000000000000000000000000000000000000efffffff");
}
public void testSubtractOverflowValueTwo() {
ONodeId one = ONodeId.valueOf(0xF0000000L);
ONodeId two = ONodeId.ONE.subtract(one);
String result = two.toHexString();
Assert.assertEquals(result, "-0000000000000000000000000000000000000000efffffff");
}
public void testSubtractToNegativeResult() {
ONodeId one = ONodeId.ZERO;
ONodeId two = one.subtract(ONodeId.ONE);
String result = two.toHexString();
Assert.assertEquals(result, "-000000000000000000000000000000000000000000000001");
}
public void testSubtractZero() {
ONodeId one = ONodeId.parseString("451234567894123456987465");
ONodeId two = one.subtract(ONodeId.ZERO);
Assert.assertEquals(two, ONodeId.parseString("451234567894123456987465"));
}
public void testSubtractFromZeroPositive() {
ONodeId one = ONodeId.parseString("451234567894123456987465");
ONodeId two = ONodeId.ZERO.subtract(one);
Assert.assertEquals(two, ONodeId.parseString("-451234567894123456987465"));
}
public void testSubtractFromZeroNegative() {
ONodeId one = ONodeId.parseString("-451234567894123456987465");
ONodeId two = ONodeId.ZERO.subtract(one);
Assert.assertEquals(two, ONodeId.parseString("451234567894123456987465"));
}
public void testSubtractZeroFromZero() {
ONodeId two = ONodeId.ZERO.subtract(ONodeId.ZERO);
Assert.assertEquals(two, ONodeId.ZERO);
}
public void testSubtractFromNegativePositive() {
ONodeId one = ONodeId.parseString("-99999999999999");
ONodeId two = ONodeId.parseString("10");
ONodeId result = one.subtract(two);
Assert.assertEquals(result, ONodeId.parseString("-100000000000009"));
}
public void testSubtractFromPositiveNegative() {
ONodeId one = ONodeId.parseString("99999999999999");
ONodeId two = ONodeId.parseString("-10");
ONodeId result = one.subtract(two);
Assert.assertEquals(result, ONodeId.parseString("100000000000009"));
}
public void testSubtractSamePositiveNumbers() {
ONodeId one = ONodeId.parseString("1245796317821536854785");
ONodeId two = ONodeId.parseString("1245796317821536854785");
ONodeId result = one.subtract(two);
Assert.assertEquals(result, ONodeId.ZERO);
}
public void testSubtractSameNegativeNumbers() {
ONodeId one = ONodeId.parseString("-1245796317821536854785");
ONodeId two = ONodeId.parseString("-1245796317821536854785");
ONodeId result = one.subtract(two);
Assert.assertEquals(result, ONodeId.ZERO);
}
public void testSubtractFromMaxNegativeOnePositive() {
ONodeId result = ONodeId.MIN_VALUE.subtract(ONodeId.ONE);
Assert.assertEquals(result, ONodeId.ZERO);
}
public void testSubtractFromMaxPositiveOneNegative() {
ONodeId result = ONodeId.MAX_VALUE.subtract(ONodeId.parseString("-1"));
Assert.assertEquals(result, ONodeId.ZERO);
}
public void testMultiplyTwoAndFive() {
ONodeId one = ONodeId.valueOf(2);
ONodeId two = one.multiply(5);
String result = two.toHexString();
Assert.assertEquals(result, "00000000000000000000000000000000000000000000000a");
}
public void testMultiplyOnZero() {
ONodeId one = ONodeId.valueOf(2);
ONodeId two = one.multiply(0);
Assert.assertEquals(two, ONodeId.ZERO);
}
public void testMultiplyOverflowNumbers() {
ONodeId one = ONodeId.valueOf(0xFFFFFFFFFL);
ONodeId two = one.multiply(26);
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000019fffffffe6");
}
public void testLeftShift2Bits() {
ONodeId nodeOne = ONodeId.valueOf(0xFFFFFFFFDL);
ONodeId two = nodeOne.shiftLeft(2);
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000003ffffffff4");
}
public void testLeftShift32Bits() {
ONodeId nodeOne = ONodeId.valueOf(0xFFFFFFFFDL);
ONodeId two = nodeOne.shiftLeft(32);
String result = two.toHexString();
Assert.assertEquals(result, "0000000000000000000000000000000ffffffffd00000000");
}
public void testLeftShift34Bits() {
ONodeId nodeOne = ONodeId.valueOf(0xFFFFFFFFDL);
ONodeId two = nodeOne.shiftLeft(34);
String result = two.toHexString();
Assert.assertEquals(result, "0000000000000000000000000000003ffffffff400000000");
}
public void testLeftShiftTillZero() {
ONodeId nodeOne = ONodeId.valueOf(0xFFFFFFFFDL);
ONodeId two = nodeOne.shiftLeft(192);
Assert.assertEquals(two, ONodeId.ZERO);
}
public void testLeftShiftTillZeroTwo() {
ONodeId nodeOne = ONodeId.parseHexSting("0000000000000000000000000000003ffffffff400000000");
ONodeId two = nodeOne.shiftLeft(160);
Assert.assertEquals(two, ONodeId.ZERO);
}
public void testRightShift2Bits() {
ONodeId nodeOne = ONodeId.valueOf(0xAAAFFFFFFFFDL);
ONodeId two = nodeOne.shiftRight(2);
String result = two.toHexString();
Assert.assertEquals(result, "0000000000000000000000000000000000002aabffffffff");
}
public void testRightShift32Bits() {
ONodeId nodeOne = ONodeId.valueOf(0xAAAFFFFFFFFDL);
ONodeId two = nodeOne.shiftRight(32);
String result = two.toHexString();
Assert.assertEquals(result, "00000000000000000000000000000000000000000000aaaf");
}
public void testRightShift34Bits() {
ONodeId nodeOne = ONodeId.valueOf(0xAAAFFFFFFFFDL);
ONodeId two = nodeOne.shiftRight(34);
String result = two.toHexString();
Assert.assertEquals(result, "000000000000000000000000000000000000000000002aab");
}
public void testRightShiftTillZero() {
ONodeId nodeOne = ONodeId.valueOf(0xFFFFFFFFDL);
ONodeId two = nodeOne.shiftRight(192);
Assert.assertEquals(two, ONodeId.ZERO);
}
public void testRightShiftTillZeroTwo() {
ONodeId nodeOne = ONodeId.parseHexSting("0000000000000000000000000000003ffffffff400000000");
ONodeId two = nodeOne.shiftRight(72);
Assert.assertEquals(two, ONodeId.ZERO);
}
public void testIntValue() {
final ONodeId nodeId = ONodeId.valueOf(0xAAAFFFFFFFFDL);
Assert.assertEquals(0xFFFFFFFD, nodeId.intValue());
}
public void testToValueOfFromString() {
final ONodeId nodeId = ONodeId.parseHexSting("00123456789abcdef0000000000123000000000000002aab");
Assert.assertEquals(nodeId.toHexString(), "00123456789abcdef0000000000123000000000000002aab");
}
public void testToStream() {
final ONodeId nodeId = ONodeId.parseHexSting("00123456789abcdef0000000000123000000000000002aab");
byte[] expectedResult = new byte[25];
expectedResult[0] = 0;
expectedResult[1] = 0x12;
expectedResult[2] = 0x34;
expectedResult[3] = 0x56;
expectedResult[4] = 0x78;
expectedResult[5] = (byte) 0x9A;
expectedResult[6] = (byte) 0xBC;
expectedResult[7] = (byte) 0xDE;
expectedResult[8] = (byte) 0xF0;
expectedResult[9] = (byte) 0x00;
expectedResult[10] = (byte) 0x00;
expectedResult[11] = (byte) 0x00;
expectedResult[12] = (byte) 0x00;
expectedResult[13] = (byte) 0x01;
expectedResult[14] = (byte) 0x23;
expectedResult[15] = (byte) 0x00;
expectedResult[16] = (byte) 0x00;
expectedResult[17] = (byte) 0x00;
expectedResult[18] = (byte) 0x00;
expectedResult[19] = (byte) 0x00;
expectedResult[20] = (byte) 0x00;
expectedResult[21] = (byte) 0x00;
expectedResult[22] = (byte) 0x2a;
expectedResult[23] = (byte) 0xab;
expectedResult[24] = (byte) 1;
Assert.assertEquals(nodeId.toStream(), expectedResult);
}
public void testChunksToByteArray() {
final ONodeId nodeId = ONodeId.parseHexSting("00123456789abcdef0000000000123000000000000002aab");
byte[] expectedResult = new byte[24];
expectedResult[0] = 0;
expectedResult[1] = 0x12;
expectedResult[2] = 0x34;
expectedResult[3] = 0x56;
expectedResult[4] = 0x78;
expectedResult[5] = (byte) 0x9A;
expectedResult[6] = (byte) 0xBC;
expectedResult[7] = (byte) 0xDE;
expectedResult[8] = (byte) 0xF0;
expectedResult[9] = (byte) 0x00;
expectedResult[10] = (byte) 0x00;
expectedResult[11] = (byte) 0x00;
expectedResult[12] = (byte) 0x00;
expectedResult[13] = (byte) 0x01;
expectedResult[14] = (byte) 0x23;
expectedResult[15] = (byte) 0x00;
expectedResult[16] = (byte) 0x00;
expectedResult[17] = (byte) 0x00;
expectedResult[18] = (byte) 0x00;
expectedResult[19] = (byte) 0x00;
expectedResult[20] = (byte) 0x00;
expectedResult[21] = (byte) 0x00;
expectedResult[22] = (byte) 0x2a;
expectedResult[23] = (byte) 0xab;
Assert.assertEquals(nodeId.chunksToByteArray(), expectedResult);
}
public void testLongValuePositive() {
final ONodeId nodeId = ONodeId.parseHexSting("00123456789abcdef000000000012300ecffaabb12342aab");
Assert.assertEquals(nodeId.longValue(), 0x6cffaabb12342aabL);
}
public void testLongValueNegative() {
final ONodeId nodeId = ONodeId.parseHexSting("-00123456789abcdef000000000012300ecffaabb12342aab");
Assert.assertEquals(nodeId.longValue(), -0x6cffaabb12342aabL);
}
public void testIntValuePositive() {
final ONodeId nodeId = ONodeId.parseHexSting("00123456789abcdef000000000012300ecffaabb12342aab");
Assert.assertEquals(nodeId.intValue(), 0x12342aab);
}
public void testIntValueNegative() {
final ONodeId nodeId = ONodeId.parseHexSting("-00123456789abcdef000000000012300ecffaabb12342aab");
Assert.assertEquals(nodeId.intValue(), -0x12342aab);
}
public void testFromStreamPositive() {
final ONodeId nodeId = ONodeId.parseString("1343412555467812");
final byte[] content = nodeId.toStream();
final ONodeId deserializedNodeId = ONodeId.fromStream(content, 0);
Assert.assertEquals(nodeId, deserializedNodeId);
}
public void testFromStreamNegative() {
final ONodeId nodeId = ONodeId.parseString("-1343412555467812");
final byte[] content = nodeId.toStream();
final ONodeId deserializedNodeId = ONodeId.fromStream(content, 0);
Assert.assertEquals(nodeId, deserializedNodeId);
}
public void testFromStreamZero() {
final ONodeId nodeId = ONodeId.parseString("0");
final byte[] content = nodeId.toStream();
final ONodeId deserializedNodeId = ONodeId.fromStream(content, 0);
Assert.assertEquals(nodeId, deserializedNodeId);
}
public void testFromStreamPositiveWithOffset() {
final ONodeId nodeId = ONodeId.parseString("1343412555467812");
final byte[] content = nodeId.toStream();
final byte[] contentWithOffset = new byte[content.length + 10];
System.arraycopy(content, 0, contentWithOffset, 5, content.length);
final ONodeId deserializedNodeId = ONodeId.fromStream(contentWithOffset, 5);
Assert.assertEquals(nodeId, deserializedNodeId);
}
public void testFromStreamNegativeWithOffset() {
final ONodeId nodeId = ONodeId.parseString("-1343412555467812");
final byte[] content = nodeId.toStream();
final byte[] contentWithOffset = new byte[content.length + 10];
System.arraycopy(content, 0, contentWithOffset, 5, content.length);
final ONodeId deserializedNodeId = ONodeId.fromStream(contentWithOffset, 5);
Assert.assertEquals(nodeId, deserializedNodeId);
}
public void testFromStreamZeroWithOffset() {
final ONodeId nodeId = ONodeId.parseString("0");
final byte[] content = nodeId.toStream();
final byte[] contentWithOffset = new byte[content.length + 10];
System.arraycopy(content, 0, contentWithOffset, 5, content.length);
final ONodeId deserializedNodeId = ONodeId.fromStream(contentWithOffset, 5);
Assert.assertEquals(nodeId, deserializedNodeId);
}
public void testCompareToRIDNodeIdCompatibility() {
final TreeSet<ONodeId> nodeIds = new TreeSet<ONodeId>();
final TreeSet<ORecordId> recordIds = new TreeSet<ORecordId>();
for (int i = 0; i < 10000; i++) {
final ONodeId nodeId = ONodeId.generateUniqueId();
nodeIds.add(nodeId);
recordIds.add(new ORecordId(1, new OClusterPositionNodeId(nodeId)));
}
final Iterator<ORecordId> recordIdIterator = recordIds.iterator();
for (final ONodeId nodeId : nodeIds) {
final ORecordId recordId = recordIdIterator.next();
Assert.assertEquals(recordId, new ORecordId(1, new OClusterPositionNodeId(nodeId)));
}
Assert.assertFalse(recordIdIterator.hasNext());
}
public void testNodeIdSerializaion() throws Exception {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final ObjectOutputStream objectOutputStream = new ObjectOutputStream(out);
final List<ONodeId> serializedNodes = new ArrayList<ONodeId>();
for (int i = 0; i < 10000; i++) {
final ONodeId nodeId = ONodeId.generateUniqueId();
objectOutputStream.writeObject(nodeId);
serializedNodes.add(nodeId);
}
objectOutputStream.close();
byte[] serializedContent = out.toByteArray();
final ByteArrayInputStream in = new ByteArrayInputStream(serializedContent);
final ObjectInputStream objectInputStream = new ObjectInputStream(in);
for (ONodeId nodeId : serializedNodes) {
final ONodeId deserializedNodeId = (ONodeId) objectInputStream.readObject();
Assert.assertEquals(deserializedNodeId, nodeId);
}
Assert.assertEquals(objectInputStream.available(), 0);
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_id_NodeIdTest.java |
2,042 | public class ClearOperation extends AbstractMapOperation implements BackupAwareOperation, PartitionAwareOperation {
boolean shouldBackup = true;
public ClearOperation() {
}
public ClearOperation(String name) {
super(name);
}
public void run() {
// near-cache clear will be called multiple times by each clear operation,
// but it's still preferred to send a separate operation to clear near-cache.
mapService.clearNearCache(name);
final RecordStore recordStore = mapService.getExistingRecordStore(getPartitionId(), name);
//if there is no recordStore, then there is nothing to clear.
if(recordStore == null) {
shouldBackup = false;
return;
}
recordStore.clear();
}
public boolean shouldBackup() {
return shouldBackup;
}
public int getSyncBackupCount() {
return mapService.getMapContainer(name).getBackupCount();
}
public int getAsyncBackupCount() {
return mapService.getMapContainer(name).getAsyncBackupCount();
}
@Override
public boolean returnsResponse() {
return true;
}
public Operation getBackupOperation() {
ClearBackupOperation clearBackupOperation = new ClearBackupOperation(name);
clearBackupOperation.setServiceName(SERVICE_NAME);
return clearBackupOperation;
}
@Override
public String toString() {
return "ClearOperation{" +
'}';
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_operation_ClearOperation.java |
3,547 | public static class MultiFields {
public static MultiFields empty() {
return new MultiFields(Defaults.PATH_TYPE, ImmutableOpenMap.<String, Mapper>of());
}
public static class Builder {
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
private ContentPath.Type pathType = Defaults.PATH_TYPE;
public Builder pathType(ContentPath.Type pathType) {
this.pathType = pathType;
return this;
}
public Builder add(Mapper.Builder builder) {
mapperBuilders.put(builder.name(), builder);
return this;
}
@SuppressWarnings("unchecked")
public MultiFields build(AbstractFieldMapper.Builder mainFieldBuilder, BuilderContext context) {
if (pathType == Defaults.PATH_TYPE && mapperBuilders.isEmpty()) {
return empty();
} else if (mapperBuilders.isEmpty()) {
return new MultiFields(pathType, ImmutableOpenMap.<String, Mapper>of());
} else {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainFieldBuilder.name());
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
String key = cursor.key;
Mapper.Builder value = cursor.value;
mapperBuilders.put(key, value.build(context));
}
context.path().remove();
context.path().pathType(origPathType);
ImmutableOpenMap.Builder<String, Mapper> mappers = mapperBuilders.cast();
return new MultiFields(pathType, mappers.build());
}
}
}
private final ContentPath.Type pathType;
private volatile ImmutableOpenMap<String, Mapper> mappers;
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, Mapper> mappers) {
this.pathType = pathType;
this.mappers = mappers;
// we disable the all in multi-field mappers
for (ObjectCursor<Mapper> cursor : mappers.values()) {
Mapper mapper = cursor.value;
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
}
}
}
public void parse(AbstractFieldMapper mainField, ParseContext context) throws IOException {
if (mappers.isEmpty()) {
return;
}
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainField.name());
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
context.path().remove();
context.path().pathType(origPathType);
}
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
AbstractFieldMapper mergeWithMultiField = (AbstractFieldMapper) mergeWith;
List<FieldMapper> newFieldMappers = null;
ImmutableOpenMap.Builder<String, Mapper> newMappersBuilder = null;
for (ObjectCursor<Mapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
Mapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
// we disable the all in multi-field mappers
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
}
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(mergeWithMapper.name(), mergeWithMapper);
if (mergeWithMapper instanceof AbstractFieldMapper) {
if (newFieldMappers == null) {
newFieldMappers = new ArrayList<FieldMapper>(2);
}
newFieldMappers.add((FieldMapper) mergeWithMapper);
}
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
}
}
// first add all field mappers
if (newFieldMappers != null) {
mergeContext.docMapper().addFieldMappers(newFieldMappers);
}
// now publish mappers
if (newMappersBuilder != null) {
mappers = newMappersBuilder.build();
}
}
public void traverse(FieldMapperListener fieldMapperListener) {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.traverse(fieldMapperListener);
}
}
public void close() {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.close();
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (!mappers.isEmpty()) {
builder.startObject("fields");
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.toXContent(builder, params);
}
builder.endObject();
}
return builder;
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_core_AbstractFieldMapper.java |
190 | public class KeyColumnValueStoreUtil {
public static void delete(KeyColumnValueStore store, StoreTransaction txn, long key, String col) throws BackendException {
StaticBuffer k = longToByteBuffer(key);
StaticBuffer c = stringToByteBuffer(col);
store.mutate(k, KeyColumnValueStore.NO_ADDITIONS, Arrays.asList(c), txn);
}
public static String get(KeyColumnValueStore store, StoreTransaction txn, long key, String col) throws BackendException {
StaticBuffer k = longToByteBuffer(key);
StaticBuffer c = stringToByteBuffer(col);
StaticBuffer valBytes = KCVSUtil.get(store, k, c, txn);
if (null == valBytes)
return null;
return byteBufferToString(valBytes);
}
public static void insert(KeyColumnValueStore store, StoreTransaction txn, long key, String col, String val) throws BackendException {
StaticBuffer k = longToByteBuffer(key);
StaticBuffer c = stringToByteBuffer(col);
StaticBuffer v = stringToByteBuffer(val);
store.mutate(k, Arrays.<Entry>asList(StaticArrayEntry.of(c, v)), KeyColumnValueStore.NO_DELETIONS, txn);
}
// TODO rename as "bufferToString" after syntax errors are resolved
public static String byteBufferToString(StaticBuffer b) {
try {
ByteBuffer bb = b.asByteBuffer();
return new String(bb.array(), bb.position() + bb.arrayOffset(), bb.remaining(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
// TODO rename as "stringToBuffer" after syntax errors are resolved
public static StaticBuffer stringToByteBuffer(String s) {
byte[] b;
try {
b = s.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
ByteBuffer bb = ByteBuffer.allocate(b.length);
bb.put(b);
bb.flip();
return StaticArrayBuffer.of(bb);
}
// TODO rename as "longToBuffer" after syntax errors are resolved
public static StaticBuffer longToByteBuffer(long l) {
return new WriteByteBuffer(8).putLong(l).getStaticBuffer();
}
public static long bufferToLong(StaticBuffer b) {
return b.getLong(0);
}
} | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_KeyColumnValueStoreUtil.java |
92 | public class StaticAssetStorageServiceImplTest extends TestCase {
/**
* * For example, if the URL is /product/myproductimage.jpg, then the MD5 would be
* 35ec52a8dbd8cf3e2c650495001fe55f resulting in the following file on the filesystem
* {assetFileSystemPath}/64/a7/myproductimage.jpg.
*
* If there is a "siteId" in the BroadleafRequestContext then the site is also distributed
* using a similar algorithm but the system attempts to keep images for sites in their own
* directory resulting in an extra two folders required to reach any given product. So, for
* site with id 125, the system will MD5 "site125" in order to build the URL string. "site125" has an md5
* string of "7d905e85b8cb72a0477632be2c342bd6".
*
* So, in this case with the above product URL in site125, the full URL on the filesystem
* will be:
*
* {assetFileSystemPath}/7d/site125/64/a7/myproductimage.jpg.
* @throws Exception
*/
public void testGenerateStorageFileName() throws Exception {
StaticAssetStorageServiceImpl staticAssetStorageService = new StaticAssetStorageServiceImpl();
staticAssetStorageService.assetFileSystemPath = "/test";
staticAssetStorageService.assetServerMaxGeneratedDirectories = 2;
String fileName = staticAssetStorageService.generateStorageFileName("/product/myproductimage.jpg", false);
assertTrue(fileName.equals("/test/35/ec/myproductimage.jpg"));
BroadleafRequestContext brc = new BroadleafRequestContext();
BroadleafRequestContext.setBroadleafRequestContext(brc);
Site site = new SiteImpl();
site.setId(125L);
brc.setSite(site);
// try with site specific directory
fileName = staticAssetStorageService.generateStorageFileName("/product/myproductimage.jpg", false);
assertTrue(fileName.equals("/test/7f/site-125/35/ec/myproductimage.jpg"));
// try with 3 max generated directories
staticAssetStorageService.assetServerMaxGeneratedDirectories = 3;
fileName = staticAssetStorageService.generateStorageFileName("/product/myproductimage.jpg", false);
assertTrue(fileName.equals("/test/7f/site-125/35/ec/52/myproductimage.jpg"));
staticAssetStorageService.assetServerMaxGeneratedDirectories = 2;
fileName = staticAssetStorageService.generateStorageFileName("testwithoutslash", false);
assertTrue(fileName.equals("/test/7f/site-125/e9/90/testwithoutslash"));
}
/**
* Will throw an exception because the string being uploaded is too long.
* @throws Exception
*/
public void testUploadFileThatIsTooLarge() throws Exception {
StaticAssetStorageServiceImpl staticAssetStorageService = new StaticAssetStorageServiceImpl();
staticAssetStorageService.assetFileSystemPath = System.getProperty("java.io.tmpdir");
staticAssetStorageService.assetServerMaxGeneratedDirectories = 2;
String str = "This string is too long";
staticAssetStorageService.maxUploadableFileSize = str.length() - 1;
// convert String into InputStream
InputStream is = new ByteArrayInputStream(str.getBytes());
MockMultipartFile mpf = new MockMultipartFile("Test File", is);
StaticAsset staticAsset = new StaticAssetImpl();
staticAsset.setFileExtension(".jpg");
staticAsset.setFullUrl("/product/myproduct.jpg");
staticAsset.setStorageType(StorageType.FILESYSTEM);
// Remember this, we may need to delete this file.
String fileName = staticAssetStorageService.generateStorageFileName(staticAsset, false);
boolean exceptionThrown = false;
try {
staticAssetStorageService.createStaticAssetStorageFromFile(mpf, staticAsset);
} catch (Exception e) {
exceptionThrown = true;
}
assertTrue("Service call threw an exception", exceptionThrown);
File f = new File(staticAssetStorageService.assetFileSystemPath + fileName);
if (f.exists()) {
f.delete();
}
}
/**
* Tests uploading a file that is an allowed size.
* @throws Exception
*/
public void testUploadFileThatIsAllowedSize() throws Exception {
StaticAssetStorageServiceImpl staticAssetStorageService = new StaticAssetStorageServiceImpl();
staticAssetStorageService.assetFileSystemPath = System.getProperty("java.io.tmpdir");
staticAssetStorageService.assetServerMaxGeneratedDirectories = 2;
String str = "This string is not too long.";
staticAssetStorageService.maxUploadableFileSize = str.length();
// convert String into InputStream
InputStream is = new ByteArrayInputStream(str.getBytes());
MockMultipartFile mpf = new MockMultipartFile("Test File", is);
StaticAsset staticAsset = new StaticAssetImpl();
staticAsset.setFileExtension(".jpg");
staticAsset.setFullUrl("/product/myproduct.jpg");
staticAsset.setStorageType(StorageType.FILESYSTEM);
// Remember this, we may need to delete this file.
String fileName = staticAssetStorageService.generateStorageFileName(staticAsset, false);
boolean exceptionThrown = false;
try {
staticAssetStorageService.createStaticAssetStorageFromFile(mpf, staticAsset);
} catch (Exception e) {
exceptionThrown = true;
}
assertFalse("Service call threw an exception", exceptionThrown);
File f = new File(fileName);
if (f.exists()) {
f.delete();
}
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_test_java_org_broadleafcommerce_cms_file_service_StaticAssetStorageServiceImplTest.java |
323 | public class NodeInfo extends NodeOperationResponse {
@Nullable
private ImmutableMap<String, String> serviceAttributes;
private Version version;
private Build build;
@Nullable
private Settings settings;
@Nullable
private OsInfo os;
@Nullable
private ProcessInfo process;
@Nullable
private JvmInfo jvm;
@Nullable
private ThreadPoolInfo threadPool;
@Nullable
private NetworkInfo network;
@Nullable
private TransportInfo transport;
@Nullable
private HttpInfo http;
@Nullable
private PluginsInfo plugins;
NodeInfo() {
}
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable ImmutableMap<String, String> serviceAttributes, @Nullable Settings settings,
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool, @Nullable NetworkInfo network,
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) {
super(node);
this.version = version;
this.build = build;
this.serviceAttributes = serviceAttributes;
this.settings = settings;
this.os = os;
this.process = process;
this.jvm = jvm;
this.threadPool = threadPool;
this.network = network;
this.transport = transport;
this.http = http;
this.plugins = plugins;
}
/**
* System's hostname. <code>null</code> in case of UnknownHostException
*/
@Nullable
public String getHostname() {
return getNode().getHostName();
}
/**
* The current ES version
*/
public Version getVersion() {
return version;
}
/**
* The build version of the node.
*/
public Build getBuild() {
return this.build;
}
/**
* The service attributes of the node.
*/
@Nullable
public ImmutableMap<String, String> getServiceAttributes() {
return this.serviceAttributes;
}
/**
* The settings of the node.
*/
@Nullable
public Settings getSettings() {
return this.settings;
}
/**
* Operating System level information.
*/
@Nullable
public OsInfo getOs() {
return this.os;
}
/**
* Process level information.
*/
@Nullable
public ProcessInfo getProcess() {
return process;
}
/**
* JVM level information.
*/
@Nullable
public JvmInfo getJvm() {
return jvm;
}
@Nullable
public ThreadPoolInfo getThreadPool() {
return this.threadPool;
}
/**
* Network level information.
*/
@Nullable
public NetworkInfo getNetwork() {
return network;
}
@Nullable
public TransportInfo getTransport() {
return transport;
}
@Nullable
public HttpInfo getHttp() {
return http;
}
@Nullable
public PluginsInfo getPlugins() {
return this.plugins;
}
public static NodeInfo readNodeInfo(StreamInput in) throws IOException {
NodeInfo nodeInfo = new NodeInfo();
nodeInfo.readFrom(in);
return nodeInfo;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
version = Version.readVersion(in);
build = Build.readBuild(in);
if (in.readBoolean()) {
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
int size = in.readVInt();
for (int i = 0; i < size; i++) {
builder.put(in.readString(), in.readString());
}
serviceAttributes = builder.build();
}
if (in.readBoolean()) {
settings = ImmutableSettings.readSettingsFromStream(in);
}
if (in.readBoolean()) {
os = OsInfo.readOsInfo(in);
}
if (in.readBoolean()) {
process = ProcessInfo.readProcessInfo(in);
}
if (in.readBoolean()) {
jvm = JvmInfo.readJvmInfo(in);
}
if (in.readBoolean()) {
threadPool = ThreadPoolInfo.readThreadPoolInfo(in);
}
if (in.readBoolean()) {
network = NetworkInfo.readNetworkInfo(in);
}
if (in.readBoolean()) {
transport = TransportInfo.readTransportInfo(in);
}
if (in.readBoolean()) {
http = HttpInfo.readHttpInfo(in);
}
if (in.readBoolean()) {
plugins = PluginsInfo.readPluginsInfo(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(version.id);
Build.writeBuild(build, out);
if (getServiceAttributes() == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeVInt(serviceAttributes.size());
for (Map.Entry<String, String> entry : serviceAttributes.entrySet()) {
out.writeString(entry.getKey());
out.writeString(entry.getValue());
}
}
if (settings == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
ImmutableSettings.writeSettingsToStream(settings, out);
}
if (os == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
os.writeTo(out);
}
if (process == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
process.writeTo(out);
}
if (jvm == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
jvm.writeTo(out);
}
if (threadPool == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
threadPool.writeTo(out);
}
if (network == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
network.writeTo(out);
}
if (transport == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
transport.writeTo(out);
}
if (http == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
http.writeTo(out);
}
if (plugins == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
plugins.writeTo(out);
}
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_node_info_NodeInfo.java |
1,590 | public class RebalanceOnlyWhenActiveAllocationDecider extends AllocationDecider {
@Inject
public RebalanceOnlyWhenActiveAllocationDecider(Settings settings) {
super(settings);
}
@Override
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
// its ok to check for active here, since in relocation, a shard is split into two in routing
// nodes, once relocating, and one initializing
if (!allocation.routingNodes().allReplicasActive(shardRouting)) {
return allocation.decision(Decision.NO, "not all replicas are active in cluster");
}
return allocation.decision(Decision.YES, "all replicas are active in cluster");
}
} | 0true
| src_main_java_org_elasticsearch_cluster_routing_allocation_decider_RebalanceOnlyWhenActiveAllocationDecider.java |
1,343 | Future future = es.submit("default", new Callable<String>() {
@Override
public String call() {
try {
latch1.await(30, TimeUnit.SECONDS);
return "success";
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_executor_ExecutorServiceTest.java |
1,818 | constructors[GET] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new GetOperation();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_map_MapDataSerializerHook.java |
4,677 | final static class Count extends QueryCollector {
private long counter = 0;
Count(ESLogger logger, PercolateContext context) {
super(logger, context);
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
searcher.search(query, collector);
if (collector.exists()) {
counter++;
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
long counter() {
return counter;
}
} | 1no label
| src_main_java_org_elasticsearch_percolator_QueryCollector.java |
3,727 | public static enum MatchType {
SIMPLE,
REGEX;
public static MatchType fromString(String value) {
if ("simple".equals(value)) {
return SIMPLE;
} else if ("regex".equals(value)) {
return REGEX;
}
throw new ElasticsearchIllegalArgumentException("No matching pattern matched on [" + value + "]");
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_object_DynamicTemplate.java |
228 | assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertEquals(1, map.size());
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceExecuteTest.java |
561 | public class PutMappingAction extends IndicesAction<PutMappingRequest, PutMappingResponse, PutMappingRequestBuilder> {
public static final PutMappingAction INSTANCE = new PutMappingAction();
public static final String NAME = "indices/mapping/put";
private PutMappingAction() {
super(NAME);
}
@Override
public PutMappingResponse newResponse() {
return new PutMappingResponse();
}
@Override
public PutMappingRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new PutMappingRequestBuilder(client);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_mapping_put_PutMappingAction.java |
1,830 | @Service("blEntityValidatorService")
public class EntityValidatorServiceImpl implements EntityValidatorService, ApplicationContextAware {
@Resource(name = "blGlobalEntityPropertyValidators")
protected List<GlobalPropertyValidator> globalEntityValidators;
protected ApplicationContext applicationContext;
@Override
public void validate(Entity entity, Serializable instance, Map<String, FieldMetadata> propertiesMetadata,
boolean validateUnsubmittedProperties) {
List<String> types = getTypeHierarchy(entity);
//validate each individual property according to their validation configuration
for (Entry<String, FieldMetadata> metadataEntry : propertiesMetadata.entrySet()) {
FieldMetadata metadata = metadataEntry.getValue();
//Don't test this field if it was not inherited from our polymorphic type (or supertype)
if (types.contains(metadata.getInheritedFromType())) {
Property property = entity.getPMap().get(metadataEntry.getKey());
// This property should be set to false only in the case where we are adding a member to a collection
// that has type of lookup. In this case, we don't have the properties from the target in our entity,
// and we don't need to validate them.
if (!validateUnsubmittedProperties && property == null) {
continue;
}
//for radio buttons, it's possible that the entity property was never populated in the first place from the POST
//and so it will be null
String propertyName = metadataEntry.getKey();
String propertyValue = (property == null) ? null : property.getValue();
if (metadata instanceof BasicFieldMetadata) {
//First execute the global field validators
if (CollectionUtils.isNotEmpty(globalEntityValidators)) {
for (GlobalPropertyValidator validator : globalEntityValidators) {
PropertyValidationResult result = validator.validate(entity,
instance,
propertiesMetadata,
(BasicFieldMetadata)metadata,
propertyName,
propertyValue);
if (!result.isValid()) {
entity.addValidationError(propertyName, result.getErrorMessage());
}
}
}
//Now execute the validators configured for this particular field
Map<String, Map<String, String>> validations =
((BasicFieldMetadata) metadata).getValidationConfigurations();
for (Map.Entry<String, Map<String, String>> validation : validations.entrySet()) {
String validationImplementation = validation.getKey();
Map<String, String> configuration = validation.getValue();
PropertyValidator validator = null;
//attempt bean resolution to find the validator
if (applicationContext.containsBean(validationImplementation)) {
validator = applicationContext.getBean(validationImplementation, PropertyValidator.class);
}
//not a bean, attempt to instantiate the class
if (validator == null) {
try {
validator = (PropertyValidator) Class.forName(validationImplementation).newInstance();
} catch (Exception e) {
//do nothing
}
}
if (validator == null) {
throw new PersistenceException("Could not find validator: " + validationImplementation +
" for property: " + propertyName);
}
PropertyValidationResult result = validator.validate(entity,
instance,
propertiesMetadata,
configuration,
(BasicFieldMetadata)metadata,
propertyName,
propertyValue);
if (!result.isValid()) {
entity.addValidationError(propertyName, result.getErrorMessage());
}
}
}
}
}
}
protected List<String> getTypeHierarchy(Entity entity) {
List<String> types = new ArrayList<String>();
Class<?> myType;
try {
myType = Class.forName(entity.getType()[0]);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
types.add(myType.getName());
boolean eof = false;
while (!eof) {
myType = myType.getSuperclass();
if (myType != null && !myType.getName().equals(Object.class.getName())) {
types.add(myType.getName());
} else {
eof = true;
}
}
return types;
}
@Override
public List<GlobalPropertyValidator> getGlobalEntityValidators() {
return globalEntityValidators;
}
@Override
public void setGlobalEntityValidators(List<GlobalPropertyValidator> globalEntityValidators) {
this.globalEntityValidators = globalEntityValidators;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_validation_EntityValidatorServiceImpl.java |
723 | ItemListener listener = new ItemListener() {
public void itemAdded(ItemEvent item) {
latchAdd.countDown();
}
public void itemRemoved(ItemEvent item) {
latchRemove.countDown();
}
}; | 0true
| hazelcast_src_test_java_com_hazelcast_collection_ListTest.java |
2,194 | public class MultiPhrasePrefixQueryTests extends ElasticsearchTestCase {
@Test
public void simpleTests() throws Exception {
IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document doc = new Document();
doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED));
writer.addDocument(doc);
IndexReader reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader);
MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery();
query.add(new Term("field", "aa"));
assertThat(Lucene.count(searcher, query), equalTo(1l));
query = new MultiPhrasePrefixQuery();
query.add(new Term("field", "aaa"));
query.add(new Term("field", "bb"));
assertThat(Lucene.count(searcher, query), equalTo(1l));
query = new MultiPhrasePrefixQuery();
query.setSlop(1);
query.add(new Term("field", "aaa"));
query.add(new Term("field", "cc"));
assertThat(Lucene.count(searcher, query), equalTo(1l));
query = new MultiPhrasePrefixQuery();
query.setSlop(1);
query.add(new Term("field", "xxx"));
assertThat(Lucene.count(searcher, query), equalTo(0l));
}
} | 0true
| src_test_java_org_elasticsearch_common_lucene_search_MultiPhrasePrefixQueryTests.java |
2,975 | public class IndexCache extends AbstractIndexComponent implements CloseableComponent, ClusterStateListener {
private final FilterCache filterCache;
private final QueryParserCache queryParserCache;
private final IdCache idCache;
private final DocSetCache docSetCache;
private ClusterService clusterService;
@Inject
public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache, QueryParserCache queryParserCache, IdCache idCache,
DocSetCache docSetCache) {
super(index, indexSettings);
this.filterCache = filterCache;
this.queryParserCache = queryParserCache;
this.idCache = idCache;
this.docSetCache = docSetCache;
}
@Inject(optional = true)
public void setClusterService(@Nullable ClusterService clusterService) {
this.clusterService = clusterService;
if (clusterService != null) {
clusterService.add(this);
}
}
public FilterCache filter() {
return filterCache;
}
public DocSetCache docSet() {
return this.docSetCache;
}
public IdCache idCache() {
return this.idCache;
}
public QueryParserCache queryParserCache() {
return this.queryParserCache;
}
@Override
public void close() throws ElasticsearchException {
filterCache.close();
idCache.close();
queryParserCache.close();
docSetCache.clear("close");
if (clusterService != null) {
clusterService.remove(this);
}
}
public void clear(IndexReader reader) {
filterCache.clear(reader);
idCache.clear(reader);
docSetCache.clear(reader);
}
public void clear(String reason) {
filterCache.clear(reason);
idCache.clear();
queryParserCache.clear();
docSetCache.clear(reason);
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
// clear the query parser cache if the metadata (mappings) changed...
if (event.metaDataChanged()) {
queryParserCache.clear();
}
}
} | 0true
| src_main_java_org_elasticsearch_index_cache_IndexCache.java |
211 | protected class NavigateNextSubWordAction extends NextSubWordAction {
/**
* Creates a new navigate next sub-word action.
*/
public NavigateNextSubWordAction() {
super(ST.WORD_NEXT);
}
@Override
protected void setCaretPosition(final int position) {
getTextWidget().setCaretOffset(modelOffset2WidgetOffset(getSourceViewer(), position));
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonEditor.java |
60 | public class TestTxLogMonitoring
{
private static final TargetDirectory target = TargetDirectory.forTest( TestTxLogMonitoring.class );
@Test
public void shouldCountBytesWritten() throws Exception
{
// Given
File directory = target.cleanDirectory( "shouldCountBytesWritten" );
File theLogFile = new File( directory, "theLog" );
Monitors monitors = new Monitors();
TxLog txLog = new TxLog( theLogFile, new DefaultFileSystemAbstraction(), monitors );
final AtomicLong bytesWritten = new AtomicLong();
monitors.addMonitorListener( new ByteCounterMonitor()
{
@Override
public void bytesWritten( long numberOfBytes )
{
bytesWritten.addAndGet( numberOfBytes );
}
@Override
public void bytesRead( long numberOfBytes )
{
}
}, TxLog.class.getName() );
byte[] globalId = {1, 2, 3};
// When
txLog.txStart( globalId );
txLog.addBranch( globalId, new byte[]{4,5,6} );
txLog.close();
// Then
assertTrue( bytesWritten.get() > 0 );
assertEquals( theLogFile.length(), bytesWritten.get() );
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestTxLogMonitoring.java |
698 | public class BulkRequestTests extends ElasticsearchTestCase {
@Test
public void testSimpleBulk1() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
// translate Windows line endings (\r\n) to standard ones (\n)
if (Constants.WINDOWS) {
bulkAction = Strings.replace(bulkAction, "\r\n", "\n");
}
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(3));
assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes()));
assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class));
assertThat(((IndexRequest) bulkRequest.requests().get(2)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }").toBytes()));
}
@Test
public void testSimpleBulk2() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk2.json");
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(3));
}
@Test
public void testSimpleBulk3() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk3.json");
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(3));
}
@Test
public void testSimpleBulk4() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk4.json");
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), true, null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(4));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().toUtf8(), equalTo("{\"field\":\"value\"}"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).script(), equalTo("counter += param1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).scriptLang(), equalTo("js"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).scriptParams().size(), equalTo(1));
assertThat(((Integer) ((UpdateRequest) bulkRequest.requests().get(1)).scriptParams().get("param1")), equalTo(1));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().toUtf8(), equalTo("{\"counter\":1}"));
}
@Test
public void testBulkAllowExplicitIndex() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
try {
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), true, null, null, false);
fail();
} catch (Exception e) {
}
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), true, "test", null, false);
}
} | 0true
| src_test_java_org_elasticsearch_action_bulk_BulkRequestTests.java |
3,610 | final class TransactionImpl implements Transaction, TransactionSupport {
private static final ThreadLocal<Boolean> THREAD_FLAG = new ThreadLocal<Boolean>();
private static final int ROLLBACK_TIMEOUT_MINUTES = 5;
private static final int COMMIT_TIMEOUT_MINUTES = 5;
private final TransactionManagerServiceImpl transactionManagerService;
private final NodeEngine nodeEngine;
private final List<TransactionLog> txLogs = new LinkedList<TransactionLog>();
private final Map<Object, TransactionLog> txLogMap = new HashMap<Object, TransactionLog>();
private final String txnId;
private Long threadId;
private final long timeoutMillis;
private final int durability;
private final TransactionType transactionType;
private final String txOwnerUuid;
private final boolean checkThreadAccess;
private State state = NO_TXN;
private long startTime;
private Address[] backupAddresses;
private SerializableXID xid;
public TransactionImpl(TransactionManagerServiceImpl transactionManagerService, NodeEngine nodeEngine,
TransactionOptions options, String txOwnerUuid) {
this.transactionManagerService = transactionManagerService;
this.nodeEngine = nodeEngine;
this.txnId = UuidUtil.buildRandomUuidString();
this.timeoutMillis = options.getTimeoutMillis();
this.durability = options.getDurability();
this.transactionType = options.getTransactionType();
this.txOwnerUuid = txOwnerUuid == null ? nodeEngine.getLocalMember().getUuid() : txOwnerUuid;
this.checkThreadAccess = txOwnerUuid != null;
}
// used by tx backups
TransactionImpl(TransactionManagerServiceImpl transactionManagerService, NodeEngine nodeEngine,
String txnId, List<TransactionLog> txLogs, long timeoutMillis, long startTime, String txOwnerUuid) {
this.transactionManagerService = transactionManagerService;
this.nodeEngine = nodeEngine;
this.txnId = txnId;
this.timeoutMillis = timeoutMillis;
this.startTime = startTime;
this.durability = 0;
this.transactionType = TransactionType.TWO_PHASE;
this.txLogs.addAll(txLogs);
this.state = PREPARED;
this.txOwnerUuid = txOwnerUuid;
this.checkThreadAccess = false;
}
public void setXid(SerializableXID xid) {
this.xid = xid;
}
public SerializableXID getXid() {
return xid;
}
@Override
public String getTxnId() {
return txnId;
}
public TransactionType getTransactionType() {
return transactionType;
}
@Override
public void addTransactionLog(TransactionLog transactionLog) {
if (state != Transaction.State.ACTIVE) {
throw new TransactionNotActiveException("Transaction is not active!");
}
checkThread();
// there should be just one tx log for the same key. so if there is older we are removing it
if (transactionLog instanceof KeyAwareTransactionLog) {
KeyAwareTransactionLog keyAwareTransactionLog = (KeyAwareTransactionLog) transactionLog;
TransactionLog removed = txLogMap.remove(keyAwareTransactionLog.getKey());
if (removed != null) {
txLogs.remove(removed);
}
}
txLogs.add(transactionLog);
if (transactionLog instanceof KeyAwareTransactionLog) {
KeyAwareTransactionLog keyAwareTransactionLog = (KeyAwareTransactionLog) transactionLog;
txLogMap.put(keyAwareTransactionLog.getKey(), keyAwareTransactionLog);
}
}
public TransactionLog getTransactionLog(Object key) {
return txLogMap.get(key);
}
public List<TransactionLog> getTxLogs() {
return txLogs;
}
public void removeTransactionLog(Object key) {
TransactionLog removed = txLogMap.remove(key);
if (removed != null) {
txLogs.remove(removed);
}
}
private void checkThread() {
if (!checkThreadAccess && threadId != null && threadId.longValue() != Thread.currentThread().getId()) {
throw new IllegalStateException("Transaction cannot span multiple threads!");
}
}
public void begin() throws IllegalStateException {
if (state == ACTIVE) {
throw new IllegalStateException("Transaction is already active");
}
if (THREAD_FLAG.get() != null) {
throw new IllegalStateException("Nested transactions are not allowed!");
}
//init caller thread
if(threadId == null){
threadId = Thread.currentThread().getId();
setThreadFlag(Boolean.TRUE);
}
startTime = Clock.currentTimeMillis();
backupAddresses = transactionManagerService.pickBackupAddresses(durability);
if (durability > 0 && backupAddresses != null && transactionType == TransactionType.TWO_PHASE) {
List<Future> futures = startTxBackup();
awaitTxBackupCompletion(futures);
}
state = ACTIVE;
}
private void awaitTxBackupCompletion(List<Future> futures) {
for (Future future : futures) {
try {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
} catch (MemberLeftException e) {
nodeEngine.getLogger(Transaction.class).warning("Member left while replicating tx begin: " + e);
} catch (Throwable e) {
if (e instanceof ExecutionException) {
e = e.getCause() != null ? e.getCause() : e;
}
if (e instanceof TargetNotMemberException) {
nodeEngine.getLogger(Transaction.class).warning("Member left while replicating tx begin: " + e);
} else {
throw ExceptionUtil.rethrow(e);
}
}
}
}
private List<Future> startTxBackup() {
final OperationService operationService = nodeEngine.getOperationService();
List<Future> futures = new ArrayList<Future>(backupAddresses.length);
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
final Future f = operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new BeginTxBackupOperation(txOwnerUuid, txnId, xid), backupAddress);
futures.add(f);
}
}
return futures;
}
private void setThreadFlag(Boolean flag) {
if (!checkThreadAccess) {
THREAD_FLAG.set(flag);
}
}
public void prepare() throws TransactionException {
if (state != ACTIVE) {
throw new TransactionNotActiveException("Transaction is not active");
}
checkThread();
checkTimeout();
try {
final List<Future> futures = new ArrayList<Future>(txLogs.size());
state = PREPARING;
for (TransactionLog txLog : txLogs) {
futures.add(txLog.prepare(nodeEngine));
}
for (Future future : futures) {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
}
futures.clear();
state = PREPARED;
if (durability > 0) {
replicateTxnLog();
}
} catch (Throwable e) {
throw ExceptionUtil.rethrow(e, TransactionException.class);
}
}
private void replicateTxnLog() throws InterruptedException, ExecutionException, java.util.concurrent.TimeoutException {
final List<Future> futures = new ArrayList<Future>(txLogs.size());
final OperationService operationService = nodeEngine.getOperationService();
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
final Future f = operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new ReplicateTxOperation(txLogs, txOwnerUuid, txnId, timeoutMillis, startTime),
backupAddress);
futures.add(f);
}
}
for (Future future : futures) {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
}
futures.clear();
}
public void commit() throws TransactionException, IllegalStateException {
try {
if (transactionType.equals(TransactionType.TWO_PHASE) && state != PREPARED) {
throw new IllegalStateException("Transaction is not prepared");
}
if (transactionType.equals(TransactionType.LOCAL) && state != ACTIVE) {
throw new IllegalStateException("Transaction is not active");
}
checkThread();
checkTimeout();
try {
final List<Future> futures = new ArrayList<Future>(txLogs.size());
state = COMMITTING;
for (TransactionLog txLog : txLogs) {
futures.add(txLog.commit(nodeEngine));
}
for (Future future : futures) {
try {
future.get(COMMIT_TIMEOUT_MINUTES, TimeUnit.MINUTES);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during commit!", e);
}
}
state = COMMITTED;
// purge tx backup
purgeTxBackups();
} catch (Throwable e) {
state = COMMIT_FAILED;
throw ExceptionUtil.rethrow(e, TransactionException.class);
}
} finally {
setThreadFlag(null);
}
}
private void checkTimeout() throws TransactionException {
if (startTime + timeoutMillis < Clock.currentTimeMillis()) {
throw new TransactionException("Transaction is timed-out!");
}
}
public void rollback() throws IllegalStateException {
try {
if (state == NO_TXN || state == ROLLED_BACK) {
throw new IllegalStateException("Transaction is not active");
}
checkThread();
state = ROLLING_BACK;
try {
rollbackTxBackup();
final List<Future> futures = new ArrayList<Future>(txLogs.size());
final ListIterator<TransactionLog> iter = txLogs.listIterator(txLogs.size());
while (iter.hasPrevious()) {
final TransactionLog txLog = iter.previous();
futures.add(txLog.rollback(nodeEngine));
}
for (Future future : futures) {
try {
future.get(ROLLBACK_TIMEOUT_MINUTES, TimeUnit.MINUTES);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during rollback!", e);
}
}
// purge tx backup
purgeTxBackups();
} catch (Throwable e) {
throw ExceptionUtil.rethrow(e);
} finally {
state = ROLLED_BACK;
}
} finally {
setThreadFlag(null);
}
}
private void rollbackTxBackup() {
final OperationService operationService = nodeEngine.getOperationService();
final List<Future> futures = new ArrayList<Future>(txLogs.size());
// rollback tx backup
if (durability > 0 && transactionType.equals(TransactionType.TWO_PHASE)) {
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
final Future f = operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new RollbackTxBackupOperation(txnId), backupAddress);
futures.add(f);
}
}
for (Future future : futures) {
try {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during tx rollback backup!", e);
}
}
futures.clear();
}
}
public void setRollbackOnly() {
state = ROLLING_BACK;
}
private void purgeTxBackups() {
if (durability > 0 && transactionType.equals(TransactionType.TWO_PHASE)) {
final OperationService operationService = nodeEngine.getOperationService();
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
try {
operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new PurgeTxBackupOperation(txnId), backupAddress);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during purging backups!", e);
}
}
}
}
}
public long getStartTime() {
return startTime;
}
public String getOwnerUuid() {
return txOwnerUuid;
}
public State getState() {
return state;
}
public long getTimeoutMillis() {
return timeoutMillis;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Transaction");
sb.append("{txnId='").append(txnId).append('\'');
sb.append(", state=").append(state);
sb.append(", txType=").append(transactionType);
sb.append(", timeoutMillis=").append(timeoutMillis);
sb.append('}');
return sb.toString();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_transaction_impl_TransactionImpl.java |
3,721 | public class MultiFieldsIntegrationTests extends ElasticsearchIntegrationTest {
@Test
public void testMultiFields() throws Exception {
assertAcked(
client().admin().indices().prepareCreate("my-index")
.addMapping("my-type", createTypeSource())
);
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get();
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
assertThat(mappingMetaData, not(nullValue()));
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
Map titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource));
assertThat(titleFields.size(), equalTo(1));
assertThat(titleFields.get("not_analyzed"), notNullValue());
assertThat(((Map)titleFields.get("not_analyzed")).get("index").toString(), equalTo("not_analyzed"));
client().prepareIndex("my-index", "my-type", "1")
.setSource("title", "Multi fields")
.setRefresh(true)
.get();
SearchResponse searchResponse = client().prepareSearch("my-index")
.setQuery(matchQuery("title", "multi"))
.get();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("my-index")
.setQuery(matchQuery("title.not_analyzed", "Multi fields"))
.get();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertAcked(
client().admin().indices().preparePutMapping("my-index").setType("my-type")
.setSource(createPutMappingSource())
.setIgnoreConflicts(true) // If updated with multi-field type, we need to ignore failures.
);
getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get();
mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
assertThat(mappingMetaData, not(nullValue()));
mappingSource = mappingMetaData.sourceAsMap();
assertThat(((Map) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2));
titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource));
assertThat(titleFields.size(), equalTo(2));
assertThat(titleFields.get("not_analyzed"), notNullValue());
assertThat(((Map)titleFields.get("not_analyzed")).get("index").toString(), equalTo("not_analyzed"));
assertThat(titleFields.get("uncased"), notNullValue());
assertThat(((Map)titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace"));
client().prepareIndex("my-index", "my-type", "1")
.setSource("title", "Multi fields")
.setRefresh(true)
.get();
searchResponse = client().prepareSearch("my-index")
.setQuery(matchQuery("title.uncased", "Multi"))
.get();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
}
@Test
public void testGeoPointMultiField() throws Exception {
assertAcked(
client().admin().indices().prepareCreate("my-index")
.addMapping("my-type", createMappingSource("geo_point"))
);
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get();
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
assertThat(mappingMetaData, not(nullValue()));
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource));
assertThat(aField.size(), equalTo(2));
assertThat(aField.get("type").toString(), equalTo("geo_point"));
assertThat(aField.get("fields"), notNullValue());
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
assertThat(bField.size(), equalTo(2));
assertThat(bField.get("type").toString(), equalTo("string"));
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
client().prepareIndex("my-index", "my-type", "1").setSource("a", "51,19").setRefresh(true).get();
CountResponse countResponse = client().prepareCount("my-index")
.setQuery(constantScoreQuery(geoDistanceFilter("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS)))
.get();
assertThat(countResponse.getCount(), equalTo(1l));
countResponse = client().prepareCount("my-index").setQuery(matchQuery("a.b", "51,19")).get();
assertThat(countResponse.getCount(), equalTo(1l));
}
@Test
public void testTokenCountMultiField() throws Exception {
assertAcked(
client().admin().indices().prepareCreate("my-index")
.addMapping("my-type", XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("a")
.field("type", "token_count")
.field("analyzer", "simple")
.startObject("fields")
.startObject("b")
.field("type", "string")
.field("index", "not_analyzed")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject())
);
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get();
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
assertThat(mappingMetaData, not(nullValue()));
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource));
assertThat(aField.size(), equalTo(3));
assertThat(aField.get("type").toString(), equalTo("token_count"));
assertThat(aField.get("fields"), notNullValue());
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
assertThat(bField.size(), equalTo(2));
assertThat(bField.get("type").toString(), equalTo("string"));
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
client().prepareIndex("my-index", "my-type", "1").setSource("a", "my tokens").setRefresh(true).get();
CountResponse countResponse = client().prepareCount("my-index").setQuery(matchQuery("a.b", "my tokens")).get();
assertThat(countResponse.getCount(), equalTo(1l));
}
@Test
public void testCompletionMultiField() throws Exception {
assertAcked(
client().admin().indices().prepareCreate("my-index")
.addMapping("my-type", createMappingSource("completion"))
);
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get();
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
assertThat(mappingMetaData, not(nullValue()));
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource));
assertThat(aField.size(), equalTo(7));
assertThat(aField.get("type").toString(), equalTo("completion"));
assertThat(aField.get("fields"), notNullValue());
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
assertThat(bField.size(), equalTo(2));
assertThat(bField.get("type").toString(), equalTo("string"));
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
client().prepareIndex("my-index", "my-type", "1").setSource("a", "complete me").setRefresh(true).get();
CountResponse countResponse = client().prepareCount("my-index").setQuery(matchQuery("a.b", "complete me")).get();
assertThat(countResponse.getCount(), equalTo(1l));
}
@Test
public void testIpMultiField() throws Exception {
assertAcked(
client().admin().indices().prepareCreate("my-index")
.addMapping("my-type", createMappingSource("ip"))
);
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get();
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
assertThat(mappingMetaData, not(nullValue()));
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource));
assertThat(aField.size(), equalTo(2));
assertThat(aField.get("type").toString(), equalTo("ip"));
assertThat(aField.get("fields"), notNullValue());
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
assertThat(bField.size(), equalTo(2));
assertThat(bField.get("type").toString(), equalTo("string"));
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
client().prepareIndex("my-index", "my-type", "1").setSource("a", "127.0.0.1").setRefresh(true).get();
CountResponse countResponse = client().prepareCount("my-index").setQuery(matchQuery("a.b", "127.0.0.1")).get();
assertThat(countResponse.getCount(), equalTo(1l));
}
private XContentBuilder createMappingSource(String fieldType) throws IOException {
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("a")
.field("type", fieldType)
.startObject("fields")
.startObject("b")
.field("type", "string")
.field("index", "not_analyzed")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject();
}
private XContentBuilder createTypeSource() throws IOException {
if (randomBoolean()) {
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("title")
.field("type", "string")
.startObject("fields")
.startObject("not_analyzed")
.field("type", "string")
.field("index", "not_analyzed")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject();
} else {
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("title")
.field("type", "multi_field")
.startObject("fields")
.startObject("title")
.field("type", "string")
.endObject()
.startObject("not_analyzed")
.field("type", "string")
.field("index", "not_analyzed")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject();
}
}
private XContentBuilder createPutMappingSource() throws IOException {
if (randomBoolean()) {
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("title")
.field("type", "string")
.startObject("fields")
.startObject("uncased")
.field("type", "string")
.field("analyzer", "whitespace")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject();
} else {
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("title")
.field("type", "multi_field")
.startObject("fields")
.startObject("uncased")
.field("type", "string")
.field("analyzer", "whitespace")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject();
}
}
} | 0true
| src_test_java_org_elasticsearch_index_mapper_multifield_MultiFieldsIntegrationTests.java |
3,625 | public static final Mapper.TypeParser multiFieldConverterTypeParser = new Mapper.TypeParser() {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ContentPath.Type pathType = null;
AbstractFieldMapper.Builder mainFieldBuilder = null;
List<AbstractFieldMapper.Builder> fields = null;
String firstType = null;
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
pathType = parsePathType(name, fieldNode.toString());
} else if (fieldName.equals("fields")) {
Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
for (Map.Entry<String, Object> entry1 : fieldsNode.entrySet()) {
String propName = entry1.getKey();
Map<String, Object> propNode = (Map<String, Object>) entry1.getValue();
String type;
Object typeNode = propNode.get("type");
if (typeNode != null) {
type = typeNode.toString();
if (firstType == null) {
firstType = type;
}
} else {
throw new MapperParsingException("No type specified for property [" + propName + "]");
}
Mapper.TypeParser typeParser = parserContext.typeParser(type);
if (typeParser == null) {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + fieldName + "]");
}
if (propName.equals(name)) {
mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
} else {
if (fields == null) {
fields = new ArrayList<AbstractFieldMapper.Builder>(2);
}
fields.add((AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext));
}
}
}
}
if (mainFieldBuilder == null) {
if (fields == null) {
// No fields at all were specified in multi_field, so lets return a non indexed string field.
return new StringFieldMapper.Builder(name).index(false);
}
Mapper.TypeParser typeParser = parserContext.typeParser(firstType);
if (typeParser == null) {
// The first multi field's type is unknown
mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
} else {
Mapper.Builder substitute = typeParser.parse(name, Collections.<String, Object>emptyMap(), parserContext);
if (substitute instanceof AbstractFieldMapper.Builder) {
mainFieldBuilder = ((AbstractFieldMapper.Builder) substitute).index(false);
} else {
// The first multi isn't a core field type
mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
}
}
}
if (fields != null && pathType != null) {
for (Mapper.Builder field : fields) {
mainFieldBuilder.addMultiField(field);
}
mainFieldBuilder.multiFieldPathType(pathType);
} else if (fields != null) {
for (Mapper.Builder field : fields) {
mainFieldBuilder.addMultiField(field);
}
} else if (pathType != null) {
mainFieldBuilder.multiFieldPathType(pathType);
}
return mainFieldBuilder;
}
}; | 0true
| src_main_java_org_elasticsearch_index_mapper_core_TypeParsers.java |
147 | final class MoveDirProposal implements ICompletionProposal {
private final Shell shell;
private final String pn;
private final String cpn;
private final IPath sourceDir;
private final IProject project;
MoveDirProposal(Shell shell, String pn, String cpn,
IPath sourceDir, IProject project) {
this.shell = shell;
this.pn = pn;
this.cpn = cpn;
this.sourceDir = sourceDir;
this.project = project;
}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public Image getImage() {
return MINOR_CHANGE; //TODO!!!!!
}
@Override
public String getDisplayString() {
return "Rename and move to '" + pn + "'";
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument document) {
IPackageFragment pfr = (IPackageFragment) JavaCore.create(project.getFolder(sourceDir.append(cpn.replace('.', '/'))));
RenamePackageProcessor processor = new RenamePackageProcessor(pfr);
processor.setNewElementName(pn);
new RefactoringStarter().activate(new RenamePackageWizard(new RenameRefactoring(processor)),
shell, "Rename Package", 4);
}
static void addMoveDirProposal(final IFile file, final Tree.CompilationUnit cu,
final IProject project, Collection<ICompletionProposal> proposals,
final Shell shell) {
Tree.ImportPath importPath;
if (!cu.getPackageDescriptors().isEmpty()) {
importPath = cu.getPackageDescriptors().get(0).getImportPath();
}
else if (!cu.getModuleDescriptors().isEmpty()) {
importPath = cu.getModuleDescriptors().get(0).getImportPath();
}
else {
return;
}
final String pn = formatPath(importPath.getIdentifiers());
final String cpn = cu.getUnit().getPackage().getNameAsString();
final IPath sourceDir = file.getProjectRelativePath()
.removeLastSegments(file.getProjectRelativePath().segmentCount()-1);
// final IPath relPath = sourceDir.append(pn.replace('.', '/'));
// final IPath newPath = project.getFullPath().append(relPath);
// if (!project.exists(newPath)) {
proposals.add(new MoveDirProposal(shell, pn, cpn, sourceDir, project));
// }
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_MoveDirProposal.java |
727 | public class ChangeParametersRefactoring extends AbstractRefactoring {
private static class FindInvocationsVisitor extends Visitor {
private Declaration declaration;
private final Set<Tree.PositionalArgumentList> posResults =
new HashSet<Tree.PositionalArgumentList>();
private final Set<Tree.NamedArgumentList> namedResults =
new HashSet<Tree.NamedArgumentList>();
Set<Tree.PositionalArgumentList> getPositionalArgLists() {
return posResults;
}
Set<Tree.NamedArgumentList> getNamedArgLists() {
return namedResults;
}
private FindInvocationsVisitor(Declaration declaration) {
this.declaration=declaration;
}
@Override
public void visit(Tree.InvocationExpression that) {
super.visit(that);
Tree.Primary primary = that.getPrimary();
if (primary instanceof Tree.MemberOrTypeExpression) {
if (((Tree.MemberOrTypeExpression) primary).getDeclaration()
.refines(declaration)) {
Tree.PositionalArgumentList pal = that.getPositionalArgumentList();
if (pal!=null) {
posResults.add(pal);
}
Tree.NamedArgumentList nal = that.getNamedArgumentList();
if (nal!=null) {
namedResults.add(nal);
}
}
}
}
}
private static class FindArgumentsVisitor extends Visitor {
private Declaration declaration;
private final Set<Tree.MethodArgument> results =
new HashSet<Tree.MethodArgument>();
Set<Tree.MethodArgument> getResults() {
return results;
}
private FindArgumentsVisitor(Declaration declaration) {
this.declaration=declaration;
}
@Override
public void visit(Tree.MethodArgument that) {
super.visit(that);
Parameter p = that.getParameter();
if (p!=null && p.getModel().equals(declaration)) {
results.add(that);
}
}
}
private List<Integer> order = new ArrayList<Integer>();
private List<Boolean> defaulted = new ArrayList<Boolean>();
private final Declaration declaration;
private final List<Parameter> parameters;
private final Map<MethodOrValue,String> defaultArgs =
new HashMap<MethodOrValue,String>();
private final Map<MethodOrValue,String> originalDefaultArgs =
new HashMap<MethodOrValue,String>();
private final Map<MethodOrValue,String> paramLists =
new HashMap<MethodOrValue,String>();
public Map<MethodOrValue,String> getDefaultArgs() {
return defaultArgs;
}
public List<Parameter> getParameters() {
return parameters;
}
public Node getNode() {
return node;
}
public List<Integer> getOrder() {
return order;
}
public List<Boolean> getDefaulted() {
return defaulted;
}
public ChangeParametersRefactoring(IEditorPart textEditor) {
super(textEditor);
if (rootNode!=null) {
Referenceable refDec =
getReferencedExplicitDeclaration(node, rootNode);
if (refDec instanceof Functional &&
refDec instanceof Declaration) {
refDec = ((Declaration) refDec).getRefinedDeclaration();
List<ParameterList> pls =
((Functional) refDec).getParameterLists();
if (pls.isEmpty()) {
declaration = null;
parameters = null;
}
else {
declaration = (Declaration) refDec;
parameters = pls.get(0).getParameters();
for (int i=0; i<parameters.size(); i++) {
order.add(i);
defaulted.add(parameters.get(i).isDefaulted());
}
Node decNode = getReferencedNode(refDec,
editor.getParseController());
Tree.ParameterList pl=null;
if (decNode instanceof Tree.AnyClass) {
pl = ((Tree.AnyClass) decNode).getParameterList();
}
else if (decNode instanceof Tree.AnyMethod) {
pl = ((Tree.AnyMethod) decNode).getParameterLists().get(0);
}
if (pl!=null) {
for (Tree.Parameter p: pl.getParameters()) {
Tree.SpecifierOrInitializerExpression sie =
getDefaultArgSpecifier(p);
if (sie!=null) {
defaultArgs.put(p.getParameterModel().getModel(),
toString(sie.getExpression()));
}
if (p instanceof Tree.FunctionalParameterDeclaration) {
Tree.MethodDeclaration pd = (Tree.MethodDeclaration)
((Tree.FunctionalParameterDeclaration) p).getTypedDeclaration();
paramLists.put(p.getParameterModel().getModel(),
toString(pd.getParameterLists().get(0)));
}
}
originalDefaultArgs.putAll(defaultArgs);
}
}
}
else {
declaration = null;
parameters = null;
}
}
else {
declaration = null;
parameters = null;
}
}
@Override
public boolean isEnabled() {
return declaration instanceof Functional &&
project != null &&
inSameProject(declaration);
}
public int getCount() {
return declaration==null ?
0 : countDeclarationOccurrences();
}
@Override
int countReferences(Tree.CompilationUnit cu) {
FindInvocationsVisitor frv = new FindInvocationsVisitor(declaration);
FindRefinementsVisitor fdv = new FindRefinementsVisitor(declaration);
FindArgumentsVisitor fav = new FindArgumentsVisitor(declaration);
cu.visit(frv);
cu.visit(fdv);
cu.visit(fav);
return frv.getPositionalArgLists().size() +
fdv.getDeclarationNodes().size() +
fav.getResults().size();
}
public String getName() {
return "Change Parameter List";
}
public RefactoringStatus checkInitialConditions(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
// Check parameters retrieved from editor context
return new RefactoringStatus();
}
public RefactoringStatus checkFinalConditions(IProgressMonitor pm)
throws CoreException, OperationCanceledException {
RefactoringStatus result = new RefactoringStatus();
boolean foundDefaulted = false;
for (int index=0; index<defaulted.size(); index++) {
if (defaulted.get(index)) {
foundDefaulted = true;
}
else {
if (foundDefaulted) {
result.addWarning("defaulted parameters occur before required parameters");
break;
}
}
}
for (int index=0; index<defaulted.size(); index++) {
Parameter p = parameters.get(order.get(index));
if (defaulted.get(index)) {
String arg = defaultArgs.get(p.getModel());
if (arg==null || arg.isEmpty()) {
result.addWarning("missing default argument for " + p.getName());
}
}
/*else if (p.isDefaulted()) {
String arg = originalDefaultArgs.get(p.getModel());
if (arg==null || arg.isEmpty()) {
result.addWarning("missing argument to inline for " + p.getName());
}
}*/
}
return result;
}
public CompositeChange createChange(IProgressMonitor pm) throws CoreException,
OperationCanceledException {
List<PhasedUnit> units = getAllUnits();
pm.beginTask(getName(), units.size());
CompositeChange cc = new CompositeChange(getName());
int i=0;
for (PhasedUnit pu: units) {
if (searchInFile(pu)) {
TextFileChange tfc = newTextFileChange(pu);
refactorInFile(tfc, cc, pu.getCompilationUnit());
pm.worked(i++);
}
}
if (searchInEditor()) {
DocumentChange dc = newDocumentChange();
Tree.CompilationUnit cu =
editor.getParseController().getRootNode();
refactorInFile(dc, cc, cu);
pm.worked(i++);
}
pm.done();
return cc;
}
private void refactorInFile(TextChange tfc,
CompositeChange cc, Tree.CompilationUnit root) {
tfc.setEdit(new MultiTextEdit());
if (declaration!=null) {
int requiredParams=-1;
for (int i=0; i<defaulted.size(); i++) {
Parameter p = parameters.get(order.get(i));
if (!defaulted.get(i) ||
defaultHasChanged(p)) {
if (i>requiredParams) {
requiredParams = i;
}
}
}
FindInvocationsVisitor fiv =
new FindInvocationsVisitor(declaration);
root.visit(fiv);
for (Tree.PositionalArgumentList pal: fiv.getPositionalArgLists()) {
List<Tree.PositionalArgument> pas = pal.getPositionalArguments();
int existingArgs=0;
for (int i=0; i<pas.size(); i++) {
Parameter p = pas.get(i).getParameter();
if (p!=null) {
int newLoc = order.indexOf(i);
if (newLoc>existingArgs) {
existingArgs = newLoc;
}
}
}
Tree.PositionalArgument[] args =
new Tree.PositionalArgument[Math.max(requiredParams+1, existingArgs+1)];
for (int i=0; i<pas.size(); i++) {
args[order.indexOf(i)] = pas.get(i);
}
tfc.addEdit(reorderEdit(pal, args));
}
for (Tree.NamedArgumentList nal: fiv.getNamedArgLists()) {
List<Tree.NamedArgument> nas = nal.getNamedArguments();
for (int i=0; i<defaulted.size(); i++) {
Parameter p = parameters.get(order.get(i));
if (!defaulted.get(i) || defaultHasChanged(p)) {
boolean found = false;
for (Tree.NamedArgument na: nas) {
Parameter nap = na.getParameter();
if (nap!=null &&
nap.getModel().equals(p.getModel())) {
found=true;
break;
}
}
if (!found) {
tfc.addEdit(new InsertEdit(nal.getStopIndex(),
getInlinedNamedArg(p) + "; "));
}
}
}
}
FindRefinementsVisitor frv =
new FindRefinementsVisitor(declaration);
root.visit(frv);
for (Tree.StatementOrArgument decNode: frv.getDeclarationNodes()) {
boolean actual;
Tree.ParameterList pl;
if (decNode instanceof Tree.AnyMethod) {
Tree.AnyMethod m = (Tree.AnyMethod) decNode;
pl = m.getParameterLists().get(0);
actual = m.getDeclarationModel().isActual();
}
else if (decNode instanceof Tree.AnyClass) {
Tree.AnyClass c = (Tree.AnyClass) decNode;
pl = c.getParameterList();
actual = c.getDeclarationModel().isActual();
}
else if (decNode instanceof Tree.SpecifierStatement) {
Tree.Term bme = ((Tree.SpecifierStatement) decNode).getBaseMemberExpression();
if (bme instanceof Tree.ParameterizedExpression) {
pl = ((Tree.ParameterizedExpression) bme).getParameterLists().get(0);
actual = true;
}
else {
continue;
}
}
else {
continue;
}
List<Tree.Parameter> ps = pl.getParameters();
int size = ps.size();
Tree.Parameter[] params = new Tree.Parameter[size];
boolean[] defaulted = new boolean[size];
for (int i=0; i<size; i++) {
int index = order.indexOf(i);
params[index] = ps.get(i);
defaulted[index] = !actual && this.defaulted.get(index);
}
tfc.addEdit(reorderEdit(pl, params, defaulted));
}
FindArgumentsVisitor fav = new FindArgumentsVisitor(declaration);
root.visit(fav);
for (Tree.MethodArgument decNode: fav.getResults()) {
Tree.ParameterList pl = decNode.getParameterLists().get(0);
List<Tree.Parameter> ps = pl.getParameters();
int size = ps.size();
Tree.Parameter[] params = new Tree.Parameter[size];
for (int i=0; i<size; i++) {
params[order.indexOf(i)] = ps.get(i);
}
tfc.addEdit(reorderEdit(pl, params));
}
}
if (tfc.getEdit().hasChildren()) {
cc.add(tfc);
}
}
boolean defaultHasChanged(Parameter p) {
return p.isDefaulted() &&
//the default arg has been modified
!defaultArgs.get(p.getModel())
.equals(originalDefaultArgs.get(p.getModel()));
}
public ReplaceEdit reorderEdit(Node list,
Tree.PositionalArgument[] arguments) {
StringBuilder sb = new StringBuilder("(");
for (int i=0; i<arguments.length; i++) {
Tree.PositionalArgument elem = arguments[i];
String argString;
if (elem==null) {
Parameter p = parameters.get(order.get(i));
argString = getInlinedArg(p);
}
else {
argString = toString(elem);
}
sb.append(argString).append(", ");
}
sb.setLength(sb.length()-2);
sb.append(")");
return new ReplaceEdit(getNodeStartOffset(list),
getNodeLength(list),
sb.toString());
}
public ReplaceEdit reorderEdit(Node list,
Tree.Parameter[] parameters) {
StringBuilder sb = new StringBuilder("(");
for (int i=0; i<parameters.length; i++) {
Tree.Parameter parameter = parameters[i];
sb.append(toString(parameter)).append(", ");
}
sb.setLength(sb.length()-2);
sb.append(")");
return new ReplaceEdit(getNodeStartOffset(list),
getNodeLength(list),
sb.toString());
}
public ReplaceEdit reorderEdit(Node list,
Tree.Parameter[] parameters,
boolean[] defaulted) {
StringBuilder sb = new StringBuilder("(");
for (int i=0; i<parameters.length; i++) {
Tree.Parameter parameter = parameters[i];
String paramString = toString(parameter);
//first remove the default arg
Node sie = getDefaultArgSpecifier(parameter);
if (sie!=null) {
int loc = sie.getStartIndex()-parameter.getStartIndex();
paramString = paramString.substring(0,loc).trim();
}
if (defaulted[i]) {
//now add the new default arg
//TODO: this results in incorrectly-typed
// code for void functional parameters
Parameter p = parameter.getParameterModel();
paramString = paramString + getSpecifier(parameter) +
getNewDefaultArg(p);
}
sb.append(paramString).append(", ");
}
sb.setLength(sb.length()-2);
sb.append(")");
return new ReplaceEdit(getNodeStartOffset(list),
getNodeLength(list),
sb.toString());
}
private static String getSpecifier(Tree.Parameter parameter) {
if (parameter instanceof Tree.FunctionalParameterDeclaration) {
return " => ";
}
else {
return " = ";
}
}
private String getInlinedArg(Parameter p) {
String argString = originalDefaultArgs.get(p.getModel());
if (argString==null || argString.isEmpty()) {
argString = "nothing";
}
String params = paramLists.get(p.getModel());
if (params!=null) {
argString = params + " => " + argString;
}
return argString;
}
private String getInlinedNamedArg(Parameter p) {
String argString = originalDefaultArgs.get(p.getModel());
if (argString==null || argString.isEmpty()) {
argString = "nothing";
}
String paramList = paramLists.get(p.getModel());
if (paramList==null) {
return p.getName() + " = " + argString;
}
else {
return "function " + p.getName() + paramList +
" => " + argString;
}
}
private String getNewDefaultArg(Parameter p) {
String argString = defaultArgs.get(p.getModel());
if (argString==null || argString.isEmpty()) {
argString = "nothing";
}
return argString;
}
public Declaration getDeclaration() {
return declaration;
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_refactor_ChangeParametersRefactoring.java |
45 | public class OByteArrayComparator implements Comparator<byte[]> {
public static final OByteArrayComparator INSTANCE = new OByteArrayComparator();
public int compare(final byte[] arrayOne, final byte[] arrayTwo) {
final int lenDiff = arrayOne.length - arrayTwo.length;
if (lenDiff != 0)
return lenDiff;
for (int i = 0; i < arrayOne.length; i++) {
final int valOne = arrayOne[i] & 0xFF;
final int valTwo = arrayTwo[i] & 0xFF;
final int diff = valOne - valTwo;
if (diff != 0)
return diff;
}
return 0;
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_comparator_OByteArrayComparator.java |
144 | public class TitanCleanup {
/**
* Clears out the entire graph. This will delete ALL of the data stored in this graph and the data will NOT be
* recoverable. This method is intended only for development and testing use.
*
* @param graph
* @throws IllegalArgumentException if the graph has not been shut down
* @throws com.thinkaurelius.titan.core.TitanException if clearing the storage is unsuccessful
*/
public static final void clear(TitanGraph graph) {
Preconditions.checkNotNull(graph);
Preconditions.checkArgument(graph instanceof StandardTitanGraph,"Invalid graph instance detected: %s",graph.getClass());
StandardTitanGraph g = (StandardTitanGraph)graph;
Preconditions.checkArgument(!g.isOpen(),"Graph needs to be shut down before it can be cleared.");
final GraphDatabaseConfiguration config = g.getConfiguration();
BackendOperation.execute(new Callable<Boolean>(){
@Override
public Boolean call() throws Exception {
config.getBackend().clearStorage();
return true;
}
@Override
public String toString() { return "ClearBackend"; }
},new StandardDuration(20, TimeUnit.SECONDS));
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_util_TitanCleanup.java |
245 | @SuppressWarnings("serial")
static final class OLinkedHashMapCache extends OLimitedMap<ORID, ORecordInternal<?>> {
public OLinkedHashMapCache(final int initialCapacity, final float loadFactor, final int limit) {
super(initialCapacity, loadFactor, limit);
}
void removeEldest(final int amount) {
final ORID[] victims = new ORID[amount];
final int skip = size() - amount;
int skipped = 0;
int selected = 0;
for (Map.Entry<ORID, ORecordInternal<?>> entry : entrySet()) {
if (entry.getValue().isDirty() || entry.getValue().isPinned() == Boolean.TRUE || skipped++ < skip)
continue;
victims[selected++] = entry.getKey();
}
for (ORID id : victims)
remove(id);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_cache_ODefaultCache.java |
1,531 | public final class PersistenceXmlUtil {
/**
* URI for the JPA persistence namespace
*/
public static final String PERSISTENCE_NS_URI = "http://java.sun.com/xml/ns/persistence";
private final static SchemaFactory schemaFactory = SchemaFactory.newInstance(W3C_XML_SCHEMA_NS_URI);
private final static SAXParserFactory parserFactory = SAXParserFactory.newInstance();
static {
parserFactory.setNamespaceAware(true);
}
/** The persistence xml root */
public static final String PERSISTENCE_XML_ROOT = "META-INF/";
public static final String PERSISTENCE_XML_BASE_NAME = "persistence.xml";
/** The persistence XSD location */
public static final String PERSISTENCE_XSD_DIR = PERSISTENCE_XML_ROOT + "persistence/";
/** The persistence XML location */
public static final String PERSISTENCE_XML = PERSISTENCE_XML_ROOT + PERSISTENCE_XML_BASE_NAME;
private PersistenceXmlUtil() {
}
/**
* Parse the persistence.xml files referenced by the URLs in the collection
*
* @param persistenceXml
* @return A collection of parsed persistence units, or null if nothing has been found
*/
public static PersistenceUnitInfo findPersistenceUnit(String unitName, Collection<? extends PersistenceUnitInfo> units) {
if (units == null || unitName == null) {
return null;
}
for (PersistenceUnitInfo unit : units) {
if (unitName.equals(unit.getPersistenceUnitName())) {
return unit;
}
}
return null;
}
/**
* Parse the persistence.xml files referenced by the URLs in the collection
*
* @param persistenceXml
* @return A collection of parsed persistence units.
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
public static Collection<? extends PersistenceUnitInfo> parse(URL persistenceXml) {
InputStream is = null;
try {
// Buffer the InputStream so we can mark it, though we'll be in
// trouble if we have to read more than 8192 characters before finding
// the schema!
is = new BufferedInputStream(persistenceXml.openStream());
JPAVersion jpaVersion = getSchemaVersion(is);
Schema schema = getSchema(jpaVersion);
if (schema == null) {
throw new PersistenceException("Schema is unknown");
}
// Get back to the beginning of the stream
is = new BufferedInputStream(persistenceXml.openStream());
parserFactory.setNamespaceAware(true);
int endIndex = persistenceXml.getPath().length() - PERSISTENCE_XML_BASE_NAME.length();
URL persistenceXmlRoot = new URL("file://" + persistenceXml.getFile().substring(0, endIndex));
return getPersistenceUnits(is, persistenceXmlRoot, jpaVersion);
} catch (Exception e) {
throw new PersistenceException("Something goes wrong while parsing persistence.xml", e);
} finally {
if (is != null)
try {
is.close();
} catch (IOException e) {
// No logging necessary, just consume
}
}
}
public static Schema getSchema(JPAVersion version) throws SAXException {
String schemaPath = PERSISTENCE_XSD_DIR + version.getFilename();
InputStream inputStream = PersistenceXmlUtil.class.getClassLoader().getResourceAsStream(schemaPath);
return schemaFactory.newSchema(new StreamSource(inputStream));
}
public static JPAVersion getSchemaVersion(InputStream is) throws ParserConfigurationException, SAXException, IOException {
SchemaLocatingHandler schemaHandler = parse(is, new SchemaLocatingHandler());
return JPAVersion.parse(schemaHandler.getVersion());
}
public static Collection<? extends PersistenceUnitInfo> getPersistenceUnits(InputStream is, URL xmlRoot, JPAVersion version)
throws ParserConfigurationException, SAXException, IOException {
JPAHandler handler = new JPAHandler(xmlRoot, version);
return parse(is, handler).getPersistenceUnits();
}
/**
* @param is
* - xml file to be validated
* @param handler
* @return handler for chained calls
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
*/
protected static <T extends DefaultHandler> T parse(InputStream is, T handler) throws ParserConfigurationException, SAXException,
IOException {
try {
SAXParser parser = parserFactory.newSAXParser();
parser.parse(is, handler);
} catch (StopSAXParser e) {
// This is not really an exception, but a way to work out which
// version of the persistence schema to use in validation
}
return handler;
}
/**
* @param uri
* @param element
* @param attributes
* @return XML Schema Version or null
* @throws SAXException
*/
public static String parseSchemaVersion(String uri, PersistenceXml element, Attributes attributes) throws SAXException {
if (PERSISTENCE_NS_URI.equals(uri) && TAG_PERSISTENCE == element) {
return attributes.getValue(ATTR_SCHEMA_VERSION.toString());
}
return null;
}
} | 0true
| object_src_main_java_com_orientechnologies_orient_object_jpa_parsing_PersistenceXmlUtil.java |
1,542 | public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, Text> {
private boolean isVertex;
private final Text textWritable = new Text();
private SafeMapperOutputs outputs;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
this.outputs = new SafeMapperOutputs(context);
if (!context.getConfiguration().getBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_PATHS, false))
throw new IllegalStateException(PathMap.class.getSimpleName() + " requires that paths be enabled");
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, Text>.Context context) throws IOException, InterruptedException {
if (this.isVertex && value.hasPaths()) {
for (final List<FaunusPathElement.MicroElement> path : value.getPaths()) {
this.textWritable.set(path.toString());
this.outputs.write(Tokens.SIDEEFFECT, NullWritable.get(), this.textWritable);
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L);
} else {
long edgesProcessed = 0;
for (final Edge e : value.getEdges(Direction.OUT)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
for (final List<FaunusPathElement.MicroElement> path : edge.getPaths()) {
this.textWritable.set(path.toString());
this.outputs.write(Tokens.SIDEEFFECT, NullWritable.get(), this.textWritable);
}
edgesProcessed++;
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed);
}
this.outputs.write(Tokens.GRAPH, NullWritable.get(), value);
}
@Override
public void cleanup(final Mapper<NullWritable, FaunusVertex, NullWritable, Text>.Context context) throws IOException, InterruptedException {
this.outputs.close();
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_PathMap.java |
1,446 | public class OGremlinEngineThreadLocal extends ThreadLocal<ScriptEngine> {
public static OGremlinEngineThreadLocal INSTANCE = new OGremlinEngineThreadLocal();
public ScriptEngine get(final OrientBaseGraph iGraph) {
ScriptEngine engine = super.get();
if (engine != null) {
final OrientBaseGraph currGraph = (OrientBaseGraph) engine.getBindings(ScriptContext.ENGINE_SCOPE).get("g");
if (currGraph == iGraph || (currGraph != null && currGraph.getRawGraph().getURL().equals(iGraph.getRawGraph().getURL())))
// REUSE IT
return engine;
}
// CREATE A NEW ONE
engine = new GremlinGroovyScriptEngine();
engine.getBindings(ScriptContext.ENGINE_SCOPE).put("g", iGraph);
set(engine);
return engine;
}
public ScriptEngine getIfDefined() {
return super.get();
}
public boolean isDefined() {
return super.get() != null;
}
} | 0true
| graphdb_src_main_java_com_orientechnologies_orient_graph_gremlin_OGremlinEngineThreadLocal.java |
487 | public class ODatabaseExport extends ODatabaseImpExpAbstract {
protected OJSONWriter writer;
protected long recordExported;
public static final int VERSION = 6;
public ODatabaseExport(final ODatabaseRecord iDatabase, final String iFileName, final OCommandOutputListener iListener)
throws IOException {
super(iDatabase, iFileName, iListener);
if (fileName == null)
throw new IllegalArgumentException("file name missing");
if (!fileName.endsWith(".gz")) {
fileName += ".gz";
}
final File f = new File(fileName);
f.mkdirs();
if (f.exists())
f.delete();
writer = new OJSONWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(fileName), 16384))); // 16KB
writer.beginObject();
iDatabase.getLevel1Cache().setEnable(false);
iDatabase.getLevel2Cache().setEnable(false);
}
public ODatabaseExport(final ODatabaseRecord iDatabase, final OutputStream iOutputStream, final OCommandOutputListener iListener)
throws IOException {
super(iDatabase, "streaming", iListener);
writer = new OJSONWriter(new OutputStreamWriter(iOutputStream));
writer.beginObject();
iDatabase.getLevel1Cache().setEnable(false);
iDatabase.getLevel2Cache().setEnable(false);
}
@Override
public ODatabaseExport setOptions(final String s) {
super.setOptions(s);
return this;
}
public ODatabaseExport exportDatabase() {
try {
listener.onMessage("\nStarted export of database '" + database.getName() + "' to " + fileName + "...");
database.getLevel1Cache().setEnable(false);
database.getLevel2Cache().setEnable(false);
long time = System.currentTimeMillis();
if (includeInfo)
exportInfo();
if (includeClusterDefinitions)
exportClusters();
if (includeSchema)
exportSchema();
if (includeRecords)
exportRecords();
if (includeIndexDefinitions)
exportIndexDefinitions();
if (includeManualIndexes)
exportManualIndexes();
listener.onMessage("\n\nDatabase export completed in " + (System.currentTimeMillis() - time) + "ms");
writer.flush();
} catch (Exception e) {
e.printStackTrace();
throw new ODatabaseExportException("Error on exporting database '" + database.getName() + "' to: " + fileName, e);
} finally {
close();
}
return this;
}
public long exportRecords() throws IOException {
long totalFoundRecords = 0;
long totalExportedRecords = 0;
int level = 1;
listener.onMessage("\nExporting records...");
writer.beginCollection(level, true, "records");
int exportedClusters = 0;
int maxClusterId = getMaxClusterId();
for (int i = 0; exportedClusters <= maxClusterId; ++i) {
String clusterName = database.getClusterNameById(i);
exportedClusters++;
long clusterExportedRecordsTot = 0;
if (clusterName != null) {
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName.toUpperCase()))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName.toUpperCase()))
continue;
}
if (excludeClusters != null && excludeClusters.contains(clusterName.toUpperCase()))
continue;
clusterExportedRecordsTot = database.countClusterElements(clusterName);
} else if (includeClusters != null && !includeClusters.isEmpty())
continue;
listener.onMessage("\n- Cluster " + (clusterName != null ? "'" + clusterName + "'" : "NULL") + " (id=" + i + ")...");
long clusterExportedRecordsCurrent = 0;
if (clusterName != null) {
ORecordInternal<?> rec = null;
try {
for (ORecordIteratorCluster<ORecordInternal<?>> it = database.browseCluster(clusterName); it.hasNext();) {
rec = it.next();
if (rec instanceof ODocument) {
// CHECK IF THE CLASS OF THE DOCUMENT IS INCLUDED
ODocument doc = (ODocument) rec;
final String className = doc.getClassName() != null ? doc.getClassName().toUpperCase() : null;
if (includeClasses != null) {
if (!includeClasses.contains(className))
continue;
} else if (excludeClasses != null) {
if (excludeClasses.contains(className))
continue;
}
} else if (includeClasses != null && !includeClasses.isEmpty())
continue;
if (exportRecord(clusterExportedRecordsTot, clusterExportedRecordsCurrent, rec))
clusterExportedRecordsCurrent++;
}
} catch (IOException e) {
OLogManager.instance().error(this, "\nError on exporting record %s because of I/O problems", e, rec.getIdentity());
// RE-THROW THE EXCEPTION UP
throw e;
} catch (OIOException e) {
OLogManager.instance().error(this, "\nError on exporting record %s because of I/O problems", e, rec.getIdentity());
// RE-THROW THE EXCEPTION UP
throw e;
} catch (Throwable t) {
if (rec != null) {
final byte[] buffer = rec.toStream();
OLogManager
.instance()
.error(
this,
"\nError on exporting record %s. It seems corrupted; size: %d bytes, raw content (as string):\n==========\n%s\n==========",
t, rec.getIdentity(), buffer.length, new String(buffer));
}
}
}
listener.onMessage("OK (records=" + clusterExportedRecordsCurrent + "/" + clusterExportedRecordsTot + ")");
totalExportedRecords += clusterExportedRecordsCurrent;
totalFoundRecords += clusterExportedRecordsTot;
}
writer.endCollection(level, true);
listener.onMessage("\n\nDone. Exported " + totalExportedRecords + " of total " + totalFoundRecords + " records\n");
return totalFoundRecords;
}
public void close() {
database.declareIntent(null);
if (writer == null)
return;
try {
writer.endObject();
writer.close();
writer = null;
} catch (IOException e) {
}
}
private void exportClusters() throws IOException {
listener.onMessage("\nExporting clusters...");
writer.beginCollection(1, true, "clusters");
int exportedClusters = 0;
int maxClusterId = getMaxClusterId();
for (int clusterId = 0; clusterId <= maxClusterId; ++clusterId) {
final String clusterName = database.getClusterNameById(clusterId);
// exclude removed clusters
if (clusterName == null)
continue;
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName.toUpperCase()))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName.toUpperCase()))
continue;
}
writer.beginObject(2, true, null);
writer.writeAttribute(0, false, "name", clusterName);
writer.writeAttribute(0, false, "id", clusterId);
writer.writeAttribute(0, false, "type", database.getClusterType(clusterName));
exportedClusters++;
writer.endObject(2, false);
}
listener.onMessage("OK (" + exportedClusters + " clusters)");
writer.endCollection(1, true);
}
protected int getMaxClusterId() {
int totalCluster = -1;
for (String clusterName : database.getClusterNames()) {
if (database.getClusterIdByName(clusterName) > totalCluster)
totalCluster = database.getClusterIdByName(clusterName);
}
return totalCluster;
}
private void exportInfo() throws IOException {
listener.onMessage("\nExporting database info...");
writer.beginObject(1, true, "info");
writer.writeAttribute(2, true, "name", database.getName().replace('\\', '/'));
writer.writeAttribute(2, true, "default-cluster-id", database.getDefaultClusterId());
writer.writeAttribute(2, true, "exporter-version", VERSION);
writer.writeAttribute(2, true, "engine-version", OConstants.ORIENT_VERSION);
final String engineBuild = OConstants.getBuildNumber();
if (engineBuild != null)
writer.writeAttribute(2, true, "engine-build", engineBuild);
writer.writeAttribute(2, true, "storage-config-version", OStorageConfiguration.CURRENT_VERSION);
writer.writeAttribute(2, true, "schema-version", OSchemaShared.CURRENT_VERSION_NUMBER);
writer.writeAttribute(2, true, "mvrbtree-version", OMVRBTreeMapProvider.CURRENT_PROTOCOL_VERSION);
writer.writeAttribute(2, true, "schemaRecordId", database.getStorage().getConfiguration().schemaRecordId);
writer.writeAttribute(2, true, "indexMgrRecordId", database.getStorage().getConfiguration().indexMgrRecordId);
writer.endObject(1, true);
listener.onMessage("OK");
}
private void exportIndexDefinitions() throws IOException {
listener.onMessage("\nExporting index info...");
writer.beginCollection(1, true, "indexes");
final OIndexManagerProxy indexManager = database.getMetadata().getIndexManager();
indexManager.reload();
final Collection<? extends OIndex<?>> indexes = indexManager.getIndexes();
for (OIndex<?> index : indexes) {
if (index.getName().equals(ODatabaseImport.EXPORT_IMPORT_MAP_NAME))
continue;
listener.onMessage("\n- Index " + index.getName() + "...");
writer.beginObject(2, true, null);
writer.writeAttribute(3, true, "name", index.getName());
writer.writeAttribute(3, true, "type", index.getType());
if (!index.getClusters().isEmpty())
writer.writeAttribute(3, true, "clustersToIndex", index.getClusters());
if (index.getDefinition() != null) {
writer.beginObject(4, true, "definition");
writer.writeAttribute(5, true, "defClass", index.getDefinition().getClass().getName());
writer.writeAttribute(5, true, "stream", index.getDefinition().toStream());
writer.endObject(4, true);
}
writer.endObject(2, true);
listener.onMessage("OK");
}
writer.endCollection(1, true);
listener.onMessage("\nOK (" + indexes.size() + " indexes)");
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private void exportManualIndexes() throws IOException {
listener.onMessage("\nExporting manual indexes content...");
final OIndexManagerProxy indexManager = database.getMetadata().getIndexManager();
indexManager.reload();
final Collection<? extends OIndex<?>> indexes = indexManager.getIndexes();
ODocument exportEntry = new ODocument();
int manualIndexes = 0;
writer.beginCollection(1, true, "manualIndexes");
for (OIndex<?> index : indexes) {
if (index.getName().equals(ODatabaseImport.EXPORT_IMPORT_MAP_NAME))
continue;
if (!index.isAutomatic()) {
listener.onMessage("\n- Exporting index " + index.getName() + " ...");
writer.beginObject(2, true, null);
writer.writeAttribute(3, true, "name", index.getName());
List<ODocument> indexContent = database.query(new OSQLSynchQuery<ODocument>("select from index:" + index.getName()));
writer.beginCollection(3, true, "content");
int i = 0;
for (ODocument indexEntry : indexContent) {
if (i > 0)
writer.append(",");
final OIndexDefinition indexDefinition = index.getDefinition();
exportEntry.reset();
exportEntry.setLazyLoad(false);
if (indexDefinition instanceof ORuntimeKeyIndexDefinition
&& ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer() != null) {
final OBinarySerializer binarySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
final int dataSize = binarySerializer.getObjectSize(indexEntry.field("key"));
final byte[] binaryContent = new byte[dataSize];
binarySerializer.serialize(indexEntry.field("key"), binaryContent, 0);
exportEntry.field("binary", true);
exportEntry.field("key", binaryContent);
} else {
exportEntry.field("binary", false);
exportEntry.field("key", indexEntry.field("key"));
}
exportEntry.field("rid", indexEntry.field("rid"));
i++;
writer.append(exportEntry.toJSON());
final long percent = indexContent.size() / 10;
if (percent > 0 && (i % percent) == 0)
listener.onMessage(".");
}
writer.endCollection(3, true);
writer.endObject(2, true);
listener.onMessage("OK (entries=" + index.getSize() + ")");
manualIndexes++;
}
}
writer.endCollection(1, true);
listener.onMessage("\nOK (" + manualIndexes + " manual indexes)");
}
private void exportSchema() throws IOException {
listener.onMessage("\nExporting schema...");
writer.beginObject(1, true, "schema");
OSchemaProxy s = (OSchemaProxy) database.getMetadata().getSchema();
writer.writeAttribute(2, true, "version", s.getVersion());
if (!s.getClasses().isEmpty()) {
writer.beginCollection(2, true, "classes");
final List<OClass> classes = new ArrayList<OClass>(s.getClasses());
Collections.sort(classes);
for (OClass cls : classes) {
writer.beginObject(3, true, null);
writer.writeAttribute(0, false, "name", cls.getName());
writer.writeAttribute(0, false, "default-cluster-id", cls.getDefaultClusterId());
writer.writeAttribute(0, false, "cluster-ids", cls.getClusterIds());
if (((OClassImpl) cls).getOverSizeInternal() > 1)
writer.writeAttribute(0, false, "oversize", ((OClassImpl) cls).getOverSizeInternal());
if (cls.isStrictMode())
writer.writeAttribute(0, false, "strictMode", cls.isStrictMode());
if (cls.getSuperClass() != null)
writer.writeAttribute(0, false, "super-class", cls.getSuperClass().getName());
if (cls.getShortName() != null)
writer.writeAttribute(0, false, "short-name", cls.getShortName());
if (cls.isAbstract())
writer.writeAttribute(0, false, "abstract", cls.isAbstract());
if (!cls.properties().isEmpty()) {
writer.beginCollection(4, true, "properties");
final List<OProperty> properties = new ArrayList<OProperty>(cls.declaredProperties());
Collections.sort(properties);
for (OProperty p : properties) {
writer.beginObject(5, true, null);
writer.writeAttribute(0, false, "name", p.getName());
writer.writeAttribute(0, false, "type", p.getType().toString());
if (p.isMandatory())
writer.writeAttribute(0, false, "mandatory", p.isMandatory());
if (p.isReadonly())
writer.writeAttribute(0, false, "readonly", p.isReadonly());
if (p.isNotNull())
writer.writeAttribute(0, false, "not-null", p.isNotNull());
if (p.getLinkedClass() != null)
writer.writeAttribute(0, false, "linked-class", p.getLinkedClass().getName());
if (p.getLinkedType() != null)
writer.writeAttribute(0, false, "linked-type", p.getLinkedType().toString());
if (p.getMin() != null)
writer.writeAttribute(0, false, "min", p.getMin());
if (p.getMax() != null)
writer.writeAttribute(0, false, "max", p.getMax());
if (((OPropertyImpl) p).getCustomInternal() != null)
writer.writeAttribute(0, false, "customFields", ((OPropertyImpl) p).getCustomInternal());
writer.endObject(0, false);
}
writer.endCollection(4, true);
}
writer.endObject(3, true);
}
writer.endCollection(2, true);
}
writer.endObject(1, true);
listener.onMessage("OK (" + s.getClasses().size() + " classes)");
}
private boolean exportRecord(long recordTot, long recordNum, ORecordInternal<?> rec) throws IOException {
if (rec != null)
try {
if (rec.getIdentity().isValid())
rec.reload();
if (useLineFeedForRecords)
writer.append("\n");
if (recordExported > 0)
writer.append(",");
writer.append(rec.toJSON("rid,type,version,class,attribSameRow,keepTypes,alwaysFetchEmbedded,dateAsLong"));
recordExported++;
recordNum++;
if (recordTot > 10 && (recordNum + 1) % (recordTot / 10) == 0)
listener.onMessage(".");
return true;
} catch (Throwable t) {
if (rec != null) {
final byte[] buffer = rec.toStream();
OLogManager
.instance()
.error(
this,
"\nError on exporting record %s. It seems corrupted; size: %d bytes, raw content (as string):\n==========\n%s\n==========",
t, rec.getIdentity(), buffer.length, new String(buffer));
}
}
return false;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseExport.java |
4,459 | class Listener extends IndicesLifecycle.Listener {
@Override
public void afterIndexShardCreated(IndexShard indexShard) {
synchronized (mutex) {
shardsIndicesStatus.put(indexShard.shardId(), new ShardIndexingStatus());
shardsCreatedOrDeleted.set(true);
}
}
@Override
public void afterIndexShardClosed(ShardId shardId) {
synchronized (mutex) {
shardsIndicesStatus.remove(shardId);
shardsCreatedOrDeleted.set(true);
}
}
} | 1no label
| src_main_java_org_elasticsearch_indices_memory_IndexingMemoryController.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.