Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
1,220 | final class CompileErrorReporter implements
DiagnosticListener<JavaFileObject> {
private IProject project;
private boolean errorReported;
private List<IFolder> sourceDirectories;
public CompileErrorReporter(IProject project) {
this.project = project;
sourceDirectories = CeylonBuilder.getSourceFolders(project);
}
public void failed() {
if (!errorReported) {
setupMarker(project, null);
}
}
public void failed(final ExitState exitState) {
Diagnostic<? extends JavaFileObject> diagnostic = null;
if (exitState.javacExitCode == Main.EXIT_ABNORMAL) {
diagnostic = new Diagnostic<JavaFileObject>() {
@Override
public javax.tools.Diagnostic.Kind getKind() {
return javax.tools.Diagnostic.Kind.ERROR;
}
@Override
public JavaFileObject getSource() {
return null;
}
@Override
public long getPosition() {
return 0;
}
@Override
public long getStartPosition() {
return 0;
}
@Override
public long getEndPosition() {
return 0;
}
@Override
public long getLineNumber() {
return 0;
}
@Override
public long getColumnNumber() {
return 0;
}
@Override
public String getCode() {
return null;
}
@Override
public String getMessage(Locale locale) {
return "The Ceylon Java backend compiler failed abnormally" +
(exitState.ceylonCodegenExceptionCount > 0 ? "\n with " + exitState.ceylonCodegenExceptionCount + " code generation exceptions" : "") +
(exitState.ceylonCodegenErroneousCount > 0 ? "\n with " + exitState.ceylonCodegenErroneousCount + " erroneous code generations" : "") +
(exitState.ceylonCodegenGarbageCount > 0 ? "\n with " + exitState.ceylonCodegenGarbageCount + " malformed Javac tree cases" : "") +
(exitState.abortingException != null ? "\n with a throwable : " + exitState.abortingException.toString() : "") +
"";
}
};
}
if (!errorReported || diagnostic != null) {
setupMarker(project, diagnostic);
}
}
@Override
public void report(Diagnostic<? extends JavaFileObject> diagnostic) {
errorReported = true;
JavaFileObject source = diagnostic.getSource();
if (source == null) {
// no source file
if (!diagnostic.toString().startsWith("Note: Created module")) {
setupMarker(project, diagnostic);
}
}
else {
IPath absolutePath = new Path(source.getName());
IFile file = null;
for (IFolder sourceDirectory : sourceDirectories) {
IPath sourceDirPath = sourceDirectory.getLocation();
if (sourceDirPath.isPrefixOf(absolutePath)) {
IResource r = sourceDirectory.findMember(absolutePath.makeRelativeTo(sourceDirPath));
if (r instanceof IFile) {
file = (IFile) r;
}
}
}
if (file == null) {
file = getWorkspace().getRoot()
.getFileForLocation(new Path(source.getName()));
}
if(file != null) {
if (CeylonBuilder.isCeylon(file)){
try {
for (IMarker m: file.findMarkers(PROBLEM_MARKER_ID, true, DEPTH_ZERO)) {
int sev = ((Integer) m.getAttribute(IMarker.SEVERITY)).intValue();
if (sev==IMarker.SEVERITY_ERROR) {
return;
}
}
}
catch (CoreException e) {
e.printStackTrace();
}
setupMarker(file, diagnostic);
}
if (CeylonBuilder.isJava(file)){
try {
for (IMarker m: file.findMarkers(JAVA_MODEL_PROBLEM_MARKER, false, DEPTH_ZERO)) {
int sev = ((Integer) m.getAttribute(IMarker.SEVERITY)).intValue();
if (sev==IMarker.SEVERITY_ERROR) {
return;
}
}
}
catch (CoreException e) {
e.printStackTrace();
}
setupMarker(file, diagnostic);
}
}else{
setupMarker(project, diagnostic);
}
}
}
private void setupMarker(IResource resource, Diagnostic<? extends JavaFileObject> diagnostic) {
try {
long line = diagnostic==null ? -1 : diagnostic.getLineNumber();
String markerId = PROBLEM_MARKER_ID + ".backend";
if (resource instanceof IFile) {
if (CeylonBuilder.isJava((IFile)resource)) {
markerId = JAVA_MODEL_PROBLEM_MARKER;
}
// if (line<0) {
//TODO: use the Symbol to get a location for the javac error
// String name = ((Symbol)((JCDiagnostic) diagnostic).getArgs()[0]).name.toString();
// Declaration member = CeylonBuilder.getPackage((IFile)resource).getDirectMember(name, null, false);
// }
}
IMarker marker = resource.createMarker(markerId);
if (line>=0) {
//Javac doesn't have line number info for certain errors
marker.setAttribute(IMarker.LINE_NUMBER, (int) line);
marker.setAttribute(IMarker.CHAR_START,
(int) diagnostic.getStartPosition());
marker.setAttribute(IMarker.CHAR_END,
(int) diagnostic.getEndPosition());
}
if (markerId.equals(JAVA_MODEL_PROBLEM_MARKER)) {
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
}
String message = diagnostic==null ?
"unexplained compilation problem" :
diagnostic.getMessage(Locale.getDefault());
marker.setAttribute(IMarker.MESSAGE, message);
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
switch (diagnostic==null ? Diagnostic.Kind.ERROR : diagnostic.getKind()) {
case ERROR:
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
break;
case WARNING:
case MANDATORY_WARNING:
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_WARNING);
break;
default:
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_INFO);
}
}
catch (CoreException ce) {
ce.printStackTrace();
}
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_builder_CompileErrorReporter.java |
3,154 | public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImplTests {
protected abstract FieldDataType getFieldDataType();
@Test
public void testSingleValueAllSetNumber() throws Exception {
fillSingleValueAllSet();
IndexNumericFieldData indexFieldData = getForField("value");
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
assertThat(fieldData.getNumDocs(), equalTo(3));
LongValues longValues = fieldData.getLongValues();
assertThat(longValues.isMultiValued(), equalTo(false));
assertThat(longValues.setDocument(0), equalTo(1));
assertThat(longValues.nextValue(), equalTo(2l));
assertThat(longValues.setDocument(1), equalTo(1));
assertThat(longValues.nextValue(), equalTo(1l));
assertThat(longValues.setDocument(2), equalTo(1));
assertThat(longValues.nextValue(), equalTo(3l));
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(false));
assertThat(1, equalTo(doubleValues.setDocument(0)));
assertThat(doubleValues.nextValue(), equalTo(2d));
assertThat(1, equalTo(doubleValues.setDocument(1)));
assertThat(doubleValues.nextValue(), equalTo(1d));
assertThat(1, equalTo(doubleValues.setDocument(2)));
assertThat(doubleValues.nextValue(), equalTo(3d));
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
TopFieldDocs topDocs;
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MIN))));
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MAX), true)));
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(2));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
}
@Test
public void testSingleValueWithMissingNumber() throws Exception {
fillSingleValueWithMissing();
IndexNumericFieldData indexFieldData = getForField("value");
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
assertThat(fieldData.getNumDocs(), equalTo(3));
LongValues longValues = fieldData.getLongValues();
assertThat(longValues.isMultiValued(), equalTo(false));
assertThat(longValues.setDocument(0), equalTo(1));
assertThat(longValues.nextValue(), equalTo(2l));
assertThat(longValues.setDocument(1), equalTo(0));
assertThat(longValues.setDocument(2), equalTo(1));
assertThat(longValues.nextValue(), equalTo(3l));
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(false));
assertThat(1, equalTo(doubleValues.setDocument(0)));
assertThat(doubleValues.nextValue(), equalTo(2d));
assertThat(0, equalTo(doubleValues.setDocument(1)));
assertThat(1, equalTo(doubleValues.setDocument(2)));
assertThat(doubleValues.nextValue(), equalTo(3d));
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
TopFieldDocs topDocs;
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MIN)))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MAX), true))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(2));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", SortMode.MIN))));
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", SortMode.MAX), true)));
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
assertThat(topDocs.scoreDocs[2].doc, equalTo(0));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("1", SortMode.MIN))));
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("1", SortMode.MAX), true)));
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(2));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
}
@Test
public void testMultiValueAllSetNumber() throws Exception {
fillMultiValueAllSet();
IndexNumericFieldData indexFieldData = getForField("value");
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
assertThat(fieldData.getNumDocs(), equalTo(3));
LongValues longValues = fieldData.getLongValues();
assertThat(longValues.isMultiValued(), equalTo(true));
assertThat(longValues.setDocument(0), equalTo(2));
assertThat(longValues.nextValue(), equalTo(2l));
assertThat(longValues.nextValue(), equalTo(4l));
assertThat(longValues.setDocument(1), equalTo(1));
assertThat(longValues.nextValue(), equalTo(1l));
assertThat(longValues.setDocument(2), equalTo(1));
assertThat(longValues.nextValue(), equalTo(3l));
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(true));
assertThat(2, equalTo(doubleValues.setDocument(0)));
assertThat(doubleValues.nextValue(), equalTo(2d));
assertThat(doubleValues.nextValue(), equalTo(4d));
assertThat(1, equalTo(doubleValues.setDocument(1)));
assertThat(doubleValues.nextValue(), equalTo(1d));
assertThat(1, equalTo(doubleValues.setDocument(2)));
assertThat(doubleValues.nextValue(), equalTo(3d));
}
@Test
public void testMultiValueWithMissingNumber() throws Exception {
fillMultiValueWithMissing();
IndexNumericFieldData indexFieldData = getForField("value");
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
assertThat(fieldData.getNumDocs(), equalTo(3));
LongValues longValues = fieldData.getLongValues();
assertThat(longValues.isMultiValued(), equalTo(true));
assertThat(longValues.setDocument(0), equalTo(2));
assertThat(longValues.nextValue(), equalTo(2l));
assertThat(longValues.nextValue(), equalTo(4l));
assertThat(longValues.setDocument(1), equalTo(0));
assertThat(longValues.setDocument(2), equalTo(1));
assertThat(longValues.nextValue(), equalTo(3l));
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(true));
assertThat(2, equalTo(doubleValues.setDocument(0)));
assertThat(doubleValues.nextValue(), equalTo(2d));
assertThat(doubleValues.nextValue(), equalTo(4d));
assertThat(0, equalTo(doubleValues.setDocument(1)));
assertThat(1, equalTo(doubleValues.setDocument(2)));
assertThat(doubleValues.nextValue(), equalTo(3d));
}
@Test
public void testMissingValueForAll() throws Exception {
fillAllMissing();
IndexNumericFieldData indexFieldData = getForField("value");
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
assertThat(fieldData.getNumDocs(), equalTo(3));
// long values
LongValues longValues = fieldData.getLongValues();
assertThat(longValues.isMultiValued(), equalTo(false));
assertThat(longValues.setDocument(0), equalTo(0));
assertThat(longValues.setDocument(1), equalTo(0));
assertThat(longValues.setDocument(2), equalTo(0));
// double values
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(false));
assertThat(0, equalTo(doubleValues.setDocument(0)));
assertThat(0, equalTo(doubleValues.setDocument(1)));
assertThat(0, equalTo(doubleValues.setDocument(2)));
}
protected void fillAllMissing() throws Exception {
Document d = new Document();
d.add(new StringField("_id", "1", Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "2", Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "3", Field.Store.NO));
writer.addDocument(d);
}
@Test
public void testSortMultiValuesFields() throws Exception {
fillExtendedMvSet();
IndexFieldData indexFieldData = getForField("value");
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MIN)))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-10));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(2));
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[3].doc, equalTo(3));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(4));
assertThat(topDocs.scoreDocs[4].doc, equalTo(4));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(6));
assertThat(topDocs.scoreDocs[5].doc, equalTo(6));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(8));
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MAX), true))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(10));
assertThat(topDocs.scoreDocs[1].doc, equalTo(4));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(8));
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(6));
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(4));
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[5].doc, equalTo(7));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(-8));
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.SUM)))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-27));
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[2].doc, equalTo(0));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(6));
assertThat(topDocs.scoreDocs[3].doc, equalTo(3));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(15));
assertThat(topDocs.scoreDocs[4].doc, equalTo(4));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(21));
assertThat(topDocs.scoreDocs[5].doc, equalTo(6));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(27));
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.SUM), true))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(27));
assertThat(topDocs.scoreDocs[1].doc, equalTo(4));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(21));
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(15));
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(6));
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[5].doc, equalTo(7));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(-27));
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.AVG)))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-9));
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[3].doc, equalTo(3));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(5));
assertThat(topDocs.scoreDocs[4].doc, equalTo(4));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(7));
assertThat(topDocs.scoreDocs[5].doc, equalTo(6));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(9));
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.AVG), true))); // defaults to _last
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(9));
assertThat(topDocs.scoreDocs[1].doc, equalTo(4));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(7));
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(5));
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3));
assertThat(topDocs.scoreDocs[5].doc, equalTo(7));
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(-9));
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", SortMode.MIN))));
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(5));
assertThat(topDocs.scoreDocs[2].doc, equalTo(7));
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
assertThat(topDocs.scoreDocs[5].doc, equalTo(3));
assertThat(topDocs.scoreDocs[6].doc, equalTo(4));
assertThat(topDocs.scoreDocs[7].doc, equalTo(6));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", SortMode.MAX), true)));
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(5));
assertThat(topDocs.scoreDocs[2].doc, equalTo(6));
assertThat(topDocs.scoreDocs[3].doc, equalTo(4));
assertThat(topDocs.scoreDocs[4].doc, equalTo(3));
assertThat(topDocs.scoreDocs[5].doc, equalTo(0));
assertThat(topDocs.scoreDocs[6].doc, equalTo(2));
assertThat(topDocs.scoreDocs[7].doc, equalTo(7));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("-9", SortMode.MIN))));
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
assertThat(topDocs.scoreDocs[1].doc, equalTo(1));
assertThat(topDocs.scoreDocs[2].doc, equalTo(5));
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
assertThat(topDocs.scoreDocs[5].doc, equalTo(3));
assertThat(topDocs.scoreDocs[6].doc, equalTo(4));
assertThat(topDocs.scoreDocs[7].doc, equalTo(6));
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource("9", SortMode.MAX), true)));
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
assertThat(topDocs.scoreDocs[1].doc, equalTo(1));
assertThat(topDocs.scoreDocs[2].doc, equalTo(5));
assertThat(topDocs.scoreDocs[3].doc, equalTo(4));
assertThat(topDocs.scoreDocs[4].doc, equalTo(3));
assertThat(topDocs.scoreDocs[5].doc, equalTo(0));
assertThat(topDocs.scoreDocs[6].doc, equalTo(2));
assertThat(topDocs.scoreDocs[7].doc, equalTo(7));
}
} | 0true
| src_test_java_org_elasticsearch_index_fielddata_AbstractNumericFieldDataTests.java |
2,090 | public class PutAllOperation extends AbstractMapOperation implements PartitionAwareOperation, BackupAwareOperation {
private MapEntrySet entrySet;
private boolean initialLoad = false;
private List<Map.Entry<Data, Data>> backupEntrySet;
private List<RecordInfo> backupRecordInfos;
public PutAllOperation(String name, MapEntrySet entrySet) {
super(name);
this.entrySet = entrySet;
}
public PutAllOperation(String name, MapEntrySet entrySet, boolean initialLoad) {
super(name);
this.entrySet = entrySet;
this.initialLoad = initialLoad;
}
public PutAllOperation() {
}
public void run() {
backupRecordInfos = new ArrayList<RecordInfo>();
backupEntrySet = new ArrayList<Map.Entry<Data, Data>>();
int partitionId = getPartitionId();
RecordStore recordStore = mapService.getRecordStore(partitionId, name);
Set<Map.Entry<Data, Data>> entries = entrySet.getEntrySet();
InternalPartitionService partitionService = getNodeEngine().getPartitionService();
Set<Data> keysToInvalidate = new HashSet<Data>();
for (Map.Entry<Data, Data> entry : entries) {
Data dataKey = entry.getKey();
Data dataValue = entry.getValue();
if (partitionId == partitionService.getPartitionId(dataKey)) {
Data dataOldValue = null;
if (initialLoad) {
recordStore.putFromLoad(dataKey, dataValue, -1);
} else {
dataOldValue = mapService.toData(recordStore.put(dataKey, dataValue, -1));
}
mapService.interceptAfterPut(name, dataValue);
EntryEventType eventType = dataOldValue == null ? EntryEventType.ADDED : EntryEventType.UPDATED;
mapService.publishEvent(getCallerAddress(), name, eventType, dataKey, dataOldValue, dataValue);
keysToInvalidate.add(dataKey);
if (mapContainer.getWanReplicationPublisher() != null && mapContainer.getWanMergePolicy() != null) {
Record record = recordStore.getRecord(dataKey);
final SimpleEntryView entryView = mapService.createSimpleEntryView(dataKey, mapService.toData(dataValue), record);
mapService.publishWanReplicationUpdate(name, entryView);
}
backupEntrySet.add(entry);
RecordInfo replicationInfo = mapService.createRecordInfo(recordStore.getRecord(dataKey));
backupRecordInfos.add(replicationInfo);
}
}
invalidateNearCaches(keysToInvalidate);
}
protected final void invalidateNearCaches(Set<Data> keys) {
if (mapService.isNearCacheAndInvalidationEnabled(name)) {
mapService.invalidateAllNearCaches(name, keys);
}
}
@Override
public Object getResponse() {
return true;
}
@Override
public String toString() {
return "PutAllOperation{" +
'}';
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeObject(entrySet);
out.writeBoolean(initialLoad);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
entrySet = in.readObject();
initialLoad = in.readBoolean();
}
@Override
public boolean shouldBackup() {
return !backupEntrySet.isEmpty();
}
public final int getAsyncBackupCount() {
return mapContainer.getAsyncBackupCount();
}
public final int getSyncBackupCount() {
return mapContainer.getBackupCount();
}
@Override
public Operation getBackupOperation() {
return new PutAllBackupOperation(name, backupEntrySet, backupRecordInfos);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_PutAllOperation.java |
1,490 | @RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class HibernateSerializationHookNonAvailableTest {
private static final Field ORIGINAL;
private static final Field TYPE_MAP;
private static final Method GET_SERIALIZATION_SERVICE;
private static final ClassLoader FILTERING_CLASS_LOADER;
static {
try {
List<String> excludes = Arrays.asList(new String[]{"org.hibernate"});
FILTERING_CLASS_LOADER = new FilteringClassLoader(excludes, "com.hazelcast");
String hazelcastInstanceImplClassName = "com.hazelcast.instance.HazelcastInstanceImpl";
Class<?> hazelcastInstanceImplClass = FILTERING_CLASS_LOADER.loadClass(hazelcastInstanceImplClassName);
GET_SERIALIZATION_SERVICE = hazelcastInstanceImplClass.getMethod("getSerializationService");
String hazelcastInstanceProxyClassName = "com.hazelcast.instance.HazelcastInstanceProxy";
Class<?> hazelcastInstanceProxyClass = FILTERING_CLASS_LOADER.loadClass(hazelcastInstanceProxyClassName);
ORIGINAL = hazelcastInstanceProxyClass.getDeclaredField("original");
ORIGINAL.setAccessible(true);
String serializationServiceImplClassName = "com.hazelcast.nio.serialization.SerializationServiceImpl";
Class<?> serializationServiceImplClass = FILTERING_CLASS_LOADER.loadClass(serializationServiceImplClassName);
TYPE_MAP = serializationServiceImplClass.getDeclaredField("typeMap");
TYPE_MAP.setAccessible(true);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Test
public void testAutoregistrationOnHibernate4NonAvailable()
throws Exception {
Thread thread = Thread.currentThread();
ClassLoader tccl = thread.getContextClassLoader();
try {
thread.setContextClassLoader(FILTERING_CLASS_LOADER);
Class<?> configClazz = FILTERING_CLASS_LOADER.loadClass("com.hazelcast.config.Config");
Object config = configClazz.newInstance();
Method setClassLoader = configClazz.getDeclaredMethod("setClassLoader", ClassLoader.class);
setClassLoader.invoke(config, FILTERING_CLASS_LOADER);
Class<?> hazelcastClazz = FILTERING_CLASS_LOADER.loadClass("com.hazelcast.core.Hazelcast");
Method newHazelcastInstance = hazelcastClazz.getDeclaredMethod("newHazelcastInstance", configClazz);
Object hz = newHazelcastInstance.invoke(hazelcastClazz, config);
Object impl = ORIGINAL.get(hz);
Object serializationService = GET_SERIALIZATION_SERVICE.invoke(impl);
ConcurrentMap<Class, ?> typeMap = (ConcurrentMap<Class, ?>) TYPE_MAP.get(serializationService);
boolean cacheKeySerializerFound = false;
boolean cacheEntrySerializerFound = false;
for (Class clazz : typeMap.keySet()) {
if (clazz == CacheKey.class) {
cacheKeySerializerFound = true;
} else if (clazz == CacheEntry.class) {
cacheEntrySerializerFound = true;
}
}
assertFalse("CacheKey serializer found", cacheKeySerializerFound);
assertFalse("CacheEntry serializer found", cacheEntrySerializerFound);
} finally {
thread.setContextClassLoader(tccl);
}
}
} | 0true
| hazelcast-hibernate_hazelcast-hibernate4_src_test_java_com_hazelcast_hibernate_serialization_HibernateSerializationHookNonAvailableTest.java |
2,016 | @Retention(RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@BindingAnnotation
public @interface Named {
String value();
} | 0true
| src_main_java_org_elasticsearch_common_inject_name_Named.java |
711 | private final class FileFlushTask implements Callable<Void> {
private final long fileId;
private FileFlushTask(long fileId) {
this.fileId = fileId;
}
@Override
public Void call() throws Exception {
final GroupKey firstKey = new GroupKey(fileId, 0);
final GroupKey lastKey = new GroupKey(fileId, Long.MAX_VALUE);
NavigableMap<GroupKey, WriteGroup> subMap = writeGroups.subMap(firstKey, true, lastKey, true);
Iterator<Map.Entry<GroupKey, WriteGroup>> entryIterator = subMap.entrySet().iterator();
groupsLoop: while (entryIterator.hasNext()) {
Map.Entry<GroupKey, WriteGroup> entry = entryIterator.next();
final WriteGroup writeGroup = entry.getValue();
final GroupKey groupKey = entry.getKey();
lockManager.acquireLock(Thread.currentThread(), groupKey, OLockManager.LOCK.EXCLUSIVE);
try {
int flushedPages = 0;
for (int i = 0; i < 16; i++) {
OCachePointer pagePointer = writeGroup.pages[i];
if (pagePointer != null) {
if (!pagePointer.tryAcquireExclusiveLock())
continue groupsLoop;
try {
flushPage(groupKey.fileId, (groupKey.groupIndex << 4) + i, pagePointer.getDataPointer());
flushedPages++;
} finally {
pagePointer.releaseExclusiveLock();
}
}
}
for (OCachePointer pagePointer : writeGroup.pages)
if (pagePointer != null)
pagePointer.decrementReferrer();
cacheSize.addAndGet(-flushedPages);
entryIterator.remove();
} finally {
lockManager.releaseLock(Thread.currentThread(), entry.getKey(), OLockManager.LOCK.EXCLUSIVE);
}
}
files.get(fileId).synch();
return null;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_hashindex_local_cache_OWOWCache.java |
432 | public class ClusterStatsIndices implements ToXContent, Streamable {
private int indexCount;
private ShardStats shards;
private DocsStats docs;
private StoreStats store;
private FieldDataStats fieldData;
private FilterCacheStats filterCache;
private IdCacheStats idCache;
private CompletionStats completion;
private SegmentsStats segments;
private PercolateStats percolate;
private ClusterStatsIndices() {
}
public ClusterStatsIndices(ClusterStatsNodeResponse[] nodeResponses) {
ObjectObjectOpenHashMap<String, ShardStats> countsPerIndex = new ObjectObjectOpenHashMap<String, ShardStats>();
this.docs = new DocsStats();
this.store = new StoreStats();
this.fieldData = new FieldDataStats();
this.filterCache = new FilterCacheStats();
this.idCache = new IdCacheStats();
this.completion = new CompletionStats();
this.segments = new SegmentsStats();
this.percolate = new PercolateStats();
for (ClusterStatsNodeResponse r : nodeResponses) {
for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) {
ShardStats indexShardStats = countsPerIndex.get(shardStats.getIndex());
if (indexShardStats == null) {
indexShardStats = new ShardStats();
countsPerIndex.put(shardStats.getIndex(), indexShardStats);
}
indexShardStats.total++;
CommonStats shardCommonStats = shardStats.getStats();
if (shardStats.getShardRouting().primary()) {
indexShardStats.primaries++;
docs.add(shardCommonStats.docs);
}
store.add(shardCommonStats.store);
fieldData.add(shardCommonStats.fieldData);
filterCache.add(shardCommonStats.filterCache);
idCache.add(shardCommonStats.idCache);
completion.add(shardCommonStats.completion);
segments.add(shardCommonStats.segments);
percolate.add(shardCommonStats.percolate);
}
}
shards = new ShardStats();
indexCount = countsPerIndex.size();
for (ObjectObjectCursor<String, ShardStats> indexCountsCursor : countsPerIndex) {
shards.addIndexShardCount(indexCountsCursor.value);
}
}
public int getIndexCount() {
return indexCount;
}
public ShardStats getShards() {
return this.shards;
}
public DocsStats getDocs() {
return docs;
}
public StoreStats getStore() {
return store;
}
public FieldDataStats getFieldData() {
return fieldData;
}
public FilterCacheStats getFilterCache() {
return filterCache;
}
public IdCacheStats getIdCache() {
return idCache;
}
public CompletionStats getCompletion() {
return completion;
}
public SegmentsStats getSegments() {
return segments;
}
public PercolateStats getPercolate() {
return percolate;
}
@Override
public void readFrom(StreamInput in) throws IOException {
indexCount = in.readVInt();
shards = ShardStats.readShardStats(in);
docs = DocsStats.readDocStats(in);
store = StoreStats.readStoreStats(in);
fieldData = FieldDataStats.readFieldDataStats(in);
filterCache = FilterCacheStats.readFilterCacheStats(in);
idCache = IdCacheStats.readIdCacheStats(in);
completion = CompletionStats.readCompletionStats(in);
segments = SegmentsStats.readSegmentsStats(in);
percolate = PercolateStats.readPercolateStats(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(indexCount);
shards.writeTo(out);
docs.writeTo(out);
store.writeTo(out);
fieldData.writeTo(out);
filterCache.writeTo(out);
idCache.writeTo(out);
completion.writeTo(out);
segments.writeTo(out);
percolate.writeTo(out);
}
public static ClusterStatsIndices readIndicesStats(StreamInput in) throws IOException {
ClusterStatsIndices indicesStats = new ClusterStatsIndices();
indicesStats.readFrom(in);
return indicesStats;
}
static final class Fields {
static final XContentBuilderString COUNT = new XContentBuilderString("count");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.COUNT, indexCount);
shards.toXContent(builder, params);
docs.toXContent(builder, params);
store.toXContent(builder, params);
fieldData.toXContent(builder, params);
filterCache.toXContent(builder, params);
idCache.toXContent(builder, params);
completion.toXContent(builder, params);
segments.toXContent(builder, params);
percolate.toXContent(builder, params);
return builder;
}
public static class ShardStats implements ToXContent, Streamable {
int indices;
int total;
int primaries;
// min/max
int minIndexShards = -1;
int maxIndexShards = -1;
int minIndexPrimaryShards = -1;
int maxIndexPrimaryShards = -1;
double minIndexReplication = -1;
double totalIndexReplication = 0;
double maxIndexReplication = -1;
public ShardStats() {
}
/**
* number of indices in the cluster
*/
public int getIndices() {
return this.indices;
}
/**
* total number of shards in the cluster
*/
public int getTotal() {
return this.total;
}
/**
* total number of primary shards in the cluster
*/
public int getPrimaries() {
return this.primaries;
}
/**
* returns how many *redundant* copies of the data the cluster holds - running with no replicas will return 0
*/
public double getReplication() {
if (primaries == 0) {
return 0;
}
return (((double) (total - primaries)) / primaries);
}
/**
* the maximum number of shards (primary+replicas) an index has
*/
public int getMaxIndexShards() {
return this.maxIndexShards;
}
/**
* the minimum number of shards (primary+replicas) an index has
*/
public int getMinIndexShards() {
return this.minIndexShards;
}
/**
* average number of shards (primary+replicas) across the indices
*/
public double getAvgIndexShards() {
if (this.indices == 0) {
return -1;
}
return ((double) this.total) / this.indices;
}
/**
* the maximum number of primary shards an index has
*/
public int getMaxIndexPrimaryShards() {
return this.maxIndexPrimaryShards;
}
/**
* the minimum number of primary shards an index has
*/
public int getMinIndexPrimaryShards() {
return this.minIndexPrimaryShards;
}
/**
* the average number primary shards across the indices
*/
public double getAvgIndexPrimaryShards() {
if (this.indices == 0) {
return -1;
}
return ((double) this.primaries) / this.indices;
}
/**
* minimum replication factor across the indices. See {@link #getReplication}
*/
public double getMinIndexReplication() {
return this.minIndexReplication;
}
/**
* average replication factor across the indices. See {@link #getReplication}
*/
public double getAvgIndexReplication() {
if (indices == 0) {
return -1;
}
return this.totalIndexReplication / this.indices;
}
/**
* maximum replication factor across the indices. See {@link #getReplication
*/
public double getMaxIndexReplication() {
return this.maxIndexReplication;
}
public void addIndexShardCount(ShardStats indexShardCount) {
this.indices++;
this.primaries += indexShardCount.primaries;
this.total += indexShardCount.total;
this.totalIndexReplication += indexShardCount.getReplication();
if (this.indices == 1) {
// first index, uninitialized.
minIndexPrimaryShards = indexShardCount.primaries;
maxIndexPrimaryShards = indexShardCount.primaries;
minIndexShards = indexShardCount.total;
maxIndexShards = indexShardCount.total;
minIndexReplication = indexShardCount.getReplication();
maxIndexReplication = minIndexReplication;
} else {
minIndexShards = Math.min(minIndexShards, indexShardCount.total);
minIndexPrimaryShards = Math.min(minIndexPrimaryShards, indexShardCount.primaries);
minIndexReplication = Math.min(minIndexReplication, indexShardCount.getReplication());
maxIndexShards = Math.max(maxIndexShards, indexShardCount.total);
maxIndexPrimaryShards = Math.max(maxIndexPrimaryShards, indexShardCount.primaries);
maxIndexReplication = Math.max(maxIndexReplication, indexShardCount.getReplication());
}
}
public static ShardStats readShardStats(StreamInput in) throws IOException {
ShardStats c = new ShardStats();
c.readFrom(in);
return c;
}
@Override
public void readFrom(StreamInput in) throws IOException {
indices = in.readVInt();
total = in.readVInt();
primaries = in.readVInt();
minIndexShards = in.readVInt();
maxIndexShards = in.readVInt();
minIndexPrimaryShards = in.readVInt();
maxIndexPrimaryShards = in.readVInt();
minIndexReplication = in.readDouble();
totalIndexReplication = in.readDouble();
maxIndexReplication = in.readDouble();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(indices);
out.writeVInt(total);
out.writeVInt(primaries);
out.writeVInt(minIndexShards);
out.writeVInt(maxIndexShards);
out.writeVInt(minIndexPrimaryShards);
out.writeVInt(maxIndexPrimaryShards);
out.writeDouble(minIndexReplication);
out.writeDouble(totalIndexReplication);
out.writeDouble(maxIndexReplication);
}
static final class Fields {
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString PRIMARIES = new XContentBuilderString("primaries");
static final XContentBuilderString REPLICATION = new XContentBuilderString("replication");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
static final XContentBuilderString AVG = new XContentBuilderString("avg");
static final XContentBuilderString INDEX = new XContentBuilderString("index");
}
private void addIntMinMax(XContentBuilderString field, int min, int max, double avg, XContentBuilder builder) throws IOException {
builder.startObject(field);
builder.field(Fields.MIN, min);
builder.field(Fields.MAX, max);
builder.field(Fields.AVG, avg);
builder.endObject();
}
private void addDoubleMinMax(XContentBuilderString field, double min, double max, double avg, XContentBuilder builder) throws IOException {
builder.startObject(field);
builder.field(Fields.MIN, min);
builder.field(Fields.MAX, max);
builder.field(Fields.AVG, avg);
builder.endObject();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.SHARDS);
if (indices > 0) {
builder.field(Fields.TOTAL, total);
builder.field(Fields.PRIMARIES, primaries);
builder.field(Fields.REPLICATION, getReplication());
builder.startObject(Fields.INDEX);
addIntMinMax(Fields.SHARDS, minIndexShards, maxIndexShards, getAvgIndexShards(), builder);
addIntMinMax(Fields.PRIMARIES, minIndexPrimaryShards, maxIndexPrimaryShards, getAvgIndexPrimaryShards(), builder);
addDoubleMinMax(Fields.REPLICATION, minIndexReplication, maxIndexReplication, getAvgIndexReplication(), builder);
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public String toString() {
return "total [" + total + "] primaries [" + primaries + "]";
}
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsIndices.java |
194 | private class LockStressor implements Runnable {
private final KeyColumnValueStoreManager manager;
private final KeyColumnValueStore store;
private final CountDownLatch doneLatch;
private final int opCount;
private final StaticBuffer toLock;
private int succeeded = 0;
private int temporaryFailures = 0;
private LockStressor(KeyColumnValueStoreManager manager,
KeyColumnValueStore store, CountDownLatch doneLatch, int opCount, StaticBuffer toLock) {
this.manager = manager;
this.store = store;
this.doneLatch = doneLatch;
this.opCount = opCount;
this.toLock = toLock;
}
@Override
public void run() {
// Catch & log exceptions
for (int opIndex = 0; opIndex < opCount; opIndex++) {
StoreTransaction tx = null;
try {
tx = newTransaction(manager);
store.acquireLock(toLock, toLock, null, tx);
store.mutate(toLock, ImmutableList.<Entry>of(), Arrays.asList(toLock), tx);
tx.commit();
succeeded++;
} catch (TemporaryLockingException e) {
temporaryFailures++;
} catch (Throwable t) {
log.error("Unexpected locking-related exception on iteration " + (opIndex + 1) + "/" + opCount, t);
}
}
/*
* This latch is the only thing guaranteeing that succeeded's true
* value is observable by other threads once we're done with run()
* and the latch's await() method returns.
*/
doneLatch.countDown();
}
} | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_LockKeyColumnValueStoreTest.java |
263 | public class EmbeddedMultiWriteStoreTest extends MultiWriteKeyColumnValueStoreTest {
@Override
public KeyColumnValueStoreManager openStorageManager() throws BackendException {
return new CassandraEmbeddedStoreManager(CassandraStorageSetup.getEmbeddedConfiguration(getClass().getSimpleName()));
}
} | 0true
| titan-cassandra_src_test_java_com_thinkaurelius_titan_diskstorage_cassandra_embedded_EmbeddedMultiWriteStoreTest.java |
872 | public class TransportSearchQueryAndFetchAction extends TransportSearchTypeAction {
@Inject
public TransportSearchQueryAndFetchAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings, threadPool, clusterService, searchService, searchPhaseController);
}
@Override
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
new AsyncAction(searchRequest, listener).start();
}
private class AsyncAction extends BaseAsyncAction<QueryFetchSearchResult> {
private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
}
@Override
protected String firstPhaseName() {
return "query_fetch";
}
@Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<QueryFetchSearchResult> listener) {
searchService.sendExecuteFetch(node, request, listener);
}
@Override
protected void moveToSecondPhase() throws Exception {
try {
innerFinishHim();
} catch (Throwable e) {
ReduceSearchPhaseException failure = new ReduceSearchPhaseException("merge", "", e, buildShardFailures());
if (logger.isDebugEnabled()) {
logger.debug("failed to reduce search", failure);
}
listener.onFailure(failure);
}
}
private void innerFinishHim() throws IOException {
sortedShardList = searchPhaseController.sortDocs(firstResults);
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults, firstResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = buildScrollId(request.searchType(), firstResults, null);
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successulOps.get(), buildTookInMillis(), buildShardFailures()));
}
}
} | 0true
| src_main_java_org_elasticsearch_action_search_type_TransportSearchQueryAndFetchAction.java |
3,660 | public class BoostFieldMapper extends NumberFieldMapper<Float> implements InternalMapper, RootMapper {
public static final String CONTENT_TYPE = "_boost";
public static final String NAME = "_boost";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final String NAME = "_boost";
public static final Float NULL_VALUE = null;
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setIndexed(false);
FIELD_TYPE.setStored(false);
}
}
public static class Builder extends NumberFieldMapper.Builder<Builder, BoostFieldMapper> {
protected Float nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
builder = this;
}
public Builder nullValue(float nullValue) {
this.nullValue = nullValue;
return this;
}
@Override
public BoostFieldMapper build(BuilderContext context) {
return new BoostFieldMapper(name, buildIndexName(context),
precisionStep, boost, fieldType, docValues, nullValue, postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
String name = node.get("name") == null ? BoostFieldMapper.Defaults.NAME : node.get("name").toString();
BoostFieldMapper.Builder builder = MapperBuilders.boost(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeFloatValue(propNode));
}
}
return builder;
}
}
private final Float nullValue;
public BoostFieldMapper() {
this(Defaults.NAME, Defaults.NAME);
}
protected BoostFieldMapper(String name, String indexName) {
this(name, indexName, Defaults.PRECISION_STEP, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
Defaults.NULL_VALUE, null, null, null, ImmutableSettings.EMPTY);
}
protected BoostFieldMapper(String name, String indexName, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Float nullValue,
PostingsFormatProvider postingsProvider, DocValuesFormatProvider docValuesProvider, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), precisionStep, boost, fieldType, docValues, Defaults.IGNORE_MALFORMED, Defaults.COERCE,
NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep), NumericFloatAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
postingsProvider, docValuesProvider, null, null, fieldDataSettings, indexSettings, MultiFields.empty(), null);
this.nullValue = nullValue;
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("float");
}
@Override
public boolean hasDocValues() {
return false;
}
@Override
protected int maxPrecisionStep() {
return 32;
}
@Override
public Float value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).floatValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToFloat((BytesRef) value);
}
return Float.parseFloat(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
int intValue = NumericUtils.floatToSortableInt(parseValue(value));
BytesRef bytesRef = new BytesRef();
NumericUtils.intToPrefixCoded(intValue, precisionStep(), bytesRef);
return bytesRef;
}
private float parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
}
if (value instanceof BytesRef) {
return Float.parseFloat(((BytesRef) value).utf8ToString());
}
return Float.parseFloat(value.toString());
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
float iValue = Float.parseFloat(value);
float iSim = fuzziness.asFloat();
return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) fieldData.getForField(this),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter nullValueFilter() {
if (nullValue == null) {
return null;
}
return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep,
nullValue,
nullValue,
true, true);
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public boolean includeInObject() {
return true;
}
@Override
public void parse(ParseContext context) throws IOException {
// we override parse since we want to handle cases where it is not indexed and not stored (the default)
float value = parseFloatValue(context);
if (!Float.isNaN(value)) {
context.docBoost(value);
}
super.parse(context);
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
final float value = parseFloatValue(context);
if (Float.isNaN(value)) {
return;
}
context.docBoost(value);
fields.add(new FloatFieldMapper.CustomFloatNumericField(this, value, fieldType));
}
private float parseFloatValue(ParseContext context) throws IOException {
float value;
if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) {
if (nullValue == null) {
return Float.NaN;
}
value = nullValue;
} else {
value = context.parser().floatValue(coerce.value());
}
return value;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// all are defaults, don't write it at all
if (!includeDefaults && name().equals(Defaults.NAME) && nullValue == null &&
fieldType.indexed() == Defaults.FIELD_TYPE.indexed() &&
fieldType.stored() == Defaults.FIELD_TYPE.stored() &&
customFieldDataSettings == null) {
return builder;
}
builder.startObject(contentType());
if (includeDefaults || !name().equals(Defaults.NAME)) {
builder.field("name", name());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
}
if (includeDefaults || fieldType.indexed() != Defaults.FIELD_TYPE.indexed()) {
builder.field("index", fieldType.indexed());
}
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType.stored());
}
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
}
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_internal_BoostFieldMapper.java |
524 | Runnable incrementor = new Runnable() {
public void run() {
try {
getKeyForUpdateLatch.await(30, TimeUnit.SECONDS);
boolean result = map.tryPut(key, value, 0, TimeUnit.SECONDS);
tryPutResult.set(result);
afterTryPutResult.countDown();
} catch (Exception e) {
}
}
}; | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_txn_ClientTxnMapTest.java |
3,752 | public class MergeStats implements Streamable, ToXContent {
private long total;
private long totalTimeInMillis;
private long totalNumDocs;
private long totalSizeInBytes;
private long current;
private long currentNumDocs;
private long currentSizeInBytes;
public MergeStats() {
}
public void add(long totalMerges, long totalMergeTime, long totalNumDocs, long totalSizeInBytes, long currentMerges, long currentNumDocs, long currentSizeInBytes) {
this.total += totalMerges;
this.totalTimeInMillis += totalMergeTime;
this.totalNumDocs += totalNumDocs;
this.totalSizeInBytes += totalSizeInBytes;
this.current += currentMerges;
this.currentNumDocs += currentNumDocs;
this.currentSizeInBytes += currentSizeInBytes;
}
public void add(MergeStats mergeStats) {
if (mergeStats == null) {
return;
}
this.total += mergeStats.total;
this.totalTimeInMillis += mergeStats.totalTimeInMillis;
this.totalNumDocs += mergeStats.totalNumDocs;
this.totalSizeInBytes += mergeStats.totalSizeInBytes;
this.current += mergeStats.current;
this.currentNumDocs += mergeStats.currentNumDocs;
this.currentSizeInBytes += mergeStats.currentSizeInBytes;
}
/**
* The total number of merges executed.
*/
public long getTotal() {
return this.total;
}
/**
* The total time merges have been executed (in milliseconds).
*/
public long getTotalTimeInMillis() {
return this.totalTimeInMillis;
}
/**
* The total time merges have been executed.
*/
public TimeValue getTotalTime() {
return new TimeValue(totalTimeInMillis);
}
public long getTotalNumDocs() {
return this.totalNumDocs;
}
public long getTotalSizeInBytes() {
return this.totalSizeInBytes;
}
public ByteSizeValue getTotalSize() {
return new ByteSizeValue(totalSizeInBytes);
}
/**
* The current number of merges executing.
*/
public long getCurrent() {
return this.current;
}
public long getCurrentNumDocs() {
return this.currentNumDocs;
}
public long getCurrentSizeInBytes() {
return this.currentSizeInBytes;
}
public ByteSizeValue getCurrentSize() {
return new ByteSizeValue(currentSizeInBytes);
}
public static MergeStats readMergeStats(StreamInput in) throws IOException {
MergeStats stats = new MergeStats();
stats.readFrom(in);
return stats;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.MERGES);
builder.field(Fields.CURRENT, current);
builder.field(Fields.CURRENT_DOCS, currentNumDocs);
builder.byteSizeField(Fields.CURRENT_SIZE_IN_BYTES, Fields.CURRENT_SIZE, currentSizeInBytes);
builder.field(Fields.TOTAL, total);
builder.timeValueField(Fields.TOTAL_TIME_IN_MILLIS, Fields.TOTAL_TIME, totalTimeInMillis);
builder.field(Fields.TOTAL_DOCS, totalNumDocs);
builder.byteSizeField(Fields.TOTAL_SIZE_IN_BYTES, Fields.TOTAL_SIZE, totalSizeInBytes);
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString MERGES = new XContentBuilderString("merges");
static final XContentBuilderString CURRENT = new XContentBuilderString("current");
static final XContentBuilderString CURRENT_DOCS = new XContentBuilderString("current_docs");
static final XContentBuilderString CURRENT_SIZE = new XContentBuilderString("current_size");
static final XContentBuilderString CURRENT_SIZE_IN_BYTES = new XContentBuilderString("current_size_in_bytes");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString TOTAL_TIME = new XContentBuilderString("total_time");
static final XContentBuilderString TOTAL_TIME_IN_MILLIS = new XContentBuilderString("total_time_in_millis");
static final XContentBuilderString TOTAL_DOCS = new XContentBuilderString("total_docs");
static final XContentBuilderString TOTAL_SIZE = new XContentBuilderString("total_size");
static final XContentBuilderString TOTAL_SIZE_IN_BYTES = new XContentBuilderString("total_size_in_bytes");
}
@Override
public void readFrom(StreamInput in) throws IOException {
total = in.readVLong();
totalTimeInMillis = in.readVLong();
totalNumDocs = in.readVLong();
totalSizeInBytes = in.readVLong();
current = in.readVLong();
currentNumDocs = in.readVLong();
currentSizeInBytes = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(total);
out.writeVLong(totalTimeInMillis);
out.writeVLong(totalNumDocs);
out.writeVLong(totalSizeInBytes);
out.writeVLong(current);
out.writeVLong(currentNumDocs);
out.writeVLong(currentSizeInBytes);
}
} | 0true
| src_main_java_org_elasticsearch_index_merge_MergeStats.java |
448 | public class KeyRange {
private final StaticBuffer start;
private final StaticBuffer end;
public KeyRange(StaticBuffer start, StaticBuffer end) {
this.start = start;
this.end = end;
}
@Override
public String toString() {
return String.format("KeyRange(left: %s, right: %s)", start, end);
}
public StaticBuffer getAt(int position) {
switch(position) {
case 0: return start;
case 1: return end;
default: throw new IndexOutOfBoundsException("Exceed length of 2: " + position);
}
}
public StaticBuffer getStart() {
return start;
}
public StaticBuffer getEnd() {
return end;
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_KeyRange.java |
3,694 | public static class Builder extends NumberFieldMapper.Builder<Builder, TimestampFieldMapper> {
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
private String path = Defaults.PATH;
private FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
}
public Builder enabled(EnabledAttributeMapper enabledState) {
this.enabledState = enabledState;
return builder;
}
public Builder path(String path) {
this.path = path;
return builder;
}
public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
this.dateTimeFormatter = dateTimeFormatter;
return builder;
}
@Override
public TimestampFieldMapper build(BuilderContext context) {
boolean roundCeil = Defaults.ROUND_CEIL;
if (context.indexSettings() != null) {
Settings settings = context.indexSettings();
roundCeil = settings.getAsBoolean("index.mapping.date.round_ceil", settings.getAsBoolean("index.mapping.date.parse_upper_inclusive", Defaults.ROUND_CEIL));
}
return new TimestampFieldMapper(fieldType, docValues, enabledState, path, dateTimeFormatter, roundCeil,
ignoreMalformed(context), coerce(context), postingsProvider, docValuesProvider, normsLoading, fieldDataSettings, context.indexSettings());
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_internal_TimestampFieldMapper.java |
742 | public class ProductOptionValidationType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, ProductOptionValidationType> TYPES = new LinkedHashMap<String, ProductOptionValidationType>();
public static final ProductOptionValidationType REGEX = new ProductOptionValidationType("REGEX", "Regular Expression");
public static ProductOptionValidationType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public ProductOptionValidationType() {
//do nothing
}
public ProductOptionValidationType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
@Override
public String getType() {
return type;
}
@Override
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ProductOptionValidationType other = (ProductOptionValidationType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_service_type_ProductOptionValidationType.java |
237 | public static class Task {
private final String text;
private final int priority;
private final int line;
private final int startIndex;
public Task(String text, int priority, int line, int startIndex) {
this.text = text;
this.priority = priority;
this.line = line;
this.startIndex = startIndex;
}
public String getText() {
return text;
}
public int getPriority() {
return priority;
}
public int getLine() {
return line;
}
public int getStartIndex() {
return startIndex;
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonTaskUtil.java |
3,101 | public class EngineCreationFailureException extends EngineException {
public EngineCreationFailureException(ShardId shardId, String msg, Throwable cause) {
super(shardId, msg, cause);
}
} | 0true
| src_main_java_org_elasticsearch_index_engine_EngineCreationFailureException.java |
2,489 | public final class XContentBuilder implements BytesStream {
public static enum FieldCaseConversion {
/**
* No conversion will occur.
*/
NONE,
/**
* Camel Case will be converted to Underscore casing.
*/
UNDERSCORE,
/**
* Underscore will be converted to Camel case.
*/
CAMELCASE
}
public final static DateTimeFormatter defaultDatePrinter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
protected static FieldCaseConversion globalFieldCaseConversion = FieldCaseConversion.NONE;
public static void globalFieldCaseConversion(FieldCaseConversion globalFieldCaseConversion) {
XContentBuilder.globalFieldCaseConversion = globalFieldCaseConversion;
}
public static XContentBuilder builder(XContent xContent) throws IOException {
return new XContentBuilder(xContent, new BytesStreamOutput());
}
private XContentGenerator generator;
private final OutputStream bos;
private FieldCaseConversion fieldCaseConversion = globalFieldCaseConversion;
private StringBuilder cachedStringBuilder;
private boolean humanReadable = false;
/**
* Constructs a new builder using the provided xcontent and an OutputStream. Make sure
* to call {@link #close()} when the builder is done with.
*/
public XContentBuilder(XContent xContent, OutputStream bos) throws IOException {
this.bos = bos;
this.generator = xContent.createGenerator(bos);
}
public XContentBuilder fieldCaseConversion(FieldCaseConversion fieldCaseConversion) {
this.fieldCaseConversion = fieldCaseConversion;
return this;
}
public XContentType contentType() {
return generator.contentType();
}
public XContentBuilder prettyPrint() {
generator.usePrettyPrint();
return this;
}
public XContentBuilder lfAtEnd() {
generator.usePrintLineFeedAtEnd();
return this;
}
public XContentBuilder humanReadable(boolean humanReadable) {
this.humanReadable = humanReadable;
return this;
}
public boolean humanReadable() {
return this.humanReadable;
}
public XContentBuilder field(String name, ToXContent xContent) throws IOException {
field(name);
xContent.toXContent(this, ToXContent.EMPTY_PARAMS);
return this;
}
public XContentBuilder field(String name, ToXContent xContent, ToXContent.Params params) throws IOException {
field(name);
xContent.toXContent(this, params);
return this;
}
public XContentBuilder startObject(String name) throws IOException {
field(name);
startObject();
return this;
}
public XContentBuilder startObject(String name, FieldCaseConversion conversion) throws IOException {
field(name, conversion);
startObject();
return this;
}
public XContentBuilder startObject(XContentBuilderString name) throws IOException {
field(name);
startObject();
return this;
}
public XContentBuilder startObject(XContentBuilderString name, FieldCaseConversion conversion) throws IOException {
field(name, conversion);
startObject();
return this;
}
public XContentBuilder startObject() throws IOException {
generator.writeStartObject();
return this;
}
public XContentBuilder endObject() throws IOException {
generator.writeEndObject();
return this;
}
public XContentBuilder array(String name, String... values) throws IOException {
startArray(name);
for (String value : values) {
value(value);
}
endArray();
return this;
}
public XContentBuilder array(XContentBuilderString name, String... values) throws IOException {
startArray(name);
for (String value : values) {
value(value);
}
endArray();
return this;
}
public XContentBuilder array(String name, Object... values) throws IOException {
startArray(name);
for (Object value : values) {
value(value);
}
endArray();
return this;
}
public XContentBuilder array(XContentBuilderString name, Object... values) throws IOException {
startArray(name);
for (Object value : values) {
value(value);
}
endArray();
return this;
}
public XContentBuilder startArray(String name, FieldCaseConversion conversion) throws IOException {
field(name, conversion);
startArray();
return this;
}
public XContentBuilder startArray(String name) throws IOException {
field(name);
startArray();
return this;
}
public XContentBuilder startArray(XContentBuilderString name) throws IOException {
field(name);
startArray();
return this;
}
public XContentBuilder startArray() throws IOException {
generator.writeStartArray();
return this;
}
public XContentBuilder endArray() throws IOException {
generator.writeEndArray();
return this;
}
public XContentBuilder field(XContentBuilderString name) throws IOException {
if (fieldCaseConversion == FieldCaseConversion.UNDERSCORE) {
generator.writeFieldName(name.underscore());
} else if (fieldCaseConversion == FieldCaseConversion.CAMELCASE) {
generator.writeFieldName(name.camelCase());
} else {
generator.writeFieldName(name.underscore());
}
return this;
}
public XContentBuilder field(XContentBuilderString name, FieldCaseConversion conversion) throws IOException {
if (conversion == FieldCaseConversion.UNDERSCORE) {
generator.writeFieldName(name.underscore());
} else if (conversion == FieldCaseConversion.CAMELCASE) {
generator.writeFieldName(name.camelCase());
} else {
generator.writeFieldName(name.underscore());
}
return this;
}
public XContentBuilder field(String name) throws IOException {
if (fieldCaseConversion == FieldCaseConversion.UNDERSCORE) {
if (cachedStringBuilder == null) {
cachedStringBuilder = new StringBuilder();
}
name = Strings.toUnderscoreCase(name, cachedStringBuilder);
} else if (fieldCaseConversion == FieldCaseConversion.CAMELCASE) {
if (cachedStringBuilder == null) {
cachedStringBuilder = new StringBuilder();
}
name = Strings.toCamelCase(name, cachedStringBuilder);
}
generator.writeFieldName(name);
return this;
}
public XContentBuilder field(String name, FieldCaseConversion conversion) throws IOException {
if (conversion == FieldCaseConversion.UNDERSCORE) {
if (cachedStringBuilder == null) {
cachedStringBuilder = new StringBuilder();
}
name = Strings.toUnderscoreCase(name, cachedStringBuilder);
} else if (conversion == FieldCaseConversion.CAMELCASE) {
if (cachedStringBuilder == null) {
cachedStringBuilder = new StringBuilder();
}
name = Strings.toCamelCase(name, cachedStringBuilder);
}
generator.writeFieldName(name);
return this;
}
public XContentBuilder field(String name, char[] value, int offset, int length) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeString(value, offset, length);
}
return this;
}
public XContentBuilder field(XContentBuilderString name, char[] value, int offset, int length) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeString(value, offset, length);
}
return this;
}
public XContentBuilder field(String name, String value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeString(value);
}
return this;
}
public XContentBuilder field(String name, String value, FieldCaseConversion conversion) throws IOException {
field(name, conversion);
if (value == null) {
generator.writeNull();
} else {
generator.writeString(value);
}
return this;
}
public XContentBuilder field(XContentBuilderString name, String value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeString(value);
}
return this;
}
public XContentBuilder field(XContentBuilderString name, String value, FieldCaseConversion conversion) throws IOException {
field(name, conversion);
if (value == null) {
generator.writeNull();
} else {
generator.writeString(value);
}
return this;
}
public XContentBuilder field(String name, Integer value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value.intValue());
}
return this;
}
public XContentBuilder field(XContentBuilderString name, Integer value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value.intValue());
}
return this;
}
public XContentBuilder field(String name, int value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(XContentBuilderString name, int value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(String name, Long value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value.longValue());
}
return this;
}
public XContentBuilder field(XContentBuilderString name, Long value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value.longValue());
}
return this;
}
public XContentBuilder field(String name, long value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(XContentBuilderString name, long value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(String name, Float value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value.floatValue());
}
return this;
}
public XContentBuilder field(XContentBuilderString name, Float value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value.floatValue());
}
return this;
}
public XContentBuilder field(String name, float value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(XContentBuilderString name, float value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(String name, Double value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value);
}
return this;
}
public XContentBuilder field(XContentBuilderString name, Double value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeNumber(value);
}
return this;
}
public XContentBuilder field(String name, double value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(XContentBuilderString name, double value) throws IOException {
field(name);
generator.writeNumber(value);
return this;
}
public XContentBuilder field(String name, BigDecimal value) throws IOException {
return field(name, value, value.scale(), RoundingMode.HALF_UP, true);
}
public XContentBuilder field(XContentBuilderString name, BigDecimal value) throws IOException {
return field(name, value, value.scale(), RoundingMode.HALF_UP, true);
}
public XContentBuilder field(String name, BigDecimal value, int scale, RoundingMode rounding, boolean toDouble) throws IOException {
field(name);
if (toDouble) {
try {
generator.writeNumber(value.setScale(scale, rounding).doubleValue());
} catch (ArithmeticException e) {
generator.writeString(value.toEngineeringString());
}
} else {
generator.writeString(value.toEngineeringString());
}
return this;
}
public XContentBuilder field(XContentBuilderString name, BigDecimal value, int scale, RoundingMode rounding, boolean toDouble) throws IOException {
field(name);
if (toDouble) {
try {
generator.writeNumber(value.setScale(scale, rounding).doubleValue());
} catch (ArithmeticException e) {
generator.writeString(value.toEngineeringString());
}
} else {
generator.writeString(value.toEngineeringString());
}
return this;
}
public XContentBuilder field(String name, BytesReference value) throws IOException {
field(name);
if (!value.hasArray()) {
value = value.toBytesArray();
}
generator.writeBinary(value.array(), value.arrayOffset(), value.length());
return this;
}
public XContentBuilder field(XContentBuilderString name, BytesReference value) throws IOException {
field(name);
if (!value.hasArray()) {
value = value.toBytesArray();
}
generator.writeBinary(value.array(), value.arrayOffset(), value.length());
return this;
}
public XContentBuilder field(XContentBuilderString name, BytesRef value) throws IOException {
field(name);
generator.writeUTF8String(value.bytes, value.offset, value.length);
return this;
}
public XContentBuilder field(String name, Text value) throws IOException {
field(name);
if (value.hasBytes() && value.bytes().hasArray()) {
generator.writeUTF8String(value.bytes().array(), value.bytes().arrayOffset(), value.bytes().length());
return this;
}
if (value.hasString()) {
generator.writeString(value.string());
return this;
}
// TODO: TextBytesOptimization we can use a buffer here to convert it? maybe add a request to jackson to support InputStream as well?
BytesArray bytesArray = value.bytes().toBytesArray();
generator.writeUTF8String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length());
return this;
}
public XContentBuilder field(XContentBuilderString name, Text value) throws IOException {
field(name);
if (value.hasBytes() && value.bytes().hasArray()) {
generator.writeUTF8String(value.bytes().array(), value.bytes().arrayOffset(), value.bytes().length());
return this;
}
if (value.hasString()) {
generator.writeString(value.string());
return this;
}
// TODO: TextBytesOptimization we can use a buffer here to convert it? maybe add a request to jackson to support InputStream as well?
BytesArray bytesArray = value.bytes().toBytesArray();
generator.writeUTF8String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length());
return this;
}
public XContentBuilder field(String name, byte[] value, int offset, int length) throws IOException {
field(name);
generator.writeBinary(value, offset, length);
return this;
}
public XContentBuilder field(String name, Map<String, Object> value) throws IOException {
field(name);
value(value);
return this;
}
public XContentBuilder field(XContentBuilderString name, Map<String, Object> value) throws IOException {
field(name);
value(value);
return this;
}
public XContentBuilder field(String name, Iterable value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, Iterable value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(String name, String... value) throws IOException {
startArray(name);
for (String o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, String... value) throws IOException {
startArray(name);
for (String o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(String name, Object... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, Object... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(String name, int... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, int offset, int length, int... value) throws IOException {
assert ((offset >= 0) && (value.length > length));
startArray(name);
for (int i = offset; i < length; i++) {
value(value[i]);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, int... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(String name, long... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, long... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(String name, float... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, float... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(String name, double... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(XContentBuilderString name, double... value) throws IOException {
startArray(name);
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder field(String name, Object value) throws IOException {
field(name);
writeValue(value);
return this;
}
public XContentBuilder field(XContentBuilderString name, Object value) throws IOException {
field(name);
writeValue(value);
return this;
}
public XContentBuilder value(Object value) throws IOException {
writeValue(value);
return this;
}
public XContentBuilder field(String name, boolean value) throws IOException {
field(name);
generator.writeBoolean(value);
return this;
}
public XContentBuilder field(XContentBuilderString name, boolean value) throws IOException {
field(name);
generator.writeBoolean(value);
return this;
}
public XContentBuilder field(String name, byte[] value) throws IOException {
field(name);
if (value == null) {
generator.writeNull();
} else {
generator.writeBinary(value);
}
return this;
}
public XContentBuilder field(XContentBuilderString name, byte[] value) throws IOException {
field(name);
return value(value);
}
public XContentBuilder field(String name, ReadableInstant date) throws IOException {
field(name);
return value(date);
}
public XContentBuilder field(XContentBuilderString name, ReadableInstant date) throws IOException {
field(name);
return value(date);
}
public XContentBuilder field(String name, ReadableInstant date, DateTimeFormatter formatter) throws IOException {
field(name);
return value(date, formatter);
}
public XContentBuilder field(XContentBuilderString name, ReadableInstant date, DateTimeFormatter formatter) throws IOException {
field(name);
return value(date, formatter);
}
public XContentBuilder field(String name, Date date) throws IOException {
field(name);
return value(date);
}
public XContentBuilder field(XContentBuilderString name, Date date) throws IOException {
field(name);
return value(date);
}
public XContentBuilder field(String name, Date date, DateTimeFormatter formatter) throws IOException {
field(name);
return value(date, formatter);
}
public XContentBuilder field(XContentBuilderString name, Date date, DateTimeFormatter formatter) throws IOException {
field(name);
return value(date, formatter);
}
public XContentBuilder nullField(String name) throws IOException {
generator.writeNullField(name);
return this;
}
public XContentBuilder nullField(XContentBuilderString name) throws IOException {
field(name);
generator.writeNull();
return this;
}
public XContentBuilder nullValue() throws IOException {
generator.writeNull();
return this;
}
public XContentBuilder rawField(String fieldName, byte[] content) throws IOException {
generator.writeRawField(fieldName, content, bos);
return this;
}
public XContentBuilder rawField(String fieldName, byte[] content, int offset, int length) throws IOException {
generator.writeRawField(fieldName, content, offset, length, bos);
return this;
}
public XContentBuilder rawField(String fieldName, InputStream content) throws IOException {
generator.writeRawField(fieldName, content, bos);
return this;
}
public XContentBuilder rawField(String fieldName, BytesReference content) throws IOException {
generator.writeRawField(fieldName, content, bos);
return this;
}
public XContentBuilder timeValueField(XContentBuilderString rawFieldName, XContentBuilderString readableFieldName, TimeValue timeValue) throws IOException {
if (humanReadable) {
field(readableFieldName, timeValue.toString());
}
field(rawFieldName, timeValue.millis());
return this;
}
public XContentBuilder timeValueField(XContentBuilderString rawFieldName, XContentBuilderString readableFieldName, long rawTime) throws IOException {
if (humanReadable) {
field(readableFieldName, new TimeValue(rawTime).toString());
}
field(rawFieldName, rawTime);
return this;
}
public XContentBuilder byteSizeField(XContentBuilderString rawFieldName, XContentBuilderString readableFieldName, ByteSizeValue byteSizeValue) throws IOException {
if (humanReadable) {
field(readableFieldName, byteSizeValue.toString());
}
field(rawFieldName, byteSizeValue.bytes());
return this;
}
public XContentBuilder byteSizeField(XContentBuilderString rawFieldName, XContentBuilderString readableFieldName, long rawSize) throws IOException {
if (humanReadable) {
field(readableFieldName, new ByteSizeValue(rawSize).toString());
}
field(rawFieldName, rawSize);
return this;
}
public XContentBuilder value(Boolean value) throws IOException {
if (value == null) {
return nullValue();
}
return value(value.booleanValue());
}
public XContentBuilder value(boolean value) throws IOException {
generator.writeBoolean(value);
return this;
}
public XContentBuilder value(ReadableInstant date) throws IOException {
return value(date, defaultDatePrinter);
}
public XContentBuilder value(ReadableInstant date, DateTimeFormatter dateTimeFormatter) throws IOException {
if (date == null) {
return nullValue();
}
return value(dateTimeFormatter.print(date));
}
public XContentBuilder value(Date date) throws IOException {
return value(date, defaultDatePrinter);
}
public XContentBuilder value(Date date, DateTimeFormatter dateTimeFormatter) throws IOException {
if (date == null) {
return nullValue();
}
return value(dateTimeFormatter.print(date.getTime()));
}
public XContentBuilder value(Integer value) throws IOException {
if (value == null) {
return nullValue();
}
return value(value.intValue());
}
public XContentBuilder value(int value) throws IOException {
generator.writeNumber(value);
return this;
}
public XContentBuilder value(Long value) throws IOException {
if (value == null) {
return nullValue();
}
return value(value.longValue());
}
public XContentBuilder value(long value) throws IOException {
generator.writeNumber(value);
return this;
}
public XContentBuilder value(Float value) throws IOException {
if (value == null) {
return nullValue();
}
return value(value.floatValue());
}
public XContentBuilder value(float value) throws IOException {
generator.writeNumber(value);
return this;
}
public XContentBuilder value(Double value) throws IOException {
if (value == null) {
return nullValue();
}
return value(value.doubleValue());
}
public XContentBuilder value(double value) throws IOException {
generator.writeNumber(value);
return this;
}
public XContentBuilder value(String value) throws IOException {
if (value == null) {
return nullValue();
}
generator.writeString(value);
return this;
}
public XContentBuilder value(byte[] value) throws IOException {
if (value == null) {
return nullValue();
}
generator.writeBinary(value);
return this;
}
public XContentBuilder value(byte[] value, int offset, int length) throws IOException {
if (value == null) {
return nullValue();
}
generator.writeBinary(value, offset, length);
return this;
}
public XContentBuilder value(BytesReference value) throws IOException {
if (value == null) {
return nullValue();
}
if (!value.hasArray()) {
value = value.toBytesArray();
}
generator.writeBinary(value.array(), value.arrayOffset(), value.length());
return this;
}
public XContentBuilder value(Text value) throws IOException {
if (value == null) {
return nullValue();
}
if (value.hasBytes() && value.bytes().hasArray()) {
generator.writeUTF8String(value.bytes().array(), value.bytes().arrayOffset(), value.bytes().length());
return this;
}
if (value.hasString()) {
generator.writeString(value.string());
return this;
}
BytesArray bytesArray = value.bytes().toBytesArray();
generator.writeUTF8String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length());
return this;
}
public XContentBuilder map(Map<String, Object> map) throws IOException {
if (map == null) {
return nullValue();
}
writeMap(map);
return this;
}
public XContentBuilder value(Map<String, Object> map) throws IOException {
if (map == null) {
return nullValue();
}
writeMap(map);
return this;
}
public XContentBuilder value(Iterable value) throws IOException {
if (value == null) {
return nullValue();
}
startArray();
for (Object o : value) {
value(o);
}
endArray();
return this;
}
public XContentBuilder copyCurrentStructure(XContentParser parser) throws IOException {
generator.copyCurrentStructure(parser);
return this;
}
public XContentBuilder flush() throws IOException {
generator.flush();
return this;
}
public void close() {
try {
generator.close();
} catch (IOException e) {
// ignore
}
}
public XContentGenerator generator() {
return this.generator;
}
public OutputStream stream() {
return this.bos;
}
@Override
public BytesReference bytes() {
close();
return ((BytesStream) bos).bytes();
}
/**
* Returns the actual stream used.
*/
public BytesStream bytesStream() throws IOException {
close();
return (BytesStream) bos;
}
/**
* Returns a string representation of the builder (only applicable for text based xcontent).
* <p/>
* <p>Only applicable when the builder is constructed with {@link FastByteArrayOutputStream}.
*/
public String string() throws IOException {
close();
BytesArray bytesArray = bytes().toBytesArray();
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), Charsets.UTF_8);
}
private void writeMap(Map<String, Object> map) throws IOException {
generator.writeStartObject();
for (Map.Entry<String, Object> entry : map.entrySet()) {
field(entry.getKey());
Object value = entry.getValue();
if (value == null) {
generator.writeNull();
} else {
writeValue(value);
}
}
generator.writeEndObject();
}
private void writeValue(Object value) throws IOException {
if (value == null) {
generator.writeNull();
return;
}
Class type = value.getClass();
if (type == String.class) {
generator.writeString((String) value);
} else if (type == Integer.class) {
generator.writeNumber(((Integer) value).intValue());
} else if (type == Long.class) {
generator.writeNumber(((Long) value).longValue());
} else if (type == Float.class) {
generator.writeNumber(((Float) value).floatValue());
} else if (type == Double.class) {
generator.writeNumber(((Double) value).doubleValue());
} else if (type == Short.class) {
generator.writeNumber(((Short) value).shortValue());
} else if (type == Boolean.class) {
generator.writeBoolean(((Boolean) value).booleanValue());
} else if (type == GeoPoint.class) {
generator.writeStartObject();
generator.writeNumberField("lat", ((GeoPoint) value).lat());
generator.writeNumberField("lon", ((GeoPoint) value).lon());
generator.writeEndObject();
} else if (value instanceof Map) {
writeMap((Map) value);
} else if (value instanceof Iterable) {
generator.writeStartArray();
for (Object v : (Iterable) value) {
writeValue(v);
}
generator.writeEndArray();
} else if (value instanceof Object[]) {
generator.writeStartArray();
for (Object v : (Object[]) value) {
writeValue(v);
}
generator.writeEndArray();
} else if (type == byte[].class) {
generator.writeBinary((byte[]) value);
} else if (value instanceof Date) {
generator.writeString(XContentBuilder.defaultDatePrinter.print(((Date) value).getTime()));
} else if (value instanceof Calendar) {
generator.writeString(XContentBuilder.defaultDatePrinter.print((((Calendar) value)).getTimeInMillis()));
} else if (value instanceof ReadableInstant) {
generator.writeString(XContentBuilder.defaultDatePrinter.print((((ReadableInstant) value)).getMillis()));
} else if (value instanceof BytesReference) {
BytesReference bytes = (BytesReference) value;
if (!bytes.hasArray()) {
bytes = bytes.toBytesArray();
}
generator.writeBinary(bytes.array(), bytes.arrayOffset(), bytes.length());
} else if (value instanceof Text) {
Text text = (Text) value;
if (text.hasBytes() && text.bytes().hasArray()) {
generator.writeUTF8String(text.bytes().array(), text.bytes().arrayOffset(), text.bytes().length());
} else if (text.hasString()) {
generator.writeString(text.string());
} else {
BytesArray bytesArray = text.bytes().toBytesArray();
generator.writeUTF8String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length());
}
} else if (value instanceof ToXContent) {
((ToXContent) value).toXContent(this, ToXContent.EMPTY_PARAMS);
} else if (value instanceof double[]) {
generator.writeStartArray();
for (double v : (double[]) value) {
generator.writeNumber(v);
}
generator.writeEndArray();
} else if (value instanceof long[]) {
generator.writeStartArray();
for (long v : (long[]) value) {
generator.writeNumber(v);
}
generator.writeEndArray();
} else if (value instanceof int[]) {
generator.writeStartArray();
for (int v : (int[]) value) {
generator.writeNumber(v);
}
generator.writeEndArray();
} else if (value instanceof float[]) {
generator.writeStartArray();
for (float v : (float[]) value) {
generator.writeNumber(v);
}
generator.writeEndArray();
} else if (value instanceof short[]) {
generator.writeStartArray();
for (float v : (short[]) value) {
generator.writeNumber(v);
}
generator.writeEndArray();
} else {
// if this is a "value" object, like enum, DistanceUnit, ..., just toString it
// yea, it can be misleading when toString a Java class, but really, jackson should be used in that case
generator.writeString(value.toString());
//throw new ElasticsearchIllegalArgumentException("type not supported for generic value conversion: " + type);
}
}
} | 0true
| src_main_java_org_elasticsearch_common_xcontent_XContentBuilder.java |
209 | @SuppressWarnings("unchecked")
public class OStorageRemoteThread implements OStorageProxy {
private static AtomicInteger sessionSerialId = new AtomicInteger(-1);
private final OStorageRemote delegate;
private String serverURL;
private int sessionId;
public OStorageRemoteThread(final OStorageRemote iSharedStorage) {
delegate = iSharedStorage;
serverURL = null;
sessionId = sessionSerialId.decrementAndGet();
}
public OStorageRemoteThread(final OStorageRemote iSharedStorage, final int iSessionId) {
delegate = iSharedStorage;
serverURL = null;
sessionId = iSessionId;
}
public void open(final String iUserName, final String iUserPassword, final Map<String, Object> iOptions) {
pushSession();
try {
delegate.open(iUserName, iUserPassword, iOptions);
} finally {
popSession();
}
}
@Override
public boolean isDistributed() {
return delegate.isDistributed();
}
public void create(final Map<String, Object> iOptions) {
pushSession();
try {
delegate.create(iOptions);
} finally {
popSession();
}
}
public void close(boolean iForce) {
pushSession();
try {
delegate.close(iForce);
Orient.instance().unregisterStorage(this);
} finally {
popSession();
}
}
public boolean dropCluster(final String iClusterName, final boolean iTruncate) {
pushSession();
try {
return delegate.dropCluster(iClusterName, iTruncate);
} finally {
popSession();
}
}
public int getUsers() {
pushSession();
try {
return delegate.getUsers();
} finally {
popSession();
}
}
public int addUser() {
pushSession();
try {
return delegate.addUser();
} finally {
popSession();
}
}
public OSharedResourceAdaptiveExternal getLock() {
pushSession();
try {
return delegate.getLock();
} finally {
popSession();
}
}
public void setSessionId(final String iServerURL, final int iSessionId) {
serverURL = iServerURL;
sessionId = iSessionId;
delegate.setSessionId(serverURL, iSessionId);
}
public void reload() {
pushSession();
try {
delegate.reload();
} finally {
popSession();
}
}
public boolean exists() {
pushSession();
try {
return delegate.exists();
} finally {
popSession();
}
}
public int removeUser() {
pushSession();
try {
return delegate.removeUser();
} finally {
popSession();
}
}
public void close() {
pushSession();
try {
delegate.close();
} finally {
popSession();
}
}
public void delete() {
pushSession();
try {
delegate.delete();
Orient.instance().unregisterStorage(this);
} finally {
popSession();
}
}
@Override
public OStorage getUnderlying() {
return delegate;
}
public Set<String> getClusterNames() {
pushSession();
try {
return delegate.getClusterNames();
} finally {
popSession();
}
}
@Override
public void backup(OutputStream out, Map<String, Object> options, final Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("backup");
}
@Override
public void restore(InputStream in, Map<String, Object> options, final Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("restore");
}
public OStorageOperationResult<OPhysicalPosition> createRecord(final int iDataSegmentId, final ORecordId iRid,
final byte[] iContent, ORecordVersion iRecordVersion, final byte iRecordType, final int iMode,
ORecordCallback<OClusterPosition> iCallback) {
pushSession();
try {
return delegate.createRecord(iDataSegmentId, iRid, iContent, OVersionFactory.instance().createVersion(), iRecordType, iMode,
iCallback);
} finally {
popSession();
}
}
public OStorageOperationResult<ORawBuffer> readRecord(final ORecordId iRid, final String iFetchPlan, boolean iIgnoreCache,
ORecordCallback<ORawBuffer> iCallback, boolean loadTombstones) {
pushSession();
try {
return delegate.readRecord(iRid, iFetchPlan, iIgnoreCache, null, loadTombstones);
} finally {
popSession();
}
}
public OStorageOperationResult<ORecordVersion> updateRecord(final ORecordId iRid, final byte[] iContent,
final ORecordVersion iVersion, final byte iRecordType, final int iMode, ORecordCallback<ORecordVersion> iCallback) {
pushSession();
try {
return delegate.updateRecord(iRid, iContent, iVersion, iRecordType, iMode, iCallback);
} finally {
popSession();
}
}
public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRid, final ORecordVersion iVersion, final int iMode,
ORecordCallback<Boolean> iCallback) {
pushSession();
try {
return delegate.deleteRecord(iRid, iVersion, iMode, iCallback);
} finally {
popSession();
}
}
@Override
public boolean updateReplica(int dataSegmentId, ORecordId rid, byte[] content, ORecordVersion recordVersion, byte recordType)
throws IOException {
pushSession();
try {
return delegate.updateReplica(dataSegmentId, rid, content, recordVersion, recordType);
} finally {
popSession();
}
}
@Override
public ORecordMetadata getRecordMetadata(ORID rid) {
pushSession();
try {
return delegate.getRecordMetadata(rid);
} finally {
popSession();
}
}
@Override
public <V> V callInRecordLock(Callable<V> iCallable, ORID rid, boolean iExclusiveLock) {
pushSession();
try {
return delegate.callInRecordLock(iCallable, rid, iExclusiveLock);
} finally {
popSession();
}
}
@Override
public boolean cleanOutRecord(ORecordId recordId, ORecordVersion recordVersion, int iMode, ORecordCallback<Boolean> callback) {
pushSession();
try {
return delegate.cleanOutRecord(recordId, recordVersion, iMode, callback);
} finally {
popSession();
}
}
public long count(final int iClusterId) {
pushSession();
try {
return delegate.count(iClusterId);
} finally {
popSession();
}
}
@Override
public long count(int iClusterId, boolean countTombstones) {
pushSession();
try {
return delegate.count(iClusterId, countTombstones);
} finally {
popSession();
}
}
@Override
public long count(int[] iClusterIds, boolean countTombstones) {
pushSession();
try {
return delegate.count(iClusterIds, countTombstones);
} finally {
popSession();
}
}
public String toString() {
pushSession();
try {
return delegate.toString();
} finally {
popSession();
}
}
public OClusterPosition[] getClusterDataRange(final int iClusterId) {
pushSession();
try {
return delegate.getClusterDataRange(iClusterId);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] higherPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.higherPhysicalPositions(currentClusterId, physicalPosition);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] lowerPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.lowerPhysicalPositions(currentClusterId, physicalPosition);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] ceilingPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.ceilingPhysicalPositions(clusterId, physicalPosition);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] floorPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.floorPhysicalPositions(clusterId, physicalPosition);
} finally {
popSession();
}
}
public long getSize() {
pushSession();
try {
return delegate.getSize();
} finally {
popSession();
}
}
public long countRecords() {
pushSession();
try {
return delegate.countRecords();
} finally {
popSession();
}
}
public long count(final int[] iClusterIds) {
pushSession();
try {
return delegate.count(iClusterIds);
} finally {
popSession();
}
}
public Object command(final OCommandRequestText iCommand) {
pushSession();
try {
return delegate.command(iCommand);
} finally {
popSession();
}
}
public void commit(final OTransaction iTx, Runnable callback) {
pushSession();
try {
delegate.commit(iTx, null);
} finally {
popSession();
}
}
public void rollback(OTransaction iTx) {
pushSession();
try {
delegate.rollback(iTx);
} finally {
popSession();
}
}
public int getClusterIdByName(final String iClusterName) {
pushSession();
try {
return delegate.getClusterIdByName(iClusterName);
} finally {
popSession();
}
}
public String getClusterTypeByName(final String iClusterName) {
pushSession();
try {
return delegate.getClusterTypeByName(iClusterName);
} finally {
popSession();
}
}
public int getDefaultClusterId() {
pushSession();
try {
return delegate.getDefaultClusterId();
} finally {
popSession();
}
}
public void setDefaultClusterId(final int defaultClusterId) {
pushSession();
try {
delegate.setDefaultClusterId(defaultClusterId);
} finally {
popSession();
}
}
public int addCluster(final String iClusterType, final String iClusterName, final String iLocation,
final String iDataSegmentName, boolean forceListBased, final Object... iArguments) {
pushSession();
try {
return delegate.addCluster(iClusterType, iClusterName, iLocation, iDataSegmentName, false, iArguments);
} finally {
popSession();
}
}
public int addCluster(String iClusterType, String iClusterName, int iRequestedId, String iLocation, String iDataSegmentName,
boolean forceListBased, Object... iParameters) {
pushSession();
try {
return delegate
.addCluster(iClusterType, iClusterName, iRequestedId, iLocation, iDataSegmentName, forceListBased, iParameters);
} finally {
popSession();
}
}
public boolean dropCluster(final int iClusterId, final boolean iTruncate) {
pushSession();
try {
return delegate.dropCluster(iClusterId, iTruncate);
} finally {
popSession();
}
}
public ODataSegment getDataSegmentById(final int iDataSegmentId) {
return delegate.getDataSegmentById(iDataSegmentId);
}
public int getDataSegmentIdByName(final String iDataSegmentName) {
return delegate.getDataSegmentIdByName(iDataSegmentName);
}
public int addDataSegment(final String iDataSegmentName) {
pushSession();
try {
return delegate.addDataSegment(iDataSegmentName);
} finally {
popSession();
}
}
public int addDataSegment(final String iSegmentName, final String iSegmentFileName) {
pushSession();
try {
return delegate.addDataSegment(iSegmentName, iSegmentFileName);
} finally {
popSession();
}
}
public boolean dropDataSegment(final String iSegmentName) {
pushSession();
try {
return delegate.dropDataSegment(iSegmentName);
} finally {
popSession();
}
}
public void synch() {
pushSession();
try {
delegate.synch();
} finally {
popSession();
}
}
public String getPhysicalClusterNameById(final int iClusterId) {
pushSession();
try {
return delegate.getPhysicalClusterNameById(iClusterId);
} finally {
popSession();
}
}
public int getClusters() {
pushSession();
try {
return delegate.getClusterMap();
} finally {
popSession();
}
}
public Collection<OCluster> getClusterInstances() {
pushSession();
try {
return delegate.getClusterInstances();
} finally {
popSession();
}
}
public OCluster getClusterById(final int iId) {
pushSession();
try {
return delegate.getClusterById(iId);
} finally {
popSession();
}
}
public long getVersion() {
pushSession();
try {
return delegate.getVersion();
} finally {
popSession();
}
}
public boolean isPermanentRequester() {
pushSession();
try {
return delegate.isPermanentRequester();
} finally {
popSession();
}
}
public void updateClusterConfiguration(final byte[] iContent) {
pushSession();
try {
delegate.updateClusterConfiguration(iContent);
} finally {
popSession();
}
}
public OStorageConfiguration getConfiguration() {
pushSession();
try {
return delegate.getConfiguration();
} finally {
popSession();
}
}
public boolean isClosed() {
return delegate.isClosed();
}
public boolean checkForRecordValidity(final OPhysicalPosition ppos) {
pushSession();
try {
return delegate.checkForRecordValidity(ppos);
} finally {
popSession();
}
}
public String getName() {
pushSession();
try {
return delegate.getName();
} finally {
popSession();
}
}
public String getURL() {
return delegate.getURL();
}
public void beginResponse(final OChannelBinaryAsynchClient iNetwork) throws IOException {
pushSession();
try {
delegate.beginResponse(iNetwork);
} finally {
popSession();
}
}
public OLevel2RecordCache getLevel2Cache() {
return delegate.getLevel2Cache();
}
public boolean existsResource(final String iName) {
return delegate.existsResource(iName);
}
public synchronized <T> T getResource(final String iName, final Callable<T> iCallback) {
return (T) delegate.getResource(iName, iCallback);
}
public <T> T removeResource(final String iName) {
return (T) delegate.removeResource(iName);
}
public ODocument getClusterConfiguration() {
return delegate.getClusterConfiguration();
}
protected void handleException(final OChannelBinaryAsynchClient iNetwork, final String iMessage, final Exception iException) {
delegate.handleException(iNetwork, iMessage, iException);
}
public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) {
return delegate.callInLock(iCallable, iExclusiveLock);
}
public ORemoteServerEventListener getRemoteServerEventListener() {
return delegate.getAsynchEventListener();
}
public void setRemoteServerEventListener(final ORemoteServerEventListener iListener) {
delegate.setAsynchEventListener(iListener);
}
public void removeRemoteServerEventListener() {
delegate.removeRemoteServerEventListener();
}
public static int getNextConnectionId() {
return sessionSerialId.decrementAndGet();
}
@Override
public void checkForClusterPermissions(final String iClusterName) {
delegate.checkForClusterPermissions(iClusterName);
}
public STATUS getStatus() {
return delegate.getStatus();
}
@Override
public String getType() {
return delegate.getType();
}
@Override
public boolean equals(final Object iOther) {
return iOther == this || iOther == delegate;
}
protected void pushSession() {
delegate.setSessionId(serverURL, sessionId);
}
protected void popSession() {
serverURL = delegate.getServerURL();
sessionId = delegate.getSessionId();
}
} | 1no label
| client_src_main_java_com_orientechnologies_orient_client_remote_OStorageRemoteThread.java |
2,977 | public class UnsortedIndexStore implements IndexStore {
private final ConcurrentMap<Comparable, ConcurrentMap<Data, QueryableEntry>> mapRecords
= new ConcurrentHashMap<Comparable, ConcurrentMap<Data, QueryableEntry>>(1000);
@Override
public void getSubRecordsBetween(MultiResultSet results, Comparable from, Comparable to) {
int trend = from.compareTo(to);
if (trend == 0) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(from);
if (records != null) {
results.addResultSet(records);
}
return;
}
if (trend < 0) {
Comparable oldFrom = from;
from = to;
to = oldFrom;
}
Set<Comparable> values = mapRecords.keySet();
for (Comparable value : values) {
if (value.compareTo(from) <= 0 && value.compareTo(to) >= 0) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(value);
if (records != null) {
results.addResultSet(records);
}
}
}
}
@Override
public void getSubRecords(MultiResultSet results, ComparisonType comparisonType, Comparable searchedValue) {
Set<Comparable> values = mapRecords.keySet();
for (Comparable value : values) {
boolean valid;
int result = value.compareTo(searchedValue);
switch (comparisonType) {
case LESSER:
valid = result < 0;
break;
case LESSER_EQUAL:
valid = result <= 0;
break;
case GREATER:
valid = result > 0;
break;
case GREATER_EQUAL:
valid = result >= 0;
break;
case NOT_EQUAL:
valid = result != 0;
break;
default:
throw new IllegalStateException("Unrecognized comparisonType:" + comparisonType);
}
if (valid) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(value);
if (records != null) {
results.addResultSet(records);
}
}
}
}
@Override
public void newIndex(Comparable newValue, QueryableEntry record) {
Data indexKey = record.getIndexKey();
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(newValue);
if (records == null) {
records = new ConcurrentHashMap<Data, QueryableEntry>();
ConcurrentMap<Data, QueryableEntry> existing = mapRecords.putIfAbsent(newValue, records);
if (existing != null) {
records = existing;
}
}
records.put(indexKey, record);
}
@Override
public ConcurrentMap<Data, QueryableEntry> getRecordMap(Comparable indexValue) {
return mapRecords.get(indexValue);
}
@Override
public void removeIndex(Comparable oldValue, Data indexKey) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(oldValue);
if (records != null) {
records.remove(indexKey);
if (records.size() == 0) {
mapRecords.remove(oldValue);
}
}
}
@Override
public Set<QueryableEntry> getRecords(Comparable value) {
return new SingleResultSet(mapRecords.get(value));
}
@Override
public void getRecords(MultiResultSet results, Set<Comparable> values) {
for (Comparable value : values) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(value);
if (records != null) {
results.addResultSet(records);
}
}
}
@Override
public void clear() {
mapRecords.clear();
}
@Override
public String toString() {
return "UnsortedIndexStore{"
+ "mapRecords=" + mapRecords.size()
+ '}';
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_query_impl_UnsortedIndexStore.java |
1,101 | public class OSQLFunctionIntersect extends OSQLFunctionMultiValueAbstract<Set<Object>> {
public static final String NAME = "intersect";
public OSQLFunctionIntersect() {
super(NAME, 1, -1);
}
public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters,
OCommandContext iContext) {
Object value = iParameters[0];
if (value instanceof OSQLFilterItemVariable)
value = ((OSQLFilterItemVariable) value).getValue(iCurrentRecord, iContext);
if (value == null)
return Collections.emptySet();
if (!(value instanceof Collection<?>))
value = Arrays.asList(value);
final Collection<?> coll = (Collection<?>) value;
if (iParameters.length == 1) {
// AGGREGATION MODE (STATEFULL)
if (context == null) {
// ADD ALL THE ITEMS OF THE FIRST COLLECTION
context = new HashSet<Object>(coll);
} else {
// INTERSECT IT AGAINST THE CURRENT COLLECTION
context.retainAll(coll);
}
return null;
} else {
// IN-LINE MODE (STATELESS)
final HashSet<Object> result = new HashSet<Object>(coll);
for (int i = 1; i < iParameters.length; ++i) {
value = iParameters[i];
if (value instanceof OSQLFilterItemVariable)
value = ((OSQLFilterItemVariable) value).getValue(iCurrentRecord, iContext);
if (value != null) {
if (!(value instanceof Collection<?>))
// CONVERT IT INTO A COLLECTION
value = Arrays.asList(value);
result.retainAll((Collection<?>) value);
} else
result.clear();
}
return result;
}
}
public String getSyntax() {
return "Syntax error: intersect(<field>*)";
}
@SuppressWarnings("unchecked")
@Override
public Object mergeDistributedResult(List<Object> resultsToMerge) {
final Collection<Object> result = new HashSet<Object>();
if (!resultsToMerge.isEmpty()) {
final Collection<Object> items = (Collection<Object>) resultsToMerge.get(0);
if (items != null) {
result.addAll(items);
}
}
for (int i = 1; i < resultsToMerge.size(); i++) {
final Collection<Object> items = (Collection<Object>) resultsToMerge.get(i);
if (items != null) {
result.retainAll(items);
}
}
return result;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_functions_coll_OSQLFunctionIntersect.java |
215 | public static abstract class Configuration
{
// TODO This config should be split into a boolean and a string (keep_logical_logs vs kept_logical_logs)
public static final Setting<String> keep_logical_logs = GraphDatabaseSettings.keep_logical_logs;
} | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogBackedXaDataSource.java |
1,268 | addOperation(operations, new Runnable() {
public void run() {
IQueue q = hazelcast.getQueue("myQ");
q.contains(new byte[100]);
}
}, 1); | 0true
| hazelcast_src_main_java_com_hazelcast_examples_AllTest.java |
1,927 | public abstract class AbstractRuleBuilderFieldService implements RuleBuilderFieldService, ApplicationContextAware, InitializingBean {
protected DynamicEntityDao dynamicEntityDao;
protected ApplicationContext applicationContext;
protected List<FieldData> fields = new ArrayList<FieldData>();
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public FieldWrapper buildFields() {
FieldWrapper wrapper = new FieldWrapper();
for (FieldData field : getFields()) {
FieldDTO fieldDTO = new FieldDTO();
fieldDTO.setLabel(field.getFieldLabel());
//translate the label to display
String label = field.getFieldLabel();
BroadleafRequestContext context = BroadleafRequestContext.getBroadleafRequestContext();
MessageSource messages = context.getMessageSource();
label = messages.getMessage(label, null, label, context.getJavaLocale());
fieldDTO.setLabel(label);
fieldDTO.setName(field.getFieldName());
fieldDTO.setOperators(field.getOperators());
fieldDTO.setOptions(field.getOptions());
wrapper.getFields().add(fieldDTO);
}
return wrapper;
}
@Override
public SupportedFieldType getSupportedFieldType(String fieldName) {
SupportedFieldType type = null;
if (fieldName != null) {
for (FieldData field : getFields()) {
if (fieldName.equals(field.getFieldName())){
return field.getFieldType();
}
}
}
return type;
}
@Override
public SupportedFieldType getSecondaryFieldType(String fieldName) {
SupportedFieldType type = null;
if (fieldName != null) {
for (FieldData field : getFields()) {
if (fieldName.equals(field.getFieldName())){
return field.getSecondaryFieldType();
}
}
}
return type;
}
@Override
public FieldDTO getField(String fieldName) {
for (FieldData field : getFields()) {
if (field.getFieldName().equals(fieldName)) {
FieldDTO fieldDTO = new FieldDTO();
fieldDTO.setLabel(field.getFieldLabel());
fieldDTO.setName(field.getFieldName());
fieldDTO.setOperators(field.getOperators());
fieldDTO.setOptions(field.getOptions());
return fieldDTO;
}
}
return null;
}
@Override
public List<FieldData> getFields() {
return fields;
}
@Override
@SuppressWarnings("unchecked")
public void setFields(final List<FieldData> fields) {
List<FieldData> proxyFields = (List<FieldData>) Proxy.newProxyInstance(getClass().getClassLoader(), new Class<?>[]{List.class}, new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if (method.getName().equals("add")) {
FieldData fieldData = (FieldData) args[0];
testFieldName(fieldData);
}
if (method.getName().equals("addAll")) {
Collection<FieldData> addCollection = (Collection<FieldData>) args[0];
Iterator<FieldData> itr = addCollection.iterator();
while (itr.hasNext()) {
FieldData fieldData = itr.next();
testFieldName(fieldData);
}
}
return method.invoke(fields, args);
}
private void testFieldName(FieldData fieldData) throws ClassNotFoundException {
if (!StringUtils.isEmpty(fieldData.getFieldName()) && dynamicEntityDao != null) {
Class<?>[] dtos = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(Class.forName(getDtoClassName()));
if (ArrayUtils.isEmpty(dtos)) {
dtos = new Class<?>[]{Class.forName(getDtoClassName())};
}
Field field = null;
for (Class<?> dto : dtos) {
field = dynamicEntityDao.getFieldManager().getField(dto, fieldData.getFieldName());
if (field != null) {
break;
}
}
if (field == null) {
throw new IllegalArgumentException("Unable to find the field declared in FieldData (" + fieldData.getFieldName() + ") on the target class (" + getDtoClassName() + "), or any registered entity class that derives from it.");
}
}
}
});
this.fields = proxyFields;
}
@Override
public RuleBuilderFieldService clone() throws CloneNotSupportedException {
try {
RuleBuilderFieldService clone = this.getClass().newInstance();
clone.setFields(this.fields);
return clone;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public abstract String getDtoClassName();
public abstract void init();
@Override
public void afterPropertiesSet() throws Exception {
// This bean only is valid when the following bean is active. (admin)
if (applicationContext.containsBean(DynamicEntityRemoteService.DEFAULTPERSISTENCEMANAGERREF)) {
PersistenceManager persistenceManager = (PersistenceManager) applicationContext.getBean(DynamicEntityRemoteService.DEFAULTPERSISTENCEMANAGERREF);
persistenceManager.setTargetMode(TargetModeType.SANDBOX);
dynamicEntityDao = persistenceManager.getDynamicEntityDao();
setFields(new ArrayList<FieldData>());
// This cannot be null during startup as we do not want to remove the null safety checks in a multi-tenant env.
boolean contextWasNull = false;
if (BroadleafRequestContext.getBroadleafRequestContext() == null) {
BroadleafRequestContext brc = new BroadleafRequestContext();
brc.setIgnoreSite(true);
BroadleafRequestContext.setBroadleafRequestContext(brc);
contextWasNull = true;
}
try {
init();
} finally {
if (contextWasNull) {
BroadleafRequestContext.setBroadleafRequestContext(null);
}
}
}
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_web_rulebuilder_service_AbstractRuleBuilderFieldService.java |
1,248 | addOperation(operations, new Runnable() {
public void run() {
IMap map = hazelcast.getMap("myMap");
map.remove(random.nextInt(SIZE));
}
}, 10); | 0true
| hazelcast_src_main_java_com_hazelcast_examples_AllTest.java |
1,609 | public class OCopyDatabaseChunkTask extends OAbstractReplicatedTask {
private static final long serialVersionUID = 1L;
private String databaseName;
private boolean lastChunk = false;
private byte[] chunkContent;
public OCopyDatabaseChunkTask() {
}
public OCopyDatabaseChunkTask(final byte[] chunk) {
chunkContent = chunk;
}
@Override
public Object execute(final OServer iServer, ODistributedServerManager iManager, final ODatabaseDocumentTx database)
throws Exception {
ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT,
"writing database %s in chunk to disk size=%d...", database.getName(), chunkContent.length);
final File f = new File("importDatabase/" + database.getName());
final FileOutputStream out = new FileOutputStream(f, true);
try {
final ByteArrayInputStream in = new ByteArrayInputStream(chunkContent);
try {
OIOUtils.copyStream(in, out, chunkContent.length);
} finally {
in.close();
}
} finally {
out.close();
}
if (lastChunk)
try {
ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT, "importing database %s...",
database.getName());
final ODatabaseImport importDb = new ODatabaseImport(database, f.getAbsolutePath(), null);
try {
importDb.importDatabase();
} finally {
ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT,
"database %s imported correctly", database.getName());
importDb.close();
}
} finally {
OFileUtils.deleteRecursively(new File("importDatabase"));
}
return Boolean.TRUE;
}
public QUORUM_TYPE getQuorumType() {
return QUORUM_TYPE.NONE;
}
@Override
public String getPayload() {
return null;
}
@Override
public OFixUpdateRecordTask getFixTask(ODistributedRequest iRequest, ODistributedResponse iBadResponse, ODistributedResponse iGoodResponse) {
return null;
}
@Override
public String getName() {
return "deploy_db";
}
@Override
public void writeExternal(final ObjectOutput out) throws IOException {
out.writeUTF(databaseName);
out.write(chunkContent);
out.writeBoolean(lastChunk);
}
@Override
public void readExternal(final ObjectInput in) throws IOException, ClassNotFoundException {
databaseName = in.readUTF();
in.read(chunkContent);
lastChunk = in.readBoolean();
}
} | 1no label
| server_src_main_java_com_orientechnologies_orient_server_distributed_task_OCopyDatabaseChunkTask.java |
3,857 | public class HasChildFilterParser implements FilterParser {
public static final String NAME = "has_child";
@Inject
public HasChildFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Query query = null;
boolean queryFound = false;
String childType = null;
int shortCircuitParentDocSet = 8192; // Tests show a cut of point between 8192 and 16384.
String filterName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
// TODO we need to set the type, but, `query` can come before `type`...
// since we switch types, make sure we change the context
String[] origTypes = QueryParseContext.setTypesWithPrevious(childType == null ? null : new String[]{childType});
try {
query = parseContext.parseInnerQuery();
queryFound = true;
} finally {
QueryParseContext.setTypes(origTypes);
}
} else if ("filter".equals(currentFieldName)) {
// TODO handle `filter` element before `type` element...
String[] origTypes = QueryParseContext.setTypesWithPrevious(childType == null ? null : new String[]{childType});
try {
Filter innerFilter = parseContext.parseInnerFilter();
query = new XConstantScoreQuery(innerFilter);
queryFound = true;
} finally {
QueryParseContext.setTypes(origTypes);
}
} else {
throw new QueryParsingException(parseContext.index(), "[has_child] filter does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
childType = parser.text();
} else if ("_scope".equals(currentFieldName)) {
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] filter has been removed, use a filter as a facet_filter in the relevant global facet");
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
// noop to be backwards compatible
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
// noop to be backwards compatible
} else if ("short_circuit_cutoff".equals(currentFieldName)) {
shortCircuitParentDocSet = parser.intValue();
} else {
throw new QueryParsingException(parseContext.index(), "[has_child] filter does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new QueryParsingException(parseContext.index(), "[has_child] filter requires 'query' field");
}
if (query == null) {
return null;
}
if (childType == null) {
throw new QueryParsingException(parseContext.index(), "[has_child] filter requires 'type' field");
}
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
if (childDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "No mapping for for type [" + childType + "]");
}
if (!childDocMapper.parentFieldMapper().active()) {
throw new QueryParsingException(parseContext.index(), "Type [" + childType + "] does not have parent mapping");
}
String parentType = childDocMapper.parentFieldMapper().type();
// wrap the query with type query
query = new XFilteredQuery(query, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "[has_child] Type [" + childType + "] points to a non existent parent type [" + parentType + "]");
}
Filter nonNestedDocsFilter = null;
if (parentDocMapper.hasNestedObjects()) {
nonNestedDocsFilter = parseContext.cacheFilter(NonNestedDocsFilter.INSTANCE, null);
}
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
Query childrenConstantScoreQuery = new ChildrenConstantScoreQuery(query, parentType, childType, parentFilter, shortCircuitParentDocSet, nonNestedDocsFilter);
if (filterName != null) {
parseContext.addNamedFilter(filterName, new CustomQueryWrappingFilter(childrenConstantScoreQuery));
}
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
if (deleteByQuery) {
return new DeleteByQueryWrappingFilter(childrenConstantScoreQuery);
} else {
return new CustomQueryWrappingFilter(childrenConstantScoreQuery);
}
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_HasChildFilterParser.java |
995 | public static class Presentation {
public static class Tab {
public static class Name {
public static final String Items = "FulfillmentGroupImpl_Items_Tab";
public static final String Pricing = "FulfillmentGroupImpl_Pricing_Tab";
public static final String Address = "FulfillmentGroupImpl_Address_Tab";
public static final String Advanced = "FulfillmentGroupImpl_Advanced_Tab";
}
public static class Order {
public static final int Items = 2000;
public static final int Pricing = 3000;
public static final int Address = 4000;
public static final int Advanced = 5000;
}
}
public static class Group {
public static class Name {
public static final String Pricing = "FulfillmentGroupImpl_Pricing";
}
public static class Order {
public static final int General = 1000;
public static final int Pricing = 2000;
}
}
public static class FieldOrder {
public static final int REFNUMBER = 3000;
public static final int STATUS = 4000;
public static final int TYPE = 5000;
public static final int DELIVERINSTRUCTION = 6000;
public static final int PRIMARY = 7000;
public static final int PHONE = 8000;
public static final int RETAIL = 1000;
public static final int SALE = 2000;
public static final int PRICE = 3000;
public static final int ITEMTAX = 4000;
public static final int FEETAX = 5000;
public static final int FGTAX = 6000;
public static final int TOTALTAX = 7000;
public static final int MERCHANDISETOTAL = 8000;
public static final int TOTAL = 9000;
public static final int TAXABLE = 10000;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_FulfillmentGroupImpl.java |
2,287 | public class NoneRecyclerTests extends AbstractRecyclerTests {
@Override
protected Recycler<byte[]> newRecycler() {
return Recyclers.none(RECYCLER_C);
}
} | 0true
| src_test_java_org_elasticsearch_common_recycler_NoneRecyclerTests.java |
1,225 | public interface OStorageProxy extends OStorage {
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_OStorageProxy.java |
1,818 | return new ConstructionProxy<T>() {
public T newInstance(Object... arguments) throws InvocationTargetException {
try {
return constructor.newInstance(arguments);
} catch (InstantiationException e) {
throw new AssertionError(e); // shouldn't happen, we know this is a concrete type
} catch (IllegalAccessException e) {
throw new AssertionError(e); // a security manager is blocking us, we're hosed
}
}
public InjectionPoint getInjectionPoint() {
return injectionPoint;
}
public Constructor<T> getConstructor() {
return constructor;
}
}; | 0true
| src_main_java_org_elasticsearch_common_inject_DefaultConstructionProxyFactory.java |
1,342 | public class OUpdatePageRecord extends OAbstractPageWALRecord {
private OPageChanges pageChanges;
private OLogSequenceNumber prevLsn;
public OUpdatePageRecord() {
}
public OUpdatePageRecord(long pageIndex, long fileId, OOperationUnitId operationUnitId, OPageChanges pageChanges,
OLogSequenceNumber prevLsn) {
super(pageIndex, fileId, operationUnitId);
this.pageChanges = pageChanges;
this.prevLsn = prevLsn;
assert prevLsn != null;
}
public OPageChanges getChanges() {
return pageChanges;
}
public OLogSequenceNumber getPrevLsn() {
return prevLsn;
}
@Override
public int serializedSize() {
int serializedSize = super.serializedSize();
serializedSize += 2 * OLongSerializer.LONG_SIZE;
serializedSize += pageChanges.serializedSize();
return serializedSize;
}
@Override
public int toStream(byte[] content, int offset) {
offset = super.toStream(content, offset);
OLongSerializer.INSTANCE.serializeNative(prevLsn.getPosition(), content, offset);
offset += OLongSerializer.LONG_SIZE;
OLongSerializer.INSTANCE.serializeNative(prevLsn.getSegment(), content, offset);
offset += OLongSerializer.LONG_SIZE;
offset = pageChanges.toStream(content, offset);
return offset;
}
@Override
public int fromStream(byte[] content, int offset) {
offset = super.fromStream(content, offset);
long position = OLongSerializer.INSTANCE.deserializeNative(content, offset);
offset += OLongSerializer.LONG_SIZE;
long segment = OLongSerializer.INSTANCE.deserializeNative(content, offset);
offset += OLongSerializer.LONG_SIZE;
prevLsn = new OLogSequenceNumber(segment, position);
pageChanges = new OPageChanges();
offset = pageChanges.fromStream(content, offset);
return offset;
}
@Override
public boolean isUpdateMasterRecord() {
return false;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
if (!super.equals(o))
return false;
OUpdatePageRecord that = (OUpdatePageRecord) o;
if (!prevLsn.equals(that.prevLsn))
return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + prevLsn.hashCode();
return result;
}
@Override
public String toString() {
return "OUpdatePageRecord{" + "pageChanges=" + pageChanges + ", prevLsn=" + prevLsn + '}';
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_OUpdatePageRecord.java |
728 | public class CollectionAddAllRequest extends CollectionRequest {
protected List<Data> valueList;
public CollectionAddAllRequest() {
}
public CollectionAddAllRequest(String name, List<Data> valueList) {
super(name);
this.valueList = valueList;
}
@Override
protected Operation prepareOperation() {
return new CollectionAddAllOperation(name, valueList);
}
@Override
public int getClassId() {
return CollectionPortableHook.COLLECTION_ADD_ALL;
}
public void write(PortableWriter writer) throws IOException {
super.write(writer);
final ObjectDataOutput out = writer.getRawDataOutput();
out.writeInt(valueList.size());
for (Data value : valueList) {
value.writeData(out);
}
}
public void read(PortableReader reader) throws IOException {
super.read(reader);
final ObjectDataInput in = reader.getRawDataInput();
final int size = in.readInt();
valueList = new ArrayList<Data>(size);
for (int i = 0; i < size; i++) {
final Data value = new Data();
value.readData(in);
valueList.add(value);
}
}
@Override
public String getRequiredAction() {
return ActionConstants.ACTION_ADD;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_client_CollectionAddAllRequest.java |
678 | public static class Name {
public static final String General = "CategoryImpl_Category_Description";
public static final String ActiveDateRange = "CategoryImpl_Active_Date_Range";
public static final String Advanced = "CategoryImpl_Advanced";
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_CategoryImpl.java |
1,462 | public abstract class AbstractAccountController extends BroadleafAbstractController {
@Resource(name="blOrderService")
protected OrderService orderService;
@Resource(name="blCatalogService")
protected CatalogService catalogService;
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_controller_account_AbstractAccountController.java |
845 | return new IAnswer<OrderItemPriceDetailAdjustment>() {
@Override
public OrderItemPriceDetailAdjustment answer() throws Throwable {
return new OrderItemPriceDetailAdjustmentImpl();
}
}; | 0true
| core_broadleaf-framework_src_test_java_org_broadleafcommerce_core_offer_service_OfferDataItemProvider.java |
2,363 | private class SingleExecutionProcessor
implements Runnable {
@Override
public void run() {
try {
RequestPartitionResult result = mapReduceService
.processRequest(supervisor.getJobOwner(), new RequestMemberIdAssignment(name, jobId), name);
// JobSupervisor doesn't exists anymore on jobOwner, job done?
if (result.getResultState() == NO_SUPERVISOR) {
return;
} else if (result.getResultState() == NO_MORE_PARTITIONS) {
return;
}
int partitionId = result.getPartitionId();
KeyValueSource<KeyIn, ValueIn> delegate = keyValueSource;
if (supervisor.getConfiguration().isCommunicateStats()) {
delegate = new KeyValueSourceFacade<KeyIn, ValueIn>(keyValueSource, supervisor);
}
delegate.reset();
if (delegate.open(nodeEngine)) {
DefaultContext<KeyOut, ValueOut> context = supervisor.getOrCreateContext(MapCombineTask.this);
processMapping(partitionId, context, delegate);
delegate.close();
finalizeMapping(partitionId, context);
} else {
// Partition assignment might not be ready yet, postpone the processing and retry later
postponePartitionProcessing(partitionId);
}
} catch (Throwable t) {
handleProcessorThrowable(t);
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_mapreduce_impl_task_MapCombineTask.java |
133 | class InlineDeclarationProposal implements ICompletionProposal,
ICompletionProposalExtension6 {
private final InlineRefactoringAction action;
public InlineDeclarationProposal(CeylonEditor editor) {
action = new InlineRefactoringAction(editor);
}
@Override
public Point getSelection(IDocument doc) {
return null;
}
@Override
public Image getImage() {
return CeylonLabelProvider.COMPOSITE_CHANGE;
}
@Override
public String getDisplayString() {
return "Inline '" + action.currentName() + "'";
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument doc) {
action.run();
}
boolean isEnabled() {
return action.isEnabled();
}
public static void add(Collection<ICompletionProposal> proposals, CeylonEditor editor) {
InlineDeclarationProposal prop = new InlineDeclarationProposal(editor);
if (prop.isEnabled()) {
proposals.add(prop);
}
}
@Override
public StyledString getStyledDisplayString() {
return Highlights.styleProposal(getDisplayString(), false);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_InlineDeclarationProposal.java |
260 | {
@Override
public boolean visit( XaCommand element )
{
if ( element instanceof PropertyCommand )
{
// THEN
PropertyCommand propertyCommand = (PropertyCommand) element;
verifyPropertyRecord( propertyCommand.getBefore() );
verifyPropertyRecord( propertyCommand.getAfter() );
return true;
}
return false;
}
private void verifyPropertyRecord( PropertyRecord record )
{
if ( record.getPrevProp() != Record.NO_NEXT_PROPERTY.intValue() )
{
for ( PropertyBlock block : record.getPropertyBlocks() )
{
assertTrue( block.isLight() );
}
}
}
}; | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_xa_WriteTransactionTest.java |
1,771 | map.addEntryListener(new EntryAdapter() {
@Override
public void entryEvicted(EntryEvent event) {
super.entryEvicted(event);
latch.countDown();
}
}, false); | 0true
| hazelcast_src_test_java_com_hazelcast_map_EvictionTest.java |
1,732 | public interface EntryProcessor<K, V> extends Serializable {
/**
* Process the entry without worrying about concurrency.
* <p/>
*
* @param entry entry to be processes
* @return result of the process
*/
Object process(Map.Entry<K, V> entry);
/**
* Get the entry processor to be applied to backup entries.
* <p/>
*
* @return back up processor
*/
EntryBackupProcessor<K, V> getBackupProcessor();
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_EntryProcessor.java |
164 | {
int doneRecordCount = 0;
@Override
public boolean accept( LogEntry item )
{
//System.out.println(item);
if( item instanceof LogEntry.Done)
{
doneRecordCount++;
// Accept everything except the second done record we find
if( doneRecordCount == 2)
{
brokenTxIdentifier.set( item.getIdentifier() );
return false;
}
}
// Not a done record, not our concern
return true;
}
}; | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestPartialTransactionCopier.java |
137 | public interface TitanManagement extends TitanConfiguration, SchemaManager {
/*
##################### RELATION TYPE INDEX ##########################
*/
/**
* Identical to {@link #buildEdgeIndex(com.thinkaurelius.titan.core.EdgeLabel, String, com.tinkerpop.blueprints.Direction, com.thinkaurelius.titan.core.Order, com.thinkaurelius.titan.core.RelationType...)}
* with default sort order {@link Order#ASC}.
*
* @param label
* @param name
* @param direction
* @param sortKeys
* @return the created {@link RelationTypeIndex}
*/
public RelationTypeIndex buildEdgeIndex(EdgeLabel label, String name, Direction direction, RelationType... sortKeys);
/**
* Creates a {@link RelationTypeIndex} for the provided edge label. That means, that all edges of that label will be
* indexed according to this index definition which will speed up certain vertex-centric queries.
* <p/>
* An indexed is defined by its name, the direction in which the index should be created (can be restricted to one
* direction or both), the sort order and - most importantly - the sort keys which define the index key.
*
* @param label
* @param name
* @param direction
* @param sortOrder
* @param sortKeys
* @return the created {@link RelationTypeIndex}
*/
public RelationTypeIndex buildEdgeIndex(EdgeLabel label, String name, Direction direction, Order sortOrder, RelationType... sortKeys);
/**
* Identical to {@link #buildPropertyIndex(com.thinkaurelius.titan.core.PropertyKey, String, com.thinkaurelius.titan.core.Order, com.thinkaurelius.titan.core.RelationType...)}
* with default sort order {@link Order#ASC}.
*
* @param key
* @param name
* @param sortKeys
* @return the created {@link RelationTypeIndex}
*/
public RelationTypeIndex buildPropertyIndex(PropertyKey key, String name, RelationType... sortKeys);
/**
* Creates a {@link RelationTypeIndex} for the provided property key. That means, that all properties of that key will be
* indexed according to this index definition which will speed up certain vertex-centric queries.
* <p/>
* An indexed is defined by its name, the sort order and - most importantly - the sort keys which define the index key.
*
* @param key
* @param name
* @param sortOrder
* @param sortKeys
* @return the created {@link RelationTypeIndex}
*/
public RelationTypeIndex buildPropertyIndex(PropertyKey key, String name, Order sortOrder, RelationType... sortKeys);
/**
* Whether a {@link RelationTypeIndex} with the given name has been defined for the provided {@link RelationType}
* @param type
* @param name
* @return
*/
public boolean containsRelationIndex(RelationType type, String name);
/**
* Returns the {@link RelationTypeIndex} with the given name for the provided {@link RelationType} or null
* if it does not exist
*
* @param type
* @param name
* @return
*/
public RelationTypeIndex getRelationIndex(RelationType type, String name);
/**
* Returns an {@link Iterable} over all {@link RelationTypeIndex}es defined for the provided {@link RelationType}
* @param type
* @return
*/
public Iterable<RelationTypeIndex> getRelationIndexes(RelationType type);
/*
##################### GRAPH INDEX ##########################
*/
/**
* Whether the graph has a graph index defined with the given name.
*
* @param name
* @return
*/
public boolean containsGraphIndex(String name);
/**
* Returns the graph index with the given name or null if it does not exist
*
* @param name
* @return
*/
public TitanGraphIndex getGraphIndex(String name);
/**
* Returns all graph indexes that index the given element type.
*
* @param elementType
* @return
*/
public Iterable<TitanGraphIndex> getGraphIndexes(final Class<? extends Element> elementType);
/**
* Returns an {@link IndexBuilder} to add a graph index to this Titan graph. The index to-be-created
* has the provided name and indexes elements of the given type.
*
* @param indexName
* @param elementType
* @return
*/
public IndexBuilder buildIndex(String indexName, Class<? extends Element> elementType);
public void addIndexKey(final TitanGraphIndex index, final PropertyKey key, Parameter... parameters);
/**
* Builder for {@link TitanGraphIndex}. Allows for the configuration of a graph index prior to its construction.
*/
public interface IndexBuilder {
/**
* Adds the given key to the composite key of this index
*
* @param key
* @return this IndexBuilder
*/
public IndexBuilder addKey(PropertyKey key);
/**
* Adds the given key and associated parameters to the composite key of this index
* @param key
* @param parameters
* @return this IndexBuilder
*/
public IndexBuilder addKey(PropertyKey key, Parameter... parameters);
/**
* Restricts this index to only those elements that have the provided schemaType. If this graph index indexes
* vertices, then the argument is expected to be a vertex label and only vertices with that label will be indexed.
* Likewise, for edges and properties only those with the matching relation type will be indexed.
*
* @param schemaType
* @return this IndexBuilder
*/
public IndexBuilder indexOnly(TitanSchemaType schemaType);
/**
* Makes this a unique index for the configured element type,
* i.e. an index key can be associated with at most one element in the graph.
*
* @return this IndexBuilder
*/
public IndexBuilder unique();
/**
* Builds a composite index according to the specification
*
* @return the created composite {@link TitanGraphIndex}
*/
public TitanGraphIndex buildCompositeIndex();
/**
* Builds a mixed index according to the specification against the backend index with the given name (i.e.
* the name under which that index is configured in the graph configuration)
*
* @param backingIndex the name of the mixed index
* @return the created mixed {@link TitanGraphIndex}
*/
public TitanGraphIndex buildMixedIndex(String backingIndex);
}
/*
##################### CONSISTENCY SETTING ##########################
*/
/**
* Retrieves the consistency modifier for the given {@link TitanSchemaElement}. If none has been explicitly
* defined, {@link ConsistencyModifier#DEFAULT} is returned.
*
* @param element
* @return
*/
public ConsistencyModifier getConsistency(TitanSchemaElement element);
/**
* Sets the consistency modifier for the given {@link TitanSchemaElement}. Note, that only {@link RelationType}s
* and composite graph indexes allow changing of the consistency level.
*
* @param element
* @param consistency
*/
public void setConsistency(TitanSchemaElement element, ConsistencyModifier consistency);
/**
* Retrieves the time-to-live for the given {@link TitanSchemaType} as a {@link Duration}.
* If none has been explicitly defined, a zero-length {@link Duration} is returned.
*
* @param type
* @return
*/
public Duration getTTL(TitanSchemaType type);
/**
* Sets the time-to-live for the given {@link TitanSchemaType}. The most granular time unit used for TTL values
* is seconds. Any argument will be rounded to seconds if it is more granular than that.
*
* @param type the affected type
* @param ttl time-to-live
* @param unit time unit of the specified ttl
*/
public void setTTL(TitanSchemaType type, int ttl, TimeUnit unit);
/*
##################### SCHEMA UPDATE ##########################
*/
/**
* Changes the name of a {@link TitanSchemaElement} to the provided new name.
* The new name must be valid and not already in use, otherwise an {@link IllegalArgumentException} is thrown.
*
* @param element
* @param newName
*/
public void changeName(TitanSchemaElement element, String newName);
/**
* Updates the provided index according to the given {@link SchemaAction}
*
* @param index
* @param updateAction
*/
public void updateIndex(TitanIndex index, SchemaAction updateAction);
/*
##################### CLUSTER MANAGEMENT ##########################
*/
/**
* Returns a set of unique instance ids for all Titan instances that are currently
* part of this graph cluster.
*
* @return
*/
public Set<String> getOpenInstances();
/**
* Forcefully removes a Titan instance from this graph cluster as identified by its name.
* <p/>
* This method should be used with great care and only in cases where a Titan instance
* has been abnormally terminated (i.e. killed instead of properly shut-down). If this happens, the instance
* will continue to be listed as an open instance which means that 1) a new instance with the same id cannot
* be started and 2) schema updates will fail because the killed instance cannot acknowledge the schema update.
*
* <p/>
* Throws an exception if the instance is not part of this cluster or if the instance has
* been started after the start of this management transaction which is indicative of the instance
* having been restarted successfully.
*
* @param instanceId
*/
public void forceCloseInstance(String instanceId);
/**
* Returns an iterable over all defined types that have the given clazz (either {@link EdgeLabel} which returns all labels,
* {@link PropertyKey} which returns all keys, or {@link RelationType} which returns all types).
*
* @param clazz {@link RelationType} or sub-interface
* @param <T>
* @return Iterable over all types for the given category (label, key, or both)
*/
public <T extends RelationType> Iterable<T> getRelationTypes(Class<T> clazz);
/**
* Returns an {@link Iterable} over all defined {@link VertexLabel}s.
*
* @return
*/
public Iterable<VertexLabel> getVertexLabels();
/**
* Whether this management transaction is open or has been closed (i.e. committed or rolled-back)
* @return
*/
public boolean isOpen();
/**
* Commits this management transaction and persists all schema changes. Closes this transaction.
* @see com.thinkaurelius.titan.core.TitanTransaction#commit()
*/
public void commit();
/**
* Closes this management transaction and discards all changes.
* @see com.thinkaurelius.titan.core.TitanTransaction#rollback()
*/
public void rollback();
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_schema_TitanManagement.java |
1,176 | public class OQueryOperatorContainsText extends OQueryTargetOperator {
private boolean ignoreCase = true;
public OQueryOperatorContainsText(final boolean iIgnoreCase) {
super("CONTAINSTEXT", 5, false);
ignoreCase = iIgnoreCase;
}
public OQueryOperatorContainsText() {
super("CONTAINSTEXT", 5, false);
}
@Override
public String getSyntax() {
return "<left> CONTAINSTEXT[( noignorecase ] )] <right>";
}
/**
* This is executed on non-indexed fields.
*/
@Override
public Object evaluateRecord(final OIdentifiable iRecord, ODocument iCurrentResult, final OSQLFilterCondition iCondition,
final Object iLeft, final Object iRight, OCommandContext iContext) {
if (iLeft == null || iRight == null)
return false;
return iLeft.toString().indexOf(iRight.toString()) > -1;
}
@SuppressWarnings({ "unchecked", "deprecation" })
@Override
public Collection<OIdentifiable> filterRecords(final ODatabaseComplex<?> iDatabase, final List<String> iTargetClasses,
final OSQLFilterCondition iCondition, final Object iLeft, final Object iRight) {
final String fieldName;
if (iCondition.getLeft() instanceof OSQLFilterItemField)
fieldName = iCondition.getLeft().toString();
else
fieldName = iCondition.getRight().toString();
final String fieldValue;
if (iCondition.getLeft() instanceof OSQLFilterItemField)
fieldValue = iCondition.getRight().toString();
else
fieldValue = iCondition.getLeft().toString();
final String className = iTargetClasses.get(0);
final OProperty prop = iDatabase.getMetadata().getSchema().getClass(className).getProperty(fieldName);
if (prop == null)
// NO PROPERTY DEFINED
return null;
OIndex<?> fullTextIndex = null;
for (final OIndex<?> indexDefinition : prop.getIndexes()) {
if (indexDefinition instanceof OIndexFullText) {
fullTextIndex = indexDefinition;
break;
}
}
if (fullTextIndex == null) {
return null;
}
return (Collection<OIdentifiable>) fullTextIndex.get(fieldValue);
}
public boolean isIgnoreCase() {
return ignoreCase;
}
@Override
public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight) {
return OIndexReuseType.INDEX_METHOD;
}
@Override
public Object executeIndexQuery(OCommandContext iContext, OIndex<?> index, INDEX_OPERATION_TYPE iOperationType,
List<Object> keyParams, IndexResultListener resultListener, int fetchLimit) {
final OIndexDefinition indexDefinition = index.getDefinition();
if (indexDefinition.getParamCount() > 1)
return null;
final OIndex<?> internalIndex = index.getInternal();
final Object result;
if (internalIndex instanceof OIndexFullText) {
final Object indexResult = index.get(indexDefinition.createValue(keyParams));
if (indexResult instanceof Collection)
result = indexResult;
else if (indexResult == null)
result = Collections.emptyList();
else
result = Collections.singletonList((OIdentifiable) indexResult);
} else
return null;
updateProfiler(iContext, internalIndex, keyParams, indexDefinition);
if (iOperationType == INDEX_OPERATION_TYPE.COUNT)
return ((Collection<?>) result).size();
return result;
}
@Override
public ORID getBeginRidRange(Object iLeft, Object iRight) {
return null;
}
@Override
public ORID getEndRidRange(Object iLeft, Object iRight) {
return null;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_operator_OQueryOperatorContainsText.java |
193 | public class GeneralCLibrary implements CLibrary {
public static native Pointer memmove(Pointer dest, Pointer src, NativeLong len);
static {
Native.register(Platform.C_LIBRARY_NAME);
}
@Override
public void memoryMove(long src, long dest, long len) {
memmove(new Pointer(dest), new Pointer(src), new NativeLong(len));
}
} | 0true
| nativeos_src_main_java_com_orientechnologies_nio_GeneralCLibrary.java |
93 | DISJOINT {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).disjoint((Geoshape) condition);
}
@Override
public String toString() {
return "disjoint";
}
@Override
public boolean hasNegation() {
return true;
}
@Override
public TitanPredicate negate() {
return INTERSECT;
}
}, | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geo.java |
333 | fUpdateJob= new UIJob(display, PackagesMessages.PackageExplorerContentProvider_update_job_description) {
@Override
public IStatus runInUIThread(IProgressMonitor monitor) {
TreeViewer viewer= fViewer;
if (viewer != null && viewer.isBusy()) {
schedule(100); // reschedule when viewer is busy: bug 184991
} else {
runPendingUpdates();
}
return Status.OK_STATUS;
}
}; | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_explorer_PackageExplorerContentProvider.java |
1,955 | public interface InternalFactory<T> {
/**
* ES:
* An factory that returns a pre created instance.
*/
public static class Instance<T> implements InternalFactory<T> {
private final T object;
public Instance(T object) {
this.object = object;
}
@Override
public T get(Errors errors, InternalContext context, Dependency<?> dependency) throws ErrorsException {
return object;
}
@Override
public String toString() {
return object.toString();
}
}
/**
* Creates an object to be injected.
*
* @param context of this injection
* @return instance to be injected
* @throws org.elasticsearch.common.inject.internal.ErrorsException
* if a value cannot be provided
*/
T get(Errors errors, InternalContext context, Dependency<?> dependency)
throws ErrorsException;
} | 0true
| src_main_java_org_elasticsearch_common_inject_internal_InternalFactory.java |
1,295 | Files.walkFileTree(testStoragePath, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Path fileToCopy = copyTo.resolve(testStoragePath.relativize(file));
if (fileToCopy.endsWith("baseLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.wmr"))
fileToCopy = fileToCopy.getParent().resolve("testLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.wmr");
else if (fileToCopy.endsWith("baseLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.0.wal"))
fileToCopy = fileToCopy.getParent().resolve("testLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.0.wal");
Files.copy(file, fileToCopy);
return FileVisitResult.CONTINUE;
}
}); | 0true
| core_src_test_java_com_orientechnologies_orient_core_storage_impl_local_paginated_LocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.java |
1,442 | public static class Factory implements MetaData.Custom.Factory<RestoreMetaData> {
/**
* {@inheritDoc}
*/
@Override
public String type() {
return TYPE;
}
/**
* {@inheritDoc}
*/
@Override
public RestoreMetaData readFrom(StreamInput in) throws IOException {
Entry[] entries = new Entry[in.readVInt()];
for (int i = 0; i < entries.length; i++) {
SnapshotId snapshotId = SnapshotId.readSnapshotId(in);
State state = State.fromValue(in.readByte());
int indices = in.readVInt();
ImmutableList.Builder<String> indexBuilder = ImmutableList.builder();
for (int j = 0; j < indices; j++) {
indexBuilder.add(in.readString());
}
ImmutableMap.Builder<ShardId, ShardRestoreStatus> builder = ImmutableMap.<ShardId, ShardRestoreStatus>builder();
int shards = in.readVInt();
for (int j = 0; j < shards; j++) {
ShardId shardId = ShardId.readShardId(in);
ShardRestoreStatus shardState = ShardRestoreStatus.readShardRestoreStatus(in);
builder.put(shardId, shardState);
}
entries[i] = new Entry(snapshotId, state, indexBuilder.build(), builder.build());
}
return new RestoreMetaData(entries);
}
/**
* {@inheritDoc}
*/
@Override
public void writeTo(RestoreMetaData repositories, StreamOutput out) throws IOException {
out.writeVInt(repositories.entries().size());
for (Entry entry : repositories.entries()) {
entry.snapshotId().writeTo(out);
out.writeByte(entry.state().value());
out.writeVInt(entry.indices().size());
for (String index : entry.indices()) {
out.writeString(index);
}
out.writeVInt(entry.shards().size());
for (Map.Entry<ShardId, ShardRestoreStatus> shardEntry : entry.shards().entrySet()) {
shardEntry.getKey().writeTo(out);
shardEntry.getValue().writeTo(out);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public RestoreMetaData fromXContent(XContentParser parser) throws IOException {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public void toXContent(RestoreMetaData customIndexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startArray("snapshots");
for (Entry entry : customIndexMetaData.entries()) {
toXContent(entry, builder, params);
}
builder.endArray();
}
/**
* Serializes single restore operation
*
* @param entry restore operation metadata
* @param builder XContent builder
* @param params serialization parameters
* @throws IOException
*/
public void toXContent(Entry entry, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field("snapshot", entry.snapshotId().getSnapshot());
builder.field("repository", entry.snapshotId().getRepository());
builder.field("state", entry.state());
builder.startArray("indices");
{
for (String index : entry.indices()) {
builder.value(index);
}
}
builder.endArray();
builder.startArray("shards");
{
for (Map.Entry<ShardId, ShardRestoreStatus> shardEntry : entry.shards.entrySet()) {
ShardId shardId = shardEntry.getKey();
ShardRestoreStatus status = shardEntry.getValue();
builder.startObject();
{
builder.field("index", shardId.getIndex());
builder.field("shard", shardId.getId());
builder.field("state", status.state());
}
builder.endObject();
}
}
builder.endArray();
builder.endObject();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isPersistent() {
return false;
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_RestoreMetaData.java |
2,819 | public class ArmenianAnalyzerProvider extends AbstractIndexAnalyzerProvider<ArmenianAnalyzer> {
private final ArmenianAnalyzer analyzer;
@Inject
public ArmenianAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
analyzer = new ArmenianAnalyzer(version,
Analysis.parseStopWords(env, settings, ArmenianAnalyzer.getDefaultStopSet(), version),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET, version));
}
@Override
public ArmenianAnalyzer get() {
return this.analyzer;
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_ArmenianAnalyzerProvider.java |
926 | public abstract class BroadcastOperationRequest<T extends BroadcastOperationRequest> extends ActionRequest<T> {
protected String[] indices;
private BroadcastOperationThreading operationThreading = BroadcastOperationThreading.THREAD_PER_SHARD;
private IndicesOptions indicesOptions = IndicesOptions.strict();
protected BroadcastOperationRequest() {
}
protected BroadcastOperationRequest(String[] indices) {
this.indices = indices;
}
public String[] indices() {
return indices;
}
@SuppressWarnings("unchecked")
public final T indices(String... indices) {
this.indices = indices;
return (T) this;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
/**
* Controls the operation threading model.
*/
public BroadcastOperationThreading operationThreading() {
return operationThreading;
}
/**
* Controls the operation threading model.
*/
@SuppressWarnings("unchecked")
public final T operationThreading(BroadcastOperationThreading operationThreading) {
this.operationThreading = operationThreading;
return (T) this;
}
/**
* Controls the operation threading model.
*/
public T operationThreading(String operationThreading) {
return operationThreading(BroadcastOperationThreading.fromString(operationThreading, this.operationThreading));
}
public IndicesOptions indicesOptions() {
return indicesOptions;
}
@SuppressWarnings("unchecked")
public final T indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return (T) this;
}
protected void beforeStart() {
}
protected void beforeLocalFork() {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(indices);
out.writeByte(operationThreading.id());
indicesOptions.writeIndicesOptions(out);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
indices = in.readStringArray();
operationThreading = BroadcastOperationThreading.fromId(in.readByte());
indicesOptions = IndicesOptions.readIndicesOptions(in);
}
} | 1no label
| src_main_java_org_elasticsearch_action_support_broadcast_BroadcastOperationRequest.java |
629 | @SuppressWarnings("unchecked")
public class OIndexRemoteMultiValue extends OIndexRemote<Collection<OIdentifiable>> {
protected final static String QUERY_GET = "select EXPAND( rid ) from index:%s where key = ?";
public OIndexRemoteMultiValue(final String iName, final String iWrappedType, final ORID iRid,
final OIndexDefinition iIndexDefinition, final ODocument iConfiguration, final Set<String> clustersToIndex) {
super(iName, iWrappedType, iRid, iIndexDefinition, iConfiguration, clustersToIndex);
}
public Collection<OIdentifiable> get(final Object iKey) {
final OCommandRequest cmd = formatCommand(QUERY_GET, name);
return (Collection<OIdentifiable>) getDatabase().command(cmd).execute(iKey);
}
public Iterator<Entry<Object, Collection<OIdentifiable>>> iterator() {
final OCommandRequest cmd = formatCommand(QUERY_ENTRIES, name);
final Collection<ODocument> result = getDatabase().command(cmd).execute();
final Map<Object, Collection<OIdentifiable>> map = new LinkedHashMap<Object, Collection<OIdentifiable>>();
for (final ODocument d : result) {
Collection<OIdentifiable> rids = map.get(d.field("key"));
if (rids == null) {
rids = new HashSet<OIdentifiable>();
map.put(d.field("key"), rids);
}
rids.add((OIdentifiable) d.field("rid", OType.LINK));
}
return map.entrySet().iterator();
}
public Iterator<Entry<Object, Collection<OIdentifiable>>> inverseIterator() {
final OCommandRequest cmd = formatCommand(QUERY_ENTRIES, name);
final List<ODocument> result = getDatabase().command(cmd).execute();
final Map<Object, Collection<OIdentifiable>> map = new LinkedHashMap<Object, Collection<OIdentifiable>>();
for (ListIterator<ODocument> it = result.listIterator(); it.hasPrevious();) {
ODocument d = it.previous();
Collection<OIdentifiable> rids = map.get(d.field("key"));
if (rids == null) {
rids = new HashSet<OIdentifiable>();
map.put(d.field("key"), rids);
}
rids.add((OIdentifiable) d.field("rid", OType.LINK));
}
return map.entrySet().iterator();
}
public Iterator<OIdentifiable> valuesIterator() {
throw new UnsupportedOperationException();
}
public Iterator<OIdentifiable> valuesInverseIterator() {
throw new UnsupportedOperationException();
}
@Override
public boolean supportsOrderedIterations() {
return false;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_OIndexRemoteMultiValue.java |
657 | constructors[COLLECTION_ADD_ALL_BACKUP] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new CollectionAddAllBackupOperation();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java |
290 | public class NoShardAvailableActionException extends IndexShardException {
public NoShardAvailableActionException(ShardId shardId) {
super(shardId, null);
}
public NoShardAvailableActionException(ShardId shardId, String msg) {
super(shardId, msg);
}
public NoShardAvailableActionException(ShardId shardId, String msg, Throwable cause) {
super(shardId, msg, cause);
}
@Override
public RestStatus status() {
return RestStatus.SERVICE_UNAVAILABLE;
}
} | 0true
| src_main_java_org_elasticsearch_action_NoShardAvailableActionException.java |
354 | public enum RUN_MODE {
DEFAULT, RUNNING_DISTRIBUTED
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_OScenarioThreadLocal.java |
598 | ex.execute(new Runnable() {
public void run() {
try {
Thread.sleep(random.nextInt(10) * 1000);
final Config config = new Config();
config.setProperty("hazelcast.wait.seconds.before.join", "5");
final NetworkConfig networkConfig = config.getNetworkConfig();
networkConfig.getJoin().getMulticastConfig().setEnabled(false);
TcpIpConfig tcpIpConfig = networkConfig.getJoin().getTcpIpConfig();
tcpIpConfig.setEnabled(true);
int port = 12301;
networkConfig.setPortAutoIncrement(false);
networkConfig.setPort(port + seed);
for (int i = 0; i < count; i++) {
tcpIpConfig.addMember("127.0.0.1:" + (port + i));
}
HazelcastInstance h = Hazelcast.newHazelcastInstance(config);
mapOfInstances.put(seed, h);
latch.countDown();
} catch (Exception e) {
e.printStackTrace();
}
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_cluster_JoinStressTest.java |
1,628 | public static final Validator EMPTY = new Validator() {
@Override
public String validate(String setting, String value) {
return null;
}
}; | 0true
| src_main_java_org_elasticsearch_cluster_settings_Validator.java |
4,223 | public abstract class FsDirectoryService extends AbstractIndexShardComponent implements DirectoryService, StoreRateLimiting.Listener, StoreRateLimiting.Provider {
protected final FsIndexStore indexStore;
private final CounterMetric rateLimitingTimeInNanos = new CounterMetric();
public FsDirectoryService(ShardId shardId, @IndexSettings Settings indexSettings, IndexStore indexStore) {
super(shardId, indexSettings);
this.indexStore = (FsIndexStore) indexStore;
}
@Override
public final long throttleTimeInNanos() {
return rateLimitingTimeInNanos.count();
}
@Override
public final StoreRateLimiting rateLimiting() {
return indexStore.rateLimiting();
}
protected final LockFactory buildLockFactory() throws IOException {
String fsLock = componentSettings.get("lock", componentSettings.get("fs_lock", "native"));
LockFactory lockFactory = NoLockFactory.getNoLockFactory();
if (fsLock.equals("native")) {
// TODO LUCENE MONITOR: this is not needed in next Lucene version
lockFactory = new NativeFSLockFactory();
} else if (fsLock.equals("simple")) {
lockFactory = new SimpleFSLockFactory();
} else if (fsLock.equals("none")) {
lockFactory = NoLockFactory.getNoLockFactory();
}
return lockFactory;
}
@Override
public final void renameFile(Directory dir, String from, String to) throws IOException {
final FSDirectory fsDirectory = DirectoryUtils.getLeaf(dir, FSDirectory.class);
if (fsDirectory == null) {
throw new ElasticsearchIllegalArgumentException("Can not rename file on non-filesystem based directory ");
}
File directory = fsDirectory.getDirectory();
File old = new File(directory, from);
File nu = new File(directory, to);
if (nu.exists())
if (!nu.delete())
throw new IOException("Cannot delete " + nu);
if (!old.exists()) {
throw new FileNotFoundException("Can't rename from [" + from + "] to [" + to + "], from does not exists");
}
boolean renamed = false;
for (int i = 0; i < 3; i++) {
if (old.renameTo(nu)) {
renamed = true;
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new InterruptedIOException(e.getMessage());
}
}
if (!renamed) {
throw new IOException("Failed to rename, from [" + from + "], to [" + to + "]");
}
}
@Override
public final void fullDelete(Directory dir) throws IOException {
final FSDirectory fsDirectory = DirectoryUtils.getLeaf(dir, FSDirectory.class);
if (fsDirectory == null) {
throw new ElasticsearchIllegalArgumentException("Can not fully delete on non-filesystem based directory");
}
FileSystemUtils.deleteRecursively(fsDirectory.getDirectory());
// if we are the last ones, delete also the actual index
String[] list = fsDirectory.getDirectory().getParentFile().list();
if (list == null || list.length == 0) {
FileSystemUtils.deleteRecursively(fsDirectory.getDirectory().getParentFile());
}
}
@Override
public Directory[] build() throws IOException {
File[] locations = indexStore.shardIndexLocations(shardId);
Directory[] dirs = new Directory[locations.length];
for (int i = 0; i < dirs.length; i++) {
FileSystemUtils.mkdirs(locations[i]);
FSDirectory wrapped = newFSDirectory(locations[i], buildLockFactory());
dirs[i] = new RateLimitedFSDirectory(wrapped, this, this) ;
}
return dirs;
}
protected abstract FSDirectory newFSDirectory(File location, LockFactory lockFactory) throws IOException;
@Override
public final void onPause(long nanos) {
rateLimitingTimeInNanos.inc(nanos);
}
} | 1no label
| src_main_java_org_elasticsearch_index_store_fs_FsDirectoryService.java |
282 | public class CTConnectionFactory implements KeyedPoolableObjectFactory<String, CTConnection> {
private static final Logger log = LoggerFactory.getLogger(CTConnectionFactory.class);
private static final long SCHEMA_WAIT_MAX = 5000L;
private static final long SCHEMA_WAIT_INCREMENT = 25L;
private final AtomicReference<Config> cfgRef;
private CTConnectionFactory(Config config) {
this.cfgRef = new AtomicReference<Config>(config);
}
@Override
public void activateObject(String key, CTConnection c) throws Exception {
// Do nothing, as in passivateObject
}
@Override
public void destroyObject(String key, CTConnection c) throws Exception {
TTransport t = c.getTransport();
if (t.isOpen()) {
t.close();
log.trace("Closed transport {}", t);
} else {
log.trace("Not closing transport {} (already closed)", t);
}
}
@Override
public CTConnection makeObject(String key) throws Exception {
CTConnection conn = makeRawConnection();
Cassandra.Client client = conn.getClient();
client.set_keyspace(key);
return conn;
}
/**
* Create a Cassandra-Thrift connection, but do not attempt to
* set a keyspace on the connection.
*
* @return A CTConnection ready to talk to a Cassandra cluster
* @throws TTransportException on any Thrift transport failure
*/
public CTConnection makeRawConnection() throws TTransportException {
final Config cfg = cfgRef.get();
String hostname = cfg.getRandomHost();
log.debug("Creating TSocket({}, {}, {}, {}, {})", hostname, cfg.port, cfg.username, cfg.password, cfg.timeoutMS);
TSocket socket;
if (null != cfg.sslTruststoreLocation && !cfg.sslTruststoreLocation.isEmpty()) {
TSSLTransportFactory.TSSLTransportParameters params = new TSSLTransportFactory.TSSLTransportParameters() {{
setTrustStore(cfg.sslTruststoreLocation, cfg.sslTruststorePassword);
}};
socket = TSSLTransportFactory.getClientSocket(hostname, cfg.port, cfg.timeoutMS, params);
} else {
socket = new TSocket(hostname, cfg.port, cfg.timeoutMS);
}
TTransport transport = new TFramedTransport(socket, cfg.frameSize);
log.trace("Created transport {}", transport);
TBinaryProtocol protocol = new TBinaryProtocol(transport);
Cassandra.Client client = new Cassandra.Client(protocol);
if (!transport.isOpen()) {
transport.open();
}
if (cfg.username != null) {
Map<String, String> credentials = new HashMap<String, String>() {{
put(IAuthenticator.USERNAME_KEY, cfg.username);
put(IAuthenticator.PASSWORD_KEY, cfg.password);
}};
try {
client.login(new AuthenticationRequest(credentials));
} catch (Exception e) { // TTransportException will propagate authentication/authorization failure
throw new TTransportException(e);
}
}
return new CTConnection(transport, client, cfg);
}
@Override
public void passivateObject(String key, CTConnection o) throws Exception {
// Do nothing, as in activateObject
}
@Override
public boolean validateObject(String key, CTConnection c) {
Config curCfg = cfgRef.get();
boolean isSameConfig = c.getConfig().equals(curCfg);
if (log.isDebugEnabled()) {
if (isSameConfig) {
log.trace("Validated {} by configuration {}", c, curCfg);
} else {
log.trace("Rejected {}; current config is {}; rejected connection config is {}",
c, curCfg, c.getConfig());
}
}
return isSameConfig && c.isOpen();
}
public static class Config {
// this is to keep backward compatibility with JDK 1.6, can be changed to ThreadLocalRandom once we fully switch
private static final ThreadLocal<Random> THREAD_LOCAL_RANDOM = new ThreadLocal<Random>() {
@Override
public Random initialValue() {
return new Random();
}
};
private final String[] hostnames;
private final int port;
private final String username;
private final String password;
private int timeoutMS;
private int frameSize;
private String sslTruststoreLocation;
private String sslTruststorePassword;
private boolean isBuilt;
public Config(String[] hostnames, int port, String username, String password) {
this.hostnames = hostnames;
this.port = port;
this.username = username;
this.password = password;
}
// TODO: we don't really need getters/setters here as all of the fields are final and immutable
public String getHostname() {
return hostnames[0];
}
public int getPort() {
return port;
}
public String getRandomHost() {
return hostnames.length == 1 ? hostnames[0] : hostnames[THREAD_LOCAL_RANDOM.get().nextInt(hostnames.length)];
}
public Config setTimeoutMS(int timeoutMS) {
checkIfAlreadyBuilt();
this.timeoutMS = timeoutMS;
return this;
}
public Config setFrameSize(int frameSize) {
checkIfAlreadyBuilt();
this.frameSize = frameSize;
return this;
}
public Config setSSLTruststoreLocation(String location) {
checkIfAlreadyBuilt();
this.sslTruststoreLocation = location;
return this;
}
public Config setSSLTruststorePassword(String password) {
checkIfAlreadyBuilt();
this.sslTruststorePassword = password;
return this;
}
public CTConnectionFactory build() {
isBuilt = true;
return new CTConnectionFactory(this);
}
public void checkIfAlreadyBuilt() {
if (isBuilt)
throw new IllegalStateException("Can't accept modifications when used with built factory.");
}
@Override
public String toString() {
return "Config[hostnames=" + StringUtils.join(hostnames, ',') + ", port=" + port
+ ", timeoutMS=" + timeoutMS + ", frameSize=" + frameSize
+ "]";
}
}
} | 1no label
| titan-cassandra_src_main_java_com_thinkaurelius_titan_diskstorage_cassandra_thrift_thriftpool_CTConnectionFactory.java |
314 | public class TransportClusterHealthAction extends TransportMasterNodeReadOperationAction<ClusterHealthRequest, ClusterHealthResponse> {
private final ClusterName clusterName;
@Inject
public TransportClusterHealthAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
ClusterName clusterName) {
super(settings, transportService, clusterService, threadPool);
this.clusterName = clusterName;
}
@Override
protected String executor() {
// we block here...
return ThreadPool.Names.GENERIC;
}
@Override
protected String transportAction() {
return ClusterHealthAction.NAME;
}
@Override
protected ClusterHealthRequest newRequest() {
return new ClusterHealthRequest();
}
@Override
protected ClusterHealthResponse newResponse() {
return new ClusterHealthResponse();
}
@Override
protected void masterOperation(final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener<ClusterHealthResponse> listener) throws ElasticsearchException {
long endTime = System.currentTimeMillis() + request.timeout().millis();
if (request.waitForEvents() != null) {
final CountDownLatch latch = new CountDownLatch(1);
clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", request.waitForEvents(), new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
return currentState;
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
latch.countDown();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
});
try {
latch.await(request.timeout().millis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
// ignore
}
}
int waitFor = 5;
if (request.waitForStatus() == null) {
waitFor--;
}
if (request.waitForRelocatingShards() == -1) {
waitFor--;
}
if (request.waitForActiveShards() == -1) {
waitFor--;
}
if (request.waitForNodes().isEmpty()) {
waitFor--;
}
if (request.indices().length == 0) { // check that they actually exists in the meta data
waitFor--;
}
if (waitFor == 0) {
// no need to wait for anything
ClusterState clusterState = clusterService.state();
listener.onResponse(clusterHealth(request, clusterState));
return;
}
while (true) {
int waitForCounter = 0;
ClusterState clusterState = clusterService.state();
ClusterHealthResponse response = clusterHealth(request, clusterState);
if (request.waitForStatus() != null && response.getStatus().value() <= request.waitForStatus().value()) {
waitForCounter++;
}
if (request.waitForRelocatingShards() != -1 && response.getRelocatingShards() <= request.waitForRelocatingShards()) {
waitForCounter++;
}
if (request.waitForActiveShards() != -1 && response.getActiveShards() >= request.waitForActiveShards()) {
waitForCounter++;
}
if (request.indices().length > 0) {
try {
clusterState.metaData().concreteIndices(request.indices());
waitForCounter++;
} catch (IndexMissingException e) {
response.status = ClusterHealthStatus.RED; // no indices, make sure its RED
// missing indices, wait a bit more...
}
}
if (!request.waitForNodes().isEmpty()) {
if (request.waitForNodes().startsWith(">=")) {
int expected = Integer.parseInt(request.waitForNodes().substring(2));
if (response.getNumberOfNodes() >= expected) {
waitForCounter++;
}
} else if (request.waitForNodes().startsWith("ge(")) {
int expected = Integer.parseInt(request.waitForNodes().substring(3, request.waitForNodes().length() - 1));
if (response.getNumberOfNodes() >= expected) {
waitForCounter++;
}
} else if (request.waitForNodes().startsWith("<=")) {
int expected = Integer.parseInt(request.waitForNodes().substring(2));
if (response.getNumberOfNodes() <= expected) {
waitForCounter++;
}
} else if (request.waitForNodes().startsWith("le(")) {
int expected = Integer.parseInt(request.waitForNodes().substring(3, request.waitForNodes().length() - 1));
if (response.getNumberOfNodes() <= expected) {
waitForCounter++;
}
} else if (request.waitForNodes().startsWith(">")) {
int expected = Integer.parseInt(request.waitForNodes().substring(1));
if (response.getNumberOfNodes() > expected) {
waitForCounter++;
}
} else if (request.waitForNodes().startsWith("gt(")) {
int expected = Integer.parseInt(request.waitForNodes().substring(3, request.waitForNodes().length() - 1));
if (response.getNumberOfNodes() > expected) {
waitForCounter++;
}
} else if (request.waitForNodes().startsWith("<")) {
int expected = Integer.parseInt(request.waitForNodes().substring(1));
if (response.getNumberOfNodes() < expected) {
waitForCounter++;
}
} else if (request.waitForNodes().startsWith("lt(")) {
int expected = Integer.parseInt(request.waitForNodes().substring(3, request.waitForNodes().length() - 1));
if (response.getNumberOfNodes() < expected) {
waitForCounter++;
}
} else {
int expected = Integer.parseInt(request.waitForNodes());
if (response.getNumberOfNodes() == expected) {
waitForCounter++;
}
}
}
if (waitForCounter == waitFor) {
listener.onResponse(response);
return;
}
if (System.currentTimeMillis() > endTime) {
response.timedOut = true;
listener.onResponse(response);
return;
}
try {
Thread.sleep(200);
} catch (InterruptedException e) {
response.timedOut = true;
listener.onResponse(response);
return;
}
}
}
private ClusterHealthResponse clusterHealth(ClusterHealthRequest request, ClusterState clusterState) {
if (logger.isTraceEnabled()) {
logger.trace("Calculating health based on state version [{}]", clusterState.version());
}
String[] concreteIndices;
try {
concreteIndices = clusterState.metaData().concreteIndicesIgnoreMissing(request.indices());
} catch (IndexMissingException e) {
// one of the specified indices is not there - treat it as RED.
ClusterHealthResponse response = new ClusterHealthResponse(clusterName.value(), Strings.EMPTY_ARRAY, clusterState);
response.status = ClusterHealthStatus.RED;
return response;
}
return new ClusterHealthResponse(clusterName.value(), concreteIndices, clusterState);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_health_TransportClusterHealthAction.java |
380 | public interface ODetachable {
/**
* Detaches the object.
*
* @return true if the object has been fully detached, otherwise false
*/
public boolean detach();
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_record_ODetachable.java |
1,549 | @Component("blDefaultErrorHandler")
public class DefaultErrorHandler implements ErrorHandler {
private static final Log LOG = LogFactory.getLog(DefaultErrorHandler.class);
@SuppressWarnings("unused")
private String name;
protected List<String> unloggedExceptionClasses = new ArrayList<String>();
/* (non-Javadoc)
* @see org.broadleafcommerce.core.workflow.ErrorHandler#handleError(org.broadleafcommerce.core.workflow.ProcessContext, java.lang.Throwable)
*/
public void handleError(ProcessContext context, Throwable th) throws WorkflowException {
context.stopProcess();
boolean shouldLog = true;
Throwable cause = th;
while (true) {
if (unloggedExceptionClasses.contains(cause.getClass().getName())) {
shouldLog = false;
break;
}
cause = cause.getCause();
if (cause == null) {
break;
}
}
if (shouldLog) {
LOG.error("An error occurred during the workflow", th);
}
throw new WorkflowException(th);
}
/* (non-Javadoc)
* @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String)
*/
public void setBeanName(String name) {
this.name = name;
}
public List<String> getUnloggedExceptionClasses() {
return unloggedExceptionClasses;
}
public void setUnloggedExceptionClasses(List<String> unloggedExceptionClasses) {
this.unloggedExceptionClasses = unloggedExceptionClasses;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_workflow_DefaultErrorHandler.java |
898 | protected abstract class BaseAsyncAction<FirstResult extends SearchPhaseResult> {
protected final ActionListener<SearchResponse> listener;
protected final GroupShardsIterator shardsIts;
protected final SearchRequest request;
protected final ClusterState clusterState;
protected final DiscoveryNodes nodes;
protected final int expectedSuccessfulOps;
private final int expectedTotalOps;
protected final AtomicInteger successulOps = new AtomicInteger();
private final AtomicInteger totalOps = new AtomicInteger();
protected final AtomicArray<FirstResult> firstResults;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
private final Object shardFailuresMutex = new Object();
protected volatile ScoreDoc[] sortedShardList;
protected final long startTime = System.currentTimeMillis();
protected BaseAsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
this.request = request;
this.listener = listener;
this.clusterState = clusterService.state();
nodes = clusterState.nodes();
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ);
String[] concreteIndices = clusterState.metaData().concreteIndices(request.indices(), request.indicesOptions());
for (String index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index);
}
Map<String, Set<String>> routingMap = clusterState.metaData().resolveSearchRouting(request.routing(), request.indices());
shardsIts = clusterService.operationRouting().searchShards(clusterState, request.indices(), concreteIndices, routingMap, request.preference());
expectedSuccessfulOps = shardsIts.size();
// we need to add 1 for non active partition, since we count it in the total!
expectedTotalOps = shardsIts.totalSizeWith1ForEmpty();
firstResults = new AtomicArray<FirstResult>(shardsIts.size());
}
public void start() {
if (expectedSuccessfulOps == 0) {
// no search shards to search on, bail with empty response (it happens with search across _all with no indices around and consistent with broadcast operations)
listener.onResponse(new SearchResponse(InternalSearchResponse.EMPTY, null, 0, 0, System.currentTimeMillis() - startTime, ShardSearchFailure.EMPTY_ARRAY));
return;
}
request.beforeStart();
// count the local operations, and perform the non local ones
int localOperations = 0;
int shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
localOperations++;
} else {
// do the remote operation here, the localAsync flag is not relevant
performFirstPhase(shardIndex, shardIt);
}
} else {
// really, no shards active in this group
onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
}
}
// we have local operations, perform them now
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
request.beforeLocalFork();
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
int shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
performFirstPhase(shardIndex, shardIt);
}
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
if (localAsync) {
request.beforeLocalFork();
}
shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final int fShardIndex = shardIndex;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
if (localAsync) {
try {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
performFirstPhase(fShardIndex, shardIt);
}
});
} catch (Throwable t) {
onFirstPhaseResult(shardIndex, shard, shard.currentNodeId(), shardIt, t);
}
} else {
performFirstPhase(fShardIndex, shardIt);
}
}
}
}
}
}
}
void performFirstPhase(final int shardIndex, final ShardIterator shardIt) {
performFirstPhase(shardIndex, shardIt, shardIt.nextOrNull());
}
void performFirstPhase(final int shardIndex, final ShardIterator shardIt, final ShardRouting shard) {
if (shard == null) {
// no more active shards... (we should not really get here, but just for safety)
onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
} else {
final DiscoveryNode node = nodes.get(shard.currentNodeId());
if (node == null) {
onFirstPhaseResult(shardIndex, shard, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
} else {
String[] filteringAliases = clusterState.metaData().filteringAliases(shard.index(), request.indices());
sendExecuteFirstPhase(node, internalSearchRequest(shard, shardsIts.size(), request, filteringAliases, startTime), new SearchServiceListener<FirstResult>() {
@Override
public void onResult(FirstResult result) {
onFirstPhaseResult(shardIndex, shard, result, shardIt);
}
@Override
public void onFailure(Throwable t) {
onFirstPhaseResult(shardIndex, shard, node.id(), shardIt, t);
}
});
}
}
}
void onFirstPhaseResult(int shardIndex, ShardRouting shard, FirstResult result, ShardIterator shardIt) {
result.shardTarget(new SearchShardTarget(shard.currentNodeId(), shard.index(), shard.id()));
processFirstPhaseResult(shardIndex, shard, result);
// increment all the "future" shards to update the total ops since we some may work and some may not...
// and when that happens, we break on total ops, so we must maintain them
int xTotalOps = totalOps.addAndGet(shardIt.remaining() + 1);
successulOps.incrementAndGet();
if (xTotalOps == expectedTotalOps) {
try {
innerMoveToSecondPhase();
} catch (Throwable e) {
if (logger.isDebugEnabled()) {
logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "] while moving to second phase", e);
}
listener.onFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
}
}
}
void onFirstPhaseResult(final int shardIndex, @Nullable ShardRouting shard, @Nullable String nodeId, final ShardIterator shardIt, Throwable t) {
// we always add the shard failure for a specific shard instance
// we do make sure to clean it on a successful response from a shard
SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId().getIndex(), shardIt.shardId().getId());
addShardFailure(shardIndex, shardTarget, t);
if (totalOps.incrementAndGet() == expectedTotalOps) {
if (logger.isDebugEnabled()) {
if (t != null && !TransportActions.isShardNotAvailableException(t)) {
if (shard != null) {
logger.debug(shard.shortSummary() + ": Failed to execute [" + request + "]", t);
} else {
logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "]", t);
}
}
}
if (successulOps.get() == 0) {
if (logger.isDebugEnabled()) {
logger.debug("All shards failed for phase: [{}]", firstPhaseName(), t);
}
// no successful ops, raise an exception
listener.onFailure(new SearchPhaseExecutionException(firstPhaseName(), "all shards failed", buildShardFailures()));
} else {
try {
innerMoveToSecondPhase();
} catch (Throwable e) {
listener.onFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
}
}
} else {
final ShardRouting nextShard = shardIt.nextOrNull();
final boolean lastShard = nextShard == null;
// trace log this exception
if (logger.isTraceEnabled() && t != null) {
logger.trace(executionFailureMsg(shard, shardIt, request, lastShard), t);
}
if (!lastShard) {
try {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
performFirstPhase(shardIndex, shardIt, nextShard);
}
});
} catch (Throwable t1) {
onFirstPhaseResult(shardIndex, shard, shard.currentNodeId(), shardIt, t1);
}
} else {
// no more shards active, add a failure
if (logger.isDebugEnabled() && !logger.isTraceEnabled()) { // do not double log this exception
if (t != null && !TransportActions.isShardNotAvailableException(t)) {
logger.debug(executionFailureMsg(shard, shardIt, request, lastShard), t);
}
}
}
}
}
private String executionFailureMsg(@Nullable ShardRouting shard, final ShardIterator shardIt, SearchRequest request, boolean lastShard) {
if (shard != null) {
return shard.shortSummary() + ": Failed to execute [" + request + "] lastShard [" + lastShard + "]";
} else {
return shardIt.shardId() + ": Failed to execute [" + request + "] lastShard [" + lastShard + "]";
}
}
/**
* Builds how long it took to execute the search.
*/
protected final long buildTookInMillis() {
return System.currentTimeMillis() - startTime;
}
protected final ShardSearchFailure[] buildShardFailures() {
AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures;
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
for (int i = 0; i < failures.length; i++) {
failures[i] = entries.get(i).value;
}
return failures;
}
protected final void addShardFailure(final int shardIndex, @Nullable SearchShardTarget shardTarget, Throwable t) {
// we don't aggregate shard failures on non active shards (but do keep the header counts right)
if (TransportActions.isShardNotAvailableException(t)) {
return;
}
// lazily create shard failures, so we can early build the empty shard failure list in most cases (no failures)
if (shardFailures == null) {
synchronized (shardFailuresMutex) {
if (shardFailures == null) {
shardFailures = new AtomicArray<ShardSearchFailure>(shardsIts.size());
}
}
}
ShardSearchFailure failure = shardFailures.get(shardIndex);
if (failure == null) {
shardFailures.set(shardIndex, new ShardSearchFailure(t, shardTarget));
} else {
// the failure is already present, try and not override it with an exception that is less meaningless
// for example, getting illegal shard state
if (TransportActions.isReadOverrideException(t)) {
shardFailures.set(shardIndex, new ShardSearchFailure(t, shardTarget));
}
}
}
/**
* Releases shard targets that are not used in the docsIdsToLoad.
*/
protected void releaseIrrelevantSearchContexts(AtomicArray<? extends QuerySearchResultProvider> queryResults,
AtomicArray<IntArrayList> docIdsToLoad) {
if (docIdsToLoad == null) {
return;
}
// we only release search context that we did not fetch from if we are not scrolling
if (request.scroll() == null) {
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults.asList()) {
if (docIdsToLoad.get(entry.index) == null) {
DiscoveryNode node = nodes.get(entry.value.queryResult().shardTarget().nodeId());
if (node != null) { // should not happen (==null) but safeguard anyhow
searchService.sendFreeContext(node, entry.value.queryResult().id(), request);
}
}
}
}
}
protected abstract void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<FirstResult> listener);
protected final void processFirstPhaseResult(int shardIndex, ShardRouting shard, FirstResult result) {
firstResults.set(shardIndex, result);
// clean a previous error on this shard group (note, this code will be serialized on the same shardIndex value level
// so its ok concurrency wise to miss potentially the shard failures being created because of another failure
// in the #addShardFailure, because by definition, it will happen on *another* shardIndex
AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures;
if (shardFailures != null) {
shardFailures.set(shardIndex, null);
}
}
final void innerMoveToSecondPhase() throws Exception {
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder();
boolean hadOne = false;
for (int i = 0; i < firstResults.length(); i++) {
FirstResult result = firstResults.get(i);
if (result == null) {
continue; // failure
}
if (hadOne) {
sb.append(",");
} else {
hadOne = true;
}
sb.append(result.shardTarget());
}
logger.trace("Moving to second phase, based on results from: {} (cluster state version: {})", sb, clusterState.version());
}
moveToSecondPhase();
}
protected abstract void moveToSecondPhase() throws Exception;
protected abstract String firstPhaseName();
} | 1no label
| src_main_java_org_elasticsearch_action_search_type_TransportSearchTypeAction.java |
702 | public static class Presentation {
public static class Tab {
public static class Name {
public static final String Marketing = "ProductImpl_Marketing_Tab";
public static final String Media = "SkuImpl_Media_Tab";
public static final String ProductOptions = "ProductImpl_Product_Options_Tab";
public static final String Inventory = "ProductImpl_Inventory_Tab";
public static final String Shipping = "ProductImpl_Shipping_Tab";
public static final String Advanced = "ProductImpl_Advanced_Tab";
}
public static class Order {
public static final int Marketing = 2000;
public static final int Media = 3000;
public static final int ProductOptions = 4000;
public static final int Inventory = 5000;
public static final int Shipping = 6000;
public static final int Advanced = 7000;
}
}
public static class Group {
public static class Name {
public static final String General = "ProductImpl_Product_Description";
public static final String Price = "SkuImpl_Price";
public static final String ActiveDateRange = "ProductImpl_Product_Active_Date_Range";
public static final String Advanced = "ProductImpl_Advanced";
public static final String Inventory = "SkuImpl_Sku_Inventory";
public static final String Badges = "ProductImpl_Badges";
public static final String Shipping = "ProductWeight_Shipping";
public static final String Financial = "ProductImpl_Financial";
}
public static class Order {
public static final int General = 1000;
public static final int Price = 2000;
public static final int ActiveDateRange = 3000;
public static final int Advanced = 1000;
public static final int Inventory = 1000;
public static final int Badges = 1000;
public static final int Shipping = 1000;
}
}
public static class FieldOrder {
public static final int NAME = 1000;
public static final int SHORT_DESCRIPTION = 2000;
public static final int PRIMARY_MEDIA = 3000;
public static final int LONG_DESCRIPTION = 4000;
public static final int DEFAULT_CATEGORY = 5000;
public static final int MANUFACTURER = 6000;
public static final int URL = 7000;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_ProductImpl.java |
1,073 | public class MaxSizeConfigReadOnly extends MaxSizeConfig {
public MaxSizeConfigReadOnly(MaxSizeConfig config) {
super(config);
}
public MaxSizeConfig setSize(int size) {
throw new UnsupportedOperationException("This config is read-only");
}
public MaxSizeConfig setMaxSizePolicy(MaxSizePolicy maxSizePolicy) {
throw new UnsupportedOperationException("This config is read-only");
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_MaxSizeConfigReadOnly.java |
146 | (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}}); | 0true
| src_main_java_jsr166e_extra_AtomicDouble.java |
232 | @Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface OVersion {
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_annotation_OVersion.java |
2,674 | public class FailedToResolveConfigException extends ElasticsearchException {
public FailedToResolveConfigException(String msg) {
super(msg);
}
public FailedToResolveConfigException(String msg, Throwable cause) {
super(msg, cause);
}
} | 0true
| src_main_java_org_elasticsearch_env_FailedToResolveConfigException.java |
2,532 | new HashMap<String, Object>() {{
put("field", "value");
put("field2", "value2");
}}); | 0true
| src_test_java_org_elasticsearch_common_xcontent_support_XContentMapValuesTests.java |
4,520 | class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
TimeValue interval = settings.getAsTime(INDICES_TTL_INTERVAL, IndicesTTLService.this.interval);
if (!interval.equals(IndicesTTLService.this.interval)) {
logger.info("updating indices.ttl.interval from [{}] to [{}]", IndicesTTLService.this.interval, interval);
IndicesTTLService.this.interval = interval;
}
}
} | 1no label
| src_main_java_org_elasticsearch_indices_ttl_IndicesTTLService.java |
2,544 | public class SimpleJodaTests extends ElasticsearchTestCase {
@Test
public void testMultiParsers() {
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
DateTimeParser[] parsers = new DateTimeParser[3];
parsers[0] = DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getParser();
parsers[1] = DateTimeFormat.forPattern("MM-dd-yyyy").withZone(DateTimeZone.UTC).getParser();
parsers[2] = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withZone(DateTimeZone.UTC).getParser();
builder.append(DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getPrinter(), parsers);
DateTimeFormatter formatter = builder.toFormatter();
formatter.parseMillis("2009-11-15 14:12:12");
}
@Test
public void testIsoDateFormatDateTimeNoMillisUTC() {
DateTimeFormatter formatter = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
assertThat(millis, equalTo(0l));
}
@Test
public void testUpperBound() {
MutableDateTime dateTime = new MutableDateTime(3000, 12, 31, 23, 59, 59, 999, DateTimeZone.UTC);
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
String value = "2000-01-01";
int i = formatter.parseInto(dateTime, value, 0);
assertThat(i, equalTo(value.length()));
assertThat(dateTime.toString(), equalTo("2000-01-01T23:59:59.999Z"));
}
@Test
public void testIsoDateFormatDateOptionalTimeUTC() {
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
assertThat(millis, equalTo(0l));
millis = formatter.parseMillis("1970-01-01T00:00:00.001Z");
assertThat(millis, equalTo(1l));
millis = formatter.parseMillis("1970-01-01T00:00:00.1Z");
assertThat(millis, equalTo(100l));
millis = formatter.parseMillis("1970-01-01T00:00:00.1");
assertThat(millis, equalTo(100l));
millis = formatter.parseMillis("1970-01-01T00:00:00");
assertThat(millis, equalTo(0l));
millis = formatter.parseMillis("1970-01-01");
assertThat(millis, equalTo(0l));
millis = formatter.parseMillis("1970");
assertThat(millis, equalTo(0l));
try {
formatter.parseMillis("1970 kuku");
fail("formatting should fail");
} catch (IllegalArgumentException e) {
// all is well
}
// test offset in format
millis = formatter.parseMillis("1970-01-01T00:00:00-02:00");
assertThat(millis, equalTo(TimeValue.timeValueHours(2).millis()));
}
@Test
public void testIsoVsCustom() {
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00");
assertThat(millis, equalTo(0l));
formatter = DateTimeFormat.forPattern("yyyy/MM/dd HH:mm:ss").withZone(DateTimeZone.UTC);
millis = formatter.parseMillis("1970/01/01 00:00:00");
assertThat(millis, equalTo(0l));
FormatDateTimeFormatter formatter2 = Joda.forPattern("yyyy/MM/dd HH:mm:ss");
millis = formatter2.parser().parseMillis("1970/01/01 00:00:00");
assertThat(millis, equalTo(0l));
}
@Test
public void testWriteAndParse() {
DateTimeFormatter dateTimeWriter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
Date date = new Date();
assertThat(formatter.parseMillis(dateTimeWriter.print(date.getTime())), equalTo(date.getTime()));
}
@Test
public void testSlashInFormat() {
FormatDateTimeFormatter formatter = Joda.forPattern("MM/yyyy");
formatter.parser().parseMillis("01/2001");
formatter = Joda.forPattern("yyyy/MM/dd HH:mm:ss");
long millis = formatter.parser().parseMillis("1970/01/01 00:00:00");
formatter.printer().print(millis);
try {
millis = formatter.parser().parseMillis("1970/01/01");
fail();
} catch (IllegalArgumentException e) {
// it really can't parse this one
}
}
@Test
public void testMultipleFormats() {
FormatDateTimeFormatter formatter = Joda.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
long millis = formatter.parser().parseMillis("1970/01/01 00:00:00");
assertThat("1970/01/01 00:00:00", is(formatter.printer().print(millis)));
}
@Test
public void testMultipleDifferentFormats() {
FormatDateTimeFormatter formatter = Joda.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
String input = "1970/01/01 00:00:00";
long millis = formatter.parser().parseMillis(input);
assertThat(input, is(formatter.printer().print(millis)));
Joda.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||dateOptionalTime");
Joda.forPattern("dateOptionalTime||yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
Joda.forPattern("yyyy/MM/dd HH:mm:ss||dateOptionalTime||yyyy/MM/dd");
Joda.forPattern("date_time||date_time_no_millis");
Joda.forPattern(" date_time || date_time_no_millis");
}
@Test
public void testInvalidPatterns() {
expectInvalidPattern("does_not_exist_pattern", "Invalid format: [does_not_exist_pattern]: Illegal pattern component: o");
expectInvalidPattern("OOOOO", "Invalid format: [OOOOO]: Illegal pattern component: OOOOO");
expectInvalidPattern(null, "No date pattern provided");
expectInvalidPattern("", "No date pattern provided");
expectInvalidPattern(" ", "No date pattern provided");
expectInvalidPattern("||date_time_no_millis", "No date pattern provided");
expectInvalidPattern("date_time_no_millis||", "No date pattern provided");
}
private void expectInvalidPattern(String pattern, String errorMessage) {
try {
Joda.forPattern(pattern);
fail("Pattern " + pattern + " should have thrown an exception but did not");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString(errorMessage));
}
}
@Test
public void testRounding() {
long TIME = utcTimeInMillis("2009-02-03T01:01:01");
MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
time.setMillis(TIME);
assertThat(time.monthOfYear().roundFloor().toString(), equalTo("2009-02-01T00:00:00.000Z"));
time.setMillis(TIME);
assertThat(time.hourOfDay().roundFloor().toString(), equalTo("2009-02-03T01:00:00.000Z"));
time.setMillis(TIME);
assertThat(time.dayOfMonth().roundFloor().toString(), equalTo("2009-02-03T00:00:00.000Z"));
}
@Test
public void testRoundingSetOnTime() {
MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
time.setRounding(time.getChronology().monthOfYear(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-01T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-01T00:00:00.000Z")));
time.setMillis(utcTimeInMillis("2009-05-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-05-01T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-05-01T00:00:00.000Z")));
time = new MutableDateTime(DateTimeZone.UTC);
time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-03T00:00:00.000Z")));
time.setMillis(utcTimeInMillis("2009-02-02T23:01:01"));
assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-02T00:00:00.000Z")));
time = new MutableDateTime(DateTimeZone.UTC);
time.setRounding(time.getChronology().weekOfWeekyear(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2011-05-05T01:01:01"));
assertThat(time.toString(), equalTo("2011-05-02T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2011-05-02T00:00:00.000Z")));
}
@Test
public void testRoundingWithTimeZone() {
MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
time.setZone(DateTimeZone.forOffsetHours(-2));
time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
MutableDateTime utcTime = new MutableDateTime(DateTimeZone.UTC);
utcTime.setRounding(utcTime.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
utcTime.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000-02:00"));
assertThat(utcTime.toString(), equalTo("2009-02-03T00:00:00.000Z"));
// the time is on the 2nd, and utcTime is on the 3rd, but, because time already encapsulates
// time zone, the millis diff is not 24, but 22 hours
assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis()));
time.setMillis(utcTimeInMillis("2009-02-04T01:01:01"));
utcTime.setMillis(utcTimeInMillis("2009-02-04T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000-02:00"));
assertThat(utcTime.toString(), equalTo("2009-02-04T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis()));
}
private long utcTimeInMillis(String time) {
return ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC).parseMillis(time);
}
} | 0true
| src_test_java_org_elasticsearch_deps_joda_SimpleJodaTests.java |
1,227 | private final class Counter implements Callable<Void> {
private final Random random = new Random();
public Void call() throws Exception {
countDownLatch.await();
for (int n = 0; n < CYCLES_COUNT; n++) {
final int modificationsCount = random.nextInt(255);
modificationLock.requestModificationLock();
try {
for (int i = 0; i < modificationsCount; i++) {
counter.incrementAndGet();
Thread.sleep(random.nextInt(5));
}
} finally {
modificationLock.releaseModificationLock();
}
}
return null;
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_storage_StorageModificationLockTest.java |
67 | public class CloudyUtility {
final static ILogger logger = Logger.getLogger(CloudyUtility.class);
private CloudyUtility() {
}
public static String getQueryString(Map<String, String> attributes) {
StringBuilder query = new StringBuilder();
for (Iterator<Map.Entry<String,String>> iterator = attributes.entrySet().iterator(); iterator.hasNext(); ) {
final Map.Entry<String,String> entry = iterator.next();
final String value = entry.getValue();
query.append(AwsURLEncoder.urlEncode(entry.getKey())).append("=").append(AwsURLEncoder.urlEncode(value)).append("&");
}
String result = query.toString();
if (result != null && !result.equals(""))
result = "?" + result.substring(0, result.length() - 1);
return result;
}
public static Object unmarshalTheResponse(InputStream stream, AwsConfig awsConfig) throws IOException {
Object o = parse(stream, awsConfig);
return o;
}
private static Object parse(InputStream in, AwsConfig awsConfig) {
final DocumentBuilder builder;
try {
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document doc = builder.parse(in);
Element element = doc.getDocumentElement();
NodeHolder elementNodeHolder = new NodeHolder(element);
List<String> names = new ArrayList<String>();
List<NodeHolder> reservationset = elementNodeHolder.getSubNodes("reservationset");
for (NodeHolder reservation : reservationset) {
List<NodeHolder> items = reservation.getSubNodes("item");
for (NodeHolder item : items) {
NodeHolder instancesset = item.getSub("instancesset");
names.addAll(instancesset.getList("privateipaddress", awsConfig));
}
}
return names;
} catch (Exception e) {
logger.warning(e);
}
return new ArrayList<String>();
}
static class NodeHolder {
Node node;
public NodeHolder(Node node) {
this.node = node;
}
public NodeHolder getSub(String name) {
if (node != null) {
for (org.w3c.dom.Node node : new AbstractXmlConfigHelper.IterableNodeList(this.node.getChildNodes())) {
String nodeName = cleanNodeName(node.getNodeName());
if (name.equals(nodeName)) {
return new NodeHolder(node);
}
}
}
return new NodeHolder(null);
}
public List<NodeHolder> getSubNodes(String name) {
List<NodeHolder> list = new ArrayList<NodeHolder>();
if (node != null) {
for (org.w3c.dom.Node node : new AbstractXmlConfigHelper.IterableNodeList(this.node.getChildNodes())) {
String nodeName = cleanNodeName(node.getNodeName());
if (name.equals(nodeName)) {
list.add(new NodeHolder(node));
}
}
}
return list;
}
public List<String> getList(String name, AwsConfig awsConfig) {
List<String> list = new ArrayList<String>();
if (node == null) return list;
for (org.w3c.dom.Node node : new AbstractXmlConfigHelper.IterableNodeList(this.node.getChildNodes())) {
String nodeName = cleanNodeName(node.getNodeName());
if (!"item".equals(nodeName)) continue;
final NodeHolder nodeHolder = new NodeHolder(node);
final String state = getState(nodeHolder);
final String ip = getIp(name, nodeHolder);
final String instanceName = getInstanceName(nodeHolder);
if (ip != null) {
if (!acceptState(state)) {
logger.finest(format("Ignoring EC2 instance [%s][%s] reason: the instance is not running but %s", instanceName, ip, state));
} else if (!acceptTag(awsConfig, node)) {
logger.finest(format("Ignoring EC2 instance [%s][%s] reason: tag-key/tag-value don't match", instanceName, ip));
} else if (!acceptGroupName(awsConfig, node)) {
logger.finest(format("Ignoring EC2 instance [%s][%s] reason: security-group-name doesn't match", instanceName, ip));
} else {
list.add(ip);
logger.finest(format("Accepting EC2 instance [%s][%s]",instanceName, ip));
}
}
}
return list;
}
private boolean acceptState(String state) {
return "running".equals(state);
}
private static String getState(NodeHolder nodeHolder) {
final NodeHolder instancestate = nodeHolder.getSub("instancestate");
return instancestate.getSub("name").getNode().getFirstChild().getNodeValue();
}
private static String getInstanceName(NodeHolder nodeHolder) {
final NodeHolder tagSetNode = nodeHolder.getSub("tagset");
if (tagSetNode.getNode() == null) {
return null;
}
final NodeList childNodes = tagSetNode.getNode().getChildNodes();
for (int k = 0; k < childNodes.getLength(); k++) {
Node item = childNodes.item(k);
if (!item.getNodeName().equals("item")) continue;
NodeHolder itemHolder = new NodeHolder(item);
final Node keyNode = itemHolder.getSub("key").getNode();
if (keyNode == null || keyNode.getFirstChild() == null) continue;
final String nodeValue = keyNode.getFirstChild().getNodeValue();
if (!"Name".equals(nodeValue)) continue;
final Node valueNode = itemHolder.getSub("value").getNode();
if (valueNode == null || valueNode.getFirstChild() == null) continue;
return valueNode.getFirstChild().getNodeValue();
}
return null;
}
private static String getIp(String name, NodeHolder nodeHolder) {
final Node node1 = nodeHolder.getSub(name).getNode();
return node1 == null ? null : node1.getFirstChild().getNodeValue();
}
private boolean acceptTag(AwsConfig awsConfig, Node node) {
return applyTagFilter(node, awsConfig.getTagKey(), awsConfig.getTagValue());
}
private boolean acceptGroupName(AwsConfig awsConfig, Node node) {
return applyFilter(node, awsConfig.getSecurityGroupName(), "groupset", "groupname");
}
private boolean applyFilter(Node node, String filter, String set, String filterField) {
if (nullOrEmpty(filter)) {
return true;
} else {
for (NodeHolder group : new NodeHolder(node).getSub(set).getSubNodes("item")) {
NodeHolder nh = group.getSub(filterField);
if (nh != null && nh.getNode().getFirstChild() != null && filter.equals(nh.getNode().getFirstChild().getNodeValue())) {
return true;
}
}
return false;
}
}
private boolean applyTagFilter(Node node, String keyExpected, String valueExpected) {
if (nullOrEmpty(keyExpected)) {
return true;
} else {
for (NodeHolder group : new NodeHolder(node).getSub("tagset").getSubNodes("item")) {
if (keyEquals(keyExpected, group) &&
(nullOrEmpty(valueExpected) || valueEquals(valueExpected, group))) {
return true;
}
}
return false;
}
}
private boolean valueEquals(String valueExpected, NodeHolder group) {
NodeHolder nhValue = group.getSub("value");
return nhValue != null && nhValue.getNode().getFirstChild() != null && valueExpected.equals(nhValue.getNode().getFirstChild().getNodeValue());
}
private boolean nullOrEmpty(String keyExpected) {
return keyExpected == null || keyExpected.equals("");
}
private boolean keyEquals(String keyExpected, NodeHolder group) {
NodeHolder nhKey = group.getSub("key");
return nhKey != null && nhKey.getNode().getFirstChild() != null && keyExpected.equals(nhKey.getNode().getFirstChild().getNodeValue());
}
public Node getNode() {
return node;
}
}
} | 1no label
| hazelcast-cloud_src_main_java_com_hazelcast_aws_utility_CloudyUtility.java |
94 | WITHIN {
@Override
public boolean evaluate(Object value, Object condition) {
Preconditions.checkArgument(condition instanceof Geoshape);
if (value == null) return false;
Preconditions.checkArgument(value instanceof Geoshape);
return ((Geoshape) value).within((Geoshape) condition);
}
@Override
public String toString() {
return "within";
}
@Override
public boolean hasNegation() {
return false;
}
@Override
public TitanPredicate negate() {
throw new UnsupportedOperationException();
}
}; | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geo.java |
1,350 | completableFuture.andThen(new ExecutionCallback() {
@Override
public void onResponse(Object response) {
reference.set(response);
latch2.countDown();
}
@Override
public void onFailure(Throwable t) {
reference.set(t);
latch2.countDown();
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_executor_ExecutorServiceTest.java |
633 | public final class AddMembershipListenerRequest extends CallableClientRequest implements RetryableRequest {
public AddMembershipListenerRequest() {
}
@Override
public Object call() throws Exception {
final ClusterServiceImpl service = getService();
final ClientEndpoint endpoint = getEndpoint();
final String registrationId = service.addMembershipListener(new MembershipListener() {
@Override
public void memberAdded(MembershipEvent membershipEvent) {
if (endpoint.live()) {
final MemberImpl member = (MemberImpl) membershipEvent.getMember();
endpoint.sendEvent(new ClientMembershipEvent(member, MembershipEvent.MEMBER_ADDED), getCallId());
}
}
@Override
public void memberRemoved(MembershipEvent membershipEvent) {
if (endpoint.live()) {
final MemberImpl member = (MemberImpl) membershipEvent.getMember();
endpoint.sendEvent(new ClientMembershipEvent(member, MembershipEvent.MEMBER_REMOVED), getCallId());
}
}
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
if (endpoint.live()) {
final MemberImpl member = (MemberImpl) memberAttributeEvent.getMember();
final String uuid = member.getUuid();
final MemberAttributeOperationType op = memberAttributeEvent.getOperationType();
final String key = memberAttributeEvent.getKey();
final Object value = memberAttributeEvent.getValue();
final MemberAttributeChange memberAttributeChange = new MemberAttributeChange(uuid, op, key, value);
endpoint.sendEvent(new ClientMembershipEvent(member, memberAttributeChange), getCallId());
}
}
});
final String name = ClusterServiceImpl.SERVICE_NAME;
endpoint.setListenerRegistration(name, name, registrationId);
final Collection<MemberImpl> memberList = service.getMemberList();
final Collection<Data> response = new ArrayList<Data>(memberList.size());
final SerializationService serializationService = getClientEngine().getSerializationService();
for (MemberImpl member : memberList) {
response.add(serializationService.toData(member));
}
return new SerializableCollection(response);
}
public String getServiceName() {
return ClusterServiceImpl.SERVICE_NAME;
}
public int getFactoryId() {
return ClientPortableHook.ID;
}
public int getClassId() {
return ClientPortableHook.MEMBERSHIP_LISTENER;
}
@Override
public Permission getRequiredPermission() {
return null;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_cluster_client_AddMembershipListenerRequest.java |
2,811 | public class AnalysisTestsHelper {
public static AnalysisService createAnalysisServiceFromClassPath(String resource) {
Settings settings = ImmutableSettings.settingsBuilder()
.loadFromClasspath(resource).build();
return createAnalysisServiceFromSettings(settings);
}
public static AnalysisService createAnalysisServiceFromSettings(
Settings settings) {
Index index = new Index("test");
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
AnalysisModule analysisModule = new AnalysisModule(settings,
parentInjector.getInstance(IndicesAnalysisService.class));
Injector injector = new ModulesBuilder().add(new IndexSettingsModule(index, settings),
new IndexNameModule(index), analysisModule).createChildInjector(parentInjector);
return injector.getInstance(AnalysisService.class);
}
} | 0true
| src_test_java_org_elasticsearch_index_analysis_AnalysisTestsHelper.java |
1,114 | public class SymmetricEncryptionConfig {
private boolean enabled = false;
private String salt = "thesalt";
private String password = "thepassword";
private int iterationCount = 19;
private String algorithm = "PBEWithMD5AndDES";
private byte[] key = null;
public boolean isEnabled() {
return enabled;
}
public SymmetricEncryptionConfig setEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
public String getSalt() {
return salt;
}
public SymmetricEncryptionConfig setSalt(String salt) {
this.salt = salt;
return this;
}
public String getPassword() {
return password;
}
public SymmetricEncryptionConfig setPassword(String password) {
this.password = password;
return this;
}
public int getIterationCount() {
return iterationCount;
}
public SymmetricEncryptionConfig setIterationCount(int iterationCount) {
this.iterationCount = iterationCount;
return this;
}
public String getAlgorithm() {
return algorithm;
}
public SymmetricEncryptionConfig setAlgorithm(String algorithm) {
this.algorithm = algorithm;
return this;
}
public byte[] getKey() {
return key != null ? Arrays.copyOf(key, key.length) : null;
}
public SymmetricEncryptionConfig setKey(byte[] key) {
this.key = key != null ? Arrays.copyOf(key, key.length) : null;
return this;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("SymmetricEncryptionConfig{");
sb.append("enabled=").append(enabled);
sb.append(", iterationCount=").append(iterationCount);
sb.append(", algorithm='").append(algorithm).append('\'');
sb.append(", key=").append(Arrays.toString(key));
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_SymmetricEncryptionConfig.java |
1,511 | public class FilterRoutingTests extends ElasticsearchAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(FilterRoutingTests.class);
@Test
public void testClusterFilters() {
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.include.tag1", "value1,value2")
.put("cluster.routing.allocation.exclude.tag1", "value3,value4")
.build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").numberOfShards(2).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
logger.info("--> adding four nodes and performing rerouting");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("node1", ImmutableMap.of("tag1", "value1")))
.put(newNode("node2", ImmutableMap.of("tag1", "value2")))
.put(newNode("node3", ImmutableMap.of("tag1", "value3")))
.put(newNode("node4", ImmutableMap.of("tag1", "value4")))
).build();
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(clusterState.routingNodes().shardsWithState(INITIALIZING).size(), equalTo(2));
logger.info("--> start the shards (primaries)");
routingTable = strategy.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("--> start the shards (replicas)");
routingTable = strategy.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("--> make sure shards are only allocated on tag1 with value1 and value2");
List<MutableShardRouting> startedShards = clusterState.routingNodes().shardsWithState(ShardRoutingState.STARTED);
assertThat(startedShards.size(), equalTo(4));
for (MutableShardRouting startedShard : startedShards) {
assertThat(startedShard.currentNodeId(), Matchers.anyOf(equalTo("node1"), equalTo("node2")));
}
}
@Test
public void testIndexFilters() {
AllocationService strategy = createAllocationService(settingsBuilder()
.build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 1)
.put("index.routing.allocation.include.tag1", "value1,value2")
.put("index.routing.allocation.exclude.tag1", "value3,value4")
.build()))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
logger.info("--> adding two nodes and performing rerouting");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("node1", ImmutableMap.of("tag1", "value1")))
.put(newNode("node2", ImmutableMap.of("tag1", "value2")))
.put(newNode("node3", ImmutableMap.of("tag1", "value3")))
.put(newNode("node4", ImmutableMap.of("tag1", "value4")))
).build();
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(clusterState.routingNodes().shardsWithState(INITIALIZING).size(), equalTo(2));
logger.info("--> start the shards (primaries)");
routingTable = strategy.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("--> start the shards (replicas)");
routingTable = strategy.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("--> make sure shards are only allocated on tag1 with value1 and value2");
List<MutableShardRouting> startedShards = clusterState.routingNodes().shardsWithState(ShardRoutingState.STARTED);
assertThat(startedShards.size(), equalTo(4));
for (MutableShardRouting startedShard : startedShards) {
assertThat(startedShard.currentNodeId(), Matchers.anyOf(equalTo("node1"), equalTo("node2")));
}
logger.info("--> switch between value2 and value4, shards should be relocating");
metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 1)
.put("index.routing.allocation.include.tag1", "value1,value4")
.put("index.routing.allocation.exclude.tag1", "value2,value3")
.build()))
.build();
clusterState = ClusterState.builder(clusterState).metaData(metaData).build();
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(clusterState.routingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2));
assertThat(clusterState.routingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(2));
logger.info("--> finish relocation");
routingTable = strategy.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
startedShards = clusterState.routingNodes().shardsWithState(ShardRoutingState.STARTED);
assertThat(startedShards.size(), equalTo(4));
for (MutableShardRouting startedShard : startedShards) {
assertThat(startedShard.currentNodeId(), Matchers.anyOf(equalTo("node1"), equalTo("node4")));
}
}
} | 0true
| src_test_java_org_elasticsearch_cluster_routing_allocation_FilterRoutingTests.java |
571 | public class ODefaultIndexFactory implements OIndexFactory {
public static final String SBTREE_ALGORITHM = "SBTREE";
public static final String MVRBTREE_ALGORITHM = "MVRBTREE";
public static final String MVRBTREE_VALUE_CONTAINER = "MVRBTREESET";
public static final String SBTREEBONSAI_VALUE_CONTAINER = "SBTREEBONSAISET";
public static final String NONE_VALUE_CONTAINER = "NONE";
private static final Set<String> TYPES;
static {
final Set<String> types = new HashSet<String>();
types.add(OClass.INDEX_TYPE.UNIQUE.toString());
types.add(OClass.INDEX_TYPE.NOTUNIQUE.toString());
types.add(OClass.INDEX_TYPE.FULLTEXT.toString());
types.add(OClass.INDEX_TYPE.DICTIONARY.toString());
TYPES = Collections.unmodifiableSet(types);
}
/**
* Index types :
* <ul>
* <li>UNIQUE</li>
* <li>NOTUNIQUE</li>
* <li>FULLTEXT</li>
* <li>DICTIONARY</li>
* </ul>
*/
public Set<String> getTypes() {
return TYPES;
}
public OIndexInternal<?> createIndex(ODatabaseRecord database, String indexType, String algorithm, String valueContainerAlgorithm)
throws OConfigurationException {
if (valueContainerAlgorithm == null) {
if (OClass.INDEX_TYPE.NOTUNIQUE.toString().equals(indexType)
|| OClass.INDEX_TYPE.NOTUNIQUE_HASH_INDEX.toString().equals(indexType)
|| OClass.INDEX_TYPE.FULLTEXT_HASH_INDEX.toString().equals(indexType)
|| OClass.INDEX_TYPE.FULLTEXT.toString().equals(indexType))
valueContainerAlgorithm = MVRBTREE_VALUE_CONTAINER;
else
valueContainerAlgorithm = NONE_VALUE_CONTAINER;
}
if ((database.getStorage().getType().equals(OEngineLocalPaginated.NAME) || database.getStorage().getType()
.equals(OEngineLocal.NAME))
&& valueContainerAlgorithm.equals(ODefaultIndexFactory.MVRBTREE_VALUE_CONTAINER)
&& OGlobalConfiguration.INDEX_NOTUNIQUE_USE_SBTREE_CONTAINER_BY_DEFAULT.getValueAsBoolean()) {
OLogManager
.instance()
.warn(
this,
"Index was created using %s as values container. "
+ "This container is deprecated and is not supported any more. To avoid this message please drop and recreate indexes or perform DB export/import.",
valueContainerAlgorithm);
}
if (SBTREE_ALGORITHM.equals(algorithm))
return createSBTreeIndex(indexType, valueContainerAlgorithm);
if (MVRBTREE_ALGORITHM.equals(algorithm) || algorithm == null)
return createMRBTreeIndex(indexType, valueContainerAlgorithm);
throw new OConfigurationException("Unsupported type : " + indexType);
}
private OIndexInternal<?> createMRBTreeIndex(String indexType, String valueContainerAlgorithm) {
if (OClass.INDEX_TYPE.UNIQUE.toString().equals(indexType)) {
return new OIndexUnique(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm);
} else if (OClass.INDEX_TYPE.NOTUNIQUE.toString().equals(indexType)) {
return new OIndexNotUnique(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<Set<OIdentifiable>>(),
valueContainerAlgorithm);
} else if (OClass.INDEX_TYPE.FULLTEXT.toString().equals(indexType)) {
return new OIndexFullText(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<Set<OIdentifiable>>(),
valueContainerAlgorithm);
} else if (OClass.INDEX_TYPE.DICTIONARY.toString().equals(indexType)) {
return new OIndexDictionary(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm);
}
throw new OConfigurationException("Unsupported type : " + indexType);
}
private OIndexInternal<?> createSBTreeIndex(String indexType, String valueContainerAlgorithm) {
if (OClass.INDEX_TYPE.UNIQUE.toString().equals(indexType)) {
return new OIndexUnique(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm);
} else if (OClass.INDEX_TYPE.NOTUNIQUE.toString().equals(indexType)) {
return new OIndexNotUnique(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<Set<OIdentifiable>>(), valueContainerAlgorithm);
} else if (OClass.INDEX_TYPE.FULLTEXT.toString().equals(indexType)) {
return new OIndexFullText(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<Set<OIdentifiable>>(), valueContainerAlgorithm);
} else if (OClass.INDEX_TYPE.DICTIONARY.toString().equals(indexType)) {
return new OIndexDictionary(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm);
}
throw new OConfigurationException("Unsupported type : " + indexType);
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_ODefaultIndexFactory.java |
265 | public interface EmailTrackingManager {
public Long createTrackedEmail(String emailAddress, String type, String extraValue);
public void recordOpen (Long emailId, Map<String, String> extraValues);
public void recordClick(Long emailId , Map<String, String> parameterMap, String customerId, Map<String, String> extraValues);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_email_service_EmailTrackingManager.java |
1,336 | final ExecutionCallback callback = new ExecutionCallback() {
public void onResponse(Object response) {
if (response == null)
count.incrementAndGet();
latch.countDown();
}
public void onFailure(Throwable t) {
}
}; | 0true
| hazelcast_src_test_java_com_hazelcast_executor_ExecutorServiceTest.java |
125 | final EntryAdapter<String, String> listener = new EntryAdapter<String, String>() {
public void onEntryEvent(EntryEvent<String, String> event) {
latch.countDown();
}
}; | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_ClientReconnectTest.java |
116 | public class NullPageDTO extends PageDTO {
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_dto_NullPageDTO.java |
1,528 | @Component("blRemoveFacetValuesLinkProcessor")
public class RemoveFacetValuesLinkProcessor extends AbstractAttributeModifierAttrProcessor {
/**
* Sets the name of this processor to be used in Thymeleaf template
*/
public RemoveFacetValuesLinkProcessor() {
super("removefacetvalueslink");
}
@Override
public int getPrecedence() {
return 10000;
}
@Override
@SuppressWarnings("unchecked")
protected Map<String, String> getModifiedAttributeValues(Arguments arguments, Element element, String attributeName) {
Map<String, String> attrs = new HashMap<String, String>();
BroadleafRequestContext blcContext = BroadleafRequestContext.getBroadleafRequestContext();
HttpServletRequest request = blcContext.getRequest();
String baseUrl = request.getRequestURL().toString();
Map<String, String[]> params = new HashMap<String, String[]>(request.getParameterMap());
SearchFacetDTO facet = (SearchFacetDTO) StandardExpressionProcessor.processExpression(arguments, element.getAttributeValue(attributeName));
String key = facet.getFacet().getField().getAbbreviation();
params.remove(key);
params.remove(ProductSearchCriteria.PAGE_NUMBER);
String url = ProcessorUtils.getUrl(baseUrl, params);
attrs.put("href", url);
return attrs;
}
@Override
protected ModificationType getModificationType(Arguments arguments, Element element, String attributeName, String newAttributeName) {
return ModificationType.SUBSTITUTION;
}
@Override
protected boolean removeAttributeIfEmpty(Arguments arguments, Element element, String attributeName, String newAttributeName) {
return true;
}
@Override
protected boolean recomputeProcessorsAfterExecution(Arguments arguments, Element element, String attributeName) {
return false;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_processor_RemoveFacetValuesLinkProcessor.java |
313 | public class ClusterShardHealth implements Streamable {
private int shardId;
ClusterHealthStatus status = ClusterHealthStatus.RED;
int activeShards = 0;
int relocatingShards = 0;
int initializingShards = 0;
int unassignedShards = 0;
boolean primaryActive = false;
private ClusterShardHealth() {
}
ClusterShardHealth(int shardId) {
this.shardId = shardId;
}
public int getId() {
return shardId;
}
public ClusterHealthStatus getStatus() {
return status;
}
public int getRelocatingShards() {
return relocatingShards;
}
public int getActiveShards() {
return activeShards;
}
public boolean isPrimaryActive() {
return primaryActive;
}
public int getInitializingShards() {
return initializingShards;
}
public int getUnassignedShards() {
return unassignedShards;
}
static ClusterShardHealth readClusterShardHealth(StreamInput in) throws IOException {
ClusterShardHealth ret = new ClusterShardHealth();
ret.readFrom(in);
return ret;
}
@Override
public void readFrom(StreamInput in) throws IOException {
shardId = in.readVInt();
status = ClusterHealthStatus.fromValue(in.readByte());
activeShards = in.readVInt();
relocatingShards = in.readVInt();
initializingShards = in.readVInt();
unassignedShards = in.readVInt();
primaryActive = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(shardId);
out.writeByte(status.value());
out.writeVInt(activeShards);
out.writeVInt(relocatingShards);
out.writeVInt(initializingShards);
out.writeVInt(unassignedShards);
out.writeBoolean(primaryActive);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_health_ClusterShardHealth.java |
2,380 | MEGA {
@Override
public long toSingles(long size) {
return x(size, C2 / C0, MAX / (C2 / C0));
}
@Override
public long toKilo(long size) {
return x(size, C2 / C1, MAX / (C2 / C1));
}
@Override
public long toMega(long size) {
return size;
}
@Override
public long toGiga(long size) {
return size / (C3 / C2);
}
@Override
public long toTera(long size) {
return size / (C4 / C2);
}
@Override
public long toPeta(long size) {
return size / (C5 / C2);
}
}, | 0true
| src_main_java_org_elasticsearch_common_unit_SizeUnit.java |
3,845 | public class GeoPolygonFilterParser implements FilterParser {
public static final String NAME = "geo_polygon";
public static final String POINTS = "points";
@Inject
public GeoPolygonFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "geoPolygon"};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
boolean cache = false;
CacheKeyFilter.Key cacheKey = null;
String fieldName = null;
List<GeoPoint> shell = Lists.newArrayList();
boolean normalizeLon = true;
boolean normalizeLat = true;
String filterName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if(POINTS.equals(currentFieldName)) {
while((token = parser.nextToken()) != Token.END_ARRAY) {
shell.add(GeoPoint.parse(parser));
}
} else {
throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]");
}
}
}
if (shell.isEmpty()) {
throw new QueryParsingException(parseContext.index(), "no points defined for geo_polygon filter");
} else {
if(shell.size() < 3) {
throw new QueryParsingException(parseContext.index(), "to few points defined for geo_polygon filter");
}
GeoPoint start = shell.get(0);
if(!start.equals(shell.get(shell.size()-1))) {
shell.add(start);
}
if(shell.size() < 4) {
throw new QueryParsingException(parseContext.index(), "to few points defined for geo_polygon filter");
}
}
if (normalizeLat || normalizeLon) {
for (GeoPoint point : shell) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
}
MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
}
FieldMapper<?> mapper = smartMappers.mapper();
if (!(mapper instanceof GeoPointFieldMapper)) {
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
}
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
Filter filter = new GeoPolygonFilter(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
filter = wrapSmartNameFilter(filter, smartMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_GeoPolygonFilterParser.java |
1,559 | public class MoveAllocationCommand implements AllocationCommand {
public static final String NAME = "move";
public static class Factory implements AllocationCommand.Factory<MoveAllocationCommand> {
@Override
public MoveAllocationCommand readFrom(StreamInput in) throws IOException {
return new MoveAllocationCommand(ShardId.readShardId(in), in.readString(), in.readString());
}
@Override
public void writeTo(MoveAllocationCommand command, StreamOutput out) throws IOException {
command.shardId().writeTo(out);
out.writeString(command.fromNode());
out.writeString(command.toNode());
}
@Override
public MoveAllocationCommand fromXContent(XContentParser parser) throws IOException {
String index = null;
int shardId = -1;
String fromNode = null;
String toNode = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
index = parser.text();
} else if ("shard".equals(currentFieldName)) {
shardId = parser.intValue();
} else if ("from_node".equals(currentFieldName) || "fromNode".equals(currentFieldName)) {
fromNode = parser.text();
} else if ("to_node".equals(currentFieldName) || "toNode".equals(currentFieldName)) {
toNode = parser.text();
} else {
throw new ElasticsearchParseException("[move] command does not support field [" + currentFieldName + "]");
}
} else {
throw new ElasticsearchParseException("[move] command does not support complex json tokens [" + token + "]");
}
}
if (index == null) {
throw new ElasticsearchParseException("[move] command missing the index parameter");
}
if (shardId == -1) {
throw new ElasticsearchParseException("[move] command missing the shard parameter");
}
if (fromNode == null) {
throw new ElasticsearchParseException("[move] command missing the from_node parameter");
}
if (toNode == null) {
throw new ElasticsearchParseException("[move] command missing the to_node parameter");
}
return new MoveAllocationCommand(new ShardId(index, shardId), fromNode, toNode);
}
@Override
public void toXContent(MoveAllocationCommand command, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field("index", command.shardId().index());
builder.field("shard", command.shardId().id());
builder.field("from_node", command.fromNode());
builder.field("to_node", command.toNode());
builder.endObject();
}
}
private final ShardId shardId;
private final String fromNode;
private final String toNode;
public MoveAllocationCommand(ShardId shardId, String fromNode, String toNode) {
this.shardId = shardId;
this.fromNode = fromNode;
this.toNode = toNode;
}
@Override
public String name() {
return NAME;
}
public ShardId shardId() {
return this.shardId;
}
public String fromNode() {
return this.fromNode;
}
public String toNode() {
return this.toNode;
}
@Override
public void execute(RoutingAllocation allocation) throws ElasticsearchException {
DiscoveryNode fromDiscoNode = allocation.nodes().resolveNode(fromNode);
DiscoveryNode toDiscoNode = allocation.nodes().resolveNode(toNode);
boolean found = false;
for (MutableShardRouting shardRouting : allocation.routingNodes().node(fromDiscoNode.id())) {
if (!shardRouting.shardId().equals(shardId)) {
continue;
}
found = true;
// TODO we can possibly support also relocating cases, where we cancel relocation and move...
if (!shardRouting.started()) {
throw new ElasticsearchIllegalArgumentException("[move_allocation] can't move " + shardId + ", shard is not started (state = " + shardRouting.state() + "]");
}
RoutingNode toRoutingNode = allocation.routingNodes().node(toDiscoNode.id());
Decision decision = allocation.deciders().canAllocate(shardRouting, toRoutingNode, allocation);
if (decision.type() == Decision.Type.NO) {
throw new ElasticsearchIllegalArgumentException("[move_allocation] can't move " + shardId + ", from " + fromDiscoNode + ", to " + toDiscoNode + ", since its not allowed, reason: " + decision);
}
if (decision.type() == Decision.Type.THROTTLE) {
// its being throttled, maybe have a flag to take it into account and fail? for now, just do it since the "user" wants it...
}
allocation.routingNodes().assign(new MutableShardRouting(shardRouting.index(), shardRouting.id(),
toRoutingNode.nodeId(), shardRouting.currentNodeId(), shardRouting.restoreSource(),
shardRouting.primary(), ShardRoutingState.INITIALIZING, shardRouting.version() + 1), toRoutingNode.nodeId());
allocation.routingNodes().relocate(shardRouting, toRoutingNode.nodeId());
}
if (!found) {
throw new ElasticsearchIllegalArgumentException("[move_allocation] can't move " + shardId + ", failed to find it on node " + fromDiscoNode);
}
}
} | 0true
| src_main_java_org_elasticsearch_cluster_routing_allocation_command_MoveAllocationCommand.java |
3,538 | public class CompoundTypesTests extends ElasticsearchTestCase {
@Test
public void testStringType() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1").field("type", "string").endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", "value2")
.bytes());
assertThat(doc.rootDoc().get("field1"), equalTo("value1"));
assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d));
assertThat(doc.rootDoc().get("field2"), equalTo("value2"));
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field1").field("value", "value1").field("boost", 2.0f).endObject()
.field("field2", "value2")
.bytes());
assertThat(doc.rootDoc().get("field1"), equalTo("value1"));
assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(2.0d, 0.000001d));
assertThat(doc.rootDoc().get("field2"), equalTo("value2"));
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", "value2")
.bytes());
assertThat(doc.rootDoc().get("field1"), equalTo("value1"));
assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d));
assertThat(doc.rootDoc().get("field2"), equalTo("value2"));
}
} | 0true
| src_test_java_org_elasticsearch_index_mapper_compound_CompoundTypesTests.java |
383 | static class CountDownValueNotNullListener extends MyEntryListener{
public CountDownValueNotNullListener(int latchCount){
super(latchCount);
}
public CountDownValueNotNullListener(int addlatchCount, int removeLatchCount){
super(addlatchCount, removeLatchCount);
}
public void entryAdded(EntryEvent event) {
if(event.getValue() != null){
addLatch.countDown();
}
}
public void entryRemoved(EntryEvent event) {
if(event.getValue() != null){
removeLatch.countDown();
}
}
public void entryUpdated(EntryEvent event) {
if(event.getValue() != null){
updateLatch.countDown();
}
}
public void entryEvicted(EntryEvent event) {
if(event.getValue() != null){
evictLatch.countDown();
}
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapListenersTest.java |
1,646 | public class ThreadDumpRequest implements ConsoleRequest {
private boolean dumpDeadlocks;
private Address target;
public ThreadDumpRequest() {
}
public ThreadDumpRequest(Address target, boolean dumpDeadlocks) {
this.target = target;
this.dumpDeadlocks = dumpDeadlocks;
}
@Override
public int getType() {
return ConsoleRequestConstants.REQUEST_TYPE_GET_THREAD_DUMP;
}
@Override
public void writeResponse(ManagementCenterService mcs, ObjectDataOutput dos) throws Exception {
String threadDump = (String) mcs.callOnAddress(target, new ThreadDumpOperation(dumpDeadlocks));
if (threadDump != null) {
dos.writeBoolean(true);
writeLongString(dos, threadDump);
} else {
dos.writeBoolean(false);
}
}
@Override
public String readResponse(ObjectDataInput in) throws IOException {
if (in.readBoolean()) {
return readLongString(in);
} else {
return null;
}
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
target.writeData(out);
out.writeBoolean(dumpDeadlocks);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
target = new Address();
target.readData(in);
dumpDeadlocks = in.readBoolean();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_request_ThreadDumpRequest.java |
79 | public class ClientDestroyRequest extends CallableClientRequest implements Portable, RetryableRequest, SecureRequest {
private String name;
private String serviceName;
public ClientDestroyRequest() {
}
public ClientDestroyRequest(String name, String serviceName) {
this.name = name;
this.serviceName = serviceName;
}
@Override
public Object call() throws Exception {
ProxyService proxyService = getClientEngine().getProxyService();
proxyService.destroyDistributedObject(getServiceName(), name);
return null;
}
@Override
public String getServiceName() {
return serviceName;
}
@Override
public int getFactoryId() {
return ClientPortableHook.ID;
}
@Override
public int getClassId() {
return ClientPortableHook.DESTROY_PROXY;
}
@Override
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
writer.writeUTF("s", serviceName);
}
@Override
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
serviceName = reader.readUTF("s");
}
@Override
public Permission getRequiredPermission() {
return getPermission(name, serviceName, ActionConstants.ACTION_DESTROY);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_ClientDestroyRequest.java |
1,294 | new OCommandOutputListener() {
@Override
public void onMessage(String text) {
System.out.println(text);
}
}); | 0true
| core_src_test_java_com_orientechnologies_orient_core_storage_impl_local_paginated_LocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.java |
422 | static final class Fields {
static final XContentBuilderString SNAPSHOT = new XContentBuilderString("snapshot");
static final XContentBuilderString ACCEPTED = new XContentBuilderString("accepted");
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_snapshots_restore_RestoreSnapshotResponse.java |
2,864 | public class GermanAnalyzerProvider extends AbstractIndexAnalyzerProvider<GermanAnalyzer> {
private final GermanAnalyzer analyzer;
@Inject
public GermanAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
analyzer = new GermanAnalyzer(version,
Analysis.parseStopWords(env, settings, GermanAnalyzer.getDefaultStopSet(), version),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET, version));
}
@Override
public GermanAnalyzer get() {
return this.analyzer;
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_GermanAnalyzerProvider.java |
3,681 | public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
public static final String NAME = "_size";
public static final String CONTENT_TYPE = "_size";
public static class Defaults extends IntegerFieldMapper.Defaults {
public static final String NAME = CONTENT_TYPE;
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.DISABLED;
public static final FieldType SIZE_FIELD_TYPE = new FieldType(IntegerFieldMapper.Defaults.FIELD_TYPE);
static {
SIZE_FIELD_TYPE.freeze();
}
}
public static class Builder extends NumberFieldMapper.Builder<Builder, IntegerFieldMapper> {
protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.SIZE_FIELD_TYPE));
builder = this;
}
public Builder enabled(EnabledAttributeMapper enabled) {
this.enabledState = enabled;
return builder;
}
@Override
public SizeFieldMapper build(BuilderContext context) {
return new SizeFieldMapper(enabledState, fieldType, postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
SizeFieldMapper.Builder builder = size();
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
} else if (fieldName.equals("store")) {
builder.store(parseStore(fieldName, fieldNode.toString()));
}
}
return builder;
}
}
private EnabledAttributeMapper enabledState;
public SizeFieldMapper() {
this(Defaults.ENABLED_STATE, new FieldType(Defaults.SIZE_FIELD_TYPE), null, null, null, ImmutableSettings.EMPTY);
}
public SizeFieldMapper(EnabledAttributeMapper enabled, FieldType fieldType, PostingsFormatProvider postingsProvider,
DocValuesFormatProvider docValuesProvider, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(Defaults.NAME), Defaults.PRECISION_STEP, Defaults.BOOST, fieldType, null, Defaults.NULL_VALUE,
Defaults.IGNORE_MALFORMED, Defaults.COERCE, postingsProvider, docValuesProvider, null, null, fieldDataSettings,
indexSettings, MultiFields.empty(), null);
this.enabledState = enabled;
}
@Override
public boolean hasDocValues() {
return false;
}
@Override
protected String contentType() {
return Defaults.NAME;
}
public boolean enabled() {
return this.enabledState.enabled;
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
// we post parse it so we get the size stored, possibly compressed (source will be preParse)
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// nothing to do here, we call the parent in postParse
}
@Override
public boolean includeInObject() {
return false;
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!enabledState.enabled) {
return;
}
if (context.flyweight()) {
return;
}
fields.add(new CustomIntegerNumericField(this, context.source().length(), fieldType));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// all are defaults, no need to write it at all
if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && fieldType().stored() == Defaults.SIZE_FIELD_TYPE.stored()) {
return builder;
}
builder.startObject(contentType());
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
builder.field("enabled", enabledState.enabled);
}
if (includeDefaults || fieldType().stored() != Defaults.SIZE_FIELD_TYPE.stored() && enabledState.enabled) {
builder.field("store", fieldType().stored());
}
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) {
this.enabledState = sizeFieldMapperMergeWith.enabledState;
}
}
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_internal_SizeFieldMapper.java |
636 | public final class ClientPingRequest extends CallableClientRequest {
public Object call() throws Exception {
return null;
}
public String getServiceName() {
return null;
}
public int getFactoryId() {
return ClientPortableHook.ID;
}
public int getClassId() {
return ClientPortableHook.CLIENT_PING;
}
@Override
public Permission getRequiredPermission() {
return null;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_cluster_client_ClientPingRequest.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.