Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
1,031 |
public interface ConfigBuilder {
/**
* Builds Config object.
*
* @return Built Config object
*/
Config build();
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_config_ConfigBuilder.java
|
1,088 |
public enum MemberGroupType {
HOST_AWARE, CUSTOM, PER_MEMBER
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_config_PartitionGroupConfig.java
|
2,039 |
Factory<Field> FIELDS = new Factory<Field>() {
public Field[] getMembers(Class<?> type) {
return type.getDeclaredFields();
}
public InjectionPoint create(TypeLiteral<?> typeLiteral, Field member, Errors errors) {
return new InjectionPoint(typeLiteral, member);
}
};
| 0true
|
src_main_java_org_elasticsearch_common_inject_spi_InjectionPoint.java
|
167 |
public class TestLogPruning
{
private GraphDatabaseAPI db;
private FileSystemAbstraction fs;
@After
public void after() throws Exception
{
if ( db != null )
{
db.shutdown();
}
}
@Test
public void noPruning() throws Exception
{
newDb( "true" );
for ( int i = 0; i < 100; i++ )
{
doTransaction();
rotate();
assertEquals( i+1, logCount() );
}
}
@Test
public void pruneByFileSize() throws Exception
{
// Given
int size = 1050;
newDb( size + " size" );
doTransaction();
rotate();
long sizeOfOneLog = fs.getFileSize( neoDataSource()
.getXaContainer().getLogicalLog().getFileName( 0 ) );
int filesNeededToExceedPruneLimit = (int) Math.ceil( (double) size / (double) sizeOfOneLog );
// When
for ( int i = 1; i < filesNeededToExceedPruneLimit*2; i++ )
{
doTransaction();
rotate();
// Then
assertEquals( Math.min( i+1, filesNeededToExceedPruneLimit ), logCount() );
}
}
private NeoStoreXaDataSource neoDataSource()
{
return db.getDependencyResolver().resolveDependency( XaDataSourceManager.class ).getNeoStoreDataSource();
}
@Test
public void pruneByFileCount() throws Exception
{
int logsToKeep = 5;
newDb( logsToKeep + " files" );
for ( int i = 0; i < logsToKeep*2; i++ )
{
doTransaction();
rotate();
assertEquals( Math.min( i+1, logsToKeep ), logCount() );
}
}
@Test
public void pruneByTransactionCount() throws Exception
{
int transactionsToKeep = 100;
int txsPerLog = transactionsToKeep/10;
newDb( transactionsToKeep + " txs" );
for ( int i = 0; i < transactionsToKeep/txsPerLog*3; i++ )
{
for ( int j = 0; j < txsPerLog; j++ )
{
doTransaction();
}
rotate();
assertEquals( Math.min( i+1, transactionsToKeep/txsPerLog ), logCount() );
}
}
private GraphDatabaseAPI newDb( String logPruning )
{
GraphDatabaseAPI db = new ImpermanentGraphDatabase( stringMap( keep_logical_logs.name(), logPruning ) )
{
@Override
protected FileSystemAbstraction createFileSystemAbstraction()
{
return (fs = super.createFileSystemAbstraction());
}
};
this.db = db;
return db;
}
private void doTransaction()
{
Transaction tx = db.beginTx();
try
{
db.createNode();
tx.success();
}
finally
{
tx.finish();
}
}
private void rotate() throws Exception
{
neoDataSource().rotateLogicalLog();
}
private int logCount()
{
XaLogicalLog log = neoDataSource().getXaContainer().getLogicalLog();
int count = 0;
for ( long i = log.getHighestLogVersion()-1; i >= 0; i-- )
{
if ( fs.fileExists( log.getFileName( i ) ) )
{
count++;
}
else
{
break;
}
}
return count;
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestLogPruning.java
|
2,035 |
public class AddIndexOperationFactory implements OperationFactory {
String name;
String attributeName;
boolean ordered;
public AddIndexOperationFactory() {
}
public AddIndexOperationFactory(String name, String attributeName, boolean ordered) {
this.name = name;
this.attributeName = attributeName;
this.ordered = ordered;
}
@Override
public Operation createOperation() {
return new AddIndexOperation(name, attributeName, ordered);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(attributeName);
out.writeBoolean(ordered);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
name = in.readUTF();
attributeName = in.readUTF();
ordered = in.readBoolean();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_map_operation_AddIndexOperationFactory.java
|
1,222 |
addOperation(operations, new Runnable() {
public void run() {
IQueue q = hazelcast.getQueue("myQ");
q.remainingCapacity();
}
}, 1);
| 0true
|
hazelcast_src_main_java_com_hazelcast_examples_AllTest.java
|
943 |
public class OScheduler implements Runnable {
public final static String CLASSNAME = "OSchedule";
public static String PROP_NAME = "name";
public static String PROP_RULE = "rule";
public static String PROP_ARGUMENTS = "arguments";
public static String PROP_STATUS = "status";
public static String PROP_FUNC = "function";
public static String PROP_STARTTIME = "starttime";
public static String PROP_STARTED = "start";
private String name;
private String rule;
private Map<Object, Object> iArgs;
private String status;
private OFunction function;
private Date startTime;
private ODocument document;
private ODatabaseRecord db;
private boolean started;
private boolean isRunning = false;
public OScheduler(ODocument doc) {
this.name = doc.field(PROP_NAME);
this.rule = doc.field(PROP_RULE);
this.iArgs = doc.field(PROP_ARGUMENTS);
this.status = doc.field(PROP_STATUS);
// this.runAtStart = doc.field(PROP_RUN_ON_START) == null ? false : ((Boolean)doc.field(PROP_RUN_ON_START));
this.started = doc.field(PROP_STARTED) == null ? false : ((Boolean) doc.field(PROP_STARTED));
ODocument funcDoc = doc.field(PROP_FUNC);
if (funcDoc != null)
function = new OFunction(funcDoc);
else
throw new OCommandScriptException("function cannot be null");
this.startTime = doc.field(PROP_STARTTIME);
this.document = doc;
this.db = doc.getDatabase();
}
public String getSchedulingRule() {
return this.rule;
}
public String getSchduleName() {
return this.name;
}
public boolean isStarted() {
return this.started;
}
public void setStarted(boolean started) {
this.started = started;
}
public String getStatus() {
return status;
}
public Map<Object, Object> arguments() {
return this.iArgs;
}
public OFunction getFunction() {
return this.function;
}
public Date getStartTime() {
return this.startTime;
}
public void setStatus(String status) {
this.status = status;
}
public boolean isRunning() {
return this.isRunning;
}
public void resetDocument(ODocument doc) {
this.document = doc;
this.name = doc.field(PROP_NAME);
this.rule = doc.field(PROP_RULE);
this.iArgs = doc.field(PROP_ARGUMENTS);
this.status = doc.field(PROP_STATUS);
this.started = doc.field(PROP_STARTED) == null ? false : ((Boolean) doc.field(PROP_STARTED));
ODocument funcDoc = doc.field(PROP_FUNC);
if (funcDoc != null)
function = new OFunction(funcDoc);
else
throw new OCommandScriptException("function cannot be null");
this.startTime = doc.field(PROP_STARTTIME);
this.db = doc.getDatabase();
}
public String toString() {
String str = "OSchedule <name:" + this.name + ",rule:" + this.rule + ",current status:" + this.status + ",func:"
+ this.function.getName() + ",start:" + this.isStarted() + ">";
return str;
}
@Override
public void run() {
isRunning = true;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS");
Date date = new Date(System.currentTimeMillis());
OLogManager.instance().warn(this, "execute : " + this.toString() + " at " + sdf.format(date));
ODatabaseRecordThreadLocal.INSTANCE.set(db);
this.document.field(PROP_STATUS, SCHEDULER_STATUS.RUNNING);
this.document.field(PROP_STARTTIME, System.currentTimeMillis());
this.document.save();
OScriptManager scriptManager = null;
Bindings binding = null;
try {
if (this.function == null)
return;
if (db != null && !(db instanceof ODatabaseRecordTx))
db = db.getUnderlying();
scriptManager = Orient.instance().getScriptManager();
final ScriptEngine scriptEngine = scriptManager.getEngine(this.function.getLanguage());
binding = scriptEngine.getBindings(ScriptContext.ENGINE_SCOPE);
for (OScriptInjection i : scriptManager.getInjections())
i.bind(binding);
binding.put("doc", this.document);
if (db != null)
binding.put("db", new OScriptDocumentDatabaseWrapper((ODatabaseRecordTx) db));
binding.put("orient", new OScriptOrientWrapper(db));
if (iArgs != null) {
for (Entry<Object, Object> a : iArgs.entrySet()) {
binding.put(a.getKey().toString(), a.getValue());
}
binding.put("params", iArgs.values().toArray());
} else {
binding.put("params", new Object[0]);
}
if (this.function.getLanguage() == null)
throw new OConfigurationException("Database function '" + this.function.getName() + "' has no language");
final String funcStr = scriptManager.getFunctionDefinition(this.function);
if (funcStr != null) {
try {
scriptEngine.eval(funcStr);
} catch (ScriptException e) {
scriptManager.getErrorMessage(e, funcStr);
}
}
if (scriptEngine instanceof Invocable) {
final Invocable invocableEngine = (Invocable) scriptEngine;
Object[] args = null;
if (iArgs != null) {
args = new Object[iArgs.size()];
int i = 0;
for (Entry<Object, Object> arg : iArgs.entrySet())
args[i++] = arg.getValue();
}
invocableEngine.invokeFunction(this.function.getName(), args);
}
} catch (ScriptException e) {
throw new OCommandScriptException("Error on execution of the script", this.function.getName(), e.getColumnNumber(), e);
} catch (NoSuchMethodException e) {
throw new OCommandScriptException("Error on execution of the script", this.function.getName(), 0, e);
} catch (OCommandScriptException e) {
throw e;
} catch (Exception ex) {
throw new OCommandScriptException("Unknown Exception", this.function.getName(), 0, ex);
} finally {
if (scriptManager != null && binding != null)
scriptManager.unbind(binding);
OLogManager.instance().warn(this, "Job : " + this.toString() + " Finished!");
isRunning = false;
this.document.field(PROP_STATUS, SCHEDULER_STATUS.WAITING);
this.document.save();
}
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_schedule_OScheduler.java
|
1,763 |
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(expectedEntryCountAfterIdleEviction, map.size());
}
});
| 0true
|
hazelcast_src_test_java_com_hazelcast_map_EvictionTest.java
|
200 |
public static class Order {
public static final int Audit = 99000;
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_audit_Auditable.java
|
99 |
LifecycleListener listener = new LifecycleListener() {
public void stateChanged(LifecycleEvent event) {
final LifecycleState state = list.poll();
if (state != null && state.equals(event.getState())) {
latch.countDown();
}
}
};
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java
|
651 |
private final class KeysIterable implements Iterable<Object> {
@Override
public Iterator<Object> iterator() {
final EntriesIterator entriesIterator = new EntriesIterator();
return new Iterator<Object>() {
@Override
public boolean hasNext() {
return entriesIterator.hasNext();
}
@Override
public Object next() {
return entriesIterator.next().getKey();
}
@Override
public void remove() {
entriesIterator.remove();
}
};
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_index_engine_OLocalHashTableIndexEngine.java
|
1,131 |
public class OSQLMethodAppend extends OAbstractSQLMethod {
public static final String NAME = "append";
public OSQLMethodAppend() {
super(NAME, 1);
}
@Override
public Object execute(OIdentifiable iRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
final Object v = getParameterValue(iRecord, iMethodParams[0].toString());
if (v != null) {
ioResult = ioResult != null ? ioResult.toString() + v : null;
}
return ioResult;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAppend.java
|
1,017 |
class AsyncSingleAction {
private final ActionListener<Response> listener;
private final Request request;
private ShardIterator shardIt;
private DiscoveryNodes nodes;
private final AtomicBoolean operationStarted = new AtomicBoolean();
private AsyncSingleAction(Request request, ActionListener<Response> listener) {
this.request = request;
this.listener = listener;
}
public void start() {
start(false);
}
public boolean start(final boolean fromClusterEvent) throws ElasticsearchException {
final ClusterState clusterState = clusterService.state();
nodes = clusterState.nodes();
try {
ClusterBlockException blockException = checkGlobalBlock(clusterState, request);
if (blockException != null) {
if (blockException.retryable()) {
retry(fromClusterEvent, blockException);
return false;
} else {
throw blockException;
}
}
// check if we need to execute, and if not, return
if (!resolveRequest(clusterState, request, listener)) {
return true;
}
blockException = checkRequestBlock(clusterState, request);
if (blockException != null) {
if (blockException.retryable()) {
retry(fromClusterEvent, blockException);
return false;
} else {
throw blockException;
}
}
shardIt = shards(clusterState, request);
} catch (Throwable e) {
listener.onFailure(e);
return true;
}
// no shardIt, might be in the case between index gateway recovery and shardIt initialization
if (shardIt.size() == 0) {
retry(fromClusterEvent, null);
return false;
}
// this transport only make sense with an iterator that returns a single shard routing (like primary)
assert shardIt.size() == 1;
ShardRouting shard = shardIt.nextOrNull();
assert shard != null;
if (!shard.active()) {
retry(fromClusterEvent, null);
return false;
}
if (!operationStarted.compareAndSet(false, true)) {
return true;
}
request.shardId = shardIt.shardId().id();
if (shard.currentNodeId().equals(nodes.localNodeId())) {
request.beforeLocalFork();
try {
threadPool.executor(executor).execute(new Runnable() {
@Override
public void run() {
try {
shardOperation(request, listener);
} catch (Throwable e) {
if (retryOnFailure(e)) {
operationStarted.set(false);
// we already marked it as started when we executed it (removed the listener) so pass false
// to re-add to the cluster listener
retry(false, null);
} else {
listener.onFailure(e);
}
}
}
});
} catch (Throwable e) {
if (retryOnFailure(e)) {
retry(fromClusterEvent, null);
} else {
listener.onFailure(e);
}
}
} else {
DiscoveryNode node = nodes.get(shard.currentNodeId());
transportService.sendRequest(node, transportAction, request, transportOptions(), new BaseTransportResponseHandler<Response>() {
@Override
public Response newInstance() {
return newResponse();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(Response response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
// if we got disconnected from the node, or the node / shard is not in the right state (being closed)
if (exp.unwrapCause() instanceof ConnectTransportException || exp.unwrapCause() instanceof NodeClosedException ||
retryOnFailure(exp)) {
operationStarted.set(false);
// we already marked it as started when we executed it (removed the listener) so pass false
// to re-add to the cluster listener
retry(false, null);
} else {
listener.onFailure(exp);
}
}
});
}
return true;
}
void retry(final boolean fromClusterEvent, final @Nullable Throwable failure) {
if (!fromClusterEvent) {
// make it threaded operation so we fork on the discovery listener thread
request.beforeLocalFork();
clusterService.add(request.timeout(), new TimeoutClusterStateListener() {
@Override
public void postAdded() {
if (start(true)) {
// if we managed to start and perform the operation on the primary, we can remove this listener
clusterService.remove(this);
}
}
@Override
public void onClose() {
clusterService.remove(this);
listener.onFailure(new NodeClosedException(nodes.localNode()));
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (start(true)) {
// if we managed to start and perform the operation on the primary, we can remove this listener
clusterService.remove(this);
}
}
@Override
public void onTimeout(TimeValue timeValue) {
// just to be on the safe side, see if we can start it now?
if (start(true)) {
clusterService.remove(this);
return;
}
clusterService.remove(this);
Throwable listenFailure = failure;
if (listenFailure == null) {
if (shardIt == null) {
listenFailure = new UnavailableShardsException(new ShardId(request.index(), -1), "Timeout waiting for [" + timeValue + "], request: " + request.toString());
} else {
listenFailure = new UnavailableShardsException(shardIt.shardId(), "[" + shardIt.size() + "] shardIt, [" + shardIt.sizeActive() + "] active : Timeout waiting for [" + timeValue + "], request: " + request.toString());
}
}
listener.onFailure(listenFailure);
}
});
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_single_instance_TransportInstanceSingleOperationAction.java
|
635 |
public class OIndexUnique extends OIndexOneValue {
public OIndexUnique(String typeId, String algorithm, OIndexEngine<OIdentifiable> engine, String valueContainerAlgorithm) {
super(typeId, algorithm, engine, valueContainerAlgorithm);
}
@Override
public OIndexOneValue put(Object key, final OIdentifiable iSingleValue) {
checkForRebuild();
key = getCollatingValue(key);
modificationLock.requestModificationLock();
try {
acquireExclusiveLock();
try {
checkForKeyType(key);
final OIdentifiable value = indexEngine.get(key);
if (value != null) {
// CHECK IF THE ID IS THE SAME OF CURRENT: THIS IS THE UPDATE CASE
if (!value.equals(iSingleValue))
throw new ORecordDuplicatedException(String.format(
"Cannot index record %s: found duplicated key '%s' in index '%s' previously assigned to the record %s",
iSingleValue.getIdentity(), key, getName(), value.getIdentity()), value.getIdentity());
else
return this;
}
if (!iSingleValue.getIdentity().isPersistent())
((ORecord<?>) iSingleValue.getRecord()).save();
indexEngine.put(key, iSingleValue.getIdentity());
return this;
} finally {
releaseExclusiveLock();
}
} finally {
modificationLock.releaseModificationLock();
}
}
@Override
protected void putInSnapshot(Object key, OIdentifiable value, Map<Object, Object> snapshot) {
key = getCollatingValue(key);
Object snapshotValue = snapshot.get(key);
if (snapshotValue == null) {
final OIdentifiable storedValue = indexEngine.get(key);
final Set<OIdentifiable> values = new LinkedHashSet<OIdentifiable>();
if (storedValue != null)
values.add(storedValue.getIdentity());
values.add(value.getIdentity());
snapshot.put(key, values);
} else if (snapshotValue instanceof Set) {
final Set<OIdentifiable> values = (Set<OIdentifiable>) snapshotValue;
values.add(value.getIdentity());
} else {
final Set<OIdentifiable> values = new LinkedHashSet<OIdentifiable>();
values.add(value);
snapshot.put(key, values);
}
}
@Override
protected void removeFromSnapshot(Object key, OIdentifiable value, Map<Object, Object> snapshot) {
key = getCollatingValue(key);
Object snapshotValue = snapshot.get(key);
if (snapshotValue instanceof Set) {
final Set<OIdentifiable> values = (Set<OIdentifiable>) snapshotValue;
if (values.isEmpty())
snapshot.put(key, RemovedValue.INSTANCE);
else
values.remove(value);
} else
snapshot.put(key, RemovedValue.INSTANCE);
}
@Override
protected void commitSnapshot(Map<Object, Object> snapshot) {
for (Map.Entry<Object, Object> snapshotEntry : snapshot.entrySet()) {
Object key = snapshotEntry.getKey();
checkForKeyType(key);
Object snapshotValue = snapshotEntry.getValue();
if (snapshotValue instanceof Set) {
Set<OIdentifiable> values = (Set<OIdentifiable>) snapshotValue;
if (values.isEmpty())
continue;
final Iterator<OIdentifiable> valuesIterator = values.iterator();
if (values.size() > 1) {
final OIdentifiable valueOne = valuesIterator.next();
final OIdentifiable valueTwo = valuesIterator.next();
throw new ORecordDuplicatedException(String.format(
"Cannot index record %s: found duplicated key '%s' in index '%s' previously assigned to the record %s",
valueTwo.getIdentity(), key, getName(), valueOne.getIdentity()), valueOne.getIdentity());
}
final OIdentifiable value = valuesIterator.next();
indexEngine.put(key, value.getIdentity());
} else if (snapshotValue.equals(RemovedValue.INSTANCE))
indexEngine.remove(key);
else
assert false : "Provided value can not be committed";
}
}
@Override
public boolean canBeUsedInEqualityOperators() {
return true;
}
@Override
public boolean supportsOrderedIterations() {
return indexEngine.hasRangeQuerySupport();
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_index_OIndexUnique.java
|
299 |
public class OTraverseMultiValueDepthFirstProcess extends OTraverseAbstractProcess<Iterator<Object>> {
protected Object value;
protected int index = -1;
public OTraverseMultiValueDepthFirstProcess(final OTraverse iCommand, final Iterator<Object> iTarget) {
super(iCommand, iTarget);
}
public OIdentifiable process() {
while (target.hasNext()) {
value = target.next();
index++;
if (value instanceof OIdentifiable) {
final ORecord<?> rec = ((OIdentifiable) value).getRecord();
if (rec instanceof ODocument) {
final OTraverseRecordProcess subProcess = new OTraverseRecordProcess(command, (ODocument) rec);
final OIdentifiable subValue = subProcess.process();
if (subValue != null)
return subValue;
}
}
}
return drop();
}
@Override
public String getStatus() {
return toString();
}
@Override
public String toString() {
return "[idx:" + index + "]";
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_traverse_OTraverseMultiValueDepthFirstProcess.java
|
2,346 |
public class StringAndBytesText implements Text {
public static final Text[] EMPTY_ARRAY = new Text[0];
public static Text[] convertFromStringArray(String[] strings) {
if (strings.length == 0) {
return EMPTY_ARRAY;
}
Text[] texts = new Text[strings.length];
for (int i = 0; i < strings.length; i++) {
texts[i] = new StringAndBytesText(strings[i]);
}
return texts;
}
private BytesReference bytes;
private String text;
private int hash;
public StringAndBytesText(BytesReference bytes) {
this.bytes = bytes;
}
public StringAndBytesText(String text) {
this.text = text;
}
@Override
public boolean hasBytes() {
return bytes != null;
}
@Override
public BytesReference bytes() {
if (bytes == null) {
bytes = new BytesArray(text.getBytes(Charsets.UTF_8));
}
return bytes;
}
@Override
public boolean hasString() {
return text != null;
}
@Override
public String string() {
// TODO: we can optimize the conversion based on the bytes reference API similar to UnicodeUtil
if (text == null) {
if (!bytes.hasArray()) {
bytes = bytes.toBytesArray();
}
text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), Charsets.UTF_8);
}
return text;
}
@Override
public String toString() {
return string();
}
@Override
public int hashCode() {
if (hash == 0) {
hash = bytes().hashCode();
}
return hash;
}
@Override
public boolean equals(Object obj) {
return bytes().equals(((Text) obj).bytes());
}
@Override
public int compareTo(Text text) {
return UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder.compare(bytes(), text.bytes());
}
}
| 0true
|
src_main_java_org_elasticsearch_common_text_StringAndBytesText.java
|
255 |
public interface OCollate extends Comparator<Object> {
public String getName();
public Object transform(Object obj);
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_collate_OCollate.java
|
2,547 |
public class VectorHighlighterTests extends ElasticsearchTestCase {
@Test
public void testVectorHighlighter() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
indexWriter.addDocument(document);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
assertThat(topDocs.totalHits, equalTo(1));
FastVectorHighlighter highlighter = new FastVectorHighlighter();
String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, notNullValue());
assertThat(fragment, equalTo("the big <b>bad</b> dog"));
}
@Test
public void testVectorHighlighterPrefixQuery() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
indexWriter.addDocument(document);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
assertThat(topDocs.totalHits, equalTo(1));
FastVectorHighlighter highlighter = new FastVectorHighlighter();
PrefixQuery prefixQuery = new PrefixQuery(new Term("content", "ba"));
assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT.getClass().getName()));
String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(prefixQuery),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, nullValue());
prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_QUERY_REWRITE);
Query rewriteQuery = prefixQuery.rewrite(reader);
fragment = highlighter.getBestFragment(highlighter.getFieldQuery(rewriteQuery),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, notNullValue());
// now check with the custom field query
prefixQuery = new PrefixQuery(new Term("content", "ba"));
assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT.getClass().getName()));
fragment = highlighter.getBestFragment(new CustomFieldQuery(prefixQuery, reader, highlighter),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, notNullValue());
}
@Test
public void testVectorHighlighterNoStore() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
document.add(new Field("content", "the big bad dog", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
indexWriter.addDocument(document);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
assertThat(topDocs.totalHits, equalTo(1));
FastVectorHighlighter highlighter = new FastVectorHighlighter();
String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, nullValue());
}
@Test
public void testVectorHighlighterNoTermVector() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
indexWriter.addDocument(document);
IndexReader reader = DirectoryReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
assertThat(topDocs.totalHits, equalTo(1));
FastVectorHighlighter highlighter = new FastVectorHighlighter();
String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, nullValue());
}
}
| 0true
|
src_test_java_org_elasticsearch_deps_lucene_VectorHighlighterTests.java
|
1,952 |
private final LoadingCache<K, Object> delegate = CacheBuilder.newBuilder().build(new CacheLoader<K, Object>() {
@Override
public Object load(K key) throws Exception {
Errors errors = new Errors();
V result = null;
try {
result = FailableCache.this.create(key, errors);
} catch (ErrorsException e) {
errors.merge(e.getErrors());
}
return errors.hasErrors() ? errors : result;
}
});
| 0true
|
src_main_java_org_elasticsearch_common_inject_internal_FailableCache.java
|
3,583 |
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.freeze();
}
public static final Float NULL_VALUE = null;
}
| 0true
|
src_main_java_org_elasticsearch_index_mapper_core_FloatFieldMapper.java
|
1,120 |
public class OSQLFunctionDate extends OSQLFunctionAbstract {
public static final String NAME = "date";
private Date date;
private SimpleDateFormat format;
/**
* Get the date at construction to have the same date for all the iteration.
*/
public OSQLFunctionDate() {
super(NAME, 0, 3);
date = new Date();
}
public Object execute(final OIdentifiable iCurrentRecord, final Object iCurrentResult, final Object[] iParameters,
OCommandContext iContext) {
if (iParameters.length == 0)
return date;
if (iParameters[0] instanceof Number)
return new Date(((Number) iParameters[0]).longValue());
if (format == null) {
if (iParameters.length > 1) {
format = new SimpleDateFormat((String) iParameters[1]);
format.setTimeZone(ODateHelper.getDatabaseTimeZone());
} else
format = ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().getConfiguration().getDateTimeFormatInstance();
if (iParameters.length == 3)
format.setTimeZone(TimeZone.getTimeZone(iParameters[2].toString()));
}
try {
return format.parse((String) iParameters[0]);
} catch (ParseException e) {
throw new OQueryParsingException("Error on formatting date '" + iParameters[0] + "' using the format: " + format, e);
}
}
public boolean aggregateResults(final Object[] configuredParameters) {
return false;
}
public String getSyntax() {
return "Syntax error: date([<date-as-string>] [,<format>] [,<timezone>])";
}
@Override
public Object getResult() {
format = null;
return null;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_misc_OSQLFunctionDate.java
|
2,321 |
static class UTCTimeZoneRoundingFloor extends TimeZoneRounding {
final static byte ID = 2;
private DateTimeUnit unit;
UTCTimeZoneRoundingFloor() { // for serialization
}
UTCTimeZoneRoundingFloor(DateTimeUnit unit) {
this.unit = unit;
}
@Override
public byte id() {
return ID;
}
@Override
public long roundKey(long utcMillis) {
return unit.field().roundFloor(utcMillis);
}
@Override
public long valueForKey(long key) {
return key;
}
@Override
public long nextRoundingValue(long value) {
return unit.field().roundCeiling(value + 1);
}
@Override
public void readFrom(StreamInput in) throws IOException {
unit = DateTimeUnit.resolve(in.readByte());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByte(unit.id());
}
}
| 0true
|
src_main_java_org_elasticsearch_common_rounding_TimeZoneRounding.java
|
2,034 |
public interface ExposedBinding<T> extends Binding<T>, HasDependencies {
/**
* Returns the enclosed environment that holds the original binding.
*/
PrivateElements getPrivateElements();
/**
* Unsupported. Always throws {@link UnsupportedOperationException}.
*/
void applyTo(Binder binder);
}
| 0true
|
src_main_java_org_elasticsearch_common_inject_spi_ExposedBinding.java
|
982 |
public class ORecordSerializerRaw implements ORecordSerializer {
public static final String NAME = "ORecordDocumentRaw";
public ORecordInternal<?> fromStream(final byte[] iSource) {
return new ORecordBytes(iSource);
}
public ORecordInternal<?> fromStream(final byte[] iSource, final ORecordInternal<?> iRecord, String[] iFields) {
final ORecordBytes record = (ORecordBytes) iRecord;
record.fromStream(iSource);
record.reset(iSource);
return record;
}
public byte[] toStream(final ORecordInternal<?> iSource, boolean iOnlyDelta) {
try {
return iSource.toStream();
} catch (Exception e) {
OLogManager.instance().error(this, "Error on unmarshalling object in binary format: " + iSource.getIdentity(), e,
OSerializationException.class);
}
return null;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_record_ORecordSerializerRaw.java
|
2,563 |
firstMaster.clusterService.submitStateUpdateTask("local-disco-receive(from node[" + localNode + "])", new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
for (LocalDiscovery discovery : clusterGroups.get(clusterName).members()) {
nodesBuilder.put(discovery.localNode);
}
nodesBuilder.localNodeId(master.localNode().id()).masterNodeId(master.localNode().id());
return ClusterState.builder(currentState).nodes(nodesBuilder).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
sendInitialStateEventIfNeeded();
}
});
| 1no label
|
src_main_java_org_elasticsearch_discovery_local_LocalDiscovery.java
|
637 |
static final class Fields {
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final XContentBuilderString INDEX = new XContentBuilderString("index");
static final XContentBuilderString PRIMARY_SIZE = new XContentBuilderString("primary_size");
static final XContentBuilderString PRIMARY_SIZE_IN_BYTES = new XContentBuilderString("primary_size_in_bytes");
static final XContentBuilderString SIZE = new XContentBuilderString("size");
static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes");
static final XContentBuilderString TRANSLOG = new XContentBuilderString("translog");
static final XContentBuilderString OPERATIONS = new XContentBuilderString("operations");
static final XContentBuilderString DOCS = new XContentBuilderString("docs");
static final XContentBuilderString NUM_DOCS = new XContentBuilderString("num_docs");
static final XContentBuilderString MAX_DOC = new XContentBuilderString("max_doc");
static final XContentBuilderString DELETED_DOCS = new XContentBuilderString("deleted_docs");
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString ROUTING = new XContentBuilderString("routing");
static final XContentBuilderString STATE = new XContentBuilderString("state");
static final XContentBuilderString PRIMARY = new XContentBuilderString("primary");
static final XContentBuilderString NODE = new XContentBuilderString("node");
static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node");
static final XContentBuilderString SHARD = new XContentBuilderString("shard");
static final XContentBuilderString ID = new XContentBuilderString("id");
static final XContentBuilderString PEER_RECOVERY = new XContentBuilderString("peer_recovery");
static final XContentBuilderString STAGE = new XContentBuilderString("stage");
static final XContentBuilderString START_TIME_IN_MILLIS = new XContentBuilderString("start_time_in_millis");
static final XContentBuilderString TIME = new XContentBuilderString("time");
static final XContentBuilderString TIME_IN_MILLIS = new XContentBuilderString("time_in_millis");
static final XContentBuilderString PROGRESS = new XContentBuilderString("progress");
static final XContentBuilderString REUSED_SIZE = new XContentBuilderString("reused_size");
static final XContentBuilderString REUSED_SIZE_IN_BYTES = new XContentBuilderString("reused_size_in_bytes");
static final XContentBuilderString EXPECTED_RECOVERED_SIZE = new XContentBuilderString("expected_recovered_size");
static final XContentBuilderString EXPECTED_RECOVERED_SIZE_IN_BYTES = new XContentBuilderString("expected_recovered_size_in_bytes");
static final XContentBuilderString RECOVERED_SIZE = new XContentBuilderString("recovered_size");
static final XContentBuilderString RECOVERED_SIZE_IN_BYTES = new XContentBuilderString("recovered_size_in_bytes");
static final XContentBuilderString RECOVERED = new XContentBuilderString("recovered");
static final XContentBuilderString GATEWAY_RECOVERY = new XContentBuilderString("gateway_recovery");
static final XContentBuilderString GATEWAY_SNAPSHOT = new XContentBuilderString("gateway_snapshot");
static final XContentBuilderString EXPECTED_OPERATIONS = new XContentBuilderString("expected_operations");
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_status_IndicesStatusResponse.java
|
174 |
public class AdditionalAnnotationCreator implements TreeLifecycleListener {
public static final String TODO_ANNOTATION_TYPE = PLUGIN_ID + ".todo";
private CeylonEditor editor;
CeylonInitializerAnnotation initializerAnnotation;
public AdditionalAnnotationCreator(CeylonEditor editor) {
this.editor = editor;
((IPostSelectionProvider) editor.getSelectionProvider())
.addPostSelectionChangedListener(new SelectionListener());
}
@Override
public Stage getStage() {
return TYPE_ANALYSIS;
}
@Override
public void update(CeylonParseController parseController, IProgressMonitor monitor) {
final CeylonParseController cpc = parseController;
if (cpc.getStage().ordinal() >= getStage().ordinal()) {
final Tree.CompilationUnit rootNode = cpc.getRootNode();
List<CommonToken> tokens = cpc.getTokens();
if (rootNode == null) {
return;
}
final IAnnotationModel model = editor.getDocumentProvider()
.getAnnotationModel(editor.getEditorInput());
if (model==null) {
return;
}
for (@SuppressWarnings("unchecked")
Iterator<Annotation> iter = model.getAnnotationIterator();
iter.hasNext();) {
Annotation a = iter.next();
if (a instanceof RefinementAnnotation ||
a.getType().equals(TODO_ANNOTATION_TYPE)) {
model.removeAnnotation(a);
}
}
//model.addAnnotation(new DefaultRangeIndicator(), new Position(50, 100));
new Visitor() {
@Override
public void visit(Tree.Declaration that) {
super.visit(that);
Declaration dec = that.getDeclarationModel();
if (dec!=null) {
if (dec.isActual()) {
addRefinementAnnotation(model, that,
that.getIdentifier(), dec);
}
}
}
@Override
public void visit(Tree.SpecifierStatement that) {
super.visit(that);
if (that.getRefinement()) {
Declaration dec = that.getDeclaration();
if (dec!=null) {
if (dec.isActual()) {
addRefinementAnnotation(model, that,
that.getBaseMemberExpression(), dec);
}
}
}
}
}.visit(rootNode);
for (CommonToken token : tokens) {
int type = token.getType();
if (type == CeylonLexer.LINE_COMMENT ||
type == CeylonLexer.MULTI_COMMENT) {
addTaskAnnotation(token, model);
}
}
}
}
public static Declaration getRefinedDeclaration(Declaration declaration) {
//Reproduces the algorithm used to build the type hierarchy
//first walk up the superclass hierarchy
if (declaration.isClassOrInterfaceMember() && declaration.isShared()) {
TypeDeclaration dec = (TypeDeclaration) declaration.getContainer();
List<ProducedType> signature = getSignature(declaration);
while (dec!=null) {
ClassOrInterface superDec = dec.getExtendedTypeDeclaration();
if (superDec!=null) {
Declaration superMemberDec =
superDec.getDirectMember(declaration.getName(), signature, false);
if (superMemberDec!=null &&
superMemberDec.getRefinedDeclaration()!=null &&
declaration.getRefinedDeclaration()!=null &&
!isAbstraction(superMemberDec) &&
superMemberDec.getRefinedDeclaration()
.equals(declaration.getRefinedDeclaration())) {
return superMemberDec;
}
}
dec = superDec;
}
//now look at the very top of the hierarchy, even if it is an interface
Declaration refinedDeclaration = declaration.getRefinedDeclaration();
if (refinedDeclaration!=null &&
!declaration.equals(refinedDeclaration)) {
List<Declaration> directlyInheritedMembers =
getInterveningRefinements(declaration.getName(), signature,
refinedDeclaration,
(TypeDeclaration) declaration.getContainer(),
(TypeDeclaration) refinedDeclaration.getContainer());
directlyInheritedMembers.remove(refinedDeclaration);
//TODO: do something for the case of
// multiple intervening interfaces?
if (directlyInheritedMembers.size()==1) {
//exactly one intervening interface
return directlyInheritedMembers.get(0);
}
else {
//no intervening interfaces
return refinedDeclaration;
}
}
}
return null;
}
private void addRefinementAnnotation(IAnnotationModel model,
Tree.StatementOrArgument that, Node line, Declaration dec) {
Declaration refined = getRefinedDeclaration(dec);
if (refined!=null) {
Declaration container = (Declaration) refined.getContainer();
Unit unit = that.getUnit();
String description =
"refines " + container.getName(unit) +
"." + refined.getName(unit);
RefinementAnnotation ra = new RefinementAnnotation(description,
refined, line.getToken().getLine());
model.addAnnotation(ra,
new Position(Nodes.getStartOffset(that),
Nodes.getLength(that)));
}
}
/**
* Updates the highlighted range in the vertical ruler
* (the blue bar indicating the current containing
* declaration).
*/
class SelectionListener implements ISelectionChangedListener {
@Override
public void selectionChanged(SelectionChangedEvent event) {
final CeylonParseController cpc = editor.getParseController();
if (cpc.getRootNode()==null) return;
Node node = Nodes.findScope(cpc.getRootNode(), (ITextSelection) event.getSelection());
if (node!=null) {
editor.setHighlightRange(node.getStartIndex(),
node.getStopIndex()-node.getStartIndex()+1, false);
}
else {
editor.resetHighlightRange();
}
IAnnotationModel model= editor.getDocumentProvider()
.getAnnotationModel(editor.getEditorInput());
if (model!=null) {
model.removeAnnotation(initializerAnnotation);
}
initializerAnnotation = null;
if (node!=null) {
node.visit(new InitializerVisitor());
if (initializerAnnotation!=null) {
model.addAnnotation(initializerAnnotation,
initializerAnnotation.getInitializerPosition());
}
}
}
}
class InitializerVisitor extends Visitor {
@Override
public void visit(Tree.ClassDefinition that) {
if (that.getClassBody()==null||that.getIdentifier()==null) return;
createAnnotation(that, that.getClassBody(), that.getIdentifier().getText());
}
@Override
public void visit(Tree.ObjectDefinition that) {
if (that.getClassBody()==null||that.getIdentifier()==null) return;
createAnnotation(that, that.getClassBody(), that.getIdentifier().getText());
}
private void createAnnotation(Node that, Tree.ClassBody body, String name) {
// int offset = editor.getSelection().getOffset();
// if (offset>that.getStartIndex()&&offset<that.getStopIndex()) {
Tree.Statement les = getLastExecutableStatement(body);
if (les != null) {
int startIndex = body.getStartIndex() + 2;
int stopIndex = les.getStopIndex();
Position initializerPosition = new Position(startIndex, stopIndex - startIndex + 1);
initializerAnnotation = new CeylonInitializerAnnotation(name, initializerPosition, 1);
}
// }
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_AdditionalAnnotationCreator.java
|
3,685 |
public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements InternalMapper, RootMapper {
public static final String NAME = "_source";
public static final String CONTENT_TYPE = "_source";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = SourceFieldMapper.NAME;
public static final boolean ENABLED = true;
public static final long COMPRESS_THRESHOLD = -1;
public static final String FORMAT = null; // default format is to use the one provided
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setIndexed(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
FIELD_TYPE.freeze();
}
}
public static class Builder extends Mapper.Builder<Builder, SourceFieldMapper> {
private boolean enabled = Defaults.ENABLED;
private long compressThreshold = Defaults.COMPRESS_THRESHOLD;
private Boolean compress = null;
private String format = Defaults.FORMAT;
private String[] includes = null;
private String[] excludes = null;
public Builder() {
super(Defaults.NAME);
}
public Builder enabled(boolean enabled) {
this.enabled = enabled;
return this;
}
public Builder compress(boolean compress) {
this.compress = compress;
return this;
}
public Builder compressThreshold(long compressThreshold) {
this.compressThreshold = compressThreshold;
return this;
}
public Builder format(String format) {
this.format = format;
return this;
}
public Builder includes(String[] includes) {
this.includes = includes;
return this;
}
public Builder excludes(String[] excludes) {
this.excludes = excludes;
return this;
}
@Override
public SourceFieldMapper build(BuilderContext context) {
return new SourceFieldMapper(name, enabled, format, compress, compressThreshold, includes, excludes);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
SourceFieldMapper.Builder builder = source();
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress") && fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress_threshold") && fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
builder.compress(true);
} else {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
}
} else if ("format".equals(fieldName)) {
builder.format(nodeStringValue(fieldNode, null));
} else if (fieldName.equals("includes")) {
List<Object> values = (List<Object>) fieldNode;
String[] includes = new String[values.size()];
for (int i = 0; i < includes.length; i++) {
includes[i] = values.get(i).toString();
}
builder.includes(includes);
} else if (fieldName.equals("excludes")) {
List<Object> values = (List<Object>) fieldNode;
String[] excludes = new String[values.size()];
for (int i = 0; i < excludes.length; i++) {
excludes[i] = values.get(i).toString();
}
builder.excludes(excludes);
}
}
return builder;
}
}
private final boolean enabled;
private Boolean compress;
private long compressThreshold;
private String[] includes;
private String[] excludes;
private String format;
private XContentType formatContentType;
public SourceFieldMapper() {
this(Defaults.NAME, Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null);
}
protected SourceFieldMapper(String name, boolean enabled, String format, Boolean compress, long compressThreshold,
String[] includes, String[] excludes) {
super(new Names(name, name, name, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, null, null, null, null); // Only stored.
this.enabled = enabled;
this.compress = compress;
this.compressThreshold = compressThreshold;
this.includes = includes;
this.excludes = excludes;
this.format = format;
this.formatContentType = format == null ? null : XContentType.fromRestContentType(format);
}
public boolean enabled() {
return this.enabled;
}
public String[] excludes() {
return this.excludes != null ? this.excludes : Strings.EMPTY_ARRAY;
}
public String[] includes() {
return this.includes != null ? this.includes : Strings.EMPTY_ARRAY;
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return null;
}
@Override
public boolean hasDocValues() {
return false;
}
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void postParse(ParseContext context) throws IOException {
}
@Override
public void parse(ParseContext context) throws IOException {
// nothing to do here, we will call it in pre parse
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public boolean includeInObject() {
return false;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!enabled) {
return;
}
if (!fieldType.stored()) {
return;
}
if (context.flyweight()) {
return;
}
BytesReference source = context.source();
boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0);
if (filtered) {
// we don't update the context source if we filter, we want to keep it as is...
Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source, true);
Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes);
BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput streamOutput = bStream;
if (compress != null && compress && (compressThreshold == -1 || source.length() > compressThreshold)) {
streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
}
XContentType contentType = formatContentType;
if (contentType == null) {
contentType = mapTuple.v1();
}
XContentBuilder builder = XContentFactory.contentBuilder(contentType, streamOutput).map(filteredSource);
builder.close();
source = bStream.bytes();
} else if (compress != null && compress && !CompressorFactory.isCompressed(source)) {
if (compressThreshold == -1 || source.length() > compressThreshold) {
BytesStreamOutput bStream = new BytesStreamOutput();
XContentType contentType = XContentFactory.xContentType(source);
if (formatContentType != null && formatContentType != contentType) {
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, CompressorFactory.defaultCompressor().streamOutput(bStream));
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
builder.close();
} else {
StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
source.writeTo(streamOutput);
streamOutput.close();
}
source = bStream.bytes();
// update the data in the context, so it can be compressed and stored compressed outside...
context.source(source);
}
} else if (formatContentType != null) {
// see if we need to convert the content type
Compressor compressor = CompressorFactory.compressor(source);
if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput());
XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
if (contentType != formatContentType) {
// we need to reread and store back, compressed....
BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, streamOutput);
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(compressedStreamInput));
builder.close();
source = bStream.bytes();
// update the data in the context, so we store it in the translog in this format
context.source(source);
} else {
compressedStreamInput.close();
}
} else {
XContentType contentType = XContentFactory.xContentType(source);
if (contentType != formatContentType) {
// we need to reread and store back
// we need to reread and store back, compressed....
BytesStreamOutput bStream = new BytesStreamOutput();
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, bStream);
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
builder.close();
source = bStream.bytes();
// update the data in the context, so we store it in the translog in this format
context.source(source);
}
}
}
assert source.hasArray();
fields.add(new StoredField(names().indexName(), source.array(), source.arrayOffset(), source.length()));
}
@Override
public byte[] value(Object value) {
if (value == null) {
return null;
}
BytesReference bValue;
if (value instanceof BytesRef) {
bValue = new BytesArray((BytesRef) value);
} else {
bValue = (BytesReference) value;
}
try {
return CompressorFactory.uncompressIfNeeded(bValue).toBytes();
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// all are defaults, no need to write it at all
if (!includeDefaults && enabled == Defaults.ENABLED && compress == null && compressThreshold == -1 && includes == null && excludes == null) {
return builder;
}
builder.startObject(contentType());
if (includeDefaults || enabled != Defaults.ENABLED) {
builder.field("enabled", enabled);
}
if (includeDefaults || !Objects.equal(format, Defaults.FORMAT)) {
builder.field("format", format);
}
if (compress != null) {
builder.field("compress", compress);
} else if (includeDefaults) {
builder.field("compress", false);
}
if (compressThreshold != -1) {
builder.field("compress_threshold", new ByteSizeValue(compressThreshold).toString());
} else if (includeDefaults) {
builder.field("compress_threshold", -1);
}
if (includes != null) {
builder.field("includes", includes);
} else if (includeDefaults) {
builder.field("includes", Strings.EMPTY_ARRAY);
}
if (excludes != null) {
builder.field("excludes", excludes);
} else if (includeDefaults) {
builder.field("excludes", Strings.EMPTY_ARRAY);
}
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (sourceMergeWith.compress != null) {
this.compress = sourceMergeWith.compress;
}
if (sourceMergeWith.compressThreshold != -1) {
this.compressThreshold = sourceMergeWith.compressThreshold;
}
if (sourceMergeWith.includes != null) {
this.includes = sourceMergeWith.includes;
}
if (sourceMergeWith.excludes != null) {
this.excludes = sourceMergeWith.excludes;
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_internal_SourceFieldMapper.java
|
710 |
static class WriteResult {
final Object response;
final long preVersion;
final Tuple<String, String> mappingToUpdate;
final Engine.IndexingOperation op;
WriteResult(Object response, long preVersion, Tuple<String, String> mappingToUpdate, Engine.IndexingOperation op) {
this.response = response;
this.preVersion = preVersion;
this.mappingToUpdate = mappingToUpdate;
this.op = op;
}
@SuppressWarnings("unchecked")
<T> T response() {
return (T) response;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_bulk_TransportShardBulkAction.java
|
1,402 |
public interface AccessDelegate<T extends HazelcastRegion> {
/**
* Get the wrapped cache region
*
* @return The underlying region
*/
T getHazelcastRegion();
/**
* Attempt to retrieve an object from the cache. Mainly used in attempting
* to resolve entities/collections from the second level cache.
*
* @param key The key of the item to be retrieved.
* @param txTimestamp a timestamp prior to the transaction start time
* @return the cached object or <tt>null</tt>
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
Object get(Object key, long txTimestamp) throws CacheException;
/**
* Called after an item has been inserted (before the transaction completes),
* instead of calling evict().
* This method is used by "synchronous" concurrency strategies.
*
* @param key The item key
* @param value The item
* @param version The item's version value
* @return Were the contents of the cache actual changed by this operation?
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
boolean insert(Object key, Object value, Object version) throws CacheException;
/**
* Called after an item has been inserted (after the transaction completes),
* instead of calling release().
* This method is used by "asynchronous" concurrency strategies.
*
* @param key The item key
* @param value The item
* @param version The item's version value
* @return Were the contents of the cache actual changed by this operation?
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
boolean afterInsert(Object key, Object value, Object version) throws CacheException;
/**
* Called after an item has been updated (before the transaction completes),
* instead of calling evict(). This method is used by "synchronous" concurrency
* strategies.
*
* @param key The item key
* @param value The item
* @param currentVersion The item's current version value
* @param previousVersion The item's previous version value
* @return Were the contents of the cache actual changed by this operation?
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
boolean update(Object key, Object value, Object currentVersion, Object previousVersion) throws CacheException;
/**
* Called after an item has been updated (after the transaction completes),
* instead of calling release(). This method is used by "asynchronous"
* concurrency strategies.
*
* @param key The item key
* @param value The item
* @param currentVersion The item's current version value
* @param previousVersion The item's previous version value
* @param lock The lock previously obtained from {@link #lockItem}
* @return Were the contents of the cache actual changed by this operation?
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
boolean afterUpdate(Object key, Object value, Object currentVersion, Object previousVersion, SoftLock lock)
throws CacheException;
/**
* Attempt to cache an object, after loading from the database.
*
* @param key The item key
* @param value The item
* @param txTimestamp a timestamp prior to the transaction start time
* @param version the item version number
* @return <tt>true</tt> if the object was successfully cached
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
boolean putFromLoad(Object key, Object value, long txTimestamp, Object version) throws CacheException;
/**
* Attempt to cache an object, after loading from the database, explicitly
* specifying the minimalPut behavior.
*
* @param key The item key
* @param value The item
* @param txTimestamp a timestamp prior to the transaction start time
* @param version the item version number
* @param minimalPutOverride Explicit minimalPut flag
* @return <tt>true</tt> if the object was successfully cached
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
boolean putFromLoad(Object key, Object value, long txTimestamp, Object version, boolean minimalPutOverride)
throws CacheException;
/**
* Called after an item has become stale (before the transaction completes).
* This method is used by "synchronous" concurrency strategies.
*
* @param key The key of the item to remove
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
void remove(Object key) throws CacheException;
/**
* Called to evict data from the entire region
*
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
void removeAll() throws CacheException;
/**
* Forcibly evict an item from the cache immediately without regard for transaction
* isolation.
*
* @param key The key of the item to remove
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
void evict(Object key) throws CacheException;
/**
* Forcibly evict all items from the cache immediately without regard for transaction
* isolation.
*
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
void evictAll() throws CacheException;
/**
* We are going to attempt to update/delete the keyed object. This
* method is used by "asynchronous" concurrency strategies.
* <p/>
* The returned object must be passed back to release(), to release the
* lock. Concurrency strategies which do not support client-visible
* locks may silently return null.
*
* @param key The key of the item to lock
* @param version The item's current version value
* @return A representation of our lock on the item; or null.
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
SoftLock lockItem(Object key, Object version) throws CacheException;
/**
* Lock the entire region
*
* @return A representation of our lock on the item; or null.
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
SoftLock lockRegion() throws CacheException;
/**
* Called when we have finished the attempted update/delete (which may or
* may not have been successful), after transaction completion. This method
* is used by "asynchronous" concurrency strategies.
*
* @param key The item key
* @param lock The lock previously obtained from {@link #lockItem}
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
void unlockItem(Object key, SoftLock lock) throws CacheException;
/**
* Called after we have finished the attempted invalidation of the entire
* region
*
* @param lock The lock previously obtained from {@link #lockRegion}
* @throws org.hibernate.cache.CacheException
* Propogated from underlying {@link org.hibernate.cache.spi.Region}
*/
void unlockRegion(SoftLock lock) throws CacheException;
}
| 0true
|
hazelcast-hibernate_hazelcast-hibernate4_src_main_java_com_hazelcast_hibernate_access_AccessDelegate.java
|
491 |
public class SourceModuleNode extends PackageFragment implements ModuleNode {
private IPackageFragmentRoot sourceFolder;
private IPackageFragment mainPackageFragment;
private Set<IPackageFragment> packageFragments = new LinkedHashSet<>();
private List<IFile> resourceChildren = new ArrayList<>();
protected String moduleSignature;
public List<IFile> getResourceChildren() {
return resourceChildren;
}
public IPackageFragmentRoot getSourceFolder() {
return sourceFolder;
}
public static SourceModuleNode createSourceModuleNode(IPackageFragmentRoot sourceFolder, String moduleSignature) {
JDTModule module = moduleFromSignatureAndProject(moduleSignature, sourceFolder.getJavaProject().getProject());
String[] packageName;
if (module.isDefaultModule()) {
packageName = new String[] {""};
} else {
packageName = module.getName().toArray(new String[0]);
}
return new SourceModuleNode(sourceFolder, moduleSignature, packageName);
}
private SourceModuleNode(IPackageFragmentRoot sourceFolder, String moduleSignature, String[] packageName) {
super((PackageFragmentRoot)sourceFolder, packageName);
this.moduleSignature = moduleSignature;
this.sourceFolder = sourceFolder;
mainPackageFragment = this;
}
public IProject getProject() {
return sourceFolder.getJavaProject().getProject();
}
public IPackageFragment getMainPackageFragment() {
return mainPackageFragment;
}
public Collection<IPackageFragment> getPackageFragments() {
return packageFragments;
}
private static JDTModule moduleFromSignatureAndProject(String signature, IProject project) {
Modules modules = CeylonBuilder.getProjectModules(project);
if (modules != null) {
for (Module module : modules.getListOfModules()) {
if (! (module instanceof JDTModule)) {
continue;
}
JDTModule jdtModule = (JDTModule) module;
if (jdtModule.isProjectModule() || jdtModule.isDefaultModule()) {
if (jdtModule.getSignature().equals(signature)) {
return jdtModule;
}
}
}
}
return null;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime
* result
+ ((moduleSignature == null) ? 0 : moduleSignature
.hashCode());
result = prime * result
+ ((sourceFolder == null) ? 0 : sourceFolder.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SourceModuleNode other = (SourceModuleNode) obj;
if (moduleSignature == null) {
if (other.moduleSignature != null)
return false;
} else if (!moduleSignature.equals(other.moduleSignature))
return false;
if (sourceFolder == null) {
if (other.sourceFolder != null)
return false;
} else if (!sourceFolder.equals(other.sourceFolder))
return false;
return true;
}
@Override
public JDTModule getModule() {
return moduleFromSignatureAndProject(moduleSignature, getProject());
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_navigator_SourceModuleNode.java
|
234 |
public abstract class OAbstractRecordCache {
protected OCache underlying;
protected String profilerPrefix = "noname";
protected String profilerMetadataPrefix = "noname";
protected int excludedCluster = -1;
/**
* Create cache backed by given implementation
*
* @param impl
* actual implementation of cache
*/
public OAbstractRecordCache(final OCache impl) {
underlying = impl;
}
/**
* Tell whether cache is enabled
*
* @return {@code true} if cache enabled at call time, otherwise - {@code false}
*/
public boolean isEnabled() {
return underlying.isEnabled();
}
/**
* Switch cache state between enabled and disabled
*
* @param enable
* pass {@code true} to enable, otherwise - {@code false}
*/
public void setEnable(final boolean enable) {
if (enable)
underlying.enable();
else
underlying.disable();
}
/**
* Remove record with specified identifier
*
* @param rid
* unique identifier of record
* @return record stored in cache if any, otherwise - {@code null}
*/
public ORecordInternal<?> freeRecord(final ORID rid) {
return underlying.remove(rid);
}
/**
* Remove all records belonging to specified cluster
*
* @param cid
* identifier of cluster
*/
public void freeCluster(final int cid) {
final Set<ORID> toRemove = new HashSet<ORID>(underlying.size() / 2);
final Set<ORID> keys = new HashSet<ORID>(underlying.keys());
for (final ORID id : keys)
if (id.getClusterId() == cid)
toRemove.add(id);
for (final ORID ridToRemove : toRemove)
underlying.remove(ridToRemove);
}
/**
* Remove record entry
*
* @param rid
* unique record identifier
*/
public void deleteRecord(final ORID rid) {
underlying.remove(rid);
}
/**
* Clear the entire cache by removing all the entries
*/
public void clear() {
underlying.clear();
}
/**
* Total number of cached entries
*
* @return non-negative integer
*/
public int getSize() {
return underlying.size();
}
/**
* Maximum number of items cache should keep
*
* @return non-negative integer
*/
public int getMaxSize() {
return underlying.limit();
}
/**
* All operations running at cache initialization stage
*/
public void startup() {
underlying.startup();
Orient.instance().getProfiler()
.registerHookValue(profilerPrefix + "enabled", "Cache enabled", METRIC_TYPE.ENABLED, new OProfilerHookValue() {
public Object getValue() {
return isEnabled();
}
}, profilerMetadataPrefix + "enabled");
Orient.instance().getProfiler()
.registerHookValue(profilerPrefix + "current", "Number of entries in cache", METRIC_TYPE.SIZE, new OProfilerHookValue() {
public Object getValue() {
return getSize();
}
}, profilerMetadataPrefix + "current");
Orient
.instance()
.getProfiler()
.registerHookValue(profilerPrefix + "max", "Maximum number of entries in cache", METRIC_TYPE.SIZE,
new OProfilerHookValue() {
public Object getValue() {
return getMaxSize();
}
}, profilerMetadataPrefix + "max");
}
/**
* All operations running at cache destruction stage
*/
public void shutdown() {
underlying.shutdown();
if (Orient.instance().getProfiler() != null) {
Orient.instance().getProfiler().unregisterHookValue(profilerPrefix + "enabled");
Orient.instance().getProfiler().unregisterHookValue(profilerPrefix + "current");
Orient.instance().getProfiler().unregisterHookValue(profilerPrefix + "max");
}
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_cache_OAbstractRecordCache.java
|
792 |
public class PercolateAction extends Action<PercolateRequest, PercolateResponse, PercolateRequestBuilder> {
public static final PercolateAction INSTANCE = new PercolateAction();
public static final String NAME = "percolate";
private PercolateAction() {
super(NAME);
}
@Override
public PercolateResponse newResponse() {
return new PercolateResponse();
}
@Override
public PercolateRequestBuilder newRequestBuilder(Client client) {
return new PercolateRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_percolate_PercolateAction.java
|
868 |
public class OrderItemQualifierAnswer implements IAnswer<OrderItemQualifier> {
@Override
public OrderItemQualifier answer() throws Throwable {
return new OrderItemQualifierImpl();
}
}
| 0true
|
core_broadleaf-framework_src_test_java_org_broadleafcommerce_core_offer_service_OfferServiceTest.java
|
1,442 |
public class TestLoadGraph {
private static final String INPUT_FILE = "target/test-classes/graph-example-2.xml";
private static final String DBURL = "plocal:target/databases/GratefulDeadConcerts";
private String inputFile = INPUT_FILE;
private String dbURL = DBURL;
public static void main(final String[] args) throws Exception {
new TestLoadGraph(args).testImport();
}
public TestLoadGraph() {
inputFile = INPUT_FILE;
dbURL = DBURL;
}
public TestLoadGraph(final String[] args) {
inputFile = args.length > 0 ? args[0] : INPUT_FILE;
dbURL = args.length > 1 ? args[1] : DBURL;
}
@Test
public void testImport() throws IOException, FileNotFoundException {
OGlobalConfiguration.STORAGE_KEEP_OPEN.setValue(false);
ODatabaseDocumentTx db = new ODatabaseDocumentTx(DBURL);
ODatabaseHelper.deleteDatabase(db, "plocal");
OrientBaseGraph g = new OrientGraphNoTx(dbURL);
System.out.println("Importing graph from file '" + inputFile + "' into database: " + g + "...");
final long startTime = System.currentTimeMillis();
GraphMLReader.inputGraph(g, new FileInputStream(inputFile), 10000, null, null, null);
System.out.println("Imported in " + (System.currentTimeMillis() - startTime) + "ms. Vertexes: " + g.countVertices());
g.shutdown();
}
}
| 0true
|
graphdb_src_test_java_com_orientechnologies_orient_graph_blueprints_TestLoadGraph.java
|
1,452 |
public class TimestampsRegionCache extends LocalRegionCache implements RegionCache {
public TimestampsRegionCache(final String name, final HazelcastInstance hazelcastInstance) {
super(name, hazelcastInstance, null);
}
@Override
public boolean put(Object key, Object value, Object currentVersion) {
return update(key, value, currentVersion, null, null);
}
@Override
protected MessageListener<Object> createMessageListener() {
return new MessageListener<Object>() {
public void onMessage(final Message<Object> message) {
final Timestamp ts = (Timestamp) message.getMessageObject();
final Object key = ts.getKey();
for (;;) {
final Value value = cache.get(key);
final Long current = value != null ? (Long) value.getValue() : null;
if (current != null) {
if (ts.getTimestamp() > current) {
if (cache.replace(key, value, new Value(value.getVersion(),
ts.getTimestamp(), Clock.currentTimeMillis()))) {
return;
}
} else {
return;
}
} else {
if (cache.putIfAbsent(key, new Value(null, ts.getTimestamp(),
Clock.currentTimeMillis())) == null) {
return;
}
}
}
}
};
}
@Override
protected Object createMessage(final Object key, final Object value, final Object currentVersion) {
return new Timestamp(key, (Long) value);
}
final void cleanup() {
}
}
| 0true
|
hazelcast-hibernate_hazelcast-hibernate4_src_main_java_com_hazelcast_hibernate_local_TimestampsRegionCache.java
|
466 |
public class IndicesAliasesRequestBuilder extends AcknowledgedRequestBuilder<IndicesAliasesRequest, IndicesAliasesResponse, IndicesAliasesRequestBuilder> {
public IndicesAliasesRequestBuilder(IndicesAdminClient indicesClient) {
super((InternalIndicesAdminClient) indicesClient, new IndicesAliasesRequest());
}
/**
* Adds an alias to the index.
*
* @param index The index
* @param alias The alias
*/
public IndicesAliasesRequestBuilder addAlias(String index, String alias) {
request.addAlias(alias, index);
return this;
}
/**
* Adds an alias to the index.
*
* @param index The indices
* @param alias The alias
*/
public IndicesAliasesRequestBuilder addAlias(String[] indices, String alias) {
request.addAlias(alias, indices);
return this;
}
/**
* Adds an alias to the index.
*
* @param index The index
* @param alias The alias
* @param filter The filter
*/
public IndicesAliasesRequestBuilder addAlias(String index, String alias, String filter) {
AliasActions action = new AliasActions(AliasAction.Type.ADD, index, alias).filter(filter);
request.addAliasAction(action);
return this;
}
/**
* Adds an alias to the index.
*
* @param indices The indices
* @param alias The alias
* @param filter The filter
*/
public IndicesAliasesRequestBuilder addAlias(String indices[], String alias, String filter) {
AliasActions action = new AliasActions(AliasAction.Type.ADD, indices, alias).filter(filter);
request.addAliasAction(action);
return this;
}
/**
* Adds an alias to the index.
*
* @param indices The indices
* @param alias The alias
* @param filter The filter
*/
public IndicesAliasesRequestBuilder addAlias(String[] indices, String alias, Map<String, Object> filter) {
request.addAlias(alias, filter, indices);
return this;
}
/**
* Adds an alias to the index.
*
* @param index The indices
* @param alias The alias
* @param filter The filter
*/
public IndicesAliasesRequestBuilder addAlias(String index, String alias, Map<String, Object> filter) {
request.addAlias(alias, filter, index);
return this;
}
/**
* Adds an alias to the index.
*
* @param indices The indices
* @param alias The alias
* @param filterBuilder The filter
*/
public IndicesAliasesRequestBuilder addAlias(String indices[], String alias, FilterBuilder filterBuilder) {
request.addAlias(alias, filterBuilder, indices);
return this;
}
/**
* Adds an alias to the index.
*
* @param index The index
* @param alias The alias
* @param filterBuilder The filter
*/
public IndicesAliasesRequestBuilder addAlias(String index, String alias, FilterBuilder filterBuilder) {
request.addAlias(alias, filterBuilder, index);
return this;
}
/**
* Removes an alias from the index.
*
* @param index The index
* @param alias The alias
*/
public IndicesAliasesRequestBuilder removeAlias(String index, String alias) {
request.removeAlias(index, alias);
return this;
}
/**
* Removes aliases from the index.
*
* @param indices The indices
* @param aliases The aliases
*/
public IndicesAliasesRequestBuilder removeAlias(String[] indices, String... aliases) {
request.removeAlias(indices, aliases);
return this;
}
/**
* Removes aliases from the index.
*
* @param index The index
* @param aliases The aliases
*/
public IndicesAliasesRequestBuilder removeAlias(String index, String[] aliases) {
request.removeAlias(index, aliases);
return this;
}
@Override
protected void doExecute(ActionListener<IndicesAliasesResponse> listener) {
((IndicesAdminClient) client).aliases(request, listener);
}
/**
* Adds an alias action to the request.
*
* @param aliasAction The alias action
*/
public IndicesAliasesRequestBuilder addAliasAction(AliasAction aliasAction) {
request.addAliasAction(aliasAction);
return this;
}
/**
* Adds an alias action to the request.
*
* @param aliasAction The alias action
*/
public IndicesAliasesRequestBuilder addAliasAction(
AliasActions action) {
request.addAliasAction(action);
return this;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_alias_IndicesAliasesRequestBuilder.java
|
61 |
public class TestTxLog
{
private void assertEqualByteArray( byte a[], byte b[] )
{
assertTrue( a.length == b.length );
for ( int i = 0; i < a.length; i++ )
{
assertEquals( a[i], b[i] );
}
}
private File path()
{
String path = AbstractNeo4jTestCase.getStorePath( "txlog" );
File file = new File( path );
file.mkdirs();
return file;
}
private File file( String name )
{
return new File( path(), name);
}
private File txFile()
{
return file( "tx_test_log.tx" );
}
private File tmpFile() throws IOException
{
File file = File.createTempFile( "tx_test_log.tx.", ".tmp", path() );
file.deleteOnExit();
return file;
}
@Test
public void testTxLog() throws IOException
{
File file = txFile();
if ( file.exists() )
{
file.delete();
}
try
{
TxLog txLog = new TxLog( txFile(), new DefaultFileSystemAbstraction(), new Monitors() );
assertTrue( !txLog.getDanglingRecords().iterator().hasNext() );
byte globalId[] = new byte[64];
byte branchId[] = new byte[45];
txLog.txStart( globalId );
txLog.addBranch( globalId, branchId );
assertEquals( 2, txLog.getRecordCount() );
// Force here because we're using DirectMappedLogBuffer
txLog.force();
// List lists[] = txLog.getDanglingRecords();
List<?> lists[] = getRecordLists( txLog.getDanglingRecords() );
assertEquals( 1, lists.length );
List<?> records = lists[0];
assertEquals( 2, records.size() );
TxLog.Record record = (TxLog.Record) records.get( 0 );
assertEquals( TxLog.TX_START, record.getType() );
assertEqualByteArray( globalId, record.getGlobalId() );
assertTrue( null == record.getBranchId() );
record = (TxLog.Record) records.get( 1 );
assertEquals( TxLog.BRANCH_ADD, record.getType() );
assertEqualByteArray( globalId, record.getGlobalId() );
assertEqualByteArray( branchId, record.getBranchId() );
txLog.markAsCommitting( globalId, ForceMode.unforced );
assertEquals( 3, txLog.getRecordCount() );
txLog.close();
txLog = new TxLog( txFile(), new DefaultFileSystemAbstraction(), new Monitors() );
assertEquals( 0, txLog.getRecordCount() );
lists = getRecordLists( txLog.getDanglingRecords() );
assertEquals( 1, lists.length );
records = lists[0];
assertEquals( 3, records.size() );
record = (TxLog.Record) records.get( 0 );
assertEquals( TxLog.TX_START, record.getType() );
assertEqualByteArray( globalId, record.getGlobalId() );
assertTrue( null == record.getBranchId() );
record = (TxLog.Record) records.get( 1 );
assertEquals( TxLog.BRANCH_ADD, record.getType() );
assertEqualByteArray( globalId, record.getGlobalId() );
assertEqualByteArray( branchId, record.getBranchId() );
record = (TxLog.Record) records.get( 2 );
assertEquals( TxLog.MARK_COMMIT, record.getType() );
assertEqualByteArray( globalId, record.getGlobalId() );
assertTrue( null == record.getBranchId() );
txLog.txDone( globalId );
// Force here because we're using DirectMappedLogBuffer
txLog.force();
assertEquals( 1, txLog.getRecordCount() );
assertEquals( 0,
getRecordLists( txLog.getDanglingRecords() ).length );
txLog.close();
txLog = new TxLog( txFile(), new DefaultFileSystemAbstraction(), new Monitors() );
assertEquals( 0,
getRecordLists( txLog.getDanglingRecords() ).length );
txLog.close();
}
finally
{
file = txFile();
if ( file.exists() )
{
file.delete();
}
}
}
private List<?>[] getRecordLists( Iterable<List<Record>> danglingRecords )
{
List<List<?>> list = new ArrayList<>();
for ( List<Record> txs : danglingRecords )
{
list.add( txs );
}
return list.toArray( new List[list.size()] );
}
@Test
public void testTruncateTxLog() throws IOException
{
File file = txFile();
if ( file.exists() )
{
file.delete();
}
try
{
TxLog txLog = new TxLog( txFile(), fs, new Monitors() );
byte globalId[] = new byte[64];
byte branchId[] = new byte[45];
txLog.txStart( globalId );
txLog.addBranch( globalId, branchId );
txLog.markAsCommitting( globalId, ForceMode.unforced );
txLog.truncate();
assertEquals( 0,
getRecordLists( txLog.getDanglingRecords() ).length );
txLog.close();
txLog = new TxLog( txFile(), new DefaultFileSystemAbstraction(), new Monitors() );
txLog.txStart( globalId );
txLog.addBranch( globalId, branchId );
txLog.markAsCommitting( globalId, ForceMode.unforced );
txLog.close();
txLog = new TxLog( txFile(), new DefaultFileSystemAbstraction(), new Monitors() );
assertEquals( 1,
getRecordLists( txLog.getDanglingRecords() ).length );
txLog.truncate();
assertEquals( 0,
getRecordLists( txLog.getDanglingRecords() ).length );
}
finally
{
file = txFile();
if ( file.exists() )
{
file.delete();
}
}
}
@Test
public void logFilesInflatedWithZerosShouldStillBeAbleToRotate() throws IOException
{
// Given
File logFile = tmpFile();
File rotationTarget = tmpFile();
zeroPad( logFile, fs, TxLog.SCAN_WINDOW_SIZE / 2 );
TxLog log = new TxLog( logFile, fs, new Monitors() );
writeStartRecords( log, 2000, 0 );
log.force();
// When
log.switchToLogFile( rotationTarget );
// Then
assertThat( log.getRecordCount(), is( 2000 ) );
}
@Test
public void logFilesInflatedWithZerosShouldNotSkipLastEntries() throws IOException
{
// Given
File logFile = tmpFile();
TxLog log = new TxLog( logFile, fs, new Monitors() );
writeStartRecords( log, 1, 0);
log.force();
log.close();
// And given we then pad it enough that records will misalign with the scan window size
zeroPad( logFile, fs, TxLog.SCAN_WINDOW_SIZE - 2 );
// And add more records
log = new TxLog( logFile, fs, new Monitors() );
writeStartRecords( log, 1, 1 );
log.force();
File rotationTarget = tmpFile();
// When
log.switchToLogFile( rotationTarget );
// Then
assertThat( log.getRecordCount(), is( 2 ) );
}
private void writeStartRecords( TxLog log, int numberOfStartRecords, int startId ) throws IOException
{
for (int i = startId; i < numberOfStartRecords + startId; i++)
{
log.txStart( globalId( i ) );
}
}
private byte[] globalId( int i )
{
globalIdBuffer.putInt(0, i);
byte[] bytes = new byte[ Xid.MAXGTRIDSIZE ];
globalIdBuffer.position( 0 );
globalIdBuffer.get( bytes );
return bytes;
}
private final ByteBuffer globalIdBuffer = ByteBuffer.allocate( Xid.MAXGTRIDSIZE );
private final DefaultFileSystemAbstraction fs = new DefaultFileSystemAbstraction();
private void zeroPad( File logFile, DefaultFileSystemAbstraction fileSystem, int numberOfNulls ) throws IOException
{
StoreChannel ch = fileSystem.open(logFile, "rw");
ch.position( ch.size() );
ch.write( ByteBuffer.allocate( numberOfNulls ));
ch.force(false);
ch.close();
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestTxLog.java
|
962 |
public interface OrderMultishipOptionDao {
/**
* Saves a given OrderMultishipOption. Note that the method will return the new
* saved instance from Hibernate
*
* @param orderMultishipOption the OrderMultishipOption to save
* @return the saved instance from Hibernate
*/
public OrderMultishipOption save(final OrderMultishipOption orderMultishipOption);
/**
* Returns all associated OrderMultishipOptions to the given order
*
* @param orderId the order's id to find OrderMultishipOptions for
* @return the associated OrderMultishipOptions
*/
public List<OrderMultishipOption> readOrderMultishipOptions(Long orderId);
/**
* Returns all associated OrderMultishipOptions to the given OrderItem
*
* @param orderItemId the order item's id to find OrderMultishipOptions for
* @return the associated OrderMultishipOptions
*/
public List<OrderMultishipOption> readOrderItemOrderMultishipOptions(Long orderItemId);
/**
* Creates a new OrderMultishipOption instance.
*
* The default Broadleaf implemntation uses the EntityConfiguration to create
* the appropriate implementation class based on the current configuration
*
* @return the OrderMultishipOption that was just created
*/
public OrderMultishipOption create();
/**
* Removes all of the OrderMultishipOptions in the list permanently
*
* @param options the options to delete
*/
public void deleteAll(List<OrderMultishipOption> options);
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_dao_OrderMultishipOptionDao.java
|
1,474 |
public class RoutingException extends ElasticsearchException {
public RoutingException(String message) {
super(message);
}
public RoutingException(String message, Throwable cause) {
super(message, cause);
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_routing_RoutingException.java
|
755 |
public class CheckoutSeed implements CheckoutResponse {
private Map<PaymentInfo, Referenced> infos;
private Order order;
private PaymentResponse paymentResponse = new PaymentResponseImpl();
private Map<String, Object> userDefinedFields;
public CheckoutSeed(Order order, Map<PaymentInfo, Referenced> infos, Map<String, Object> userDefinedFields) {
this.order = order;
this.infos = infos;
this.userDefinedFields = userDefinedFields;
}
public Map<PaymentInfo, Referenced> getInfos() {
return infos;
}
public Order getOrder() {
return order;
}
public void setOrder(Order order) {
this.order = order;
}
public PaymentResponse getPaymentResponse() {
return paymentResponse;
}
public Map<String, Object> getUserDefinedFields() {
return userDefinedFields;
}
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_checkout_service_workflow_CheckoutSeed.java
|
532 |
public interface OFetchContext {
public void onBeforeFetch(final ORecordSchemaAware<?> iRootRecord) throws OFetchException;
public void onAfterFetch(final ORecordSchemaAware<?> iRootRecord) throws OFetchException;
public void onBeforeArray(final ORecordSchemaAware<?> iRootRecord, final String iFieldName, final Object iUserObject,
final OIdentifiable[] iArray) throws OFetchException;
public void onAfterArray(final ORecordSchemaAware<?> iRootRecord, final String iFieldName, final Object iUserObject)
throws OFetchException;
public void onBeforeCollection(final ORecordSchemaAware<?> iRootRecord, final String iFieldName, final Object iUserObject,
final Collection<?> iCollection) throws OFetchException;
public void onAfterCollection(final ORecordSchemaAware<?> iRootRecord, final String iFieldName, final Object iUserObject)
throws OFetchException;
public void onBeforeMap(final ORecordSchemaAware<?> iRootRecord, final String iFieldName, final Object iUserObject)
throws OFetchException;
public void onAfterMap(final ORecordSchemaAware<?> iRootRecord, final String iFieldName, final Object iUserObject)
throws OFetchException;
public void onBeforeDocument(final ORecordSchemaAware<?> iRecord, final ORecordSchemaAware<?> iDocument, final String iFieldName,
final Object iUserObject) throws OFetchException;
public void onAfterDocument(final ORecordSchemaAware<?> iRootRecord, final ORecordSchemaAware<?> iDocument,
final String iFieldName, final Object iUserObject) throws OFetchException;
public void onBeforeStandardField(final Object iFieldValue, final String iFieldName, final Object iUserObject);
public void onAfterStandardField(final Object iFieldValue, final String iFieldName, final Object iUserObject);
public boolean fetchEmbeddedDocuments();
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_fetch_OFetchContext.java
|
963 |
public class OGZIPCompression implements OCompression {
public static final String NAME = "gzip";
public static final OGZIPCompression INSTANCE = new OGZIPCompression();
@Override
public byte[] compress(final byte[] content) {
try {
final byte[] result;
final OMemoryStream memoryOutputStream = new OMemoryStream();
final GZIPOutputStream gzipOutputStream = new GZIPOutputStream(memoryOutputStream, 16384); // 16KB
try {
gzipOutputStream.write(content);
gzipOutputStream.finish();
result = memoryOutputStream.toByteArray();
} finally {
gzipOutputStream.close();
}
return result;
} catch (IOException ioe) {
throw new IllegalStateException("Exception during data compression.", ioe);
}
}
@Override
public byte[] uncompress(byte[] content) {
try {
final OMemoryInputStream memoryInputStream = new OMemoryInputStream(content);
final GZIPInputStream gzipInputStream = new GZIPInputStream(memoryInputStream, 16384); // 16KB
try {
final byte[] buffer = new byte[1024];
byte[] result = new byte[1024];
int bytesRead;
int len = 0;
while ((bytesRead = gzipInputStream.read(buffer, 0, buffer.length)) > -1) {
if (len + bytesRead > result.length) {
int newSize = 2 * result.length;
if (newSize < len + bytesRead)
newSize = Integer.MAX_VALUE;
final byte[] oldResult = result;
result = new byte[newSize];
System.arraycopy(oldResult, 0, result, 0, oldResult.length);
}
System.arraycopy(buffer, 0, result, len, bytesRead);
len += bytesRead;
}
return Arrays.copyOf(result, len);
} finally {
gzipInputStream.close();
}
} catch (IOException ioe) {
throw new IllegalStateException("Exception during data uncompression.", ioe);
}
}
@Override
public String name() {
return NAME;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_serialization_compression_impl_OGZIPCompression.java
|
189 |
static final class Identifier extends Run {
/*
* @see org.eclipse.jdt.internal.ui.text.JavaBreakIterator.Run#isValid(char)
*/
@Override
protected boolean isValid(char ch) {
return Character.isJavaIdentifierPart(ch);
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonBreakIterator.java
|
626 |
public class BroadleafRedirectController {
public String redirect(HttpServletRequest request, HttpServletResponse response, Model model) {
String path = (String) request.getSession().getAttribute("BLC_REDIRECT_URL");
if (path == null) {
path = request.getContextPath();
}
return "ajaxredirect:" + path;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_web_controller_BroadleafRedirectController.java
|
338 |
runnables.add(new Runnable() {
public void run() {
fViewer.refresh(root, true);
// trigger a synthetic selection change so that action refresh their
// enable state.
fViewer.setSelection(fViewer.getSelection());
}
});
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_explorer_PackageExplorerContentProvider.java
|
2,024 |
public abstract class DefaultBindingTargetVisitor<T, V> implements BindingTargetVisitor<T, V> {
/**
* Default visit implementation. Returns {@code null}.
*/
protected V visitOther(Binding<? extends T> binding) {
return null;
}
public V visit(InstanceBinding<? extends T> instanceBinding) {
return visitOther(instanceBinding);
}
public V visit(ProviderInstanceBinding<? extends T> providerInstanceBinding) {
return visitOther(providerInstanceBinding);
}
public V visit(ProviderKeyBinding<? extends T> providerKeyBinding) {
return visitOther(providerKeyBinding);
}
public V visit(LinkedKeyBinding<? extends T> linkedKeyBinding) {
return visitOther(linkedKeyBinding);
}
public V visit(ExposedBinding<? extends T> exposedBinding) {
return visitOther(exposedBinding);
}
public V visit(UntargettedBinding<? extends T> untargettedBinding) {
return visitOther(untargettedBinding);
}
public V visit(ConstructorBinding<? extends T> constructorBinding) {
return visitOther(constructorBinding);
}
public V visit(ConvertedConstantBinding<? extends T> convertedConstantBinding) {
return visitOther(convertedConstantBinding);
}
// javac says it's an error to cast ProviderBinding<? extends T> to Binding<? extends T>
@SuppressWarnings("unchecked")
public V visit(ProviderBinding<? extends T> providerBinding) {
return visitOther((Binding<? extends T>) providerBinding);
}
}
| 0true
|
src_main_java_org_elasticsearch_common_inject_spi_DefaultBindingTargetVisitor.java
|
217 |
READ_UNCOMMITTED {
@Override
void configure(TransactionConfig cfg) {
cfg.setReadUncommitted(true);
}
}, READ_COMMITTED {
| 0true
|
titan-berkeleyje_src_main_java_com_thinkaurelius_titan_diskstorage_berkeleyje_BerkeleyJEStoreManager.java
|
63 |
public class DescribeInstances {
private final EC2RequestSigner rs;
private final AwsConfig awsConfig;
public DescribeInstances(AwsConfig awsConfig) {
if (awsConfig == null) {
throw new IllegalArgumentException("AwsConfig is required!");
}
if (awsConfig.getAccessKey() == null) {
throw new IllegalArgumentException("AWS access key is required!");
}
rs = new EC2RequestSigner(awsConfig.getSecretKey());
attributes.put("Action", this.getClass().getSimpleName());
attributes.put("Version", DOC_VERSION);
attributes.put("SignatureVersion", SIGNATURE_VERSION);
attributes.put("SignatureMethod", SIGNATURE_METHOD);
attributes.put("AWSAccessKeyId", awsConfig.getAccessKey());
attributes.put("Timestamp", getFormattedTimestamp());
this.awsConfig = awsConfig;
}
/**
* Formats date as ISO 8601 timestamp
*/
private String getFormattedTimestamp() {
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
df.setTimeZone(TimeZone.getTimeZone("UTC"));
return df.format(new Date());
}
private Map<String, String> attributes = new HashMap<String, String>();
public String getQueryString() {
return CloudyUtility.getQueryString(attributes);
}
public Map<String, String> getAttributes() {
return attributes;
}
public void putSignature(String value) {
attributes.put("Signature", value);
}
public <T> T execute(String endpoint) throws Exception {
rs.sign(this, endpoint);
Object result = callService(endpoint);
return (T) result;
}
public Object callService(String endpoint) throws Exception {
String query = getQueryString();
URL url = new URL("https", endpoint, -1, "/" + query);
HttpURLConnection httpConnection = (HttpURLConnection) (url.openConnection());
httpConnection.setRequestMethod(GET);
httpConnection.setDoOutput(true);
httpConnection.connect();
Object response = unmarshalTheResponse(httpConnection.getInputStream(), awsConfig);
return response;
}
}
| 0true
|
hazelcast-cloud_src_main_java_com_hazelcast_aws_impl_DescribeInstances.java
|
1,815 |
constructors[MAP_STATS] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new LocalMapStatsImpl();
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_map_MapDataSerializerHook.java
|
2,855 |
@SuppressWarnings("deprecation")
public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory {
private final int minGram;
private final int maxGram;
private final Lucene43EdgeNGramTokenizer.Side side;
private final CharMatcher matcher;
protected org.elasticsearch.Version esVersion;
@Inject
public EdgeNGramTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
this.side = Lucene43EdgeNGramTokenizer.Side.getSide(settings.get("side", Lucene43EdgeNGramTokenizer.DEFAULT_SIDE.getLabel()));
this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
this.esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
}
@Override
public Tokenizer create(Reader reader) {
if (version.onOrAfter(Version.LUCENE_43) && esVersion.onOrAfter(org.elasticsearch.Version.V_0_90_2)) {
/*
* We added this in 0.90.2 but 0.90.1 used LUCENE_43 already so we can not rely on the lucene version.
* Yet if somebody uses 0.90.2 or higher with a prev. lucene version we should also use the deprecated version.
*/
if (side == Lucene43EdgeNGramTokenizer.Side.BACK) {
throw new ElasticsearchIllegalArgumentException("side=back is not supported anymore. Please fix your analysis chain or use"
+ " an older compatibility version (<=4.2) but beware that it might cause highlighting bugs."
+ " To obtain the same behavior as the previous version please use \"edgeNGram\" filter which still supports side=back"
+ " in combination with a \"keyword\" tokenizer");
}
final Version version = this.version == Version.LUCENE_43 ? Version.LUCENE_44 : this.version; // always use 4.4 or higher
if (matcher == null) {
return new EdgeNGramTokenizer(version, reader, minGram, maxGram);
} else {
return new EdgeNGramTokenizer(version, reader, minGram, maxGram) {
@Override
protected boolean isTokenChar(int chr) {
return matcher.isTokenChar(chr);
}
};
}
} else {
return new Lucene43EdgeNGramTokenizer(version, reader, side, minGram, maxGram);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_index_analysis_EdgeNGramTokenizerFactory.java
|
769 |
@Test
public class ClassIteratorTest {
private static final boolean RECREATE_DATABASE = true;
private static ODatabaseDocumentTx db = null;
private Set<String> names;
@BeforeMethod
public void setUp() throws Exception {
initializeDatabase();
// Insert some data
names = new HashSet<String>();
names.add("Adam");
names.add("Bob");
names.add("Calvin");
names.add("Daniel");
for (String name : names){
createPerson(name);
}
}
@AfterClass
public void tearDown() throws Exception {
if (!db.isClosed())
db.close();
}
@Test
public void testIteratorShouldReuseRecordWithoutNPE() {
// Use class iterator.
// browseClass() returns all documents in RecordID order
// (including subclasses, which shouldn't exist for Person)
final ORecordIteratorClass<ODocument> personIter = db.browseClass("Person");
// Setting this to true causes the bug. Setting to false it works fine.
personIter.setReuseSameRecord(true);
int docNum = 0;
// Explicit iterator loop.
while (personIter.hasNext()) {
final ODocument personDoc = personIter.next();
Assert.assertTrue(names.contains(personDoc.field("First")));
Assert.assertTrue(names.remove(personDoc.field("First")));
System.out.printf("Doc %d: %s\n", docNum++, personDoc.toString());
}
Assert.assertTrue(names.isEmpty());
}
@Test
public void testIteratorShouldReuseRecordWithoutNPEUsingForEach() throws Exception {
// Use class iterator.
// browseClass() returns all documents in RecordID order
// (including subclasses, which shouldn't exist for Person)
final ORecordIteratorClass<ODocument> personIter = db.browseClass("Person");
// Setting this to true causes the bug. Setting to false it works fine.
personIter.setReuseSameRecord(true);
// Shorthand iterator loop.
int docNum = 0;
for (final ODocument personDoc : personIter){
Assert.assertTrue(names.contains(personDoc.field("First")));
Assert.assertTrue(names.remove(personDoc.field("First")));
System.out.printf("Doc %d: %s\n", docNum++, personDoc.toString());
}
Assert.assertTrue(names.isEmpty());
}
private static void initializeDatabase() {
db = new ODatabaseDocumentTx("memory:temp");
if (db.exists() && RECREATE_DATABASE) {
db.open("admin", "admin");
db.drop();
System.out.println("Dropped database.");
}
if (!db.exists()) {
db.create();
System.out.println("Created database.");
final OSchema schema = db.getMetadata().getSchema();
// Create Person class
final OClass personClass = schema.createClass("Person");
personClass.createProperty("First", OType.STRING).setMandatory(true).setNotNull(true).setMin(
"1");
System.out.println("Created schema.");
} else {
db.open("admin", "admin");
}
}
private static void createPerson(final String first) {
// Create Person document
final ODocument personDoc = db.newInstance("Person");
personDoc.field("First", first);
personDoc.save();
}
}
| 0true
|
core_src_test_java_com_orientechnologies_orient_core_iterator_ClassIteratorTest.java
|
453 |
public static class AdminPresentationCollection {
public static final String FRIENDLYNAME = "friendlyName";
public static final String SECURITYLEVEL = "securityLevel";
public static final String EXCLUDED = "excluded";
public static final String READONLY = "readOnly";
public static final String USESERVERSIDEINSPECTIONCACHE = "useServerSideInspectionCache";
public static final String ADDTYPE = "addType";
public static final String MANYTOFIELD = "manyToField";
public static final String ORDER = "order";
public static final String TAB = "tab";
public static final String TABORDER = "tabOrder";
public static final String CUSTOMCRITERIA = "customCriteria";
public static final String OPERATIONTYPES = "operationTypes";
public static final String SHOWIFPROPERTY = "showIfProperty";
public static final String CURRENCYCODEFIELD = "currencyCodeField";
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_override_PropertyType.java
|
1,385 |
public final class HazelcastTimestamper {
private HazelcastTimestamper(){}
public static long nextTimestamp(HazelcastInstance instance) {
return instance.getCluster().getClusterTime(); // System time in ms.
}
public static int getTimeout(HazelcastInstance instance, String regionName) {
try {
final MapConfig cfg = instance.getConfig().findMapConfig(regionName);
if (cfg.getTimeToLiveSeconds() > 0) {
return cfg.getTimeToLiveSeconds() * 1000; // TTL in ms.
}
} catch (UnsupportedOperationException ignored) {
// HazelcastInstance is instance of HazelcastClient.
}
return CacheEnvironment.getDefaultCacheTimeoutInMillis();
}
public static long getMaxOperationTimeout(HazelcastInstance instance) {
String maxOpTimeoutProp = null;
try {
Config config = instance.getConfig();
maxOpTimeoutProp = config.getProperty(GroupProperties.PROP_OPERATION_CALL_TIMEOUT_MILLIS);
} catch (UnsupportedOperationException ignored) {
// HazelcastInstance is instance of HazelcastClient.
}
if (maxOpTimeoutProp != null) {
return Long.parseLong(maxOpTimeoutProp);
}
return Long.MAX_VALUE;
}
}
| 0true
|
hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_HazelcastTimestamper.java
|
2,485 |
public static class DelegatingMapParams extends MapParams {
private final Params delegate;
public DelegatingMapParams(Map<String, String> params, Params delegate) {
super(params);
this.delegate = delegate;
}
@Override
public String param(String key) {
return super.param(key, delegate.param(key));
}
@Override
public String param(String key, String defaultValue) {
return super.param(key, delegate.param(key, defaultValue));
}
@Override
public boolean paramAsBoolean(String key, boolean defaultValue) {
return super.paramAsBoolean(key, delegate.paramAsBoolean(key, defaultValue));
}
@Override
public Boolean paramAsBoolean(String key, Boolean defaultValue) {
return super.paramAsBoolean(key, delegate.paramAsBoolean(key, defaultValue));
}
@Override @Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return super.paramAsBooleanOptional(key, delegate.paramAsBooleanOptional(key, defaultValue));
}
}
| 0true
|
src_main_java_org_elasticsearch_common_xcontent_ToXContent.java
|
172 |
public abstract class OSoftThread extends Thread implements OService {
private volatile boolean shutdownFlag;
public OSoftThread() {
}
public OSoftThread(final ThreadGroup iThreadGroup) {
super(iThreadGroup, OSoftThread.class.getSimpleName());
setDaemon(true);
}
public OSoftThread(final String name) {
super(name);
setDaemon(true);
}
public OSoftThread(final ThreadGroup group, final String name) {
super(group, name);
setDaemon(true);
}
protected abstract void execute() throws Exception;
public void startup() {
}
public void shutdown() {
}
public void sendShutdown() {
shutdownFlag = true;
}
@Override
public void run() {
startup();
while (!shutdownFlag && !isInterrupted()) {
try {
beforeExecution();
execute();
afterExecution();
} catch (Throwable t) {
t.printStackTrace();
}
}
shutdown();
}
/**
* Pauses current thread until iTime timeout or a wake up by another thread.
*
* @param iTime
* @return true if timeout has reached, otherwise false. False is the case of wake-up by another thread.
*/
public static boolean pauseCurrentThread(long iTime) {
try {
if (iTime <= 0)
iTime = Long.MAX_VALUE;
Thread.sleep(iTime);
return true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
}
protected void beforeExecution() throws InterruptedException {
return;
}
protected void afterExecution() throws InterruptedException {
return;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_thread_OSoftThread.java
|
322 |
public class IllegalResourceException extends RuntimeException
{
public IllegalResourceException()
{
super();
}
public IllegalResourceException( String message )
{
super( message );
}
public IllegalResourceException( String message, Throwable cause )
{
super( message, cause );
}
public IllegalResourceException( Throwable cause )
{
super( cause );
}
}
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_IllegalResourceException.java
|
615 |
public class MulticastJoiner extends AbstractJoiner {
private final AtomicInteger currentTryCount = new AtomicInteger(0);
private final AtomicInteger maxTryCount;
public MulticastJoiner(Node node) {
super(node);
maxTryCount = new AtomicInteger(calculateTryCount());
}
@Override
public void doJoin(AtomicBoolean joined) {
int tryCount = 0;
long joinStartTime = Clock.currentTimeMillis();
long maxJoinMillis = node.getGroupProperties().MAX_JOIN_SECONDS.getInteger() * 1000;
while (node.isActive() && !joined.get() && (Clock.currentTimeMillis() - joinStartTime < maxJoinMillis)) {
Address masterAddressNow = getTargetAddress();
if (masterAddressNow == null) {
masterAddressNow = findMasterWithMulticast();
}
node.setMasterAddress(masterAddressNow);
String msg = "Joining to master node: " + node.getMasterAddress();
logger.finest(msg);
systemLogService.logJoin(msg);
if (node.getMasterAddress() == null || node.getThisAddress().equals(node.getMasterAddress())) {
TcpIpConfig tcpIpConfig = config.getNetworkConfig().getJoin().getTcpIpConfig();
if (tcpIpConfig != null && tcpIpConfig.isEnabled()) {
doTCP(joined);
} else {
node.setAsMaster();
}
return;
}
if (++tryCount > 49) {
failedJoiningToMaster(true, tryCount);
}
if (!node.getMasterAddress().equals(node.getThisAddress())) {
connectAndSendJoinRequest(node.getMasterAddress());
} else {
node.setMasterAddress(null);
tryCount = 0;
}
try {
//noinspection BusyWait
Thread.sleep(500L);
} catch (InterruptedException ignored) {
}
}
}
private void doTCP(AtomicBoolean joined) {
node.setMasterAddress(null);
logger.finest("Multicast couldn't find cluster. Trying TCP/IP");
new TcpIpJoiner(node).join(joined);
}
@Override
public void searchForOtherClusters() {
final BlockingQueue<JoinMessage> q = new LinkedBlockingQueue<JoinMessage>();
MulticastListener listener = new MulticastListener() {
public void onMessage(Object msg) {
systemLogService.logJoin("MulticastListener onMessage " + msg);
if (msg != null && msg instanceof JoinMessage) {
JoinMessage joinRequest = (JoinMessage) msg;
if (node.getThisAddress() != null && !node.getThisAddress().equals(joinRequest.getAddress())) {
q.add(joinRequest);
}
}
}
};
node.multicastService.addMulticastListener(listener);
node.multicastService.send(node.createJoinRequest());
systemLogService.logJoin("Sent multicast join request");
try {
JoinMessage joinInfo = q.poll(3, TimeUnit.SECONDS);
if (joinInfo != null) {
if (joinInfo.getMemberCount() == 1) {
// if the other cluster has just single member, that may be a newly starting node
// instead of a split node.
// Wait 2 times 'WAIT_SECONDS_BEFORE_JOIN' seconds before processing merge JoinRequest.
Thread.sleep(node.groupProperties.WAIT_SECONDS_BEFORE_JOIN.getInteger() * 1000L * 2);
}
if (shouldMerge(joinInfo)) {
logger.warning(node.getThisAddress() + " is merging [multicast] to " + joinInfo.getAddress());
startClusterMerge(joinInfo.getAddress());
}
}
} catch (InterruptedException ignored) {
} catch (Exception e) {
if (logger != null) {
logger.warning(e);
}
} finally {
node.multicastService.removeMulticastListener(listener);
}
}
@Override
public String getType() {
return "multicast";
}
private boolean connectAndSendJoinRequest(Address masterAddress) {
if (masterAddress == null || masterAddress.equals(node.getThisAddress())) {
throw new IllegalArgumentException();
}
Connection conn = node.connectionManager.getOrConnect(masterAddress);
if (logger.isFinestEnabled()) {
logger.finest("Master connection " + conn);
}
systemLogService.logJoin("Master connection " + conn);
if (conn != null) {
return node.clusterService.sendJoinRequest(masterAddress, true);
} else {
if (logger.isFinestEnabled()) {
logger.finest("Connecting to master node: " + masterAddress);
}
return false;
}
}
private static final int publishInterval = 100;
private Address findMasterWithMulticast() {
try {
JoinRequest joinRequest = node.createJoinRequest();
while (node.isActive() && currentTryCount.incrementAndGet() <= maxTryCount.get()) {
joinRequest.setTryCount(currentTryCount.get());
node.multicastService.send(joinRequest);
if (node.getMasterAddress() == null) {
//noinspection BusyWait
Thread.sleep(publishInterval);
} else {
return node.getMasterAddress();
}
}
} catch (final Exception e) {
if (logger != null) {
logger.warning(e);
}
} finally {
currentTryCount.set(0);
}
return null;
}
private int calculateTryCount() {
final NetworkConfig networkConfig = config.getNetworkConfig();
int timeoutSeconds = networkConfig.getJoin().getMulticastConfig().getMulticastTimeoutSeconds();
int tryCountCoefficient = 1000 / publishInterval;
int tryCount = timeoutSeconds * tryCountCoefficient;
String host = node.getThisAddress().getHost();
int lastDigits;
try {
lastDigits = Integer.parseInt(host.substring(host.lastIndexOf('.') + 1));
} catch (NumberFormatException e) {
lastDigits = RandomPicker.getInt(512);
}
lastDigits = lastDigits % 100;
int portDiff = node.getThisAddress().getPort() - networkConfig.getPort();
tryCount += lastDigits + portDiff * timeoutSeconds * 3;
return tryCount;
}
public void onReceivedJoinRequest(JoinRequest joinRequest) {
if (joinRequest.getUuid().compareTo(node.localMember.getUuid()) < 0) {
maxTryCount.incrementAndGet();
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_cluster_MulticastJoiner.java
|
1,161 |
public interface ICondition extends Condition {
/**
* {@inheritDoc}
*/
void await() throws InterruptedException;
/**
* {@inheritDoc}
*/
void awaitUninterruptibly();
/**
* {@inheritDoc}
*/
long awaitNanos(long nanosTimeout) throws InterruptedException;
/**
* {@inheritDoc}
*/
boolean await(long time, TimeUnit unit) throws InterruptedException;
/**
* {@inheritDoc}
*/
boolean awaitUntil(Date deadline) throws InterruptedException;
/**
* {@inheritDoc}
*/
void signal();
/**
* {@inheritDoc}
*/
void signalAll();
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_core_ICondition.java
|
1,093 |
public abstract class OSQLFunctionAbstract implements OSQLFunction {
protected String name;
protected int minParams;
protected int maxParams;
public OSQLFunctionAbstract(final String iName, final int iMinParams, final int iMaxParams) {
this.name = iName;
this.minParams = iMinParams;
this.maxParams = iMaxParams;
}
@Override
public String getName() {
return name;
}
@Override
public int getMinParams() {
return minParams;
}
@Override
public int getMaxParams() {
return maxParams;
}
@Override
public String toString() {
return name + "()";
}
@Override
public void config(final Object[] iConfiguredParameters) {
}
@Override
public boolean aggregateResults() {
return false;
}
@Override
public boolean filterResult() {
return false;
}
@Override
public Object getResult() {
return null;
}
@Override
public void setResult(final Object iResult) {
}
@Override
public boolean shouldMergeDistributedResult() {
return false;
}
@Override
public Object mergeDistributedResult(List<Object> resultsToMerge) {
throw new IllegalStateException("By default SQL function execution result can not be merged");
}
protected boolean returnDistributedResult() {
return ODatabaseRecordThreadLocal.INSTANCE.get().getStorage() instanceof OAutoshardedStorage;
}
protected long getDistributedStorageId() {
return ((OAutoshardedStorage) ODatabaseRecordThreadLocal.INSTANCE.get().getStorage()).getStorageId();
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_OSQLFunctionAbstract.java
|
3,421 |
public class ProxyServiceImpl
implements ProxyService, PostJoinAwareService, EventPublishingService<DistributedObjectEventPacket, Object> {
static final String SERVICE_NAME = "hz:core:proxyService";
private final NodeEngineImpl nodeEngine;
private final ConcurrentMap<String, ProxyRegistry> registries = new ConcurrentHashMap<String, ProxyRegistry>();
private final ConcurrentMap<String, DistributedObjectListener> listeners = new ConcurrentHashMap<String, DistributedObjectListener>();
private final ILogger logger;
ProxyServiceImpl(NodeEngineImpl nodeEngine) {
this.nodeEngine = nodeEngine;
this.logger = nodeEngine.getLogger(ProxyService.class.getName());
}
void init() {
nodeEngine.getEventService().registerListener(SERVICE_NAME, SERVICE_NAME, new Object());
}
private final ConstructorFunction<String, ProxyRegistry> registryConstructor = new ConstructorFunction<String, ProxyRegistry>() {
public ProxyRegistry createNew(String serviceName) {
return new ProxyRegistry(serviceName);
}
};
@Override
public int getProxyCount() {
int count = 0;
for (ProxyRegistry registry : registries.values()) {
count += registry.getProxyCount();
}
return count;
}
@Override
public void initializeDistributedObject(String serviceName, String name) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
if (name == null) {
throw new NullPointerException("Object name is required!");
}
ProxyRegistry registry = getOrPutIfAbsent(registries, serviceName, registryConstructor);
registry.createProxy(name, true, true);
}
@Override
public DistributedObject getDistributedObject(String serviceName, String name) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
if (name == null) {
throw new NullPointerException("Object name is required!");
}
ProxyRegistry registry = getOrPutIfAbsent(registries, serviceName, registryConstructor);
return registry.getOrCreateProxy(name, true, true);
}
@Override
public void destroyDistributedObject(String serviceName, String name) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
if (name == null) {
throw new NullPointerException("Object name is required!");
}
Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList();
Collection<Future> calls = new ArrayList<Future>(members.size());
for (MemberImpl member : members) {
if (member.localMember()) {
continue;
}
Future f = nodeEngine.getOperationService()
.createInvocationBuilder(SERVICE_NAME, new DistributedObjectDestroyOperation(serviceName, name),
member.getAddress()).setTryCount(10).invoke();
calls.add(f);
}
destroyLocalDistributedObject(serviceName, name, true);
for (Future f : calls) {
try {
f.get(3, TimeUnit.SECONDS);
} catch (Exception e) {
logger.finest(e);
}
}
}
private void destroyLocalDistributedObject(String serviceName, String name, boolean fireEvent) {
ProxyRegistry registry = registries.get(serviceName);
if (registry != null) {
registry.destroyProxy(name, fireEvent);
}
final RemoteService service = nodeEngine.getService(serviceName);
if (service != null) {
service.destroyDistributedObject(name);
}
Throwable cause = new DistributedObjectDestroyedException(serviceName, name);
nodeEngine.waitNotifyService.cancelWaitingOps(serviceName, name, cause);
}
@Override
public Collection<DistributedObject> getDistributedObjects(String serviceName) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
Collection<DistributedObject> objects = new LinkedList<DistributedObject>();
ProxyRegistry registry = registries.get(serviceName);
if (registry != null) {
Collection<DistributedObjectFuture> futures = registry.proxies.values();
for (DistributedObjectFuture future : futures) {
objects.add(future.get());
}
}
return objects;
}
@Override
public Collection<DistributedObject> getAllDistributedObjects() {
Collection<DistributedObject> objects = new LinkedList<DistributedObject>();
for (ProxyRegistry registry : registries.values()) {
Collection<DistributedObjectFuture> futures = registry.proxies.values();
for (DistributedObjectFuture future : futures) {
objects.add(future.get());
}
}
return objects;
}
@Override
public String addProxyListener(DistributedObjectListener distributedObjectListener) {
final String id = UuidUtil.buildRandomUuidString();
listeners.put(id, distributedObjectListener);
return id;
}
@Override
public boolean removeProxyListener(String registrationId) {
return listeners.remove(registrationId) != null;
}
@Override
public void dispatchEvent(final DistributedObjectEventPacket eventPacket, Object ignore) {
final String serviceName = eventPacket.getServiceName();
if (eventPacket.getEventType() == CREATED) {
try {
final ProxyRegistry registry = getOrPutIfAbsent(registries, serviceName, registryConstructor);
if (!registry.contains(eventPacket.getName())) {
registry.createProxy(eventPacket.getName(), false,
true); // listeners will be called if proxy is created here.
}
} catch (HazelcastInstanceNotActiveException ignored) {
}
} else {
final ProxyRegistry registry = registries.get(serviceName);
if (registry != null) {
registry.destroyProxy(eventPacket.getName(), false);
}
}
}
@Override
public Operation getPostJoinOperation() {
Collection<ProxyInfo> proxies = new LinkedList<ProxyInfo>();
for (ProxyRegistry registry : registries.values()) {
for (DistributedObjectFuture future : registry.proxies.values()) {
DistributedObject distributedObject = future.get();
if (distributedObject instanceof InitializingObject) {
proxies.add(new ProxyInfo(registry.serviceName, distributedObject.getName()));
}
}
}
return proxies.isEmpty() ? null : new PostJoinProxyOperation(proxies);
}
private class ProxyRegistry {
final String serviceName;
final RemoteService service;
final ConcurrentMap<String, DistributedObjectFuture> proxies = new ConcurrentHashMap<String, DistributedObjectFuture>();
private ProxyRegistry(String serviceName) {
this.serviceName = serviceName;
this.service = nodeEngine.getService(serviceName);
if (service == null) {
if (nodeEngine.isActive()) {
throw new IllegalArgumentException("Unknown service: " + serviceName);
} else {
throw new HazelcastInstanceNotActiveException();
}
}
}
/**
* Retrieves a DistributedObject proxy or creates it if it's not available
*
* @param name name of the proxy object
* @param publishEvent true if a DistributedObjectEvent should be fired
* @param initialize true if proxy object should be initialized
* @return a DistributedObject instance
*/
DistributedObject getOrCreateProxy(final String name, boolean publishEvent, boolean initialize) {
DistributedObjectFuture proxyFuture = proxies.get(name);
if (proxyFuture == null) {
if (!nodeEngine.isActive()) {
throw new HazelcastInstanceNotActiveException();
}
proxyFuture = createProxy(name, publishEvent, initialize);
if (proxyFuture == null) {
// warning; recursive call! I (@mdogan) do not think this will ever cause a stack overflow..
return getOrCreateProxy(name, publishEvent, initialize);
}
}
return proxyFuture.get();
}
/**
* Creates a DistributedObject proxy if it's not created yet
*
* @param name name of the proxy object
* @param publishEvent true if a DistributedObjectEvent should be fired
* @param initialize true if proxy object should be initialized
* @return a DistributedObject instance if it's created by this method, null otherwise
*/
DistributedObjectFuture createProxy(final String name, boolean publishEvent, boolean initialize) {
if (!proxies.containsKey(name)) {
if (!nodeEngine.isActive()) {
throw new HazelcastInstanceNotActiveException();
}
DistributedObjectFuture proxyFuture = new DistributedObjectFuture();
if (proxies.putIfAbsent(name, proxyFuture) == null) {
DistributedObject proxy = service.createDistributedObject(name);
if (initialize && proxy instanceof InitializingObject) {
try {
((InitializingObject) proxy).initialize();
} catch (Exception e) {
logger.warning("Error while initializing proxy: " + proxy, e);
}
}
nodeEngine.eventService.executeEvent(new ProxyEventProcessor(CREATED, serviceName, proxy));
if (publishEvent) {
publish(new DistributedObjectEventPacket(CREATED, serviceName, name));
}
proxyFuture.set(proxy);
return proxyFuture;
}
}
return null;
}
void destroyProxy(String name, boolean publishEvent) {
final DistributedObjectFuture proxyFuture = proxies.remove(name);
if (proxyFuture != null) {
DistributedObject proxy = proxyFuture.get();
nodeEngine.eventService.executeEvent(new ProxyEventProcessor(DESTROYED, serviceName, proxy));
if (publishEvent) {
publish(new DistributedObjectEventPacket(DESTROYED, serviceName, name));
}
}
}
private void publish(DistributedObjectEventPacket event) {
final EventService eventService = nodeEngine.getEventService();
final Collection<EventRegistration> registrations = eventService.getRegistrations(SERVICE_NAME, SERVICE_NAME);
eventService.publishEvent(SERVICE_NAME, registrations, event, event.getName().hashCode());
}
private boolean contains(String name) {
return proxies.containsKey(name);
}
void destroy() {
for (DistributedObjectFuture future : proxies.values()) {
DistributedObject distributedObject = future.get();
if (distributedObject instanceof AbstractDistributedObject) {
((AbstractDistributedObject) distributedObject).invalidate();
}
}
proxies.clear();
}
public int getProxyCount() {
return proxies.size();
}
}
private static class DistributedObjectFuture {
volatile DistributedObject proxy;
DistributedObject get() {
if (proxy == null) {
boolean interrupted = false;
synchronized (this) {
while (proxy == null) {
try {
wait();
} catch (InterruptedException e) {
interrupted = true;
}
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
return proxy;
}
void set(DistributedObject o) {
if (o == null) {
throw new IllegalArgumentException();
}
synchronized (this) {
proxy = o;
notifyAll();
}
}
}
private class ProxyEventProcessor
implements StripedRunnable {
final EventType type;
final String serviceName;
final DistributedObject object;
private ProxyEventProcessor(EventType eventType, String serviceName, DistributedObject object) {
this.type = eventType;
this.serviceName = serviceName;
this.object = object;
}
@Override
public void run() {
DistributedObjectEvent event = new DistributedObjectEvent(type, serviceName, object);
for (DistributedObjectListener listener : listeners.values()) {
if (EventType.CREATED.equals(type)) {
listener.distributedObjectCreated(event);
} else if (EventType.DESTROYED.equals(type)) {
listener.distributedObjectDestroyed(event);
}
}
}
@Override
public int getKey() {
return object.getId().hashCode();
}
}
public static class DistributedObjectDestroyOperation
extends AbstractOperation {
private String serviceName;
private String name;
public DistributedObjectDestroyOperation() {
}
public DistributedObjectDestroyOperation(String serviceName, String name) {
this.serviceName = serviceName;
this.name = name;
}
@Override
public void run()
throws Exception {
ProxyServiceImpl proxyService = getService();
proxyService.destroyLocalDistributedObject(serviceName, name, false);
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
public Object getResponse() {
return Boolean.TRUE;
}
@Override
protected void writeInternal(ObjectDataOutput out)
throws IOException {
super.writeInternal(out);
out.writeUTF(serviceName);
out.writeObject(name); // writing as object for backward-compatibility
}
@Override
protected void readInternal(ObjectDataInput in)
throws IOException {
super.readInternal(in);
serviceName = in.readUTF();
name = in.readObject();
}
}
public static class PostJoinProxyOperation
extends AbstractOperation {
private Collection<ProxyInfo> proxies;
public PostJoinProxyOperation() {
}
public PostJoinProxyOperation(Collection<ProxyInfo> proxies) {
this.proxies = proxies;
}
@Override
public void run()
throws Exception {
if (proxies != null && proxies.size() > 0) {
NodeEngine nodeEngine = getNodeEngine();
ProxyServiceImpl proxyService = getService();
for (ProxyInfo proxy : proxies) {
final ProxyRegistry registry = getOrPutIfAbsent(proxyService.registries, proxy.serviceName,
proxyService.registryConstructor);
DistributedObjectFuture future = registry.createProxy(proxy.objectName, false, false);
if (future != null) {
final DistributedObject object = future.get();
if (object instanceof InitializingObject) {
nodeEngine.getExecutionService().execute(ExecutionService.SYSTEM_EXECUTOR, new Runnable() {
public void run() {
try {
((InitializingObject) object).initialize();
} catch (Exception e) {
getLogger().warning("Error while initializing proxy: " + object, e);
}
}
});
}
}
}
}
}
@Override
public String getServiceName() {
return ProxyServiceImpl.SERVICE_NAME;
}
@Override
public boolean returnsResponse() {
return false;
}
@Override
protected void writeInternal(ObjectDataOutput out)
throws IOException {
super.writeInternal(out);
int len = proxies != null ? proxies.size() : 0;
out.writeInt(len);
if (len > 0) {
for (ProxyInfo proxy : proxies) {
out.writeUTF(proxy.serviceName);
out.writeObject(proxy.objectName); // writing as object for backward-compatibility
}
}
}
@Override
protected void readInternal(ObjectDataInput in)
throws IOException {
super.readInternal(in);
int len = in.readInt();
if (len > 0) {
proxies = new ArrayList<ProxyInfo>(len);
for (int i = 0; i < len; i++) {
ProxyInfo proxy = new ProxyInfo(in.readUTF(), (String) in.readObject());
proxies.add(proxy);
}
}
}
}
private static class ProxyInfo {
final String serviceName;
final String objectName;
private ProxyInfo(String serviceName, String objectName) {
this.serviceName = serviceName;
this.objectName = objectName;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ProxyInfo{");
sb.append("serviceName='").append(serviceName).append('\'');
sb.append(", objectName='").append(objectName).append('\'');
sb.append('}');
return sb.toString();
}
}
void shutdown() {
for (ProxyRegistry registry : registries.values()) {
registry.destroy();
}
registries.clear();
listeners.clear();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_spi_impl_ProxyServiceImpl.java
|
419 |
}, new TxJob() {
@Override
public void run(IndexTransaction tx) {
//do nothing
}
});
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_indexing_IndexProviderTest.java
|
11 |
public class StorageSetup {
//############ UTILITIES #############
public static final String getHomeDir(String subdir) {
String homedir = System.getProperty("titan.testdir");
if (null == homedir) {
homedir = "target" + File.separator + "db";
}
if (subdir!=null && !StringUtils.isEmpty(subdir)) homedir += File.separator + subdir;
File homefile = new File(homedir);
if (!homefile.exists()) homefile.mkdirs();
return homedir;
}
public static final String getHomeDir() {
return getHomeDir(null);
}
public static final File getHomeDirFile() {
return getHomeDirFile(null);
}
public static final File getHomeDirFile(String subdir) {
return new File(getHomeDir(subdir));
}
public static final void deleteHomeDir() {
deleteHomeDir(null);
}
public static final void deleteHomeDir(String subdir) {
File homeDirFile = getHomeDirFile(subdir);
// Make directory if it doesn't exist
if (!homeDirFile.exists())
homeDirFile.mkdirs();
boolean success = IOUtils.deleteFromDirectory(homeDirFile);
if (!success) throw new IllegalStateException("Could not remove " + homeDirFile);
}
public static TitanGraph getInMemoryGraph() {
return TitanFactory.open(buildConfiguration().set(STORAGE_BACKEND,"inmemory"));
}
public static WriteConfiguration addPermanentCache(ModifiableConfiguration conf) {
conf.set(DB_CACHE, true);
conf.set(DB_CACHE_TIME,0l);
return conf.getConfiguration();
}
public static ModifiableConfiguration getConfig(WriteConfiguration config) {
return new ModifiableConfiguration(ROOT_NS,config, BasicConfiguration.Restriction.NONE);
}
public static BasicConfiguration getConfig(ReadConfiguration config) {
return new BasicConfiguration(ROOT_NS,config, BasicConfiguration.Restriction.NONE);
}
}
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_StorageSetup.java
|
2,792 |
public class IndexAliasesServiceTests extends ElasticsearchTestCase {
public static IndexAliasesService newIndexAliasesService() {
return new IndexAliasesService(new Index("test"), ImmutableSettings.Builder.EMPTY_SETTINGS, newIndexQueryParserService());
}
public static IndexQueryParserService newIndexQueryParserService() {
Injector injector = new ModulesBuilder().add(
new IndicesQueriesModule(),
new CacheRecyclerModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new CodecModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new IndexSettingsModule(new Index("test"), ImmutableSettings.Builder.EMPTY_SETTINGS),
new IndexNameModule(new Index("test")),
new IndexQueryParserModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new AnalysisModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new SimilarityModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new ScriptModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new SettingsModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new IndexEngineModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new IndexCacheModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new FunctionScoreModule(),
new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
}
}
).createInjector();
return injector.getInstance(IndexQueryParserService.class);
}
public static CompressedString filter(FilterBuilder filterBuilder) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder();
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close();
return new CompressedString(builder.string());
}
@Test
public void testFilteringAliases() throws Exception {
IndexAliasesService indexAliasesService = newIndexAliasesService();
indexAliasesService.add("cats", filter(termFilter("animal", "cat")));
indexAliasesService.add("dogs", filter(termFilter("animal", "dog")));
indexAliasesService.add("all", null);
assertThat(indexAliasesService.hasAlias("cats"), equalTo(true));
assertThat(indexAliasesService.hasAlias("dogs"), equalTo(true));
assertThat(indexAliasesService.hasAlias("turtles"), equalTo(false));
assertThat(indexAliasesService.aliasFilter("cats").toString(), equalTo("cache(animal:cat)"));
assertThat(indexAliasesService.aliasFilter("cats", "dogs").toString(), equalTo("BooleanFilter(cache(animal:cat) cache(animal:dog))"));
// Non-filtering alias should turn off all filters because filters are ORed
assertThat(indexAliasesService.aliasFilter("all"), nullValue());
assertThat(indexAliasesService.aliasFilter("cats", "all"), nullValue());
assertThat(indexAliasesService.aliasFilter("all", "cats"), nullValue());
indexAliasesService.add("cats", filter(termFilter("animal", "feline")));
indexAliasesService.add("dogs", filter(termFilter("animal", "canine")));
assertThat(indexAliasesService.aliasFilter("dogs", "cats").toString(), equalTo("BooleanFilter(cache(animal:canine) cache(animal:feline))"));
}
@Test
public void testAliasFilters() throws Exception {
IndexAliasesService indexAliasesService = newIndexAliasesService();
indexAliasesService.add("cats", filter(termFilter("animal", "cat")));
indexAliasesService.add("dogs", filter(termFilter("animal", "dog")));
assertThat(indexAliasesService.aliasFilter(), nullValue());
assertThat(indexAliasesService.aliasFilter("dogs").toString(), equalTo("cache(animal:dog)"));
assertThat(indexAliasesService.aliasFilter("dogs", "cats").toString(), equalTo("BooleanFilter(cache(animal:dog) cache(animal:cat))"));
indexAliasesService.add("cats", filter(termFilter("animal", "feline")));
indexAliasesService.add("dogs", filter(termFilter("animal", "canine")));
assertThat(indexAliasesService.aliasFilter("dogs", "cats").toString(), equalTo("BooleanFilter(cache(animal:canine) cache(animal:feline))"));
}
@Test(expected = InvalidAliasNameException.class)
public void testRemovedAliasFilter() throws Exception {
IndexAliasesService indexAliasesService = newIndexAliasesService();
indexAliasesService.add("cats", filter(termFilter("animal", "cat")));
indexAliasesService.remove("cats");
indexAliasesService.aliasFilter("cats");
}
@Test
public void testUnknownAliasFilter() throws Exception {
IndexAliasesService indexAliasesService = newIndexAliasesService();
indexAliasesService.add("cats", filter(termFilter("animal", "cat")));
indexAliasesService.add("dogs", filter(termFilter("animal", "dog")));
try {
indexAliasesService.aliasFilter("unknown");
fail();
} catch (InvalidAliasNameException e) {
// all is well
}
}
}
| 0true
|
src_test_java_org_elasticsearch_index_aliases_IndexAliasesServiceTests.java
|
2,873 |
public class ItalianAnalyzerProvider extends AbstractIndexAnalyzerProvider<ItalianAnalyzer> {
private final ItalianAnalyzer analyzer;
@Inject
public ItalianAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
analyzer = new ItalianAnalyzer(version,
Analysis.parseStopWords(env, settings, ItalianAnalyzer.getDefaultStopSet(), version),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET, version));
}
@Override
public ItalianAnalyzer get() {
return this.analyzer;
}
}
| 0true
|
src_main_java_org_elasticsearch_index_analysis_ItalianAnalyzerProvider.java
|
837 |
EMBEDDEDMAP("EmbeddedMap", 12, new Class<?>[] { Map.class }, new Class<?>[] { Map.class }) {
},
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java
|
13 |
{
@Override
public boolean accept( Throwable item )
{
return !(item instanceof LifecycleException);
}
}));
| 1no label
|
enterprise_ha_src_main_java_org_neo4j_kernel_ha_backup_HaBackupProvider.java
|
186 |
@Component("blContentProcessor")
public class ContentProcessor extends AbstractModelVariableModifierProcessor {
protected final Log LOG = LogFactory.getLog(getClass());
public static final String REQUEST_DTO = "blRequestDTO";
public static final String BLC_RULE_MAP_PARAM = "blRuleMap";
@Resource(name = "blStructuredContentService")
protected StructuredContentService structuredContentService;
@Resource(name = "blStaticAssetService")
protected StaticAssetService staticAssetService;
/**
* Sets the name of this processor to be used in Thymeleaf template
*/
public ContentProcessor() {
super("content");
}
public ContentProcessor(String elementName) {
super(elementName);
}
@Override
public int getPrecedence() {
return 10000;
}
/**
* Returns a default name
* @param element
* @param valueName
* @return
*/
protected String getAttributeValue(Element element, String valueName, String defaultValue) {
String returnValue = element.getAttributeValue(valueName);
if (returnValue == null) {
return defaultValue;
} else {
return returnValue;
}
}
@Override
protected void modifyModelAttributes(Arguments arguments, Element element) {
String contentType = element.getAttributeValue("contentType");
String contentName = element.getAttributeValue("contentName");
String maxResultsStr = element.getAttributeValue("maxResults");
Integer maxResults = null;
if (maxResultsStr != null) {
maxResults = Ints.tryParse(maxResultsStr);
}
if (maxResults == null) {
maxResults = Integer.MAX_VALUE;
}
String contentListVar = getAttributeValue(element, "contentListVar", "contentList");
String contentItemVar = getAttributeValue(element, "contentItemVar", "contentItem");
String numResultsVar = getAttributeValue(element, "numResultsVar", "numResults");
String fieldFilters = element.getAttributeValue("fieldFilters");
String sortField = element.getAttributeValue("sortField");
IWebContext context = (IWebContext) arguments.getContext();
HttpServletRequest request = context.getHttpServletRequest();
BroadleafRequestContext blcContext = BroadleafRequestContext.getBroadleafRequestContext();
Map<String, Object> mvelParameters = buildMvelParameters(request, arguments, element);
SandBox currentSandbox = blcContext.getSandbox();
List<StructuredContentDTO> contentItems;
StructuredContentType structuredContentType = structuredContentService.findStructuredContentTypeByName(contentType);
Locale locale = blcContext.getLocale();
contentItems = getContentItems(contentName, maxResults, request, mvelParameters, currentSandbox, structuredContentType, locale, arguments, element);
if (contentItems.size() > 0) {
List<Map<String,String>> contentItemFields = new ArrayList<Map<String, String>>();
for (StructuredContentDTO item : contentItems) {
if (StringUtils.isNotEmpty(fieldFilters)) {
AssignationSequence assignments = StandardExpressionProcessor.parseAssignationSequence(arguments, fieldFilters, false);
boolean valid = true;
for (Assignation assignment : assignments) {
if (ObjectUtils.notEqual(StandardExpressionProcessor.executeExpression(arguments, assignment.getRight()),
item.getValues().get(assignment.getLeft().getValue()))) {
valid = false;
break;
}
}
if (valid) {
contentItemFields.add(item.getValues());
}
} else {
contentItemFields.add(item.getValues());
}
}
addToModel(arguments, contentItemVar, contentItemFields.get(0));
addToModel(arguments, contentListVar, contentItemFields);
addToModel(arguments, numResultsVar, contentItems.size());
} else {
if (LOG.isInfoEnabled()) {
LOG.info("**************************The contentItems is null*************************");
}
addToModel(arguments, contentItemVar, null);
addToModel(arguments, contentListVar, null);
addToModel(arguments, numResultsVar, 0);
}
}
/**
* @param contentName name of the content to be looked up (can be null)
* @param maxResults maximum results to return
* @param request servlet request
* @param mvelParameters values that should be considered when filtering the content list by rules
* @param currentSandbox current sandbox being used
* @param structuredContentType the type of content that should be returned
* @param locale current locale
* @param arguments Thymeleaf Arguments passed into the tag
* @param element element context that this Thymeleaf processor is being executed in
* @return
*/
protected List<StructuredContentDTO> getContentItems(String contentName, Integer maxResults, HttpServletRequest request,
Map<String, Object> mvelParameters,
SandBox currentSandbox,
StructuredContentType structuredContentType,
Locale locale,
Arguments arguments,
Element element) {
List<StructuredContentDTO> contentItems;
if (structuredContentType == null) {
contentItems = structuredContentService.lookupStructuredContentItemsByName(currentSandbox, contentName, locale, maxResults, mvelParameters, isSecure(request));
} else {
if (contentName == null || "".equals(contentName)) {
contentItems = structuredContentService.lookupStructuredContentItemsByType(currentSandbox, structuredContentType, locale, maxResults, mvelParameters, isSecure(request));
} else {
contentItems = structuredContentService.lookupStructuredContentItemsByName(currentSandbox, structuredContentType, contentName, locale, maxResults, mvelParameters, isSecure(request));
}
}
return contentItems;
}
/**
* MVEL is used to process the content targeting rules.
*
* @param request
* @return
*/
protected Map<String, Object> buildMvelParameters(HttpServletRequest request, Arguments arguments, Element element) {
TimeZone timeZone = BroadleafRequestContext.getBroadleafRequestContext().getTimeZone();
final TimeDTO timeDto;
if (timeZone != null) {
timeDto = new TimeDTO(SystemTime.asCalendar(timeZone));
} else {
timeDto = new TimeDTO();
}
RequestDTO requestDto = (RequestDTO) request.getAttribute(REQUEST_DTO);
Map<String, Object> mvelParameters = new HashMap<String, Object>();
mvelParameters.put("time", timeDto);
mvelParameters.put("request", requestDto);
String productString = element.getAttributeValue("product");
if (productString != null) {
Object product = StandardExpressionProcessor.processExpression(arguments, productString);
if (product != null) {
mvelParameters.put("product", product);
}
}
String categoryString = element.getAttributeValue("category");
if (categoryString != null) {
Object category = StandardExpressionProcessor.processExpression(arguments, categoryString);
if (category != null) {
mvelParameters.put("category", category);
}
}
@SuppressWarnings("unchecked")
Map<String,Object> blcRuleMap = (Map<String,Object>) request.getAttribute(BLC_RULE_MAP_PARAM);
if (blcRuleMap != null) {
for (String mapKey : blcRuleMap.keySet()) {
mvelParameters.put(mapKey, blcRuleMap.get(mapKey));
}
}
return mvelParameters;
}
public boolean isSecure(HttpServletRequest request) {
boolean secure = false;
if (request != null) {
secure = ("HTTPS".equalsIgnoreCase(request.getScheme()) || request.isSecure());
}
return secure;
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_web_processor_ContentProcessor.java
|
320 |
public class MergeManager {
/**
* Additional merge points may be added by the caller. Also default merge points
* may be overriden to change their current behavior. This is accomplished by
* specifying the system property denoted by the key MergeManager.MERGE_DEFINITION_SYSTEM_PROPERTY
* with a value stating the fully qualified path of user-created property file. Please refer
* to the default properties file located at org/broadleafcommerce/profile/extensibility/context/merge/default.properties
* for more details.
*
*/
public static final String MERGE_DEFINITION_SYSTEM_PROPERTY = "org.broadleafcommerce.extensibility.context.merge.handlers.merge.properties";
private static final Log LOG = LogFactory.getLog(MergeManager.class);
private static DocumentBuilder builder;
static {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
try {
builder = dbf.newDocumentBuilder();
} catch (ParserConfigurationException e) {
LOG.error("Unable to create document builder", e);
throw new RuntimeException(e);
}
}
private MergeHandler[] handlers;
public MergeManager() throws MergeManagerSetupException {
try {
Properties props = loadProperties();
setHandlers(props);
} catch (IOException e) {
throw new MergeManagerSetupException(e);
} catch (ClassNotFoundException e) {
throw new MergeManagerSetupException(e);
} catch (IllegalAccessException e) {
throw new MergeManagerSetupException(e);
} catch (InstantiationException e) {
throw new MergeManagerSetupException(e);
}
}
/**
* Merge 2 xml document streams together into a final resulting stream. During
* the merge, various merge business rules are followed based on configuration
* defined for various merge points.
*
* @param stream1
* @param stream2
* @return the stream representing the merged document
* @throws org.broadleafcommerce.common.extensibility.context.merge.exceptions.MergeException
*/
public ResourceInputStream merge(ResourceInputStream stream1, ResourceInputStream stream2) throws MergeException {
try {
Document doc1 = builder.parse(stream1);
Document doc2 = builder.parse(stream2);
List<Node> exhaustedNodes = new ArrayList<Node>();
//process any defined handlers
for (MergeHandler handler : this.handlers) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing handler: " + handler.getXPath());
}
MergePoint point = new MergePoint(handler, doc1, doc2);
Node[] list = point.merge(exhaustedNodes);
if (list != null) {
Collections.addAll(exhaustedNodes, list);
}
}
TransformerFactory tFactory = TransformerFactory.newInstance();
Transformer xmlTransformer = tFactory.newTransformer();
xmlTransformer.setOutputProperty(OutputKeys.VERSION, "1.0");
xmlTransformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
xmlTransformer.setOutputProperty(OutputKeys.METHOD, "xml");
xmlTransformer.setOutputProperty(OutputKeys.INDENT, "yes");
DOMSource source = new DOMSource(doc1);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(baos));
StreamResult result = new StreamResult(writer);
xmlTransformer.transform(source, result);
byte[] itemArray = baos.toByteArray();
return new ResourceInputStream(new ByteArrayInputStream(itemArray), stream2.getName(), stream1.getNames());
} catch (Exception e) {
throw new MergeException(e);
}
}
private void setHandlers(Properties props) throws ClassNotFoundException, IllegalAccessException, InstantiationException {
ArrayList<MergeHandler> handlers = new ArrayList<MergeHandler>();
String[] keys = props.keySet().toArray(new String[props.keySet().size()]);
for (String key : keys) {
if (key.startsWith("handler.")) {
MergeHandler temp = (MergeHandler) Class.forName(props.getProperty(key)).newInstance();
String name = key.substring(8, key.length());
temp.setName(name);
String priority = props.getProperty("priority." + name);
if (priority != null) {
temp.setPriority(Integer.parseInt(priority));
}
String xpath = props.getProperty("xpath." + name);
if (priority != null) {
temp.setXPath(xpath);
}
handlers.add(temp);
}
}
MergeHandler[] explodedView = {};
explodedView = handlers.toArray(explodedView);
Comparator<Object> nameCompare = new Comparator<Object>() {
public int compare(Object arg0, Object arg1) {
return ((MergeHandler) arg0).getName().compareTo(((MergeHandler) arg1).getName());
}
};
Arrays.sort(explodedView, nameCompare);
ArrayList<MergeHandler> finalHandlers = new ArrayList<MergeHandler>();
for (MergeHandler temp : explodedView) {
if (temp.getName().contains(".")) {
final String parentName = temp.getName().substring(0, temp.getName().lastIndexOf("."));
int pos = Arrays.binarySearch(explodedView, new MergeHandlerAdapter() {
@Override
public String getName() {
return parentName;
}
}, nameCompare);
if (pos >= 0) {
MergeHandler[] parentHandlers = explodedView[pos].getChildren();
MergeHandler[] newHandlers = new MergeHandler[parentHandlers.length + 1];
System.arraycopy(parentHandlers, 0, newHandlers, 0, parentHandlers.length);
newHandlers[newHandlers.length - 1] = temp;
Arrays.sort(newHandlers);
explodedView[pos].setChildren(newHandlers);
}
} else {
finalHandlers.add(temp);
}
}
this.handlers = new MergeHandler[0];
this.handlers = finalHandlers.toArray(this.handlers);
Arrays.sort(this.handlers);
}
private Properties loadProperties() throws IOException {
Properties defaultProperties = new Properties();
defaultProperties.load(MergeManager.class.getResourceAsStream("default.properties"));
Properties props;
String overrideFileClassPath = System.getProperty(MERGE_DEFINITION_SYSTEM_PROPERTY);
if (overrideFileClassPath != null) {
props = new Properties(defaultProperties);
props.load(MergeManager.class.getClassLoader().getResourceAsStream(overrideFileClassPath));
} else {
props = defaultProperties;
}
return props;
}
public String serialize(InputStream in) {
InputStreamReader reader = null;
int temp;
StringBuilder item = new StringBuilder();
boolean eof = false;
try {
reader = new InputStreamReader(in);
while (!eof) {
temp = reader.read();
if (temp == -1) {
eof = true;
} else {
item.append((char) temp);
}
}
} catch (IOException e) {
LOG.error("Unable to merge source and patch locations", e);
} finally {
if (reader != null) {
try{ reader.close(); } catch (Throwable e) {
LOG.error("Unable to merge source and patch locations", e);
}
}
}
return item.toString();
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_MergeManager.java
|
3,080 |
static class Create implements IndexingOperation {
private final DocumentMapper docMapper;
private final Term uid;
private final ParsedDocument doc;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private Origin origin = Origin.PRIMARY;
private long startTime;
private long endTime;
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc) {
this.docMapper = docMapper;
this.uid = uid;
this.doc = doc;
}
@Override
public DocumentMapper docMapper() {
return this.docMapper;
}
@Override
public Type opType() {
return Type.CREATE;
}
public Create origin(Origin origin) {
this.origin = origin;
return this;
}
@Override
public Origin origin() {
return this.origin;
}
@Override
public ParsedDocument parsedDoc() {
return this.doc;
}
public Term uid() {
return this.uid;
}
public String type() {
return this.doc.type();
}
public String id() {
return this.doc.id();
}
public String routing() {
return this.doc.routing();
}
public long timestamp() {
return this.doc.timestamp();
}
public long ttl() {
return this.doc.ttl();
}
public long version() {
return this.version;
}
public Create version(long version) {
this.version = version;
this.doc.version().setLongValue(version);
return this;
}
public VersionType versionType() {
return this.versionType;
}
public Create versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public String parent() {
return this.doc.parent();
}
@Override
public List<Document> docs() {
return this.doc.docs();
}
public Analyzer analyzer() {
return this.doc.analyzer();
}
public BytesReference source() {
return this.doc.source();
}
public Create startTime(long startTime) {
this.startTime = startTime;
return this;
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public Create endTime(long endTime) {
this.endTime = endTime;
return this;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
}
| 0true
|
src_main_java_org_elasticsearch_index_engine_Engine.java
|
1,611 |
public interface MetadataVisitor {
public void visit(BasicFieldMetadata metadata);
public void visit(BasicCollectionMetadata metadata);
public void visit(AdornedTargetCollectionMetadata metadata);
public void visit(MapMetadata metadata);
}
| 0true
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_visitor_MetadataVisitor.java
|
697 |
constructors[LIST_ADD] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListAddRequest();
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java
|
4,939 |
public class RestClearScrollAction extends BaseRestHandler {
@Inject
public RestClearScrollAction(Settings settings, Client client, RestController controller) {
super(settings, client);
controller.registerHandler(DELETE, "/_search/scroll", this);
controller.registerHandler(DELETE, "/_search/scroll/{scroll_id}", this);
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel) {
String scrollIds = request.param("scroll_id");
ClearScrollRequest clearRequest = new ClearScrollRequest();
clearRequest.setScrollIds(Arrays.asList(splitScrollIds(scrollIds)));
client.clearScroll(clearRequest, new ActionListener<ClearScrollResponse>() {
@Override
public void onResponse(ClearScrollResponse response) {
try {
XContentBuilder builder = restContentBuilder(request);
builder.startObject();
builder.endObject();
channel.sendResponse(new XContentRestResponse(request, OK, builder));
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
}
}
});
}
public static String[] splitScrollIds(String scrollIds) {
if (scrollIds == null) {
return Strings.EMPTY_ARRAY;
}
return Strings.splitStringByCommaToArray(scrollIds);
}
}
| 1no label
|
src_main_java_org_elasticsearch_rest_action_search_RestClearScrollAction.java
|
1,155 |
public interface SecurePaymentInfoDao {
public BankAccountPaymentInfo findBankAccountInfo(String referenceNumber);
public CreditCardPaymentInfo findCreditCardInfo(String referenceNumber);
public GiftCardPaymentInfo findGiftCardInfo(String referenceNumber);
public Referenced save(Referenced securePaymentInfo);
public BankAccountPaymentInfo createBankAccountPaymentInfo();
public GiftCardPaymentInfo createGiftCardPaymentInfo();
public CreditCardPaymentInfo createCreditCardPaymentInfo();
public void delete(Referenced securePaymentInfo);
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_dao_SecurePaymentInfoDao.java
|
2,451 |
executor.execute(new Runnable() {
@Override
public void run() {
executed3.set(true);
}
});
| 0true
|
src_test_java_org_elasticsearch_common_util_concurrent_EsExecutorsTests.java
|
553 |
public class WeightUnitOfMeasureType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, WeightUnitOfMeasureType> TYPES = new LinkedHashMap<String, WeightUnitOfMeasureType>();
public static final WeightUnitOfMeasureType POUNDS = new WeightUnitOfMeasureType("POUNDS", "Pounds");
public static final WeightUnitOfMeasureType KILOGRAMS = new WeightUnitOfMeasureType("KILOGRAMS", "Kilograms");
public static WeightUnitOfMeasureType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public WeightUnitOfMeasureType() {
//do nothing
}
public WeightUnitOfMeasureType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)){
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
WeightUnitOfMeasureType other = (WeightUnitOfMeasureType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_util_WeightUnitOfMeasureType.java
|
3,716 |
@Ignore("No tests?")
public class SimpleIpMappingTests extends ElasticsearchTestCase {
// No Longer enabled...
// @Test public void testAutoIpDetection() throws Exception {
// String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
// .startObject("properties").endObject()
// .endObject().endObject().string();
//
// XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
//
// ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
// .startObject()
// .field("ip1", "127.0.0.1")
// .field("ip2", "0.1")
// .field("ip3", "127.0.0.1.2")
// .endObject()
// .copiedBytes());
//
// assertThat(doc.doc().getFieldable("ip1"), notNullValue());
// assertThat(doc.doc().get("ip1"), nullValue()); // its numeric
// assertThat(doc.doc().get("ip2"), equalTo("0.1"));
// assertThat(doc.doc().get("ip3"), equalTo("127.0.0.1.2"));
// }
}
| 0true
|
src_test_java_org_elasticsearch_index_mapper_ip_SimpleIpMappingTests.java
|
2,075 |
public class MultipleEntryOperationFactory implements OperationFactory {
private String name;
private Set<Data> keys;
private EntryProcessor entryProcessor;
public MultipleEntryOperationFactory() {
}
public MultipleEntryOperationFactory(String name, Set<Data> keys, EntryProcessor entryProcessor) {
this.name = name;
this.keys = keys;
this.entryProcessor = entryProcessor;
}
@Override
public Operation createOperation() {
return new MultipleEntryOperation(name, keys, entryProcessor);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(name);
out.writeInt(keys.size());
for (Data key : keys) {
key.writeData(out);
}
out.writeObject(entryProcessor);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
this.name = in.readUTF();
int size = in.readInt();
this.keys = new HashSet<Data>(size);
for (int i = 0; i < size; i++) {
Data key = new Data();
key.readData(in);
keys.add(key);
}
this.entryProcessor = in.readObject();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_map_operation_MultipleEntryOperationFactory.java
|
359 |
public class Filter {
String name;
String condition;
String entityImplementationClassName;
List<String> indexColumnNames;
public String getCondition() {
return condition;
}
public void setCondition(String condition) {
this.condition = condition;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEntityImplementationClassName() {
return entityImplementationClassName;
}
public void setEntityImplementationClassName(String entityImplementationClassName) {
this.entityImplementationClassName = entityImplementationClassName;
}
public List<String> getIndexColumnNames() {
return indexColumnNames;
}
public void setIndexColumnNames(List<String> indexColumnNames) {
this.indexColumnNames = indexColumnNames;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_filter_Filter.java
|
529 |
public class EnvironmentFactoryBean implements FactoryBean {
private String className;
public EnvironmentFactoryBean(String className) {
this.className = className;
}
public Object getObject() throws Exception {
return Class.forName(className).newInstance();
}
@SuppressWarnings("unchecked")
public Class getObjectType() {
try {
return Class.forName(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
public boolean isSingleton() {
return false;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_EnvironmentFactoryBean.java
|
158 |
@Service("blContentDefaultRuleProcessor")
public class StructuredContentDefaultRuleProcessor extends AbstractStructuredContentRuleProcessor {
private static final Log LOG = LogFactory.getLog(StructuredContentDefaultRuleProcessor.class);
/**
* Returns true if all of the rules associated with the passed in <code>StructuredContent</code>
* item match based on the passed in vars.
*
* Also returns true if no rules are present for the passed in item.
*
* @param sc - a structured content item to test
* @param vars - a map of objects used by the rule MVEL expressions
* @return the result of the rule checks
*/
public boolean checkForMatch(StructuredContentDTO sc, Map<String, Object> vars) {
String ruleExpression = sc.getRuleExpression();
if (ruleExpression != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Processing content rule for StructuredContent with id " + sc.getId() +". Value = " + ruleExpression);
}
boolean result = executeExpression(ruleExpression, vars);
if (! result) {
if (LOG.isDebugEnabled()) {
LOG.debug("Content failed to pass rule and will not be included for StructuredContent with id " + sc.getId() +". Value = " + ruleExpression);
}
}
return result;
} else {
// If no rule found, then consider this a match.
return true;
}
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_service_StructuredContentDefaultRuleProcessor.java
|
1,639 |
@Component("blBasicFieldMetadataProvider")
@Scope("prototype")
public class BasicFieldMetadataProvider extends FieldMetadataProviderAdapter {
private static final Log LOG = LogFactory.getLog(BasicFieldMetadataProvider.class);
protected boolean canHandleFieldForConfiguredMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) {
AdminPresentation annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentation.class);
return annot != null;
}
protected boolean canHandleAnnotationOverride(OverrideViaAnnotationRequest overrideViaAnnotationRequest, Map<String, FieldMetadata> metadata) {
AdminPresentationOverrides myOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationOverrides.class);
AdminPresentationMergeOverrides myMergeOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationMergeOverrides.class);
return (myOverrides != null && (!ArrayUtils.isEmpty(myOverrides.value()) || !ArrayUtils.isEmpty(myOverrides
.toOneLookups()) || !ArrayUtils.isEmpty(myOverrides.dataDrivenEnums()))) ||
myMergeOverrides != null;
}
@Override
public FieldProviderResponse addMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) {
if (!canHandleFieldForConfiguredMetadata(addMetadataRequest, metadata)) {
return FieldProviderResponse.NOT_HANDLED;
}
AdminPresentation annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentation.class);
FieldInfo info = buildFieldInfo(addMetadataRequest.getRequestedField());
FieldMetadataOverride override = constructBasicMetadataOverride(annot, addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationToOneLookup.class),
addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationDataDrivenEnumeration.class));
buildBasicMetadata(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, info, override, addMetadataRequest.getDynamicEntityDao());
setClassOwnership(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, info);
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse overrideViaAnnotation(OverrideViaAnnotationRequest overrideViaAnnotationRequest, Map<String, FieldMetadata> metadata) {
if (!canHandleAnnotationOverride(overrideViaAnnotationRequest, metadata)) {
return FieldProviderResponse.NOT_HANDLED;
}
Map<String, AdminPresentationOverride> presentationOverrides = new LinkedHashMap<String, AdminPresentationOverride>();
Map<String, AdminPresentationToOneLookupOverride> presentationToOneLookupOverrides = new LinkedHashMap<String, AdminPresentationToOneLookupOverride>();
Map<String, AdminPresentationDataDrivenEnumerationOverride> presentationDataDrivenEnumerationOverrides = new LinkedHashMap<String, AdminPresentationDataDrivenEnumerationOverride>();
AdminPresentationOverrides myOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationOverrides.class);
if (myOverrides != null) {
for (AdminPresentationOverride myOverride : myOverrides.value()) {
presentationOverrides.put(myOverride.name(), myOverride);
}
for (AdminPresentationToOneLookupOverride myOverride : myOverrides.toOneLookups()) {
presentationToOneLookupOverrides.put(myOverride.name(), myOverride);
}
for (AdminPresentationDataDrivenEnumerationOverride myOverride : myOverrides.dataDrivenEnums()) {
presentationDataDrivenEnumerationOverrides.put(myOverride.name(), myOverride);
}
}
for (String propertyName : presentationOverrides.keySet()) {
for (String key : metadata.keySet()) {
if (StringUtils.isEmpty(propertyName) || key.startsWith(propertyName)) {
buildAdminPresentationOverride(overrideViaAnnotationRequest.getPrefix(), overrideViaAnnotationRequest.getParentExcluded(), metadata, presentationOverrides, propertyName, key, overrideViaAnnotationRequest.getDynamicEntityDao());
}
}
}
for (String propertyName : presentationToOneLookupOverrides.keySet()) {
for (String key : metadata.keySet()) {
if (key.startsWith(propertyName)) {
buildAdminPresentationToOneLookupOverride(metadata, presentationToOneLookupOverrides, propertyName, key);
}
}
}
for (String propertyName : presentationDataDrivenEnumerationOverrides.keySet()) {
for (String key : metadata.keySet()) {
if (key.startsWith(propertyName)) {
buildAdminPresentationDataDrivenEnumerationOverride(metadata, presentationDataDrivenEnumerationOverrides, propertyName, key,
overrideViaAnnotationRequest.getDynamicEntityDao());
}
}
}
AdminPresentationMergeOverrides myMergeOverrides = overrideViaAnnotationRequest.getRequestedEntity().
getAnnotation(AdminPresentationMergeOverrides.class);
if (myMergeOverrides != null) {
for (AdminPresentationMergeOverride override : myMergeOverrides.value()) {
String propertyName = override.name();
Map<String, FieldMetadata> loopMap = new HashMap<String, FieldMetadata>();
loopMap.putAll(metadata);
for (Map.Entry<String, FieldMetadata> entry : loopMap.entrySet()) {
if (entry.getKey().startsWith(propertyName) || StringUtils.isEmpty(propertyName)) {
FieldMetadata targetMetadata = entry.getValue();
if (targetMetadata instanceof BasicFieldMetadata) {
BasicFieldMetadata serverMetadata = (BasicFieldMetadata) targetMetadata;
if (serverMetadata.getTargetClass() != null) {
try {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = overrideViaAnnotationRequest.getDynamicEntityDao().getFieldManager()
.getField(targetClass, fieldName);
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
temp.put(fieldName, serverMetadata);
FieldInfo info;
if (field != null) {
info = buildFieldInfo(field);
} else {
info = new FieldInfo();
info.setName(fieldName);
}
FieldMetadataOverride fieldMetadataOverride = overrideMergeMetadata(override);
if (serverMetadata.getExcluded() != null && serverMetadata.getExcluded() &&
(fieldMetadataOverride.getExcluded() == null || fieldMetadataOverride.getExcluded())) {
continue;
}
buildBasicMetadata(parentClass, targetClass, temp, info, fieldMetadataOverride,
overrideViaAnnotationRequest.getDynamicEntityDao());
serverMetadata = (BasicFieldMetadata) temp.get(fieldName);
metadata.put(entry.getKey(), serverMetadata);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
}
}
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse overrideViaXml(OverrideViaXmlRequest overrideViaXmlRequest, Map<String, FieldMetadata> metadata) {
Map<String, FieldMetadataOverride> overrides = getTargetedOverride(overrideViaXmlRequest.getDynamicEntityDao(), overrideViaXmlRequest.getRequestedConfigKey(), overrideViaXmlRequest.getRequestedCeilingEntity());
if (overrides != null) {
for (String propertyName : overrides.keySet()) {
final FieldMetadataOverride localMetadata = overrides.get(propertyName);
for (String key : metadata.keySet()) {
if (key.equals(propertyName)) {
try {
if (metadata.get(key) instanceof BasicFieldMetadata) {
BasicFieldMetadata serverMetadata = (BasicFieldMetadata) metadata.get(key);
if (serverMetadata.getTargetClass() != null) {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = overrideViaXmlRequest.getDynamicEntityDao().getFieldManager().getField(targetClass, fieldName);
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
temp.put(field.getName(), serverMetadata);
FieldInfo info = buildFieldInfo(field);
buildBasicMetadata(parentClass, targetClass, temp, info, localMetadata,
overrideViaXmlRequest.getDynamicEntityDao());
serverMetadata = (BasicFieldMetadata) temp.get(field.getName());
metadata.put(key, serverMetadata);
if (overrideViaXmlRequest.getParentExcluded()) {
if (LOG.isDebugEnabled()) {
LOG.debug("applyMetadataOverrides:Excluding " + key + "because the parent was excluded");
}
serverMetadata.setExcluded(true);
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
return FieldProviderResponse.HANDLED;
}
protected void buildAdminPresentationToOneLookupOverride(Map<String, FieldMetadata> mergedProperties, Map<String, AdminPresentationToOneLookupOverride> presentationOverrides, String propertyName, String key) {
AdminPresentationToOneLookupOverride override = presentationOverrides.get(propertyName);
if (override != null) {
AdminPresentationToOneLookup annot = override.value();
if (annot != null) {
if (!(mergedProperties.get(key) instanceof BasicFieldMetadata)) {
return;
}
BasicFieldMetadata metadata = (BasicFieldMetadata) mergedProperties.get(key);
metadata.setFieldType(SupportedFieldType.ADDITIONAL_FOREIGN_KEY);
metadata.setExplicitFieldType(SupportedFieldType.ADDITIONAL_FOREIGN_KEY);
metadata.setLookupDisplayProperty(annot.lookupDisplayProperty());
metadata.setForcePopulateChildProperties(annot.forcePopulateChildProperties());
if (!StringUtils.isEmpty(annot.lookupDisplayProperty())) {
metadata.setForeignKeyDisplayValueProperty(annot.lookupDisplayProperty());
}
metadata.setCustomCriteria(annot.customCriteria());
metadata.setUseServerSideInspectionCache(annot.useServerSideInspectionCache());
}
}
}
protected void buildAdminPresentationDataDrivenEnumerationOverride(Map<String, FieldMetadata> mergedProperties, Map<String, AdminPresentationDataDrivenEnumerationOverride> presentationOverrides, String propertyName, String key, DynamicEntityDao dynamicEntityDao) {
AdminPresentationDataDrivenEnumerationOverride override = presentationOverrides.get(propertyName);
if (override != null) {
AdminPresentationDataDrivenEnumeration annot = override.value();
if (annot != null) {
if (!(mergedProperties.get(key) instanceof BasicFieldMetadata)) {
return;
}
BasicFieldMetadata metadata = (BasicFieldMetadata) mergedProperties.get(key);
metadata.setFieldType(SupportedFieldType.DATA_DRIVEN_ENUMERATION);
metadata.setExplicitFieldType(SupportedFieldType.DATA_DRIVEN_ENUMERATION);
metadata.setOptionListEntity(annot.optionListEntity().getName());
if (metadata.getOptionListEntity().equals(DataDrivenEnumerationValueImpl.class.getName())) {
metadata.setOptionValueFieldName("key");
metadata.setOptionDisplayFieldName("display");
} else if (metadata.getOptionListEntity() == null && (StringUtils.isEmpty(metadata.getOptionValueFieldName()) || StringUtils.isEmpty(metadata.getOptionDisplayFieldName()))) {
throw new IllegalArgumentException("Problem setting up data driven enumeration for ("+propertyName+"). The optionListEntity, optionValueFieldName and optionDisplayFieldName properties must all be included if not using DataDrivenEnumerationValueImpl as the optionListEntity.");
} else {
metadata.setOptionValueFieldName(annot.optionValueFieldName());
metadata.setOptionDisplayFieldName(annot.optionDisplayFieldName());
}
if (!ArrayUtils.isEmpty(annot.optionFilterParams())) {
String[][] params = new String[annot.optionFilterParams().length][3];
for (int j=0;j<params.length;j++) {
params[j][0] = annot.optionFilterParams()[j].param();
params[j][1] = annot.optionFilterParams()[j].value();
params[j][2] = String.valueOf(annot.optionFilterParams()[j].paramType());
}
metadata.setOptionFilterParams(params);
} else {
metadata.setOptionFilterParams(new String[][]{});
}
if (!StringUtils.isEmpty(metadata.getOptionListEntity())) {
buildDataDrivenList(metadata, dynamicEntityDao);
}
}
}
}
protected void buildAdminPresentationOverride(String prefix, Boolean isParentExcluded, Map<String, FieldMetadata> mergedProperties, Map<String, AdminPresentationOverride> presentationOverrides, String propertyName, String key, DynamicEntityDao dynamicEntityDao) {
AdminPresentationOverride override = presentationOverrides.get(propertyName);
if (override != null) {
AdminPresentation annot = override.value();
if (annot != null) {
String testKey = prefix + key;
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && annot.excluded()) {
FieldMetadata metadata = mergedProperties.get(key);
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationOverride:Excluding " + key + "because an override annotation declared "+ testKey + " to be excluded");
}
metadata.setExcluded(true);
return;
}
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && !annot.excluded()) {
FieldMetadata metadata = mergedProperties.get(key);
if (!isParentExcluded) {
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationOverride:Showing " + key + "because an override annotation declared " + testKey + " to not be excluded");
}
metadata.setExcluded(false);
}
}
if (!(mergedProperties.get(key) instanceof BasicFieldMetadata)) {
return;
}
BasicFieldMetadata serverMetadata = (BasicFieldMetadata) mergedProperties.get(key);
if (serverMetadata.getTargetClass() != null) {
try {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = dynamicEntityDao.getFieldManager().getField(targetClass, fieldName);
FieldMetadataOverride localMetadata = constructBasicMetadataOverride(annot, null, null);
//do not include the previous metadata - we want to construct a fresh metadata from the override annotation
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
FieldInfo info = buildFieldInfo(field);
buildBasicMetadata(parentClass, targetClass, temp, info, localMetadata, dynamicEntityDao);
BasicFieldMetadata result = (BasicFieldMetadata) temp.get(field.getName());
result.setInheritedFromType(serverMetadata.getInheritedFromType());
result.setAvailableToTypes(serverMetadata.getAvailableToTypes());
result.setFieldType(serverMetadata.getFieldType());
result.setSecondaryType(serverMetadata.getSecondaryType());
result.setLength(serverMetadata.getLength());
result.setScale(serverMetadata.getScale());
result.setPrecision(serverMetadata.getPrecision());
result.setRequired(serverMetadata.getRequired());
result.setUnique(serverMetadata.getUnique());
result.setForeignKeyCollection(serverMetadata.getForeignKeyCollection());
result.setMutable(serverMetadata.getMutable());
result.setMergedPropertyType(serverMetadata.getMergedPropertyType());
mergedProperties.put(key, result);
if (isParentExcluded) {
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationOverride:Excluding " + key + "because the parent was excluded");
}
serverMetadata.setExcluded(true);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
protected FieldMetadataOverride overrideMergeMetadata(AdminPresentationMergeOverride merge) {
FieldMetadataOverride fieldMetadataOverride = new FieldMetadataOverride();
Map<String, AdminPresentationMergeEntry> overrideValues = getAdminPresentationEntries(merge.mergeEntries());
for (Map.Entry<String, AdminPresentationMergeEntry> entry : overrideValues.entrySet()) {
String stringValue = entry.getValue().overrideValue();
if (entry.getKey().equals(PropertyType.AdminPresentation.FRIENDLYNAME)) {
fieldMetadataOverride.setFriendlyName(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.SECURITYLEVEL)) {
fieldMetadataOverride.setSecurityLevel(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.GROUP)) {
fieldMetadataOverride.setGroup(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.TAB)) {
fieldMetadataOverride.setTab(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.COLUMNWIDTH)) {
fieldMetadataOverride.setColumnWidth(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.BROADLEAFENUMERATION)) {
fieldMetadataOverride.setBroadleafEnumeration(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.TOOLTIP)) {
fieldMetadataOverride.setTooltip(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.HELPTEXT)) {
fieldMetadataOverride.setHelpText(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.HINT)) {
fieldMetadataOverride.setHint(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.SHOWIFPROPERTY)) {
fieldMetadataOverride.setShowIfProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.CURRENCYCODEFIELD)) {
fieldMetadataOverride.setCurrencyCodeField(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.RULEIDENTIFIER)) {
fieldMetadataOverride.setRuleIdentifier(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentation.ORDER)) {
fieldMetadataOverride.setOrder(StringUtils.isEmpty(stringValue)?entry.getValue().intOverrideValue():
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.GRIDORDER)) {
fieldMetadataOverride.setGridOrder(StringUtils.isEmpty(stringValue)?entry.getValue().intOverrideValue():
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.VISIBILITY)) {
fieldMetadataOverride.setVisibility(VisibilityEnum.valueOf(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.FIELDTYPE)) {
fieldMetadataOverride.setFieldType(SupportedFieldType.valueOf(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.GROUPORDER)) {
fieldMetadataOverride.setGroupOrder(StringUtils.isEmpty(stringValue)?entry.getValue().intOverrideValue():
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.GROUPCOLLAPSED)) {
fieldMetadataOverride.setGroupCollapsed(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.TABORDER)) {
fieldMetadataOverride.setTabOrder(StringUtils.isEmpty(stringValue)?entry.getValue().intOverrideValue():
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.LARGEENTRY)) {
fieldMetadataOverride.setLargeEntry(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.PROMINENT)) {
fieldMetadataOverride.setProminent(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.READONLY)) {
fieldMetadataOverride.setReadOnly(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.REQUIREDOVERRIDE)) {
if (RequiredOverride.IGNORED!=RequiredOverride.valueOf(stringValue)) {
fieldMetadataOverride.setRequiredOverride(RequiredOverride.REQUIRED==RequiredOverride.valueOf(stringValue));
}
} else if (entry.getKey().equals(PropertyType.AdminPresentation.EXCLUDED)) {
fieldMetadataOverride.setExcluded(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentation.VALIDATIONCONFIGURATIONS)) {
processValidationAnnotations(entry.getValue().validationConfigurations(), fieldMetadataOverride);
} else if (entry.getKey().equals(PropertyType.AdminPresentationToOneLookup.LOOKUPDISPLAYPROPERTY)) {
fieldMetadataOverride.setLookupDisplayProperty(stringValue);
fieldMetadataOverride.setForeignKeyDisplayValueProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationToOneLookup.FORCEPOPULATECHILDPROPERTIES)) {
fieldMetadataOverride.setForcePopulateChildProperties(StringUtils.isEmpty(stringValue)?entry.getValue().booleanOverrideValue():
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationToOneLookup.USESERVERSIDEINSPECTIONCACHE)) {
fieldMetadataOverride.setUseServerSideInspectionCache(StringUtils.isEmpty(stringValue)?
entry.getValue().booleanOverrideValue():Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationToOneLookup.LOOKUPTYPE)) {
fieldMetadataOverride.setLookupType(LookupType.valueOf(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationToOneLookup.CUSTOMCRITERIA)) {
fieldMetadataOverride.setCustomCriteria(entry.getValue().stringArrayOverrideValue());
} else if (entry.getKey().equals(PropertyType.AdminPresentationDataDrivenEnumeration.OPTIONLISTENTITY)) {
fieldMetadataOverride.setOptionListEntity(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationDataDrivenEnumeration.OPTIONVALUEFIELDNAME)) {
fieldMetadataOverride.setOptionValueFieldName(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationDataDrivenEnumeration.OPTIONDISPLAYFIELDNAME)) {
fieldMetadataOverride.setOptionDisplayFieldName(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationDataDrivenEnumeration.OPTIONCANEDITVALUES)) {
fieldMetadataOverride.setOptionCanEditValues(StringUtils.isEmpty(stringValue) ? entry.getValue()
.booleanOverrideValue() : Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationDataDrivenEnumeration.OPTIONFILTERPARAMS)) {
OptionFilterParam[] optionFilterParams = entry.getValue().optionFilterParams();
String[][] params = new String[optionFilterParams.length][3];
for (int j=0;j<params.length;j++) {
params[j][0] = optionFilterParams[j].param();
params[j][1] = optionFilterParams[j].value();
params[j][2] = String.valueOf(optionFilterParams[j].paramType());
}
fieldMetadataOverride.setOptionFilterValues(params);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Unrecognized type: " + entry.getKey() + ". Not setting on basic field.");
}
}
}
return fieldMetadataOverride;
}
protected FieldMetadataOverride constructBasicMetadataOverride(AdminPresentation annot, AdminPresentationToOneLookup toOneLookup,
AdminPresentationDataDrivenEnumeration dataDrivenEnumeration) {
if (annot != null) {
FieldMetadataOverride override = new FieldMetadataOverride();
override.setBroadleafEnumeration(annot.broadleafEnumeration());
override.setColumnWidth(annot.columnWidth());
override.setExplicitFieldType(annot.fieldType());
override.setFieldType(annot.fieldType());
override.setGroup(annot.group());
override.setGroupCollapsed(annot.groupCollapsed());
override.setGroupOrder(annot.groupOrder());
override.setTab(annot.tab());
override.setRuleIdentifier(annot.ruleIdentifier());
override.setTabOrder(annot.tabOrder());
override.setHelpText(annot.helpText());
override.setHint(annot.hint());
override.setLargeEntry(annot.largeEntry());
override.setFriendlyName(annot.friendlyName());
override.setSecurityLevel(annot.securityLevel());
override.setOrder(annot.order());
override.setGridOrder(annot.gridOrder());
override.setVisibility(annot.visibility());
override.setProminent(annot.prominent());
override.setReadOnly(annot.readOnly());
override.setShowIfProperty(annot.showIfProperty());
override.setCurrencyCodeField(annot.currencyCodeField());
override.setRuleIdentifier(annot.ruleIdentifier());
override.setTranslatable(annot.translatable());
if (annot.validationConfigurations().length != 0) {
processValidationAnnotations(annot.validationConfigurations(), override);
}
if (annot.requiredOverride()!= RequiredOverride.IGNORED) {
override.setRequiredOverride(annot.requiredOverride()==RequiredOverride.REQUIRED);
}
override.setExcluded(annot.excluded());
override.setTooltip(annot.tooltip());
//the following annotations are complimentary to AdminPresentation
if (toOneLookup != null) {
override.setExplicitFieldType(SupportedFieldType.ADDITIONAL_FOREIGN_KEY);
override.setFieldType(SupportedFieldType.ADDITIONAL_FOREIGN_KEY);
override.setLookupDisplayProperty(toOneLookup.lookupDisplayProperty());
override.setForcePopulateChildProperties(toOneLookup.forcePopulateChildProperties());
override.setCustomCriteria(toOneLookup.customCriteria());
override.setUseServerSideInspectionCache(toOneLookup.useServerSideInspectionCache());
override.setToOneLookupCreatedViaAnnotation(true);
override.setLookupType(toOneLookup.lookupType());
}
if (dataDrivenEnumeration != null) {
override.setExplicitFieldType(SupportedFieldType.DATA_DRIVEN_ENUMERATION);
override.setFieldType(SupportedFieldType.DATA_DRIVEN_ENUMERATION);
override.setOptionCanEditValues(dataDrivenEnumeration.optionCanEditValues());
override.setOptionDisplayFieldName(dataDrivenEnumeration.optionDisplayFieldName());
if (!ArrayUtils.isEmpty(dataDrivenEnumeration.optionFilterParams())) {
Serializable[][] params = new Serializable[dataDrivenEnumeration.optionFilterParams().length][3];
for (int j=0;j<params.length;j++) {
params[j][0] = dataDrivenEnumeration.optionFilterParams()[j].param();
params[j][1] = dataDrivenEnumeration.optionFilterParams()[j].value();
params[j][2] = dataDrivenEnumeration.optionFilterParams()[j].paramType();
}
override.setOptionFilterValues(params);
}
override.setOptionListEntity(dataDrivenEnumeration.optionListEntity().getName());
override.setOptionValueFieldName(dataDrivenEnumeration.optionValueFieldName());
}
return override;
}
throw new IllegalArgumentException("AdminPresentation annotation not found on field");
}
protected void processValidationAnnotations(ValidationConfiguration[] configurations, FieldMetadataOverride override) {
for (ValidationConfiguration configuration : configurations) {
ConfigurationItem[] items = configuration.configurationItems();
Map<String, String> itemMap = new HashMap<String, String>();
for (ConfigurationItem item : items) {
itemMap.put(item.itemName(), item.itemValue());
}
if (override.getValidationConfigurations() == null) {
override.setValidationConfigurations(new LinkedHashMap<String, Map<String, String>>(5));
}
override.getValidationConfigurations().put(configuration.validationImplementation(), itemMap);
}
}
protected void buildBasicMetadata(Class<?> parentClass, Class<?> targetClass, Map<String, FieldMetadata> attributes,
FieldInfo field, FieldMetadataOverride basicFieldMetadata, DynamicEntityDao dynamicEntityDao) {
BasicFieldMetadata serverMetadata = (BasicFieldMetadata) attributes.get(field.getName());
BasicFieldMetadata metadata;
if (serverMetadata != null) {
metadata = serverMetadata;
} else {
metadata = new BasicFieldMetadata();
}
metadata.setName(field.getName());
metadata.setTargetClass(targetClass.getName());
metadata.setFieldName(field.getName());
if (basicFieldMetadata.getFieldType() != null) {
metadata.setFieldType(basicFieldMetadata.getFieldType());
}
if (basicFieldMetadata.getFriendlyName() != null) {
metadata.setFriendlyName(basicFieldMetadata.getFriendlyName());
}
if (basicFieldMetadata.getSecurityLevel() != null) {
metadata.setSecurityLevel(basicFieldMetadata.getSecurityLevel());
}
if (basicFieldMetadata.getVisibility() != null) {
metadata.setVisibility(basicFieldMetadata.getVisibility());
}
if (basicFieldMetadata.getOrder() != null) {
metadata.setOrder(basicFieldMetadata.getOrder());
}
if (basicFieldMetadata.getGridOrder() != null) {
metadata.setGridOrder(basicFieldMetadata.getGridOrder());
}
if (basicFieldMetadata.getExplicitFieldType() != null) {
metadata.setExplicitFieldType(basicFieldMetadata.getExplicitFieldType());
}
if (metadata.getExplicitFieldType()==SupportedFieldType.ADDITIONAL_FOREIGN_KEY) {
//this is a lookup - exclude the fields on this OneToOne or ManyToOne field
//metadata.setExcluded(true);
if (basicFieldMetadata.getForcePopulateChildProperties() == null || !basicFieldMetadata.getForcePopulateChildProperties()) {
metadata.setChildrenExcluded(true);
}
//metadata.setVisibility(VisibilityEnum.GRID_HIDDEN);
} else {
if (basicFieldMetadata.getExcluded()!=null) {
if (LOG.isDebugEnabled()) {
if (basicFieldMetadata.getExcluded()) {
LOG.debug("buildBasicMetadata:Excluding " + field.getName() + " because it was explicitly declared in config");
} else {
LOG.debug("buildBasicMetadata:Showing " + field.getName() + " because it was explicitly declared in config");
}
}
metadata.setExcluded(basicFieldMetadata.getExcluded());
}
}
if (basicFieldMetadata.getGroup()!=null) {
metadata.setGroup(basicFieldMetadata.getGroup());
}
if (basicFieldMetadata.getGroupOrder()!=null) {
metadata.setGroupOrder(basicFieldMetadata.getGroupOrder());
}
if (basicFieldMetadata.getGroupCollapsed()!=null) {
metadata.setGroupCollapsed(basicFieldMetadata.getGroupCollapsed());
}
if (basicFieldMetadata.getTab() != null) {
metadata.setTab(basicFieldMetadata.getTab());
}
if (basicFieldMetadata.getTabOrder() != null) {
metadata.setTabOrder(basicFieldMetadata.getTabOrder());
}
if (basicFieldMetadata.isLargeEntry()!=null) {
metadata.setLargeEntry(basicFieldMetadata.isLargeEntry());
}
if (basicFieldMetadata.isProminent()!=null) {
metadata.setProminent(basicFieldMetadata.isProminent());
}
if (basicFieldMetadata.getColumnWidth()!=null) {
metadata.setColumnWidth(basicFieldMetadata.getColumnWidth());
}
if (basicFieldMetadata.getBroadleafEnumeration()!=null) {
metadata.setBroadleafEnumeration(basicFieldMetadata.getBroadleafEnumeration());
}
if (!StringUtils.isEmpty(metadata.getBroadleafEnumeration()) && metadata.getFieldType()==SupportedFieldType.BROADLEAF_ENUMERATION) {
try {
setupBroadleafEnumeration(metadata.getBroadleafEnumeration(), metadata, dynamicEntityDao);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
if (basicFieldMetadata.getReadOnly()!=null) {
metadata.setReadOnly(basicFieldMetadata.getReadOnly());
}
if (basicFieldMetadata.getTooltip()!=null) {
metadata.setTooltip(basicFieldMetadata.getTooltip());
}
if (basicFieldMetadata.getHelpText()!=null) {
metadata.setHelpText(basicFieldMetadata.getHelpText());
}
if (basicFieldMetadata.getHint()!=null) {
metadata.setHint(basicFieldMetadata.getHint());
}
if (basicFieldMetadata.getShowIfProperty()!=null) {
metadata.setShowIfProperty(basicFieldMetadata.getShowIfProperty());
}
if (basicFieldMetadata.getCurrencyCodeField()!=null) {
metadata.setCurrencyCodeField(basicFieldMetadata.getCurrencyCodeField());
}
if (basicFieldMetadata.getLookupDisplayProperty()!=null) {
metadata.setLookupDisplayProperty(basicFieldMetadata.getLookupDisplayProperty());
metadata.setForeignKeyDisplayValueProperty(basicFieldMetadata.getLookupDisplayProperty());
}
if (basicFieldMetadata.getForcePopulateChildProperties()!=null) {
metadata.setForcePopulateChildProperties(basicFieldMetadata.getForcePopulateChildProperties());
}
if (basicFieldMetadata.getCustomCriteria() != null) {
metadata.setCustomCriteria(basicFieldMetadata.getCustomCriteria());
}
if (basicFieldMetadata.getUseServerSideInspectionCache() != null) {
metadata.setUseServerSideInspectionCache(basicFieldMetadata.getUseServerSideInspectionCache());
}
if (basicFieldMetadata.getToOneLookupCreatedViaAnnotation()!=null) {
metadata.setToOneLookupCreatedViaAnnotation(basicFieldMetadata.getToOneLookupCreatedViaAnnotation());
}
if (basicFieldMetadata.getOptionListEntity()!=null) {
metadata.setOptionListEntity(basicFieldMetadata.getOptionListEntity());
}
if (metadata.getOptionListEntity() != null && metadata.getOptionListEntity().equals(DataDrivenEnumerationValueImpl.class.getName())) {
metadata.setOptionValueFieldName("key");
metadata.setOptionDisplayFieldName("display");
} else {
if (basicFieldMetadata.getOptionValueFieldName()!=null) {
metadata.setOptionValueFieldName(basicFieldMetadata.getOptionValueFieldName());
}
if (basicFieldMetadata.getOptionDisplayFieldName()!=null) {
metadata.setOptionDisplayFieldName(basicFieldMetadata.getOptionDisplayFieldName());
}
}
if (!StringUtils.isEmpty(metadata.getOptionListEntity()) && (StringUtils.isEmpty(metadata.getOptionValueFieldName()) || StringUtils.isEmpty(metadata.getOptionDisplayFieldName()))) {
throw new IllegalArgumentException("Problem setting up data driven enumeration for ("+field.getName()+"). The optionListEntity, optionValueFieldName and optionDisplayFieldName properties must all be included if not using DataDrivenEnumerationValueImpl as the optionListEntity.");
}
if (basicFieldMetadata.getOptionFilterValues() != null) {
String[][] options = new String[basicFieldMetadata.getOptionFilterValues().length][3];
int j = 0;
for (Serializable[] option : basicFieldMetadata.getOptionFilterValues()) {
options[j][0] = String.valueOf(option[0]);
options[j][1] = String.valueOf(option[1]);
options[j][2] = String.valueOf(option[2]);
}
metadata.setOptionFilterParams(options);
}
if (!StringUtils.isEmpty(metadata.getOptionListEntity())) {
buildDataDrivenList(metadata, dynamicEntityDao);
}
if (basicFieldMetadata.getRequiredOverride()!=null) {
metadata.setRequiredOverride(basicFieldMetadata.getRequiredOverride());
}
if (basicFieldMetadata.getValidationConfigurations()!=null) {
metadata.setValidationConfigurations(basicFieldMetadata.getValidationConfigurations());
}
if ((basicFieldMetadata.getFieldType() == SupportedFieldType.RULE_SIMPLE ||
basicFieldMetadata.getFieldType() == SupportedFieldType.RULE_WITH_QUANTITY)
&& basicFieldMetadata.getRuleIdentifier() == null) {
throw new IllegalArgumentException("ruleIdentifier property must be set on AdminPresentation when the fieldType is RULE_SIMPLE or RULE_WITH_QUANTITY");
}
if (basicFieldMetadata.getRuleIdentifier()!=null) {
metadata.setRuleIdentifier(basicFieldMetadata.getRuleIdentifier());
}
if (basicFieldMetadata.getLookupType()!=null) {
metadata.setLookupType(basicFieldMetadata.getLookupType());
}
if (basicFieldMetadata.getTranslatable() != null) {
metadata.setTranslatable(basicFieldMetadata.getTranslatable());
}
if (basicFieldMetadata.getIsDerived() != null) {
metadata.setDerived(basicFieldMetadata.getIsDerived());
}
attributes.put(field.getName(), metadata);
}
protected void buildDataDrivenList(BasicFieldMetadata metadata, DynamicEntityDao dynamicEntityDao) {
try {
Criteria criteria = dynamicEntityDao.createCriteria(Class.forName(metadata.getOptionListEntity()));
if (metadata.getOptionListEntity().equals(DataDrivenEnumerationValueImpl.class.getName())) {
criteria.add(Restrictions.eq("hidden", false));
}
if (metadata.getOptionFilterParams() != null) {
for (String[] param : metadata.getOptionFilterParams()) {
Criteria current = criteria;
String key = param[0];
if (!key.equals(".ignore")) {
if (key.contains(".")) {
String[] parts = key.split("\\.");
for (int j = 0; j < parts.length - 1; j++) {
current = current.createCriteria(parts[j], parts[j]);
}
}
current.add(Restrictions.eq(key, convertType(param[1], OptionFilterParamType.valueOf(param[2]))));
}
}
}
List results = criteria.list();
String[][] enumerationValues = new String[results.size()][2];
int j = 0;
for (Object param : results) {
enumerationValues[j][1] = String.valueOf(dynamicEntityDao.getFieldManager().getFieldValue(param, metadata.getOptionDisplayFieldName()));
enumerationValues[j][0] = String.valueOf(dynamicEntityDao.getFieldManager().getFieldValue(param, metadata.getOptionValueFieldName()));
j++;
}
if (!CollectionUtils.isEmpty(results) && metadata.getOptionListEntity().equals(DataDrivenEnumerationValueImpl.class.getName())) {
metadata.setOptionCanEditValues((Boolean) dynamicEntityDao.getFieldManager().getFieldValue(results.get(0), "type.modifiable"));
}
metadata.setEnumerationValues(enumerationValues);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public int getOrder() {
return FieldMetadataProvider.BASIC;
}
}
| 0true
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_provider_metadata_BasicFieldMetadataProvider.java
|
2,316 |
static class FactorTimeZoneRounding extends TimeZoneRounding {
final static byte ID = 7;
private TimeZoneRounding timeZoneRounding;
private float factor;
FactorTimeZoneRounding() { // for serialization
}
FactorTimeZoneRounding(TimeZoneRounding timeZoneRounding, float factor) {
this.timeZoneRounding = timeZoneRounding;
this.factor = factor;
}
@Override
public byte id() {
return ID;
}
@Override
public long roundKey(long utcMillis) {
return timeZoneRounding.roundKey((long) (factor * utcMillis));
}
@Override
public long valueForKey(long key) {
return timeZoneRounding.valueForKey(key);
}
@Override
public long nextRoundingValue(long value) {
return timeZoneRounding.nextRoundingValue(value);
}
@Override
public void readFrom(StreamInput in) throws IOException {
timeZoneRounding = (TimeZoneRounding) Rounding.Streams.read(in);
factor = in.readFloat();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
Rounding.Streams.write(timeZoneRounding, out);
out.writeFloat(factor);
}
}
| 0true
|
src_main_java_org_elasticsearch_common_rounding_TimeZoneRounding.java
|
5,831 |
public static class Field {
// Fields that default to null or -1 are often set to their real default in HighlighterParseElement#parse
private final String field;
private int fragmentCharSize = -1;
private int numberOfFragments = -1;
private int fragmentOffset = -1;
private String encoder;
private String[] preTags;
private String[] postTags;
private Boolean scoreOrdered;
private Boolean highlightFilter;
private Boolean requireFieldMatch;
private String highlighterType;
private Boolean forceSource;
private String fragmenter;
private int boundaryMaxScan = -1;
private Character[] boundaryChars = null;
private Query highlightQuery;
private int noMatchSize = -1;
private Set<String> matchedFields;
private Map<String, Object> options;
private int phraseLimit = -1;
public Field(String field) {
this.field = field;
}
public String field() {
return field;
}
public int fragmentCharSize() {
return fragmentCharSize;
}
public void fragmentCharSize(int fragmentCharSize) {
this.fragmentCharSize = fragmentCharSize;
}
public int numberOfFragments() {
return numberOfFragments;
}
public void numberOfFragments(int numberOfFragments) {
this.numberOfFragments = numberOfFragments;
}
public int fragmentOffset() {
return fragmentOffset;
}
public void fragmentOffset(int fragmentOffset) {
this.fragmentOffset = fragmentOffset;
}
public String encoder() {
return encoder;
}
public void encoder(String encoder) {
this.encoder = encoder;
}
public String[] preTags() {
return preTags;
}
public void preTags(String[] preTags) {
this.preTags = preTags;
}
public String[] postTags() {
return postTags;
}
public void postTags(String[] postTags) {
this.postTags = postTags;
}
public Boolean scoreOrdered() {
return scoreOrdered;
}
public void scoreOrdered(boolean scoreOrdered) {
this.scoreOrdered = scoreOrdered;
}
public Boolean highlightFilter() {
return highlightFilter;
}
public void highlightFilter(boolean highlightFilter) {
this.highlightFilter = highlightFilter;
}
public Boolean requireFieldMatch() {
return requireFieldMatch;
}
public void requireFieldMatch(boolean requireFieldMatch) {
this.requireFieldMatch = requireFieldMatch;
}
public String highlighterType() {
return highlighterType;
}
public void highlighterType(String type) {
this.highlighterType = type;
}
public Boolean forceSource() {
return forceSource;
}
public void forceSource(boolean forceSource) {
this.forceSource = forceSource;
}
public String fragmenter() {
return fragmenter;
}
public void fragmenter(String fragmenter) {
this.fragmenter = fragmenter;
}
public int boundaryMaxScan() {
return boundaryMaxScan;
}
public void boundaryMaxScan(int boundaryMaxScan) {
this.boundaryMaxScan = boundaryMaxScan;
}
public Character[] boundaryChars() {
return boundaryChars;
}
public void boundaryChars(Character[] boundaryChars) {
this.boundaryChars = boundaryChars;
}
public Query highlightQuery() {
return highlightQuery;
}
public void highlightQuery(Query highlightQuery) {
this.highlightQuery = highlightQuery;
}
public int noMatchSize() {
return noMatchSize;
}
public void noMatchSize(int noMatchSize) {
this.noMatchSize = noMatchSize;
}
public int phraseLimit() {
return phraseLimit;
}
public void phraseLimit(int phraseLimit) {
this.phraseLimit = phraseLimit;
}
public Set<String> matchedFields() {
return matchedFields;
}
public void matchedFields(Set<String> matchedFields) {
this.matchedFields = matchedFields;
}
public Map<String, Object> options() {
return options;
}
public void options(Map<String, Object> options) {
this.options = options;
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_highlight_SearchContextHighlight.java
|
1,418 |
public static class PutResponse {
private final boolean acknowledged;
private final IndexTemplateMetaData template;
public PutResponse(boolean acknowledged, IndexTemplateMetaData template) {
this.acknowledged = acknowledged;
this.template = template;
}
public boolean acknowledged() {
return acknowledged;
}
public IndexTemplateMetaData template() {
return template;
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_MetaDataIndexTemplateService.java
|
855 |
public class ODatabaseSecurityResources {
public final static String ALL = "*";
public final static String DATABASE = "database";
public final static String SCHEMA = "database.schema";
public final static String CLASS = "database.class";
public final static String ALL_CLASSES = "database.class.*";
public final static String CLUSTER = "database.cluster";
public final static String ALL_CLUSTERS = "database.cluster.*";
public final static String COMMAND = "database.command";
public final static String FUNCTION = "database.function";
public final static String DATABASE_CONFIG = "database.config";
public final static String BYPASS_RESTRICTED = "database.bypassRestricted";
public final static String RECORD_HOOK = "database.hook.record";
public final static String SERVER_ADMIN = "server.admin";
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_metadata_security_ODatabaseSecurityResources.java
|
321 |
public class JOTMTransactionManager extends AbstractTransactionManager
{
@Override
public int getEventIdentifier()
{
return 0;
}
public static class Provider extends TransactionManagerProvider
{
public Provider()
{
super( NAME );
}
@Override
public AbstractTransactionManager loadTransactionManager(
String txLogDir, XaDataSourceManager xaDataSourceManager, KernelPanicEventGenerator kpe,
RemoteTxHook rollbackHook, StringLogger msgLog,
FileSystemAbstraction fileSystem, TransactionStateFactory stateFactory )
{
return new JOTMTransactionManager( xaDataSourceManager, stateFactory );
}
}
public static final String NAME = "JOTM";
private final TransactionManager current;
private final Jotm jotm;
private final XaDataSourceManager xaDataSourceManager;
private final Map<Transaction, TransactionState> states = new HashMap<>();
private final TransactionStateFactory stateFactory;
private JOTMTransactionManager( XaDataSourceManager xaDataSourceManager, TransactionStateFactory stateFactory )
{
this.xaDataSourceManager = xaDataSourceManager;
this.stateFactory = stateFactory;
Registry registry = null;
try
{
registry = LocateRegistry.getRegistry( 1099 );
}
catch ( RemoteException re )
{
// Nothing yet, we can still create it.
}
if ( registry == null )
{
try
{
LocateRegistry.createRegistry( 1099 );
}
catch ( RemoteException re )
{
// Something is fishy here, plus it is impossible to continue.
// So we die.
throw new Error( re );
}
}
try
{
jotm = new Jotm( true, false );
current = jotm.getTransactionManager();
}
catch ( NamingException ne )
{
throw new Error( "Error during JOTM creation", ne );
}
}
/**
* Starts the registry and binds a JOTM instance to it. Registers the
* resource adapters declared by the neo data source manager to get ready
* for possible recovery.
*/
@Override
public void init()
{
}
@Override
public void begin() throws NotSupportedException, SystemException
{
current.begin();
Transaction tx = getTransaction();
states.put( tx, stateFactory.create( tx ) );
}
@Override
public void commit() throws RollbackException, HeuristicMixedException,
HeuristicRollbackException, SecurityException,
IllegalStateException, SystemException
{
current.commit();
}
@Override
public int getStatus() throws SystemException
{
return current.getStatus();
}
@Override
public Transaction getTransaction() throws SystemException
{
if ( current == null )
{
return null;
}
return current.getTransaction();
}
@Override
public void resume( Transaction arg0 ) throws InvalidTransactionException,
IllegalStateException, SystemException
{
current.resume( arg0 );
}
@Override
public void rollback() throws IllegalStateException, SecurityException,
SystemException
{
current.rollback();
}
@Override
public void setRollbackOnly() throws IllegalStateException, SystemException
{
current.setRollbackOnly();
}
@Override
public void setTransactionTimeout( int arg0 ) throws SystemException
{
current.setTransactionTimeout( arg0 );
}
@Override
public Transaction suspend() throws SystemException
{
return current.suspend();
}
@Override
public void start() throws Throwable
{
}
/**
* Stops the JOTM instance.
*/
@Override
public void stop()
{
jotm.stop();
}
@Override
public void shutdown() throws Throwable
{
}
public Jotm getJotmTxManager()
{
return jotm;
}
@Override
public void doRecovery() throws Throwable
{
TransactionResourceManager trm = new TransactionResourceManager()
{
@Override
public void returnXAResource( String rmName, XAResource rmXares )
{
}
};
try
{
for ( XaDataSource xaDs : xaDataSourceManager.getAllRegisteredDataSources() )
{
Current.getTransactionRecovery().registerResourceManager( xaDs.getName(),
xaDs.getXaConnection().getXaResource(), xaDs.getName(), trm );
}
Current.getTransactionRecovery().startResourceManagerRecovery();
}
catch ( XAException e )
{
throw new Error( "Error registering xa datasource", e );
}
}
@Override
public TransactionState getTransactionState()
{
try
{
TransactionState state = states.get( getTransaction() );
return state != null ? state : TransactionState.NO_STATE;
}
catch ( SystemException e )
{
throw new RuntimeException( e );
}
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_JOTMTransactionManager.java
|
2,899 |
public class NumericDoubleTokenizer extends NumericTokenizer {
public NumericDoubleTokenizer(Reader reader, int precisionStep, char[] buffer) throws IOException {
super(reader, new NumericTokenStream(precisionStep), buffer, null);
}
@Override
protected void setValue(NumericTokenStream tokenStream, String value) {
tokenStream.setDoubleValue(Double.parseDouble(value));
}
}
| 0true
|
src_main_java_org_elasticsearch_index_analysis_NumericDoubleTokenizer.java
|
371 |
public class HBaseMultiWriteStoreTest extends MultiWriteKeyColumnValueStoreTest {
@BeforeClass
public static void startHBase() throws IOException {
HBaseStorageSetup.startHBase();
}
@AfterClass
public static void stopHBase() {
// Workaround for https://issues.apache.org/jira/browse/HBASE-10312
if (VersionInfo.getVersion().startsWith("0.96"))
HBaseStorageSetup.killIfRunning();
}
public KeyColumnValueStoreManager openStorageManager() throws BackendException {
return new HBaseStoreManager(HBaseStorageSetup.getHBaseConfiguration());
}
}
| 0true
|
titan-hbase-parent_titan-hbase-core_src_test_java_com_thinkaurelius_titan_diskstorage_hbase_HBaseMultiWriteStoreTest.java
|
2,541 |
public class SimpleCountTests extends ElasticsearchIntegrationTest {
@Test
public void testCountRandomPreference() throws InterruptedException, ExecutionException {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", between(1, 3))).get();
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("field", "value"),
client().prepareIndex("test", "type", "2").setSource("field", "value"),
client().prepareIndex("test", "type", "3").setSource("field", "value"),
client().prepareIndex("test", "type", "4").setSource("field", "value"),
client().prepareIndex("test", "type", "5").setSource("field", "value"),
client().prepareIndex("test", "type", "6").setSource("field", "value"));
int iters = atLeast(10);
for (int i = 0; i < iters; i++) {
// id is not indexed, but lets see that we automatically convert to
CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomUnicodeOfLengthBetween(0, 4)).get();
assertHitCount(countResponse, 6l);
}
}
@Test
public void simpleIpTests() throws Exception {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 1)).execute().actionGet();
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("from").field("type", "ip").endObject()
.startObject("to").field("type", "ip").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource("from", "192.168.0.5", "to", "192.168.0.10").setRefresh(true).execute().actionGet();
CountResponse countResponse = client().prepareCount()
.setQuery(boolQuery().must(rangeQuery("from").lt("192.168.0.7")).must(rangeQuery("to").gt("192.168.0.7")))
.execute().actionGet();
assertHitCount(countResponse, 1l);
}
@Test
public void simpleIdTests() {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 1)).execute().actionGet();
client().prepareIndex("test", "type", "XXX1").setSource("field", "value").setRefresh(true).execute().actionGet();
// id is not indexed, but lets see that we automatically convert to
CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.termQuery("_id", "XXX1")).execute().actionGet();
assertHitCount(countResponse, 1l);
countResponse = client().prepareCount().setQuery(QueryBuilders.queryString("_id:XXX1")).execute().actionGet();
assertHitCount(countResponse, 1l);
// id is not index, but we can automatically support prefix as well
countResponse = client().prepareCount().setQuery(QueryBuilders.prefixQuery("_id", "XXX")).execute().actionGet();
assertHitCount(countResponse, 1l);
countResponse = client().prepareCount().setQuery(QueryBuilders.queryString("_id:XXX*").lowercaseExpandedTerms(false)).execute().actionGet();
assertHitCount(countResponse, 1l);
}
@Test
public void simpleDateMathTests() throws Exception {
prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder()).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource("field", "2010-01-05T02:00").execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource("field", "2010-01-06T02:00").execute().actionGet();
ensureGreen();
refresh();
CountResponse countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d")).execute().actionGet();
assertNoFailures(countResponse);
assertHitCount(countResponse, 2l);
countResponse = client().prepareCount("test").setQuery(QueryBuilders.queryString("field:[2010-01-03||+2d TO 2010-01-04||+2d]")).execute().actionGet();
assertHitCount(countResponse, 2l);
}
@Test
public void localDependentDateTests() throws Exception {
prepareCreate("test")
.addMapping("type1",
jsonBuilder().startObject()
.startObject("type1")
.startObject("properties")
.startObject("date_field")
.field("type", "date")
.field("format", "E, d MMM yyyy HH:mm:ss Z")
.field("locale", "de")
.endObject()
.endObject()
.endObject()
.endObject())
.execute().actionGet();
ensureGreen();
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", "" + i).setSource("date_field", "Mi, 06 Dez 2000 02:55:00 -0800").execute().actionGet();
client().prepareIndex("test", "type1", "" + (10 + i)).setSource("date_field", "Do, 07 Dez 2000 02:55:00 -0800").execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 10; i++) {
CountResponse countResponse = client().prepareCount("test")
.setQuery(QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Do, 07 Dez 2000 00:00:00 -0800"))
.execute().actionGet();
assertHitCount(countResponse, 10l);
countResponse = client().prepareCount("test")
.setQuery(QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Fr, 08 Dez 2000 00:00:00 -0800"))
.execute().actionGet();
assertHitCount(countResponse, 20l);
}
}
}
| 0true
|
src_test_java_org_elasticsearch_count_simple_SimpleCountTests.java
|
563 |
abstract class AbstractClusterOperation extends AbstractOperation implements JoinOperation {
@Override
public boolean returnsResponse() {
return false;
}
@Override
public final String getServiceName() {
return ClusterServiceImpl.SERVICE_NAME;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_cluster_AbstractClusterOperation.java
|
371 |
@Service("blTranslationService")
public class TranslationServiceImpl implements TranslationService {
protected static final Log LOG = LogFactory.getLog(TranslationServiceImpl.class);
@Resource(name = "blTranslationDao")
protected TranslationDao dao;
protected Cache cache;
@Override
@Transactional("blTransactionManager")
public Translation save(Translation translation) {
return dao.save(translation);
}
@Override
@Transactional("blTransactionManager")
public Translation save(String entityType, String entityId, String fieldName, String localeCode,
String translatedValue) {
TranslatedEntity te = getEntityType(entityType);
Translation translation = getTranslation(te, entityId, fieldName, localeCode);
if (translation == null) {
translation = dao.create();
translation.setEntityType(te);
translation.setEntityId(entityId);
translation.setFieldName(fieldName);
translation.setLocaleCode(localeCode);
}
translation.setTranslatedValue(translatedValue);
return save(translation);
}
@Override
public Translation findTranslationById(Long id) {
return dao.readTranslationById(id);
}
@Override
@Transactional("blTransactionManager")
public Translation update(Long translationId, String localeCode, String translatedValue) {
Translation t = dao.readTranslationById(translationId);
// Check to see if there is another translation that matches this updated one. We'll remove it if it exists
Translation t2 = dao.readTranslation(t.getEntityType(), t.getEntityId(), t.getFieldName(), localeCode);
if (t2 != null && t != t2) {
dao.delete(t2);
}
t.setLocaleCode(localeCode);
t.setTranslatedValue(translatedValue);
return save(t);
}
@Override
@Transactional("blTransactionManager")
public void deleteTranslationById(Long translationId) {
Translation t = dao.readTranslationById(translationId);
dao.delete(t);
}
@Override
public Translation getTranslation(TranslatedEntity entity, String entityId, String fieldName, String localeCode) {
return dao.readTranslation(entity, entityId, fieldName, localeCode);
}
@Override
public List<Translation> getTranslations(String ceilingEntityClassname, String entityId, String property) {
TranslatedEntity entityType = getEntityType(ceilingEntityClassname);
return dao.readTranslations(entityType, entityId, property);
}
@Override
public String getTranslatedValue(Object entity, String property, Locale locale) {
// Attempt to get a translated value for this property to override the default value
TranslatedEntity entityType = getEntityType(entity);
String entityId = getEntityId(entity, entityType);
String localeCode = locale.getLanguage();
String localeCountryCode = localeCode;
if (StringUtils.isNotBlank(locale.getCountry())) {
localeCountryCode += "_" + locale.getCountry();
}
Translation translation;
// First, we'll try to look up a country language combo (en_GB), utilizing the cache
String countryCacheKey = getCacheKey(entityType, entityId, property, localeCountryCode);
Element countryValue = getCache().get(countryCacheKey);
if (countryValue != null) {
translation = (Translation) countryValue.getObjectValue();
} else {
translation = getTranslation(entityType, entityId, property, localeCountryCode);
if (translation == null) {
translation = new TranslationImpl();
}
getCache().put(new Element(countryCacheKey, translation));
}
// If we don't find one, let's try just the language (en), again utilizing the cache
if (translation.getTranslatedValue()==null) {
String nonCountryCacheKey = getCacheKey(entityType, entityId, property, localeCode);
Element nonCountryValue = getCache().get(nonCountryCacheKey);
if (nonCountryValue != null) {
translation = (Translation) nonCountryValue.getObjectValue();
} else {
translation = getTranslation(entityType, entityId, property, localeCode);
if (translation == null) {
translation = new TranslationImpl();
}
getCache().put(new Element(nonCountryCacheKey, translation));
}
}
// If we have a match on a translation, use that instead of what we found on the entity.
if (StringUtils.isNotBlank(translation.getTranslatedValue())) {
return translation.getTranslatedValue();
}
return null;
}
protected TranslatedEntity getEntityType(Class<?> entityClass) {
for (Entry<String, TranslatedEntity> entry : TranslatedEntity.getTypes().entrySet()) {
try {
Class<?> clazz = Class.forName(entry.getKey());
if (clazz.isAssignableFrom(entityClass)) {
return entry.getValue();
}
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("TranslatedEntity type was not set to a known class", e);
}
}
throw new IllegalArgumentException(entityClass.getName() + " is not a known translatable class");
}
protected TranslatedEntity getEntityType(Object entity) {
return getEntityType(entity.getClass());
}
protected TranslatedEntity getEntityType(String className) {
try {
Class<?> clazz = Class.forName(className);
return getEntityType(clazz);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(className + " is not a known translatable class");
}
}
protected String getEntityId(Object entity, TranslatedEntity entityType) {
Map<String, Object> idMetadata = dao.getIdPropertyMetadata(entityType);
String idProperty = (String) idMetadata.get("name");
Type idType = (Type) idMetadata.get("type");
if (!(idType instanceof LongType || idType instanceof StringType)) {
throw new UnsupportedOperationException("Only ID types of String and Long are currently supported");
}
Object idValue = null;
try {
idValue = PropertyUtils.getProperty(entity, idProperty);
} catch (Exception e) {
throw new RuntimeException("Error reading id property", e);
}
if (idType instanceof StringType) {
return (String) idValue;
} else if (idType instanceof LongType) {
return String.valueOf(idValue);
}
throw new IllegalArgumentException(String.format("Could not retrieve value for id property. Object: [%s], " +
"ID Property: [%s], ID Type: [%s]", entity, idProperty, idType));
}
protected String getCacheKey(TranslatedEntity entityType, String entityId, String property, String localeCode) {
return StringUtils.join(new String[] { entityType.getFriendlyType(), entityId, property, localeCode }, "|");
}
protected Cache getCache() {
if (cache == null) {
cache = CacheManager.getInstance().getCache("blTranslationElements");
}
return cache;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_i18n_service_TranslationServiceImpl.java
|
3,013 |
public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
private final String type;
private final ObjectIntOpenHashMap<HashedBytesArray> idToDoc;
private final HashedBytesArray[] docIdToId;
private final HashedBytesArray[] parentIdsValues;
private final int[] parentIdsOrdinals;
private long sizeInBytes = -1;
public SimpleIdReaderTypeCache(String type, ObjectIntOpenHashMap<HashedBytesArray> idToDoc, HashedBytesArray[] docIdToId,
HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) {
this.type = type;
this.idToDoc = idToDoc;
this.docIdToId = docIdToId;
this.parentIdsValues = parentIdsValues;
this.parentIdsOrdinals = parentIdsOrdinals;
}
public String type() {
return this.type;
}
public HashedBytesArray parentIdByDoc(int docId) {
return parentIdsValues[parentIdsOrdinals[docId]];
}
public int docById(HashedBytesArray uid) {
return idToDoc.getOrDefault(uid, -1);
}
public HashedBytesArray idByDoc(int docId) {
return docIdToId[docId];
}
public long sizeInBytes() {
if (sizeInBytes == -1) {
sizeInBytes = computeSizeInBytes();
}
return sizeInBytes;
}
/**
* Returns an already stored instance if exists, if not, returns null;
*/
public HashedBytesArray canReuse(HashedBytesArray id) {
if (idToDoc.containsKey(id)) {
// we can use #lkey() since this is called from a synchronized block
return idToDoc.lkey();
} else {
return id;
}
}
long computeSizeInBytes() {
long sizeInBytes = 0;
// Ignore type field
// sizeInBytes += ((type.length() * RamUsage.NUM_BYTES_CHAR) + (3 * RamUsage.NUM_BYTES_INT)) + RamUsage.NUM_BYTES_OBJECT_HEADER;
sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (idToDoc.values.length * RamUsageEstimator.NUM_BYTES_INT);
sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (idToDoc.allocated.length);
final boolean[] states = idToDoc.allocated;
final Object[] keys = idToDoc.keys;
for (int i = 0; i < states.length; i++) {
if (states[i]) {
HashedBytesArray bytesArray = (HashedBytesArray) keys[i];
if (bytesArray != null) {
sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsageEstimator.NUM_BYTES_INT);
} else {
sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_REF;
}
}
}
// The docIdToId array contains references to idToDoc for this segment or other segments, so we can use OBJECT_REF
sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (RamUsageEstimator.NUM_BYTES_OBJECT_REF * docIdToId.length);
for (HashedBytesArray bytesArray : parentIdsValues) {
if (bytesArray == null) {
sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_REF;
} else {
sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsageEstimator.NUM_BYTES_INT);
}
}
sizeInBytes += RamUsageEstimator.sizeOf(parentIdsOrdinals);
return sizeInBytes;
}
}
| 0true
|
src_main_java_org_elasticsearch_index_cache_id_simple_SimpleIdReaderTypeCache.java
|
134 |
@RunWith(HazelcastSerialClassRunner.class)
public abstract class ClientTestSupport extends HazelcastTestSupport {
@Rule
public final ClientTestResource clientResource = new ClientTestResource(createConfig());
protected final HazelcastInstance getInstance() {
return clientResource.instance;
}
protected final SimpleClient getClient() {
return clientResource.client;
}
protected abstract Config createConfig();
public static final class ClientTestResource extends ExternalResource {
private final Config config;
private HazelcastInstance instance;
private SimpleClient client;
public ClientTestResource(Config config) {
this.config = config;
}
protected void before() throws Throwable {
instance = new TestHazelcastInstanceFactory(1).newHazelcastInstance(config);
client = newClient(TestUtil.getNode(instance));
client.auth();
}
protected void after() {
try {
client.close();
} catch (IOException e) {
e.printStackTrace();
}
instance.shutdown();
}
}
public static SimpleClient newClient(Node node) throws IOException {
if (node.isActive()) {
if (TestEnvironment.isMockNetwork()) {
ClientEngineImpl engine = node.clientEngine;
return new MockSimpleClient(engine);
} else {
return new SocketSimpleClient(node);
}
}
throw new IllegalArgumentException("Node is not active: " + node.getThisAddress());
}
}
| 0true
|
hazelcast_src_test_java_com_hazelcast_client_ClientTestSupport.java
|
1,598 |
public class OperationTypes implements Serializable {
private static final long serialVersionUID = 1L;
private OperationType fetchType = OperationType.BASIC;
private OperationType removeType = OperationType.BASIC;
private OperationType addType = OperationType.BASIC;
private OperationType updateType = OperationType.BASIC;
private OperationType inspectType = OperationType.BASIC;
public OperationTypes() {
//do nothing
}
public OperationTypes(OperationType fetchType, OperationType removeType, OperationType addType, OperationType updateType, OperationType inspectType) {
this.removeType = removeType;
this.addType = addType;
this.updateType = updateType;
this.fetchType = fetchType;
this.inspectType = inspectType;
}
/**
* How should the system execute a removal of this item.
* <p/>
* OperationType BASIC will result in the item being removed based on its primary key
* OperationType NONDESTRUCTIVEREMOVE will result in the item being removed from the containing list in the containing entity. This
* is useful when you don't want the item to actually be deleted, but simply removed from the parent collection.
* OperationType ADORNEDTARGETLIST will result in a join structure being deleted (not either of the associated entities).
* org.broadleafcommerce.core.catalog.domain.CategoryProductXrefImpl is an example of a join structure entity.
* OperationType MAP will result in the item being removed from the requisite map in the containing entity.
*
* @return the type of remove operation
*/
public OperationType getRemoveType() {
return removeType;
}
/**
* How should the system execute a removal of this item.
* <p/>
* OperationType BASIC will result in the item being removed based on its primary key
* OperationType NONDESTRUCTIVEREMOVE will result in the item being removed from the containing list in the containing entity. This
* is useful when you don't want the item to be removed to actually be deleted, but simply removed from the parent collection.
* OperationType ADORNEDTARGETLIST will result in a join structure being deleted (not either of the associated entities).
* org.broadleafcommerce.core.catalog.domain.CategoryProductXrefImpl is an example of a join structure entity.
* OperationType MAP will result in the item being removed from the requisite map in the containing entity.
*
* @param removeType
*/
public void setRemoveType(OperationType removeType) {
this.removeType = removeType;
}
/**
* How should the system execute an addition for this item
* <p/>
* OperationType BASIC will result in the item being inserted
* OperationType NONDESTRUCTIVEREMOVE is not supported and will result in the same behavior as BASIC. Note, any foreign key associations in the
* persistence perspective (@see PersistencePerspective) will be honored during the BASIC based add.
* OperationType ADORNEDTARGETLIST will result in a join structure entity being added (not either of the associated entities).
* org.broadleafcommerce.core.catalog.domain.CategoryProductXrefImpl is an example of a join structure entity.
* OperationType MAP will result in the item being added to the requisite map in the containing entity.
*
* @return the type of the add operation
*/
public OperationType getAddType() {
return addType;
}
/**
* How should the system execute an addition for this item
* <p/>
* OperationType BASIC will result in the item being inserted
* OperationType NONDESTRUCTIVEREMOVE is not supported and will result in the same behavior as BASIC. Note, any foreign key associations in the
* persistence perspective (@see PersistencePerspective) will be honored during the BASIC based add.
* OperationType ADORNEDTARGETLIST will result in a join structure entity being added (not either of the associated entities).
* org.broadleafcommerce.core.catalog.domain.CategoryProductXrefImpl is an example of a join structure entity.
* OperationType MAP will result in the item being added to the requisite map in the containing entity.
*
* @param addType
*/
public void setAddType(OperationType addType) {
this.addType = addType;
}
/**
* How should the system execute an update for this item
* <p/>
* OperationType BASIC will result in the item being updated based on it's primary key
* OperationType NONDESTRUCTIVEREMOVE is not supported and will result in the same behavior as BASIC. Note, any foreign key associations in the
* persistence perspective (@see PersistencePerspective) will be honored during the BASIC based update.
* OperationType ADORNEDTARGETLIST will result in a join structure entity being updated (not either of the associated entities).
* org.broadleafcommerce.core.catalog.domain.CategoryProductXrefImpl is an example of a join structure entity.
* OperationType MAP will result in the item being updated to the requisite map in the containing entity.
*
* @return the type of the update operation
*/
public OperationType getUpdateType() {
return updateType;
}
/**
* How should the system execute an update for this item
* <p/>
* OperationType BASIC will result in the item being updated based on it's primary key
* OperationType NONDESTRUCTIVEREMOVE is not supported and will result in the same behavior as BASIC. Note, any foreign key associations in the
* persistence perspective (@see PersistencePerspective) will be honored during the BASIC based update.
* OperationType ADORNEDTARGETLIST will result in a join structure entity being updated (not either of the associated entities).
* org.broadleafcommerce.core.catalog.domain.CategoryProductXrefImpl is an example of a join structure entity.
* OperationType MAP will result in the item being updated to the requisite map in the containing entity.
*
* @param updateType
*/
public void setUpdateType(OperationType updateType) {
this.updateType = updateType;
}
/**
* How should the system execute a fetch
* <p/>
* OperationType BASIC will result in a search for items having one or more basic properties matches
* OperationType FOREINKEY is not support and will result in the same behavior as BASIC. Note, any foreign key associations will be included
* as part of the query.
* OperationType ADORNEDTARGETLIST will result in search for items that match one of the associations in a join structure. For example, CategoryProductXrefImpl
* is used in a AdornedTargetList fetch to retrieve all products for a particular category.
* OperationType MAP will result retrieval of all map entries for the requisite map in the containing entity.
*
* @return the type of the fetch operation
*/
public OperationType getFetchType() {
return fetchType;
}
/**
* How should the system execute a fetch
* <p/>
* OperationType BASIC will result in a search for items having one or more basic properties matches
* OperationType FOREINKEY is not support and will result in the same behavior as BASIC. Note, any foreign key associations will be included
* as part of the query.
* OperationType ADORNEDTARGETLIST will result in search for items that match one of the associations in a join structure. For example, CategoryProductXrefImpl
* is used in a AdornedTargetList fetch to retrieve all products for a particular category.
* OperationType MAP will result retrieval of all map entries for the requisite map in the containing entity.
*
* @param fetchType
*/
public void setFetchType(OperationType fetchType) {
this.fetchType = fetchType;
}
/**
* OperationType values are generally ignored for inspect and should be defined as BASIC for consistency in most circumstances.
* This API is meant to support future persistence modules where specialized inspect phase management may be required.
*
* @return the type of the inspect operation
*/
public OperationType getInspectType() {
return inspectType;
}
/**
* OperationType values are generally ignored for inspect and should be defined as BASIC for consistency in most circumstances.
* This API is meant to support future persistence modules where specialized inspect phase management may be required.
*
* @param inspectType
*/
public void setInspectType(OperationType inspectType) {
this.inspectType = inspectType;
}
public OperationTypes cloneOperationTypes() {
OperationTypes operationTypes = new OperationTypes();
operationTypes.setAddType(addType);
operationTypes.setFetchType(fetchType);
operationTypes.setInspectType(inspectType);
operationTypes.setRemoveType(removeType);
operationTypes.setUpdateType(updateType);
return operationTypes;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof OperationTypes)) return false;
OperationTypes that = (OperationTypes) o;
if (addType != that.addType) return false;
if (fetchType != that.fetchType) return false;
if (inspectType != that.inspectType) return false;
if (removeType != that.removeType) return false;
if (updateType != that.updateType) return false;
return true;
}
@Override
public int hashCode() {
int result = fetchType != null ? fetchType.hashCode() : 0;
result = 31 * result + (removeType != null ? removeType.hashCode() : 0);
result = 31 * result + (addType != null ? addType.hashCode() : 0);
result = 31 * result + (updateType != null ? updateType.hashCode() : 0);
result = 31 * result + (inspectType != null ? inspectType.hashCode() : 0);
return result;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_OperationTypes.java
|
1,324 |
@ClusterScope(scope=Scope.TEST, numNodes=0)
public class NoMasterNodeTests extends ElasticsearchIntegrationTest {
@Test
public void testNoMasterActions() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")
.put("action.auto_create_index", false)
.put("discovery.zen.minimum_master_nodes", 2)
.put("discovery.zen.ping_timeout", "200ms")
.put("discovery.initial_state_timeout", "500ms")
.put("index.number_of_shards", 1)
.build();
TimeValue timeout = TimeValue.timeValueMillis(200);
cluster().startNode(settings);
// start a second node, create an index, and then shut it down so we have no master block
cluster().startNode(settings);
createIndex("test");
client().admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
cluster().stopRandomNode();
assertThat(awaitBusy(new Predicate<Object>() {
public boolean apply(Object o) {
ClusterState state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
return state.blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK);
}
}), equalTo(true));
try {
client().prepareGet("test", "type1", "1").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
client().prepareMultiGet().add("test", "type1", "1").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
PercolateSourceBuilder percolateSource = new PercolateSourceBuilder();
percolateSource.percolateDocument().setDoc(new HashMap());
client().preparePercolate()
.setIndices("test").setDocumentType("type1")
.setSource(percolateSource).execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
long now = System.currentTimeMillis();
try {
client().prepareUpdate("test", "type1", "1").setScript("test script").setTimeout(timeout).execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50));
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
client().admin().indices().prepareAnalyze("test", "this is a test").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
client().prepareCount("test").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
now = System.currentTimeMillis();
try {
client().prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout).execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50));
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
}
}
| 0true
|
src_test_java_org_elasticsearch_cluster_NoMasterNodeTests.java
|
880 |
private class AsyncAction extends BaseAsyncAction<QuerySearchResult> {
private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
}
@Override
protected String firstPhaseName() {
return "init_scan";
}
@Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<QuerySearchResult> listener) {
searchService.sendExecuteScan(node, request, listener);
}
@Override
protected void moveToSecondPhase() throws Exception {
final InternalSearchResponse internalResponse = searchPhaseController.merge(SearchPhaseController.EMPTY_DOCS, firstResults, (AtomicArray<? extends FetchSearchResultProvider>) AtomicArray.empty());
String scrollId = null;
if (request.scroll() != null) {
scrollId = buildScrollId(request.searchType(), firstResults, ImmutableMap.of("total_hits", Long.toString(internalResponse.hits().totalHits())));
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successulOps.get(), buildTookInMillis(), buildShardFailures()));
}
}
| 0true
|
src_main_java_org_elasticsearch_action_search_type_TransportSearchScanAction.java
|
2,081 |
public class FastCharArrayWriter extends Writer {
/**
* The buffer where data is stored.
*/
protected char buf[];
/**
* The number of chars in the buffer.
*/
protected int count;
/**
* Creates a new CharArrayWriter.
*/
public FastCharArrayWriter() {
this(32);
}
/**
* Creates a new CharArrayWriter with the specified initial size.
*
* @param initialSize an int specifying the initial buffer size.
* @throws IllegalArgumentException if initialSize is negative
*/
public FastCharArrayWriter(int initialSize) {
if (initialSize < 0) {
throw new IllegalArgumentException("Negative initial size: "
+ initialSize);
}
buf = new char[initialSize];
}
/**
* Writes a character to the buffer.
*/
public void write(int c) {
int newcount = count + 1;
if (newcount > buf.length) {
buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount));
}
buf[count] = (char) c;
count = newcount;
}
/**
* Writes characters to the buffer.
*
* @param c the data to be written
* @param off the start offset in the data
* @param len the number of chars that are written
*/
public void write(char c[], int off, int len) {
if ((off < 0) || (off > c.length) || (len < 0) ||
((off + len) > c.length) || ((off + len) < 0)) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return;
}
int newcount = count + len;
if (newcount > buf.length) {
buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount));
}
System.arraycopy(c, off, buf, count, len);
count = newcount;
}
/**
* Write a portion of a string to the buffer.
*
* @param str String to be written from
* @param off Offset from which to start reading characters
* @param len Number of characters to be written
*/
public void write(String str, int off, int len) {
int newcount = count + len;
if (newcount > buf.length) {
buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount));
}
str.getChars(off, off + len, buf, count);
count = newcount;
}
/**
* Writes the contents of the buffer to another character stream.
*
* @param out the output stream to write to
* @throws java.io.IOException If an I/O error occurs.
*/
public void writeTo(Writer out) throws IOException {
out.write(buf, 0, count);
}
/**
* Appends the specified character sequence to this writer.
* <p/>
* <p> An invocation of this method of the form <tt>out.append(csq)</tt>
* behaves in exactly the same way as the invocation
* <p/>
* <pre>
* out.write(csq.toString()) </pre>
*
* <p> Depending on the specification of <tt>toString</tt> for the
* character sequence <tt>csq</tt>, the entire sequence may not be
* appended. For instance, invoking the <tt>toString</tt> method of a
* character buffer will return a subsequence whose content depends upon
* the buffer's position and limit.
*
* @param csq The character sequence to append. If <tt>csq</tt> is
* <tt>null</tt>, then the four characters <tt>"null"</tt> are
* appended to this writer.
* @return This writer
* @since 1.5
*/
public FastCharArrayWriter append(CharSequence csq) {
String s = (csq == null ? "null" : csq.toString());
write(s, 0, s.length());
return this;
}
/**
* Appends a subsequence of the specified character sequence to this writer.
* <p/>
* <p> An invocation of this method of the form <tt>out.append(csq, start,
* end)</tt> when <tt>csq</tt> is not <tt>null</tt>, behaves in
* exactly the same way as the invocation
* <p/>
* <pre>
* out.write(csq.subSequence(start, end).toString()) </pre>
*
* @param csq The character sequence from which a subsequence will be
* appended. If <tt>csq</tt> is <tt>null</tt>, then characters
* will be appended as if <tt>csq</tt> contained the four
* characters <tt>"null"</tt>.
* @param start The index of the first character in the subsequence
* @param end The index of the character following the last character in the
* subsequence
* @return This writer
* @throws IndexOutOfBoundsException If <tt>start</tt> or <tt>end</tt> are negative, <tt>start</tt>
* is greater than <tt>end</tt>, or <tt>end</tt> is greater than
* <tt>csq.length()</tt>
* @since 1.5
*/
public FastCharArrayWriter append(CharSequence csq, int start, int end) {
String s = (csq == null ? "null" : csq).subSequence(start, end).toString();
write(s, 0, s.length());
return this;
}
/**
* Appends the specified character to this writer.
* <p/>
* <p> An invocation of this method of the form <tt>out.append(c)</tt>
* behaves in exactly the same way as the invocation
* <p/>
* <pre>
* out.write(c) </pre>
*
* @param c The 16-bit character to append
* @return This writer
* @since 1.5
*/
public FastCharArrayWriter append(char c) {
write(c);
return this;
}
/**
* Resets the buffer so that you can use it again without
* throwing away the already allocated buffer.
*/
public void reset() {
count = 0;
}
/**
* Returns a copy of the input data.
*
* @return an array of chars copied from the input data.
*/
public char toCharArray()[] {
return Arrays.copyOf(buf, count);
}
/**
* Returns the underlying char array. Note, use {@link #size()} in order to know the size of
* of the actual content within the array.
*/
public char[] unsafeCharArray() {
return buf;
}
/**
* Returns the current size of the buffer.
*
* @return an int representing the current size of the buffer.
*/
public int size() {
return count;
}
/**
* Converts input data to a string.
*
* @return the string.
*/
public String toString() {
return new String(buf, 0, count);
}
/**
* Converts the input data to a string with trimmed whitespaces.
*/
public String toStringTrim() {
int st = 0;
int len = count;
char[] val = buf; /* avoid getfield opcode */
while ((st < len) && (val[st] <= ' ')) {
st++;
len--;
}
while ((st < len) && (val[len - 1] <= ' ')) {
len--;
}
return new String(buf, st, len);
}
/**
* Flush the stream.
*/
public void flush() {
}
/**
* Close the stream. This method does not release the buffer, since its
* contents might still be required. Note: Invoking this method in this class
* will have no effect.
*/
public void close() {
}
}
| 0true
|
src_main_java_org_elasticsearch_common_io_FastCharArrayWriter.java
|
594 |
public class IndicesSegmentsAction extends IndicesAction<IndicesSegmentsRequest, IndicesSegmentResponse, IndicesSegmentsRequestBuilder> {
public static final IndicesSegmentsAction INSTANCE = new IndicesSegmentsAction();
public static final String NAME = "indices/segments";
private IndicesSegmentsAction() {
super(NAME);
}
@Override
public IndicesSegmentResponse newResponse() {
return new IndicesSegmentResponse();
}
@Override
public IndicesSegmentsRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new IndicesSegmentsRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_segments_IndicesSegmentsAction.java
|
3,206 |
SHORT(16, false, SortField.Type.INT, Short.MIN_VALUE, Short.MAX_VALUE) {
@Override
public long toLong(BytesRef indexForm) {
return INT.toLong(indexForm);
}
@Override
public void toIndexForm(Number number, BytesRef bytes) {
INT.toIndexForm(number, bytes);
}
@Override
public Number toNumber(BytesRef indexForm) {
return INT.toNumber(indexForm);
}
},
| 0true
|
src_main_java_org_elasticsearch_index_fielddata_IndexNumericFieldData.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.