Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
143k
| target
class label 2
classes | project
stringlengths 33
157
|
---|---|---|---|
1,284 |
@Test
public class LocalPaginatedStorageCreateCrashRestore {
private ODatabaseDocumentTx baseDocumentTx;
private ODatabaseDocumentTx testDocumentTx;
private File buildDir;
private final AtomicLong idGen = new AtomicLong();
private ExecutorService executorService = Executors.newCachedThreadPool();
private Process process;
@BeforeClass
public void beforeClass() throws Exception {
OGlobalConfiguration.CACHE_LEVEL1_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL1_SIZE.setValue(0);
OGlobalConfiguration.CACHE_LEVEL2_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL2_SIZE.setValue(0);
String buildDirectory = System.getProperty("buildDirectory", ".");
buildDirectory += "/localPaginatedStorageCreateCrashRestore";
buildDir = new File(buildDirectory);
if (buildDir.exists())
buildDir.delete();
buildDir.mkdir();
String javaExec = System.getProperty("java.home") + "/bin/java";
System.setProperty("ORIENTDB_HOME", buildDirectory);
ProcessBuilder processBuilder = new ProcessBuilder(javaExec, "-Xmx2048m", "-classpath", System.getProperty("java.class.path"),
"-DORIENTDB_HOME=" + buildDirectory, RemoteDBRunner.class.getName());
processBuilder.inheritIO();
process = processBuilder.start();
Thread.sleep(5000);
}
@AfterClass
public void afterClass() {
testDocumentTx.drop();
baseDocumentTx.drop();
Assert.assertTrue(buildDir.delete());
}
@BeforeMethod
public void beforeMethod() {
baseDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/baseLocalPaginatedStorageCrashRestore");
if (baseDocumentTx.exists()) {
baseDocumentTx.open("admin", "admin");
baseDocumentTx.drop();
}
baseDocumentTx.create();
testDocumentTx = new ODatabaseDocumentTx("remote:localhost:3500/testLocalPaginatedStorageCrashRestore");
testDocumentTx.open("admin", "admin");
}
public void testDocumentCreation() throws Exception {
createSchema(baseDocumentTx);
createSchema(testDocumentTx);
List<Future> futures = new ArrayList<Future>();
for (int i = 0; i < 5; i++) {
futures.add(executorService.submit(new DataPropagationTask(baseDocumentTx, testDocumentTx)));
}
Thread.sleep(150000);
long lastTs = System.currentTimeMillis();
process.destroy();
for (Future future : futures) {
try {
future.get();
} catch (Exception e) {
e.printStackTrace();
}
}
testDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/testLocalPaginatedStorageCrashRestore");
testDocumentTx.open("admin", "admin");
testDocumentTx.close();
testDocumentTx.open("admin", "admin");
compareDocuments(lastTs);
}
private void createSchema(ODatabaseDocumentTx dbDocumentTx) {
ODatabaseRecordThreadLocal.INSTANCE.set(dbDocumentTx);
OSchema schema = dbDocumentTx.getMetadata().getSchema();
if (!schema.existsClass("TestClass")) {
OClass testClass = schema.createClass("TestClass");
testClass.createProperty("id", OType.LONG);
testClass.createProperty("timestamp", OType.LONG);
testClass.createProperty("stringValue", OType.STRING);
testClass.createIndex("idIndex", OClass.INDEX_TYPE.UNIQUE, "id");
schema.save();
}
}
private void compareDocuments(long lastTs) {
long minTs = Long.MAX_VALUE;
int clusterId = baseDocumentTx.getClusterIdByName("TestClass");
OStorage baseStorage = baseDocumentTx.getStorage();
OPhysicalPosition[] physicalPositions = baseStorage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition(
OClusterPositionFactory.INSTANCE.valueOf(0)));
int recordsRestored = 0;
int recordsTested = 0;
while (physicalPositions.length > 0) {
final ORecordId rid = new ORecordId(clusterId);
for (OPhysicalPosition physicalPosition : physicalPositions) {
rid.clusterPosition = physicalPosition.clusterPosition;
ODatabaseRecordThreadLocal.INSTANCE.set(baseDocumentTx);
ODocument baseDocument = baseDocumentTx.load(rid);
ODatabaseRecordThreadLocal.INSTANCE.set(testDocumentTx);
List<ODocument> testDocuments = testDocumentTx.query(new OSQLSynchQuery<ODocument>("select from TestClass where id = "
+ baseDocument.field("id")));
if (testDocuments.size() == 0) {
if (((Long) baseDocument.field("timestamp")) < minTs)
minTs = baseDocument.field("timestamp");
} else {
ODocument testDocument = testDocuments.get(0);
Assert.assertEquals(testDocument.field("id"), baseDocument.field("id"));
Assert.assertEquals(testDocument.field("timestamp"), baseDocument.field("timestamp"));
Assert.assertEquals(testDocument.field("stringValue"), baseDocument.field("stringValue"));
recordsRestored++;
}
recordsTested++;
if (recordsTested % 10000 == 0)
System.out.println(recordsTested + " were tested, " + recordsRestored + " were restored ...");
}
physicalPositions = baseStorage.higherPhysicalPositions(clusterId, physicalPositions[physicalPositions.length - 1]);
}
System.out.println(recordsRestored + " records were restored. Total records " + recordsTested
+ ". Max interval for lost records " + (lastTs - minTs));
}
public static final class RemoteDBRunner {
public static void main(String[] args) throws Exception {
OGlobalConfiguration.CACHE_LEVEL1_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL1_SIZE.setValue(0);
OGlobalConfiguration.CACHE_LEVEL2_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL2_SIZE.setValue(0);
OServer server = OServerMain.create();
server.startup(RemoteDBRunner.class
.getResourceAsStream("/com/orientechnologies/orient/core/storage/impl/local/paginated/db-create-config.xml"));
server.activate();
while (true)
;
}
}
public class DataPropagationTask implements Callable<Void> {
private ODatabaseDocumentTx baseDB;
private ODatabaseDocumentTx testDB;
public DataPropagationTask(ODatabaseDocumentTx baseDB, ODatabaseDocumentTx testDocumentTx) {
this.baseDB = new ODatabaseDocumentTx(baseDB.getURL());
this.testDB = new ODatabaseDocumentTx(testDocumentTx.getURL());
}
@Override
public Void call() throws Exception {
Random random = new Random();
baseDB.open("admin", "admin");
testDB.open("admin", "admin");
try {
while (true) {
final ODocument document = new ODocument("TestClass");
document.field("id", idGen.incrementAndGet());
document.field("timestamp", System.currentTimeMillis());
document.field("stringValue", "sfe" + random.nextLong());
saveDoc(document);
}
} finally {
baseDB.close();
testDB.close();
}
}
private void saveDoc(ODocument document) {
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
ODocument testDoc = new ODocument();
document.copyTo(testDoc);
document.save();
ODatabaseRecordThreadLocal.INSTANCE.set(testDB);
testDoc.save();
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
}
}
}
| 1no label
|
server_src_test_java_com_orientechnologies_orient_core_storage_impl_local_paginated_LocalPaginatedStorageCreateCrashRestore.java
|
147 |
public class OBinaryTypeSerializer implements OBinarySerializer<byte[]> {
private static final OBinaryConverter CONVERTER = OBinaryConverterFactory.getConverter();
public static final OBinaryTypeSerializer INSTANCE = new OBinaryTypeSerializer();
public static final byte ID = 17;
public int getObjectSize(int length) {
return length + OIntegerSerializer.INT_SIZE;
}
public int getObjectSize(byte[] object, Object... hints) {
return object.length + OIntegerSerializer.INT_SIZE;
}
public void serialize(byte[] object, byte[] stream, int startPosition, Object... hints) {
int len = object.length;
OIntegerSerializer.INSTANCE.serialize(len, stream, startPosition);
System.arraycopy(object, 0, stream, startPosition + OIntegerSerializer.INT_SIZE, len);
}
public byte[] deserialize(byte[] stream, int startPosition) {
int len = OIntegerSerializer.INSTANCE.deserialize(stream, startPosition);
return Arrays.copyOfRange(stream, startPosition + OIntegerSerializer.INT_SIZE, startPosition + OIntegerSerializer.INT_SIZE
+ len);
}
public int getObjectSize(byte[] stream, int startPosition) {
return OIntegerSerializer.INSTANCE.deserialize(stream, startPosition) + OIntegerSerializer.INT_SIZE;
}
public int getObjectSizeNative(byte[] stream, int startPosition) {
return CONVERTER.getInt(stream, startPosition, ByteOrder.nativeOrder()) + OIntegerSerializer.INT_SIZE;
}
public void serializeNative(byte[] object, byte[] stream, int startPosition, Object... hints) {
int len = object.length;
CONVERTER.putInt(stream, startPosition, len, ByteOrder.nativeOrder());
System.arraycopy(object, 0, stream, startPosition + OIntegerSerializer.INT_SIZE, len);
}
public byte[] deserializeNative(byte[] stream, int startPosition) {
int len = CONVERTER.getInt(stream, startPosition, ByteOrder.nativeOrder());
return Arrays.copyOfRange(stream, startPosition + OIntegerSerializer.INT_SIZE, startPosition + OIntegerSerializer.INT_SIZE
+ len);
}
@Override
public void serializeInDirectMemory(byte[] object, ODirectMemoryPointer pointer, long offset, Object... hints) {
int len = object.length;
pointer.setInt(offset, len);
offset += OIntegerSerializer.INT_SIZE;
pointer.set(offset, object, 0, len);
}
@Override
public byte[] deserializeFromDirectMemory(ODirectMemoryPointer pointer, long offset) {
int len = pointer.getInt(offset);
offset += OIntegerSerializer.INT_SIZE;
return pointer.get(offset, len);
}
@Override
public int getObjectSizeInDirectMemory(ODirectMemoryPointer pointer, long offset) {
return pointer.getInt(offset) + OIntegerSerializer.INT_SIZE;
}
public byte getId() {
return ID;
}
public boolean isFixedLength() {
return false;
}
public int getFixedLength() {
return 0;
}
@Override
public byte[] preprocess(byte[] value, Object... hints) {
return value;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_serialization_types_OBinaryTypeSerializer.java
|
35 |
static final class ThenRun extends Completion {
final CompletableFuture<?> src;
final Runnable fn;
final CompletableFuture<Void> dst;
final Executor executor;
ThenRun(CompletableFuture<?> src,
Runnable fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<?> a;
final Runnable fn;
final CompletableFuture<Void> dst;
Object r; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(a = this.src) != null &&
(r = a.result) != null &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(fn, dst));
else
fn.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
209 |
@SuppressWarnings("unchecked")
public class OStorageRemoteThread implements OStorageProxy {
private static AtomicInteger sessionSerialId = new AtomicInteger(-1);
private final OStorageRemote delegate;
private String serverURL;
private int sessionId;
public OStorageRemoteThread(final OStorageRemote iSharedStorage) {
delegate = iSharedStorage;
serverURL = null;
sessionId = sessionSerialId.decrementAndGet();
}
public OStorageRemoteThread(final OStorageRemote iSharedStorage, final int iSessionId) {
delegate = iSharedStorage;
serverURL = null;
sessionId = iSessionId;
}
public void open(final String iUserName, final String iUserPassword, final Map<String, Object> iOptions) {
pushSession();
try {
delegate.open(iUserName, iUserPassword, iOptions);
} finally {
popSession();
}
}
@Override
public boolean isDistributed() {
return delegate.isDistributed();
}
public void create(final Map<String, Object> iOptions) {
pushSession();
try {
delegate.create(iOptions);
} finally {
popSession();
}
}
public void close(boolean iForce) {
pushSession();
try {
delegate.close(iForce);
Orient.instance().unregisterStorage(this);
} finally {
popSession();
}
}
public boolean dropCluster(final String iClusterName, final boolean iTruncate) {
pushSession();
try {
return delegate.dropCluster(iClusterName, iTruncate);
} finally {
popSession();
}
}
public int getUsers() {
pushSession();
try {
return delegate.getUsers();
} finally {
popSession();
}
}
public int addUser() {
pushSession();
try {
return delegate.addUser();
} finally {
popSession();
}
}
public OSharedResourceAdaptiveExternal getLock() {
pushSession();
try {
return delegate.getLock();
} finally {
popSession();
}
}
public void setSessionId(final String iServerURL, final int iSessionId) {
serverURL = iServerURL;
sessionId = iSessionId;
delegate.setSessionId(serverURL, iSessionId);
}
public void reload() {
pushSession();
try {
delegate.reload();
} finally {
popSession();
}
}
public boolean exists() {
pushSession();
try {
return delegate.exists();
} finally {
popSession();
}
}
public int removeUser() {
pushSession();
try {
return delegate.removeUser();
} finally {
popSession();
}
}
public void close() {
pushSession();
try {
delegate.close();
} finally {
popSession();
}
}
public void delete() {
pushSession();
try {
delegate.delete();
Orient.instance().unregisterStorage(this);
} finally {
popSession();
}
}
@Override
public OStorage getUnderlying() {
return delegate;
}
public Set<String> getClusterNames() {
pushSession();
try {
return delegate.getClusterNames();
} finally {
popSession();
}
}
@Override
public void backup(OutputStream out, Map<String, Object> options, final Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("backup");
}
@Override
public void restore(InputStream in, Map<String, Object> options, final Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("restore");
}
public OStorageOperationResult<OPhysicalPosition> createRecord(final int iDataSegmentId, final ORecordId iRid,
final byte[] iContent, ORecordVersion iRecordVersion, final byte iRecordType, final int iMode,
ORecordCallback<OClusterPosition> iCallback) {
pushSession();
try {
return delegate.createRecord(iDataSegmentId, iRid, iContent, OVersionFactory.instance().createVersion(), iRecordType, iMode,
iCallback);
} finally {
popSession();
}
}
public OStorageOperationResult<ORawBuffer> readRecord(final ORecordId iRid, final String iFetchPlan, boolean iIgnoreCache,
ORecordCallback<ORawBuffer> iCallback, boolean loadTombstones) {
pushSession();
try {
return delegate.readRecord(iRid, iFetchPlan, iIgnoreCache, null, loadTombstones);
} finally {
popSession();
}
}
public OStorageOperationResult<ORecordVersion> updateRecord(final ORecordId iRid, final byte[] iContent,
final ORecordVersion iVersion, final byte iRecordType, final int iMode, ORecordCallback<ORecordVersion> iCallback) {
pushSession();
try {
return delegate.updateRecord(iRid, iContent, iVersion, iRecordType, iMode, iCallback);
} finally {
popSession();
}
}
public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRid, final ORecordVersion iVersion, final int iMode,
ORecordCallback<Boolean> iCallback) {
pushSession();
try {
return delegate.deleteRecord(iRid, iVersion, iMode, iCallback);
} finally {
popSession();
}
}
@Override
public boolean updateReplica(int dataSegmentId, ORecordId rid, byte[] content, ORecordVersion recordVersion, byte recordType)
throws IOException {
pushSession();
try {
return delegate.updateReplica(dataSegmentId, rid, content, recordVersion, recordType);
} finally {
popSession();
}
}
@Override
public ORecordMetadata getRecordMetadata(ORID rid) {
pushSession();
try {
return delegate.getRecordMetadata(rid);
} finally {
popSession();
}
}
@Override
public <V> V callInRecordLock(Callable<V> iCallable, ORID rid, boolean iExclusiveLock) {
pushSession();
try {
return delegate.callInRecordLock(iCallable, rid, iExclusiveLock);
} finally {
popSession();
}
}
@Override
public boolean cleanOutRecord(ORecordId recordId, ORecordVersion recordVersion, int iMode, ORecordCallback<Boolean> callback) {
pushSession();
try {
return delegate.cleanOutRecord(recordId, recordVersion, iMode, callback);
} finally {
popSession();
}
}
public long count(final int iClusterId) {
pushSession();
try {
return delegate.count(iClusterId);
} finally {
popSession();
}
}
@Override
public long count(int iClusterId, boolean countTombstones) {
pushSession();
try {
return delegate.count(iClusterId, countTombstones);
} finally {
popSession();
}
}
@Override
public long count(int[] iClusterIds, boolean countTombstones) {
pushSession();
try {
return delegate.count(iClusterIds, countTombstones);
} finally {
popSession();
}
}
public String toString() {
pushSession();
try {
return delegate.toString();
} finally {
popSession();
}
}
public OClusterPosition[] getClusterDataRange(final int iClusterId) {
pushSession();
try {
return delegate.getClusterDataRange(iClusterId);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] higherPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.higherPhysicalPositions(currentClusterId, physicalPosition);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] lowerPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.lowerPhysicalPositions(currentClusterId, physicalPosition);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] ceilingPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.ceilingPhysicalPositions(clusterId, physicalPosition);
} finally {
popSession();
}
}
@Override
public OPhysicalPosition[] floorPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) {
pushSession();
try {
return delegate.floorPhysicalPositions(clusterId, physicalPosition);
} finally {
popSession();
}
}
public long getSize() {
pushSession();
try {
return delegate.getSize();
} finally {
popSession();
}
}
public long countRecords() {
pushSession();
try {
return delegate.countRecords();
} finally {
popSession();
}
}
public long count(final int[] iClusterIds) {
pushSession();
try {
return delegate.count(iClusterIds);
} finally {
popSession();
}
}
public Object command(final OCommandRequestText iCommand) {
pushSession();
try {
return delegate.command(iCommand);
} finally {
popSession();
}
}
public void commit(final OTransaction iTx, Runnable callback) {
pushSession();
try {
delegate.commit(iTx, null);
} finally {
popSession();
}
}
public void rollback(OTransaction iTx) {
pushSession();
try {
delegate.rollback(iTx);
} finally {
popSession();
}
}
public int getClusterIdByName(final String iClusterName) {
pushSession();
try {
return delegate.getClusterIdByName(iClusterName);
} finally {
popSession();
}
}
public String getClusterTypeByName(final String iClusterName) {
pushSession();
try {
return delegate.getClusterTypeByName(iClusterName);
} finally {
popSession();
}
}
public int getDefaultClusterId() {
pushSession();
try {
return delegate.getDefaultClusterId();
} finally {
popSession();
}
}
public void setDefaultClusterId(final int defaultClusterId) {
pushSession();
try {
delegate.setDefaultClusterId(defaultClusterId);
} finally {
popSession();
}
}
public int addCluster(final String iClusterType, final String iClusterName, final String iLocation,
final String iDataSegmentName, boolean forceListBased, final Object... iArguments) {
pushSession();
try {
return delegate.addCluster(iClusterType, iClusterName, iLocation, iDataSegmentName, false, iArguments);
} finally {
popSession();
}
}
public int addCluster(String iClusterType, String iClusterName, int iRequestedId, String iLocation, String iDataSegmentName,
boolean forceListBased, Object... iParameters) {
pushSession();
try {
return delegate
.addCluster(iClusterType, iClusterName, iRequestedId, iLocation, iDataSegmentName, forceListBased, iParameters);
} finally {
popSession();
}
}
public boolean dropCluster(final int iClusterId, final boolean iTruncate) {
pushSession();
try {
return delegate.dropCluster(iClusterId, iTruncate);
} finally {
popSession();
}
}
public ODataSegment getDataSegmentById(final int iDataSegmentId) {
return delegate.getDataSegmentById(iDataSegmentId);
}
public int getDataSegmentIdByName(final String iDataSegmentName) {
return delegate.getDataSegmentIdByName(iDataSegmentName);
}
public int addDataSegment(final String iDataSegmentName) {
pushSession();
try {
return delegate.addDataSegment(iDataSegmentName);
} finally {
popSession();
}
}
public int addDataSegment(final String iSegmentName, final String iSegmentFileName) {
pushSession();
try {
return delegate.addDataSegment(iSegmentName, iSegmentFileName);
} finally {
popSession();
}
}
public boolean dropDataSegment(final String iSegmentName) {
pushSession();
try {
return delegate.dropDataSegment(iSegmentName);
} finally {
popSession();
}
}
public void synch() {
pushSession();
try {
delegate.synch();
} finally {
popSession();
}
}
public String getPhysicalClusterNameById(final int iClusterId) {
pushSession();
try {
return delegate.getPhysicalClusterNameById(iClusterId);
} finally {
popSession();
}
}
public int getClusters() {
pushSession();
try {
return delegate.getClusterMap();
} finally {
popSession();
}
}
public Collection<OCluster> getClusterInstances() {
pushSession();
try {
return delegate.getClusterInstances();
} finally {
popSession();
}
}
public OCluster getClusterById(final int iId) {
pushSession();
try {
return delegate.getClusterById(iId);
} finally {
popSession();
}
}
public long getVersion() {
pushSession();
try {
return delegate.getVersion();
} finally {
popSession();
}
}
public boolean isPermanentRequester() {
pushSession();
try {
return delegate.isPermanentRequester();
} finally {
popSession();
}
}
public void updateClusterConfiguration(final byte[] iContent) {
pushSession();
try {
delegate.updateClusterConfiguration(iContent);
} finally {
popSession();
}
}
public OStorageConfiguration getConfiguration() {
pushSession();
try {
return delegate.getConfiguration();
} finally {
popSession();
}
}
public boolean isClosed() {
return delegate.isClosed();
}
public boolean checkForRecordValidity(final OPhysicalPosition ppos) {
pushSession();
try {
return delegate.checkForRecordValidity(ppos);
} finally {
popSession();
}
}
public String getName() {
pushSession();
try {
return delegate.getName();
} finally {
popSession();
}
}
public String getURL() {
return delegate.getURL();
}
public void beginResponse(final OChannelBinaryAsynchClient iNetwork) throws IOException {
pushSession();
try {
delegate.beginResponse(iNetwork);
} finally {
popSession();
}
}
public OLevel2RecordCache getLevel2Cache() {
return delegate.getLevel2Cache();
}
public boolean existsResource(final String iName) {
return delegate.existsResource(iName);
}
public synchronized <T> T getResource(final String iName, final Callable<T> iCallback) {
return (T) delegate.getResource(iName, iCallback);
}
public <T> T removeResource(final String iName) {
return (T) delegate.removeResource(iName);
}
public ODocument getClusterConfiguration() {
return delegate.getClusterConfiguration();
}
protected void handleException(final OChannelBinaryAsynchClient iNetwork, final String iMessage, final Exception iException) {
delegate.handleException(iNetwork, iMessage, iException);
}
public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) {
return delegate.callInLock(iCallable, iExclusiveLock);
}
public ORemoteServerEventListener getRemoteServerEventListener() {
return delegate.getAsynchEventListener();
}
public void setRemoteServerEventListener(final ORemoteServerEventListener iListener) {
delegate.setAsynchEventListener(iListener);
}
public void removeRemoteServerEventListener() {
delegate.removeRemoteServerEventListener();
}
public static int getNextConnectionId() {
return sessionSerialId.decrementAndGet();
}
@Override
public void checkForClusterPermissions(final String iClusterName) {
delegate.checkForClusterPermissions(iClusterName);
}
public STATUS getStatus() {
return delegate.getStatus();
}
@Override
public String getType() {
return delegate.getType();
}
@Override
public boolean equals(final Object iOther) {
return iOther == this || iOther == delegate;
}
protected void pushSession() {
delegate.setSessionId(serverURL, sessionId);
}
protected void popSession() {
serverURL = delegate.getServerURL();
sessionId = delegate.getSessionId();
}
}
| 1no label
|
client_src_main_java_com_orientechnologies_orient_client_remote_OStorageRemoteThread.java
|
533 |
@Deprecated
public class GatewaySnapshotRequestBuilder extends BroadcastOperationRequestBuilder<GatewaySnapshotRequest, GatewaySnapshotResponse, GatewaySnapshotRequestBuilder> {
public GatewaySnapshotRequestBuilder(IndicesAdminClient indicesClient) {
super((InternalIndicesAdminClient) indicesClient, new GatewaySnapshotRequest());
}
@Override
protected void doExecute(ActionListener<GatewaySnapshotResponse> listener) {
((IndicesAdminClient) client).gatewaySnapshot(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_gateway_snapshot_GatewaySnapshotRequestBuilder.java
|
764 |
@Test(enabled = false)
public class OSBTreeBonsaiWAL extends OSBTreeBonsaiTest {
private String buildDirectory;
private String actualStorageDir;
private String expectedStorageDir;
private OWriteAheadLog writeAheadLog;
private ODiskCache actualDiskCache;
private ODiskCache expectedDiskCache;
private OLocalPaginatedStorage actualStorage;
private OSBTreeBonsai<Integer, OIdentifiable> expectedSBTree;
@BeforeClass
@Override
public void beforeClass() {
}
@AfterClass
@Override
public void afterClass() {
}
@BeforeMethod
public void beforeMethod() throws IOException {
buildDirectory = System.getProperty("buildDirectory", ".");
buildDirectory += "/sbtreeWithWALTest";
createExpectedSBTree();
createActualSBTree();
}
@AfterMethod
@Override
public void afterMethod() throws Exception {
sbTree.delete();
expectedSBTree.delete();
actualDiskCache.delete();
expectedDiskCache.delete();
writeAheadLog.delete();
Assert.assertTrue(new File(actualStorageDir).delete());
Assert.assertTrue(new File(expectedStorageDir).delete());
Assert.assertTrue(new File(buildDirectory).delete());
}
private void createActualSBTree() throws IOException {
actualStorage = mock(OLocalPaginatedStorage.class);
OStorageConfiguration storageConfiguration = mock(OStorageConfiguration.class);
storageConfiguration.clusters = new ArrayList<OStorageClusterConfiguration>();
storageConfiguration.fileTemplate = new OStorageSegmentConfiguration();
actualStorageDir = buildDirectory + "/sbtreeWithWALTestActual";
when(actualStorage.getStoragePath()).thenReturn(actualStorageDir);
when(actualStorage.getName()).thenReturn("sbtreeWithWALTesActual");
File buildDir = new File(buildDirectory);
if (!buildDir.exists())
buildDir.mkdirs();
File actualStorageDirFile = new File(actualStorageDir);
if (!actualStorageDirFile.exists())
actualStorageDirFile.mkdirs();
writeAheadLog = new OWriteAheadLog(6000, -1, 10 * 1024L * OWALPage.PAGE_SIZE, 100L * 1024 * 1024 * 1024, actualStorage);
actualDiskCache = new OReadWriteDiskCache(400L * 1024 * 1024 * 1024, 1648L * 1024 * 1024,
OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024, 1000000, 100, actualStorage, null, false, false);
OStorageVariableParser variableParser = new OStorageVariableParser(actualStorageDir);
when(actualStorage.getStorageTransaction()).thenReturn(null);
when(actualStorage.getDiskCache()).thenReturn(actualDiskCache);
when(actualStorage.getWALInstance()).thenReturn(writeAheadLog);
when(actualStorage.getVariableParser()).thenReturn(variableParser);
when(actualStorage.getConfiguration()).thenReturn(storageConfiguration);
when(actualStorage.getMode()).thenReturn("rw");
when(storageConfiguration.getDirectory()).thenReturn(actualStorageDir);
sbTree = new OSBTreeBonsai<Integer, OIdentifiable>(".sbt", 1, false);
sbTree.create("actualSBTree", OIntegerSerializer.INSTANCE, OLinkSerializer.INSTANCE, actualStorage);
}
private void createExpectedSBTree() {
final OLocalPaginatedStorage expectedStorage = mock(OLocalPaginatedStorage.class);
OStorageConfiguration storageConfiguration = mock(OStorageConfiguration.class);
storageConfiguration.clusters = new ArrayList<OStorageClusterConfiguration>();
storageConfiguration.fileTemplate = new OStorageSegmentConfiguration();
expectedStorageDir = buildDirectory + "/sbtreeWithWALTestExpected";
when(expectedStorage.getStoragePath()).thenReturn(expectedStorageDir);
when(expectedStorage.getName()).thenReturn("sbtreeWithWALTesExpected");
File buildDir = new File(buildDirectory);
if (!buildDir.exists())
buildDir.mkdirs();
File expectedStorageDirFile = new File(expectedStorageDir);
if (!expectedStorageDirFile.exists())
expectedStorageDirFile.mkdirs();
expectedDiskCache = new OReadWriteDiskCache(400L * 1024 * 1024 * 1024, 1648L * 1024 * 1024,
OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024, 1000000, 100, expectedStorage, null, false, false);
OStorageVariableParser variableParser = new OStorageVariableParser(expectedStorageDir);
when(expectedStorage.getStorageTransaction()).thenReturn(null);
when(expectedStorage.getDiskCache()).thenReturn(expectedDiskCache);
when(expectedStorage.getWALInstance()).thenReturn(null);
when(expectedStorage.getVariableParser()).thenReturn(variableParser);
when(expectedStorage.getConfiguration()).thenReturn(storageConfiguration);
when(expectedStorage.getMode()).thenReturn("rw");
when(storageConfiguration.getDirectory()).thenReturn(expectedStorageDir);
expectedSBTree = new OSBTreeBonsai<Integer, OIdentifiable>(".sbt", 1, false);
expectedSBTree.create("expectedSBTree", OIntegerSerializer.INSTANCE, OLinkSerializer.INSTANCE, expectedStorage);
}
@Override
public void testKeyPut() throws Exception {
super.testKeyPut();
assertFileRestoreFromWAL();
}
@Override
public void testKeyPutRandomUniform() throws Exception {
super.testKeyPutRandomUniform();
assertFileRestoreFromWAL();
}
@Override
public void testKeyPutRandomGaussian() throws Exception {
super.testKeyPutRandomGaussian();
assertFileRestoreFromWAL();
}
@Override
public void testKeyDeleteRandomUniform() throws Exception {
super.testKeyDeleteRandomUniform();
assertFileRestoreFromWAL();
}
@Override
public void testKeyDeleteRandomGaussian() throws Exception {
super.testKeyDeleteRandomGaussian();
assertFileRestoreFromWAL();
}
@Override
public void testKeyDelete() throws Exception {
super.testKeyDelete();
assertFileRestoreFromWAL();
}
@Override
public void testKeyAddDelete() throws Exception {
super.testKeyAddDelete();
assertFileRestoreFromWAL();
}
@Override
public void testAddKeyValuesInTwoBucketsAndMakeFirstEmpty() throws Exception {
super.testAddKeyValuesInTwoBucketsAndMakeFirstEmpty();
assertFileRestoreFromWAL();
}
@Override
public void testAddKeyValuesInTwoBucketsAndMakeLastEmpty() throws Exception {
super.testAddKeyValuesInTwoBucketsAndMakeLastEmpty();
assertFileRestoreFromWAL();
}
@Override
public void testAddKeyValuesAndRemoveFirstMiddleAndLastPages() throws Exception {
super.testAddKeyValuesAndRemoveFirstMiddleAndLastPages();
assertFileRestoreFromWAL();
}
@Test(enabled = false)
@Override
public void testValuesMajor() {
super.testValuesMajor();
}
@Test(enabled = false)
@Override
public void testValuesMinor() {
super.testValuesMinor();
}
@Test(enabled = false)
@Override
public void testValuesBetween() {
super.testValuesBetween();
}
private void assertFileRestoreFromWAL() throws IOException {
sbTree.close();
writeAheadLog.close();
expectedSBTree.close();
actualDiskCache.clear();
restoreDataFromWAL();
expectedDiskCache.clear();
assertFileContentIsTheSame(expectedSBTree.getName(), sbTree.getName());
}
private void restoreDataFromWAL() throws IOException {
OWriteAheadLog log = new OWriteAheadLog(4, -1, 10 * 1024L * OWALPage.PAGE_SIZE, 100L * 1024 * 1024 * 1024, actualStorage);
OLogSequenceNumber lsn = log.begin();
List<OWALRecord> atomicUnit = new ArrayList<OWALRecord>();
boolean atomicChangeIsProcessed = false;
while (lsn != null) {
OWALRecord walRecord = log.read(lsn);
atomicUnit.add(walRecord);
if (!atomicChangeIsProcessed) {
Assert.assertTrue(walRecord instanceof OAtomicUnitStartRecord);
atomicChangeIsProcessed = true;
} else if (walRecord instanceof OAtomicUnitEndRecord) {
atomicChangeIsProcessed = false;
for (OWALRecord restoreRecord : atomicUnit) {
if (restoreRecord instanceof OAtomicUnitStartRecord || restoreRecord instanceof OAtomicUnitEndRecord)
continue;
final OUpdatePageRecord updatePageRecord = (OUpdatePageRecord) restoreRecord;
final long fileId = updatePageRecord.getFileId();
final long pageIndex = updatePageRecord.getPageIndex();
if (!expectedDiskCache.isOpen(fileId))
expectedDiskCache.openFile(fileId);
final OCacheEntry cacheEntry = expectedDiskCache.load(fileId, pageIndex, true);
final OCachePointer cachePointer = cacheEntry.getCachePointer();
cachePointer.acquireExclusiveLock();
try {
ODurablePage durablePage = new ODurablePage(cachePointer.getDataPointer(), ODurablePage.TrackMode.NONE);
durablePage.restoreChanges(updatePageRecord.getChanges());
durablePage.setLsn(updatePageRecord.getLsn());
cacheEntry.markDirty();
} finally {
cachePointer.releaseExclusiveLock();
expectedDiskCache.release(cacheEntry);
}
}
atomicUnit.clear();
} else {
Assert.assertTrue(walRecord instanceof OUpdatePageRecord);
}
lsn = log.next(lsn);
}
Assert.assertTrue(atomicUnit.isEmpty());
log.close();
}
private void assertFileContentIsTheSame(String expectedBTree, String actualBTree) throws IOException {
File expectedFile = new File(expectedStorageDir, expectedBTree + ".sbt");
RandomAccessFile fileOne = new RandomAccessFile(expectedFile, "r");
RandomAccessFile fileTwo = new RandomAccessFile(new File(actualStorageDir, actualBTree + ".sbt"), "r");
Assert.assertEquals(fileOne.length(), fileTwo.length());
byte[] expectedContent = new byte[OClusterPage.PAGE_SIZE];
byte[] actualContent = new byte[OClusterPage.PAGE_SIZE];
fileOne.seek(OAbstractFile.HEADER_SIZE);
fileTwo.seek(OAbstractFile.HEADER_SIZE);
int bytesRead = fileOne.read(expectedContent);
while (bytesRead >= 0) {
fileTwo.readFully(actualContent, 0, bytesRead);
Assert.assertEquals(expectedContent, actualContent);
expectedContent = new byte[OClusterPage.PAGE_SIZE];
actualContent = new byte[OClusterPage.PAGE_SIZE];
bytesRead = fileOne.read(expectedContent);
}
fileOne.close();
fileTwo.close();
}
}
| 1no label
|
core_src_test_java_com_orientechnologies_orient_core_index_sbtreebonsai_local_OSBTreeBonsaiWAL.java
|
163 |
@SuppressWarnings("unchecked")
public class OSynchEventAdapter<RESOURCE_TYPE, RESPONSE_TYPE> {
protected final LinkedHashMap<RESOURCE_TYPE, Object[]> queue = new LinkedHashMap<RESOURCE_TYPE, Object[]>();
public OSynchEventAdapter() {
}
public void registerCallbackCurrentThread(final RESOURCE_TYPE iResource) {
queue.put(iResource, new Object[] { iResource, null });
}
/**
* Wait forever until the requested resource is unlocked.
*/
public RESPONSE_TYPE waitForResource(final RESOURCE_TYPE iResource) {
return getValue(iResource, 0);
}
/**
* Wait until the requested resource is unlocked. Put the current thread in sleep until timeout or is waked up by an unlock.
*/
public synchronized RESPONSE_TYPE getValue(final RESOURCE_TYPE iResource, final long iTimeout) {
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(
this,
"Thread [" + Thread.currentThread().getId() + "] is waiting for the resource " + iResource
+ (iTimeout <= 0 ? " forever" : " until " + iTimeout + "ms"));
synchronized (iResource) {
try {
iResource.wait(iTimeout);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new OLockException("Thread interrupted while waiting for resource '" + iResource + "'");
}
}
Object[] value = queue.remove(iResource);
return (RESPONSE_TYPE) (value != null ? value[1] : null);
}
public void setValue(final RESOURCE_TYPE iResource, final Object iValue) {
final Object[] waiter = queue.get(iResource);
if (waiter == null)
return;
synchronized (waiter[0]) {
waiter[1] = iValue;
waiter[0].notifyAll();
}
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_synch_OSynchEventAdapter.java
|
562 |
public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpdateRequest<PutMappingClusterStateUpdateRequest> {
private String type;
private String source;
private boolean ignoreConflicts = false;
PutMappingClusterStateUpdateRequest() {
}
public String type() {
return type;
}
public PutMappingClusterStateUpdateRequest type(String type) {
this.type = type;
return this;
}
public String source() {
return source;
}
public PutMappingClusterStateUpdateRequest source(String source) {
this.source = source;
return this;
}
public boolean ignoreConflicts() {
return ignoreConflicts;
}
public PutMappingClusterStateUpdateRequest ignoreConflicts(boolean ignoreConflicts) {
this.ignoreConflicts = ignoreConflicts;
return this;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_mapping_put_PutMappingClusterStateUpdateRequest.java
|
29 |
public class BerkeleyBlueprintsTest extends TitanBlueprintsTest {
private static final String DEFAULT_SUBDIR = "standard";
private static final Logger log =
LoggerFactory.getLogger(BerkeleyBlueprintsTest.class);
@Override
public Graph generateGraph() {
return generateGraph(DEFAULT_SUBDIR);
}
@Override
public void beforeOpeningGraph(String uid) {
String dir = BerkeleyStorageSetup.getHomeDir(uid);
log.debug("Cleaning directory {} before opening it for the first time", dir);
try {
BerkeleyJEStoreManager s = new BerkeleyJEStoreManager(BerkeleyStorageSetup.getBerkeleyJEConfiguration(dir));
s.clearStorage();
s.close();
File dirFile = new File(dir);
Assert.assertFalse(dirFile.exists() && dirFile.listFiles().length > 0);
} catch (BackendException e) {
throw new RuntimeException(e);
}
}
@Override
public TitanGraph openGraph(String uid) {
String dir = BerkeleyStorageSetup.getHomeDir(uid);
return TitanFactory.open(BerkeleyStorageSetup.getBerkeleyJEConfiguration(dir));
}
@Override
public void extraCleanUp(String uid) throws BackendException {
String dir = BerkeleyStorageSetup.getHomeDir(uid);
BerkeleyJEStoreManager s = new BerkeleyJEStoreManager(BerkeleyStorageSetup.getBerkeleyJEConfiguration(dir));
s.clearStorage();
s.close();
File dirFile = new File(dir);
Assert.assertFalse(dirFile.exists() && dirFile.listFiles().length > 0);
}
@Override
public boolean supportsMultipleGraphs() {
return true;
}
@Override
public void beforeSuite() {
//Nothing
}
@Override
public void afterSuite() {
synchronized (openGraphs) {
for (String dir : openGraphs.keySet()) {
File dirFile = new File(dir);
Assert.assertFalse(dirFile.exists() && dirFile.listFiles().length > 0);
}
}
}
}
| 0true
|
titan-berkeleyje_src_test_java_com_thinkaurelius_titan_blueprints_BerkeleyBlueprintsTest.java
|
90 |
public class ODFACommandStreamTest {
@Test
public void testNextCommand() throws Exception {
test("one;two", "one", "two");
}
@Test
public void testNextCommandQuotes() throws Exception {
test("Select 'one;'; Select \"t;w;o\"", "Select 'one;'", "Select \"t;w;o\"");
}
@Test
public void testNextCommandSeparatorAtTheEnd() throws Exception {
test("one;two;", "one", "two");
}
@Test
public void testNextCommandWhitespaces() throws Exception {
test("\tone ; two ", "one", "two");
}
private void test(String source, String... expectedResults) {
final ODFACommandStream stream = new ODFACommandStream(source);
for (String expectedResult : expectedResults) {
Assert.assertTrue(stream.hasNext());
String result = stream.nextCommand();
Assert.assertEquals(result, expectedResult);
}
Assert.assertFalse(stream.hasNext());
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_console_ODFACommandStreamTest.java
|
95 |
public class Geoshape {
private static final SpatialContext CTX = SpatialContext.GEO;
/**
* The Type of a shape: a point, box, circle, or polygon
*/
public enum Type {
POINT, BOX, CIRCLE, POLYGON;
}
//coordinates[0] = latitudes, coordinates[1] = longitudes
private final float[][] coordinates;
private Geoshape() {
coordinates = null;
}
private Geoshape(final float[][] coordinates) {
Preconditions.checkArgument(coordinates!=null && coordinates.length==2);
Preconditions.checkArgument(coordinates[0].length==coordinates[1].length && coordinates[0].length>0);
for (int i=0;i<coordinates[0].length;i++) {
if (Float.isNaN(coordinates[0][i])) Preconditions.checkArgument(i==1 && coordinates.length==2 && coordinates[1][i]>0);
else Preconditions.checkArgument(isValidCoordinate(coordinates[0][i],coordinates[1][i]));
}
this.coordinates=coordinates;
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(coordinates[0]).append(coordinates[1]).toHashCode();
}
@Override
public boolean equals(Object other) {
if (this==other) return true;
else if (other==null) return false;
else if (!getClass().isInstance(other)) return false;
Geoshape oth = (Geoshape)other;
Preconditions.checkArgument(coordinates.length==2 && oth.coordinates.length==2);
for (int i=0;i<coordinates.length;i++) {
if (coordinates[i].length!=oth.coordinates[i].length) return false;
for (int j=0;j<coordinates[i].length;j++) {
if (Float.isNaN(coordinates[i][j]) && Float.isNaN(oth.coordinates[i][j])) continue;
if (coordinates[i][j]!=oth.coordinates[i][j]) return false;
}
}
return true;
}
@Override
public String toString() {
Type type = getType();
StringBuilder s = new StringBuilder();
s.append(type.toString().toLowerCase());
switch (type) {
case POINT:
s.append(getPoint().toString());
break;
case CIRCLE:
s.append(getPoint().toString()).append(":").append(getRadius());
break;
default:
s.append("[");
for (int i=0;i<size();i++) {
if (i>0) s.append(",");
s.append(getPoint(i));
}
s.append("]");
}
return s.toString();
}
/**
* Returns the {@link Type} of this geoshape.
*
* @return
*/
public Type getType() {
if (coordinates[0].length==1) return Type.POINT;
else if (coordinates[0].length>2) return Type.POLYGON;
else { //coordinates[0].length==2
if (Float.isNaN(coordinates[0][1])) return Type.CIRCLE;
else return Type.BOX;
}
}
/**
* Returns the number of points comprising this geoshape. A point and circle have only one point (center of cricle),
* a box has two points (the south-west and north-east corners) and a polygon has a variable number of points (>=3).
*
* @return
*/
public int size() {
switch(getType()) {
case POINT: return 1;
case CIRCLE: return 1;
case BOX: return 2;
case POLYGON: return coordinates[0].length;
default: throw new IllegalStateException("Unrecognized type: " + getType());
}
}
/**
* Returns the point at the given position. The position must be smaller than {@link #size()}.
*
* @param position
* @return
*/
public Point getPoint(int position) {
if (position<0 || position>=size()) throw new ArrayIndexOutOfBoundsException("Invalid position: " + position);
return new Point(coordinates[0][position],coordinates[1][position]);
}
/**
* Returns the singleton point of this shape. Only applicable for point and circle shapes.
*
* @return
*/
public Point getPoint() {
Preconditions.checkArgument(size()==1,"Shape does not have a single point");
return getPoint(0);
}
/**
* Returns the radius in kilometers of this circle. Only applicable to circle shapes.
* @return
*/
public float getRadius() {
Preconditions.checkArgument(getType()==Type.CIRCLE,"This shape is not a circle");
return coordinates[1][1];
}
private SpatialRelation getSpatialRelation(Geoshape other) {
Preconditions.checkNotNull(other);
return convert2Spatial4j().relate(other.convert2Spatial4j());
}
public boolean intersect(Geoshape other) {
SpatialRelation r = getSpatialRelation(other);
return r==SpatialRelation.INTERSECTS || r==SpatialRelation.CONTAINS || r==SpatialRelation.WITHIN;
}
public boolean within(Geoshape outer) {
return getSpatialRelation(outer)==SpatialRelation.WITHIN;
}
public boolean disjoint(Geoshape other) {
return getSpatialRelation(other)==SpatialRelation.DISJOINT;
}
/**
* Converts this shape into its equivalent Spatial4j {@link Shape}.
* @return
*/
public Shape convert2Spatial4j() {
switch(getType()) {
case POINT: return getPoint().getSpatial4jPoint();
case CIRCLE: return CTX.makeCircle(getPoint(0).getSpatial4jPoint(), DistanceUtils.dist2Degrees(getRadius(), DistanceUtils.EARTH_MEAN_RADIUS_KM));
case BOX: return CTX.makeRectangle(getPoint(0).getSpatial4jPoint(),getPoint(1).getSpatial4jPoint());
case POLYGON: throw new UnsupportedOperationException("Not yet supported");
default: throw new IllegalStateException("Unrecognized type: " + getType());
}
}
/**
* Constructs a point from its latitude and longitude information
* @param latitude
* @param longitude
* @return
*/
public static final Geoshape point(final float latitude, final float longitude) {
Preconditions.checkArgument(isValidCoordinate(latitude,longitude),"Invalid coordinate provided");
return new Geoshape(new float[][]{ new float[]{latitude}, new float[]{longitude}});
}
/**
* Constructs a point from its latitude and longitude information
* @param latitude
* @param longitude
* @return
*/
public static final Geoshape point(final double latitude, final double longitude) {
return point((float)latitude,(float)longitude);
}
/**
* Constructs a circle from a given center point and a radius in kilometer
* @param latitude
* @param longitude
* @param radiusInKM
* @return
*/
public static final Geoshape circle(final float latitude, final float longitude, final float radiusInKM) {
Preconditions.checkArgument(isValidCoordinate(latitude,longitude),"Invalid coordinate provided");
Preconditions.checkArgument(radiusInKM>0,"Invalid radius provided [%s]",radiusInKM);
return new Geoshape(new float[][]{ new float[]{latitude, Float.NaN}, new float[]{longitude, radiusInKM}});
}
/**
* Constructs a circle from a given center point and a radius in kilometer
* @param latitude
* @param longitude
* @param radiusInKM
* @return
*/
public static final Geoshape circle(final double latitude, final double longitude, final double radiusInKM) {
return circle((float)latitude,(float)longitude,(float)radiusInKM);
}
/**
* Constructs a new box shape which is identified by its south-west and north-east corner points
* @param southWestLatitude
* @param southWestLongitude
* @param northEastLatitude
* @param northEastLongitude
* @return
*/
public static final Geoshape box(final float southWestLatitude, final float southWestLongitude,
final float northEastLatitude, final float northEastLongitude) {
Preconditions.checkArgument(isValidCoordinate(southWestLatitude,southWestLongitude),"Invalid south-west coordinate provided");
Preconditions.checkArgument(isValidCoordinate(northEastLatitude,northEastLongitude),"Invalid north-east coordinate provided");
return new Geoshape(new float[][]{ new float[]{southWestLatitude, northEastLatitude}, new float[]{southWestLongitude, northEastLongitude}});
}
/**
* Constructs a new box shape which is identified by its south-west and north-east corner points
* @param southWestLatitude
* @param southWestLongitude
* @param northEastLatitude
* @param northEastLongitude
* @return
*/
public static final Geoshape box(final double southWestLatitude, final double southWestLongitude,
final double northEastLatitude, final double northEastLongitude) {
return box((float)southWestLatitude,(float)southWestLongitude,(float)northEastLatitude,(float)northEastLongitude);
}
/**
* Whether the given coordinates mark a point on earth.
* @param latitude
* @param longitude
* @return
*/
public static final boolean isValidCoordinate(final float latitude, final float longitude) {
return latitude>=-90.0 && latitude<=90.0 && longitude>=-180.0 && longitude<=180.0;
}
/**
* A single point representation. A point is identified by its coordinate on the earth sphere using the spherical
* system of latitudes and longitudes.
*/
public static final class Point {
private final float longitude;
private final float latitude;
/**
* Constructs a point with the given latitude and longitude
* @param latitude Between -90 and 90 degrees
* @param longitude Between -180 and 180 degrees
*/
Point(float latitude, float longitude) {
this.longitude = longitude;
this.latitude = latitude;
}
/**
* Longitude of this point
* @return
*/
public float getLongitude() {
return longitude;
}
/**
* Latitude of this point
* @return
*/
public float getLatitude() {
return latitude;
}
private com.spatial4j.core.shape.Point getSpatial4jPoint() {
return CTX.makePoint(longitude,latitude);
}
/**
* Returns the distance to another point in kilometers
*
* @param other Point
* @return
*/
public double distance(Point other) {
return DistanceUtils.degrees2Dist(CTX.getDistCalc().distance(getSpatial4jPoint(),other.getSpatial4jPoint()),DistanceUtils.EARTH_MEAN_RADIUS_KM);
}
@Override
public String toString() {
return "["+latitude+","+longitude+"]";
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(latitude).append(longitude).toHashCode();
}
@Override
public boolean equals(Object other) {
if (this==other) return true;
else if (other==null) return false;
else if (!getClass().isInstance(other)) return false;
Point oth = (Point)other;
return latitude==oth.latitude && longitude==oth.longitude;
}
}
/**
* @author Matthias Broecheler ([email protected])
*/
public static class GeoshapeSerializer implements AttributeSerializer<Geoshape> {
@Override
public void verifyAttribute(Geoshape value) {
//All values of Geoshape are valid
}
@Override
public Geoshape convert(Object value) {
if (value.getClass().isArray() && (value.getClass().getComponentType().isPrimitive() ||
Number.class.isAssignableFrom(value.getClass().getComponentType())) ) {
Geoshape shape = null;
int len= Array.getLength(value);
double[] arr = new double[len];
for (int i=0;i<len;i++) arr[i]=((Number)Array.get(value,i)).doubleValue();
if (len==2) shape= point(arr[0],arr[1]);
else if (len==3) shape= circle(arr[0],arr[1],arr[2]);
else if (len==4) shape= box(arr[0],arr[1],arr[2],arr[3]);
else throw new IllegalArgumentException("Expected 2-4 coordinates to create Geoshape, but given: " + value);
return shape;
} else if (value instanceof String) {
String[] components=null;
for (String delimiter : new String[]{",",";"}) {
components = ((String)value).split(delimiter);
if (components.length>=2 && components.length<=4) break;
else components=null;
}
Preconditions.checkArgument(components!=null,"Could not parse coordinates from string: %s",value);
double[] coords = new double[components.length];
try {
for (int i=0;i<components.length;i++) {
coords[i]=Double.parseDouble(components[i]);
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Could not parse coordinates from string: " + value, e);
}
return convert(coords);
} else return null;
}
@Override
public Geoshape read(ScanBuffer buffer) {
long l = VariableLong.readPositive(buffer);
assert l>0 && l<Integer.MAX_VALUE;
int length = (int)l;
float[][] coordinates = new float[2][];
for (int i = 0; i < 2; i++) {
coordinates[i]=buffer.getFloats(length);
}
return new Geoshape(coordinates);
}
@Override
public void write(WriteBuffer buffer, Geoshape attribute) {
float[][] coordinates = attribute.coordinates;
assert (coordinates.length==2);
assert (coordinates[0].length==coordinates[1].length && coordinates[0].length>0);
int length = coordinates[0].length;
VariableLong.writePositive(buffer,length);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < length; j++) {
buffer.putFloat(coordinates[i][j]);
}
}
}
}
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geoshape.java
|
6,451 |
class TribeClusterStateListener implements ClusterStateListener {
private final InternalNode tribeNode;
private final String tribeName;
TribeClusterStateListener(InternalNode tribeNode) {
this.tribeNode = tribeNode;
this.tribeName = tribeNode.settings().get(TRIBE_NAME);
}
@Override
public void clusterChanged(final ClusterChangedEvent event) {
logger.debug("[{}] received cluster event, [{}]", tribeName, event.source());
clusterService.submitStateUpdateTask("cluster event from " + tribeName + ", " + event.source(), new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
ClusterState tribeState = event.state();
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(currentState.nodes());
// -- merge nodes
// go over existing nodes, and see if they need to be removed
for (DiscoveryNode discoNode : currentState.nodes()) {
String markedTribeName = discoNode.attributes().get(TRIBE_NAME);
if (markedTribeName != null && markedTribeName.equals(tribeName)) {
if (tribeState.nodes().get(discoNode.id()) == null) {
logger.info("[{}] removing node [{}]", tribeName, discoNode);
nodes.remove(discoNode.id());
}
}
}
// go over tribe nodes, and see if they need to be added
for (DiscoveryNode tribe : tribeState.nodes()) {
if (currentState.nodes().get(tribe.id()) == null) {
// a new node, add it, but also add the tribe name to the attributes
ImmutableMap<String, String> tribeAttr = MapBuilder.newMapBuilder(tribe.attributes()).put(TRIBE_NAME, tribeName).immutableMap();
DiscoveryNode discoNode = new DiscoveryNode(tribe.name(), tribe.id(), tribe.getHostName(), tribe.getHostAddress(), tribe.address(), tribeAttr, tribe.version());
logger.info("[{}] adding node [{}]", tribeName, discoNode);
nodes.put(discoNode);
}
}
// -- merge metadata
MetaData.Builder metaData = MetaData.builder(currentState.metaData());
RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable());
// go over existing indices, and see if they need to be removed
for (IndexMetaData index : currentState.metaData()) {
String markedTribeName = index.settings().get(TRIBE_NAME);
if (markedTribeName != null && markedTribeName.equals(tribeName)) {
IndexMetaData tribeIndex = tribeState.metaData().index(index.index());
if (tribeIndex == null) {
logger.info("[{}] removing index [{}]", tribeName, index.index());
metaData.remove(index.index());
routingTable.remove(index.index());
} else {
// always make sure to update the metadata and routing table, in case
// there are changes in them (new mapping, shards moving from initializing to started)
routingTable.add(tribeState.routingTable().index(index.index()));
Settings tribeSettings = ImmutableSettings.builder().put(tribeIndex.settings()).put(TRIBE_NAME, tribeName).build();
metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings));
}
}
}
// go over tribe one, and see if they need to be added
for (IndexMetaData tribeIndex : tribeState.metaData()) {
if (!currentState.metaData().hasIndex(tribeIndex.index())) {
// a new index, add it, and add the tribe name as a setting
logger.info("[{}] adding index [{}]", tribeName, tribeIndex.index());
Settings tribeSettings = ImmutableSettings.builder().put(tribeIndex.settings()).put(TRIBE_NAME, tribeName).build();
metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings));
routingTable.add(tribeState.routingTable().index(tribeIndex.index()));
}
}
return ClusterState.builder(currentState).nodes(nodes).metaData(metaData).routingTable(routingTable).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.warn("failed to process [{}]", t, source);
}
});
}
}
| 1no label
|
src_main_java_org_elasticsearch_tribe_TribeService.java
|
358 |
public class NodesStatsRequestBuilder extends NodesOperationRequestBuilder<NodesStatsRequest, NodesStatsResponse, NodesStatsRequestBuilder> {
public NodesStatsRequestBuilder(ClusterAdminClient clusterClient) {
super((InternalClusterAdminClient) clusterClient, new NodesStatsRequest());
}
/**
* Sets all the request flags.
*/
public NodesStatsRequestBuilder all() {
request.all();
return this;
}
/**
* Clears all stats flags.
*/
public NodesStatsRequestBuilder clear() {
request.clear();
return this;
}
/**
* Should the node indices stats be returned.
*/
public NodesStatsRequestBuilder setIndices(boolean indices) {
request.indices(indices);
return this;
}
public NodesStatsRequestBuilder setBreaker(boolean breaker) {
request.breaker(breaker);
return this;
}
/**
* Should the node indices stats be returned.
*/
public NodesStatsRequestBuilder setIndices(CommonStatsFlags indices) {
request.indices(indices);
return this;
}
/**
* Should the node OS stats be returned.
*/
public NodesStatsRequestBuilder setOs(boolean os) {
request.os(os);
return this;
}
/**
* Should the node OS stats be returned.
*/
public NodesStatsRequestBuilder setProcess(boolean process) {
request.process(process);
return this;
}
/**
* Should the node JVM stats be returned.
*/
public NodesStatsRequestBuilder setJvm(boolean jvm) {
request.jvm(jvm);
return this;
}
/**
* Should the node thread pool stats be returned.
*/
public NodesStatsRequestBuilder setThreadPool(boolean threadPool) {
request.threadPool(threadPool);
return this;
}
/**
* Should the node Network stats be returned.
*/
public NodesStatsRequestBuilder setNetwork(boolean network) {
request.network(network);
return this;
}
/**
* Should the node file system stats be returned.
*/
public NodesStatsRequestBuilder setFs(boolean fs) {
request.fs(fs);
return this;
}
/**
* Should the node Transport stats be returned.
*/
public NodesStatsRequestBuilder setTransport(boolean transport) {
request.transport(transport);
return this;
}
/**
* Should the node HTTP stats be returned.
*/
public NodesStatsRequestBuilder setHttp(boolean http) {
request.http(http);
return this;
}
@Override
protected void doExecute(ActionListener<NodesStatsResponse> listener) {
((ClusterAdminClient) client).nodesStats(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_stats_NodesStatsRequestBuilder.java
|
2,590 |
public class SocketConnector implements Runnable {
private static final int TIMEOUT = 3000;
private final TcpIpConnectionManager connectionManager;
private final Address address;
private final ILogger logger;
private final boolean silent;
public SocketConnector(TcpIpConnectionManager connectionManager, Address address, boolean silent) {
this.connectionManager = connectionManager;
this.address = address;
this.logger = connectionManager.ioService.getLogger(this.getClass().getName());
this.silent = silent;
}
public void run() {
if (!connectionManager.isLive()) {
if (logger.isFinestEnabled()) {
String message = "ConnectionManager is not live, connection attempt to " + address + " is cancelled!";
log(Level.FINEST, message);
}
return;
}
try {
if (logger.isFinestEnabled()) {
log(Level.FINEST, "Starting to connect to " + address);
}
final Address thisAddress = connectionManager.ioService.getThisAddress();
if (address.isIPv4()) {
// remote is IPv4; connect...
tryToConnect(address.getInetSocketAddress(), 0);
} else if (thisAddress.isIPv6() && thisAddress.getScopeId() != null) {
// Both remote and this addresses are IPv6.
// This is a local IPv6 address and scope id is known.
// find correct inet6 address for remote and connect...
final Inet6Address inetAddress = AddressUtil
.getInetAddressFor((Inet6Address) address.getInetAddress(), thisAddress.getScopeId());
tryToConnect(new InetSocketAddress(inetAddress, address.getPort()), 0);
} else {
// remote is IPv6 and this is either IPv4 or a global IPv6.
// find possible remote inet6 addresses and try each one to connect...
final Collection<Inet6Address> possibleInetAddresses = AddressUtil.getPossibleInetAddressesFor(
(Inet6Address) address.getInetAddress());
final Level level = silent ? Level.FINEST : Level.INFO;
//TODO: collection.toString() will likely not produce any useful output!
if (logger.isLoggable(level)) {
log(level, "Trying to connect possible IPv6 addresses: " + possibleInetAddresses);
}
boolean connected = false;
Exception error = null;
for (Inet6Address inetAddress : possibleInetAddresses) {
try {
tryToConnect(new InetSocketAddress(inetAddress, address.getPort()), TIMEOUT);
connected = true;
break;
} catch (Exception e) {
error = e;
}
}
if (!connected && error != null) {
// could not connect any of addresses
throw error;
}
}
} catch (Throwable e) {
logger.finest(e);
connectionManager.failedConnection(address, e, silent);
}
}
private void tryToConnect(final InetSocketAddress socketAddress, final int timeout)
throws Exception {
final SocketChannel socketChannel = SocketChannel.open();
connectionManager.initSocket(socketChannel.socket());
if (connectionManager.ioService.isSocketBind()) {
bindSocket(socketChannel);
}
final Level level = silent ? Level.FINEST : Level.INFO;
if (logger.isLoggable(level)) {
final String message = "Connecting to " + socketAddress + ", timeout: " + timeout
+ ", bind-any: " + connectionManager.ioService.isSocketBindAny();
log(level, message);
}
try {
socketChannel.configureBlocking(true);
try {
if (timeout > 0) {
socketChannel.socket().connect(socketAddress, timeout);
} else {
socketChannel.connect(socketAddress);
}
} catch (SocketException ex) {
//we want to include the socketAddress in the exception.
SocketException newEx = new SocketException(ex.getMessage() + " to address " + socketAddress);
newEx.setStackTrace(ex.getStackTrace());
throw newEx;
}
if (logger.isFinestEnabled()) {
log(Level.FINEST, "Successfully connected to: " + address + " using socket " + socketChannel.socket());
}
final SocketChannelWrapper socketChannelWrapper = connectionManager.wrapSocketChannel(socketChannel, true);
MemberSocketInterceptor memberSocketInterceptor = connectionManager.getMemberSocketInterceptor();
if (memberSocketInterceptor != null) {
if (logger.isFinestEnabled()) {
log(Level.FINEST, "Calling member socket interceptor: " + memberSocketInterceptor + " for " + socketChannel);
}
memberSocketInterceptor.onConnect(socketChannel.socket());
}
socketChannelWrapper.configureBlocking(false);
TcpIpConnection connection = connectionManager.assignSocketChannel(socketChannelWrapper);
connection.getWriteHandler().setProtocol(Protocols.CLUSTER);
connectionManager.sendBindRequest(connection, address, true);
} catch (Exception e) {
closeSocket(socketChannel);
log(level, "Could not connect to: " + socketAddress + ". Reason: " + e.getClass().getSimpleName()
+ "[" + e.getMessage() + "]");
throw e;
}
}
private void bindSocket(final SocketChannel socketChannel) throws IOException {
final InetAddress inetAddress;
if (connectionManager.ioService.isSocketBindAny()) {
inetAddress = null;
} else {
final Address thisAddress = connectionManager.ioService.getThisAddress();
inetAddress = thisAddress.getInetAddress();
}
final Socket socket = socketChannel.socket();
if (connectionManager.useAnyOutboundPort()) {
final SocketAddress socketAddress = new InetSocketAddress(inetAddress, 0);
socket.bind(socketAddress);
} else {
IOException ex = null;
final int retryCount = connectionManager.getOutboundPortCount() * 2;
for (int i = 0; i < retryCount; i++) {
final int port = connectionManager.acquireOutboundPort();
final SocketAddress socketAddress = new InetSocketAddress(inetAddress, port);
try {
socket.bind(socketAddress);
return;
} catch (IOException e) {
ex = e;
log(Level.FINEST, "Could not bind port[ " + port + "]: " + e.getMessage());
}
}
throw ex;
}
}
private void closeSocket(final SocketChannel socketChannel) {
if (socketChannel != null) {
try {
socketChannel.close();
} catch (final IOException ignored) {
}
}
}
private void log(Level level, String message) {
logger.log(level, message);
connectionManager.ioService.getSystemLogService().logConnection(message);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_SocketConnector.java
|
5,788 |
public class VersionFetchSubPhase implements FetchSubPhase {
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of("version", new VersionParseElement());
}
@Override
public boolean hitsExecutionNeeded(SearchContext context) {
return false;
}
@Override
public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticsearchException {
}
@Override
public boolean hitExecutionNeeded(SearchContext context) {
return context.version();
}
@Override
public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticsearchException {
// it might make sense to cache the TermDocs on a shared fetch context and just skip here)
// it is going to mean we work on the high level multi reader and not the lower level reader as is
// the case below...
long version;
try {
version = Versions.loadVersion(
hitContext.readerContext().reader(),
new Term(UidFieldMapper.NAME, hitContext.fieldVisitor().uid().toBytesRef())
);
} catch (IOException e) {
throw new ElasticsearchException("Could not query index for _version", e);
}
if (version < 0) {
version = -1;
}
hitContext.hit().version(version);
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_fetch_version_VersionFetchSubPhase.java
|
400 |
static enum EvictionPolicy {
NONE, LRU, LFU
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_nearcache_ClientNearCache.java
|
1,223 |
public abstract class OStorageEmbedded extends OStorageAbstract {
protected final ORecordLockManager lockManager;
protected final String PROFILER_CREATE_RECORD;
protected final String PROFILER_READ_RECORD;
protected final String PROFILER_UPDATE_RECORD;
protected final String PROFILER_DELETE_RECORD;
public OStorageEmbedded(final String iName, final String iFilePath, final String iMode) {
super(iName, iFilePath, iMode, OGlobalConfiguration.STORAGE_LOCK_TIMEOUT.getValueAsInteger(), new OCacheLevelTwoLocatorLocal());
lockManager = new ORecordLockManager(OGlobalConfiguration.STORAGE_RECORD_LOCK_TIMEOUT.getValueAsInteger());
PROFILER_CREATE_RECORD = "db." + name + ".createRecord";
PROFILER_READ_RECORD = "db." + name + ".readRecord";
PROFILER_UPDATE_RECORD = "db." + name + ".updateRecord";
PROFILER_DELETE_RECORD = "db." + name + ".deleteRecord";
}
public abstract OCluster getClusterByName(final String iClusterName);
protected abstract ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId iRid, boolean iAtomicLock,
boolean loadTombstones);
/**
* Closes the storage freeing the lock manager first.
*/
@Override
public void close(final boolean iForce) {
if (checkForClose(iForce))
lockManager.clear();
super.close(iForce);
}
/**
* Executes the command request and return the result back.
*/
public Object command(final OCommandRequestText iCommand) {
final OCommandExecutor executor = OCommandManager.instance().getExecutor(iCommand);
// COPY THE CONTEXT FROM THE REQUEST
executor.setContext(iCommand.getContext());
executor.setProgressListener(iCommand.getProgressListener());
executor.parse(iCommand);
return executeCommand(iCommand, executor);
}
public Object executeCommand(final OCommandRequestText iCommand, final OCommandExecutor executor) {
if (iCommand.isIdempotent() && !executor.isIdempotent())
throw new OCommandExecutionException("Cannot execute non idempotent command");
long beginTime = Orient.instance().getProfiler().startChrono();
try {
return executor.execute(iCommand.getParameters());
} catch (OException e) {
// PASS THROUGHT
throw e;
} catch (Exception e) {
throw new OCommandExecutionException("Error on execution of command: " + iCommand, e);
} finally {
if (Orient.instance().getProfiler().isRecording())
Orient
.instance()
.getProfiler()
.stopChrono("db." + ODatabaseRecordThreadLocal.INSTANCE.get().getName() + ".command." + iCommand.toString(),
"Command executed against the database", beginTime, "db.*.command.*");
}
}
@Override
public OPhysicalPosition[] higherPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) {
if (currentClusterId == -1)
return null;
checkOpeness();
lock.acquireSharedLock();
try {
final OCluster cluster = getClusterById(currentClusterId);
return cluster.higherPositions(physicalPosition);
} catch (IOException ioe) {
throw new OStorageException("Cluster Id " + currentClusterId + " is invalid in storage '" + name + '\'', ioe);
} finally {
lock.releaseSharedLock();
}
}
@Override
public OPhysicalPosition[] ceilingPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) {
if (clusterId == -1)
return null;
checkOpeness();
lock.acquireSharedLock();
try {
final OCluster cluster = getClusterById(clusterId);
return cluster.ceilingPositions(physicalPosition);
} catch (IOException ioe) {
throw new OStorageException("Cluster Id " + clusterId + " is invalid in storage '" + name + '\'', ioe);
} finally {
lock.releaseSharedLock();
}
}
@Override
public OPhysicalPosition[] lowerPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) {
if (currentClusterId == -1)
return null;
checkOpeness();
lock.acquireSharedLock();
try {
final OCluster cluster = getClusterById(currentClusterId);
return cluster.lowerPositions(physicalPosition);
} catch (IOException ioe) {
throw new OStorageException("Cluster Id " + currentClusterId + " is invalid in storage '" + name + '\'', ioe);
} finally {
lock.releaseSharedLock();
}
}
@Override
public OPhysicalPosition[] floorPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) {
if (clusterId == -1)
return null;
checkOpeness();
lock.acquireSharedLock();
try {
final OCluster cluster = getClusterById(clusterId);
return cluster.floorPositions(physicalPosition);
} catch (IOException ioe) {
throw new OStorageException("Cluster Id " + clusterId + " is invalid in storage '" + name + '\'', ioe);
} finally {
lock.releaseSharedLock();
}
}
public void acquireWriteLock(final ORID iRid) {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
}
public void releaseWriteLock(final ORID iRid) {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
}
public void acquireReadLock(final ORID iRid) {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.SHARED);
}
public void releaseReadLock(final ORID iRid) {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.SHARED);
}
@Override
public ORecordMetadata getRecordMetadata(ORID rid) {
if (rid.isNew())
throw new OStorageException("Passed record with id " + rid + " is new and can not be stored.");
checkOpeness();
final OCluster cluster = getClusterById(rid.getClusterId());
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), rid, LOCK.SHARED);
try {
final OPhysicalPosition ppos = cluster.getPhysicalPosition(new OPhysicalPosition(rid.getClusterPosition()));
if (ppos == null || ppos.dataSegmentId < 0)
return null;
return new ORecordMetadata(rid, ppos.recordVersion);
} finally {
lockManager.releaseLock(Thread.currentThread(), rid, LOCK.SHARED);
}
} catch (IOException ioe) {
OLogManager.instance().error(this, "Retrieval of record '" + rid + "' cause: " + ioe.getMessage(), ioe);
} finally {
lock.releaseSharedLock();
}
return null;
}
/**
* Checks if the storage is open. If it's closed an exception is raised.
*/
protected void checkOpeness() {
if (status != STATUS.OPEN)
throw new OStorageException("Storage " + name + " is not opened.");
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_storage_OStorageEmbedded.java
|
375 |
public class ODatabaseFlat extends ODatabaseRecordTx {
public ODatabaseFlat(String iURL) {
super(iURL, ORecordFlat.RECORD_TYPE);
}
@SuppressWarnings("unchecked")
@Override
public ORecordIteratorCluster<ORecordFlat> browseCluster(final String iClusterName) {
return super.browseCluster(iClusterName, ORecordFlat.class);
}
@Override
public ORecordIteratorCluster<ORecordFlat> browseCluster(String iClusterName, OClusterPosition startClusterPosition,
OClusterPosition endClusterPosition, boolean loadTombstones) {
return super.browseCluster(iClusterName, ORecordFlat.class, startClusterPosition, endClusterPosition, loadTombstones);
}
@SuppressWarnings("unchecked")
@Override
public ORecordFlat newInstance() {
return new ORecordFlat();
}
@Override
public ODatabaseRecord commit() {
try {
return super.commit();
} finally {
getTransaction().close();
}
}
@Override
public ODatabaseRecord rollback() {
try {
return super.rollback();
} finally {
getTransaction().close();
}
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_db_record_ODatabaseFlat.java
|
1,538 |
public static class Map extends Mapper<NullWritable, FaunusVertex, WritableComparable, Text> {
private String key;
private boolean isVertex;
private WritableHandler handler;
private String elementKey;
private SafeMapperOutputs outputs;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
this.key = context.getConfiguration().get(KEY);
this.handler = new WritableHandler(context.getConfiguration().getClass(TYPE, Text.class, WritableComparable.class));
this.elementKey = context.getConfiguration().get(ELEMENT_KEY);
this.outputs = new SafeMapperOutputs(context);
}
private Text text = new Text();
private WritableComparable writable;
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, WritableComparable, Text>.Context context) throws IOException, InterruptedException {
if (this.isVertex) {
if (value.hasPaths()) {
this.text.set(ElementPicker.getPropertyAsString(value, this.elementKey));
final Object temp = ElementPicker.getProperty(value, this.key);
if (this.key.equals(Tokens._COUNT)) {
this.writable = this.handler.set(temp);
context.write(this.writable, this.text);
} else if (temp instanceof Number) {
this.writable = this.handler.set(multiplyPathCount((Number) temp, value.pathCount()));
context.write(this.writable, this.text);
} else {
this.writable = this.handler.set(temp);
for (int i = 0; i < value.pathCount(); i++) {
context.write(this.writable, this.text);
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L);
}
} else {
long edgesProcessed = 0;
for (final Edge e : value.getEdges(Direction.OUT)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
this.text.set(ElementPicker.getPropertyAsString(edge, this.elementKey));
final Object temp = ElementPicker.getProperty(edge, this.key);
if (this.key.equals(Tokens._COUNT)) {
this.writable = this.handler.set(temp);
context.write(this.writable, this.text);
} else if (temp instanceof Number) {
this.writable = this.handler.set(multiplyPathCount((Number) temp, edge.pathCount()));
context.write(this.writable, this.text);
} else {
this.writable = this.handler.set(temp);
for (int i = 0; i < edge.pathCount(); i++) {
context.write(this.writable, this.text);
}
}
edgesProcessed++;
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed);
}
this.outputs.write(Tokens.GRAPH, NullWritable.get(), value);
}
@Override
public void cleanup(final Mapper<NullWritable, FaunusVertex, WritableComparable, Text>.Context context) throws IOException, InterruptedException {
this.outputs.close();
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_OrderMapReduce.java
|
85 |
public class ClientEngineImpl implements ClientEngine, CoreService,
ManagedService, MembershipAwareService, EventPublishingService<ClientEndpoint, ClientListener> {
public static final String SERVICE_NAME = "hz:core:clientEngine";
public static final int DESTROY_ENDPOINT_DELAY_MS = 1111;
public static final int ENDPOINT_REMOVE_DELAY_MS = 10;
public static final int THREADS_PER_CORE = 10;
public static final int RIDICULOUS_THREADS_PER_CORE = 100000;
static final Data NULL = new Data();
private final Node node;
private final NodeEngineImpl nodeEngine;
private final Executor executor;
private final SerializationService serializationService;
private final ConcurrentMap<Connection, ClientEndpoint> endpoints =
new ConcurrentHashMap<Connection, ClientEndpoint>();
private final ILogger logger;
private final ConnectionListener connectionListener = new ConnectionListenerImpl();
public ClientEngineImpl(Node node) {
this.node = node;
this.serializationService = node.getSerializationService();
this.nodeEngine = node.nodeEngine;
int coreSize = Runtime.getRuntime().availableProcessors();
this.executor = nodeEngine.getExecutionService().register(ExecutionService.CLIENT_EXECUTOR,
coreSize * THREADS_PER_CORE, coreSize * RIDICULOUS_THREADS_PER_CORE,
ExecutorType.CONCRETE);
this.logger = node.getLogger(ClientEngine.class);
}
//needed for testing purposes
public ConnectionListener getConnectionListener() {
return connectionListener;
}
@Override
public int getClientEndpointCount() {
return endpoints.size();
}
public void handlePacket(ClientPacket packet) {
executor.execute(new ClientPacketProcessor(packet));
}
@Override
public Object toObject(Data data) {
return serializationService.toObject(data);
}
@Override
public Data toData(Object obj) {
return serializationService.toData(obj);
}
@Override
public InternalPartitionService getPartitionService() {
return nodeEngine.getPartitionService();
}
@Override
public ClusterService getClusterService() {
return nodeEngine.getClusterService();
}
@Override
public SerializationService getSerializationService() {
return serializationService;
}
@Override
public EventService getEventService() {
return nodeEngine.getEventService();
}
@Override
public ProxyService getProxyService() {
return nodeEngine.getProxyService();
}
void sendOperation(Operation op, Address target) {
getOperationService().send(op, target);
}
InvocationBuilder createInvocationBuilder(String serviceName, Operation op, final int partitionId) {
return getOperationService().createInvocationBuilder(serviceName, op, partitionId);
}
private OperationService getOperationService() {
return nodeEngine.getOperationService();
}
InvocationBuilder createInvocationBuilder(String serviceName, Operation op, Address target) {
return getOperationService().createInvocationBuilder(serviceName, op, target);
}
Map<Integer, Object> invokeOnAllPartitions(String serviceName, OperationFactory operationFactory)
throws Exception {
return getOperationService().invokeOnAllPartitions(serviceName, operationFactory);
}
Map<Integer, Object> invokeOnPartitions(String serviceName, OperationFactory operationFactory,
Collection<Integer> partitions) throws Exception {
return getOperationService().invokeOnPartitions(serviceName, operationFactory, partitions);
}
void sendResponse(ClientEndpoint endpoint, ClientResponse response) {
Data resultData = serializationService.toData(response);
Connection conn = endpoint.getConnection();
conn.write(new DataAdapter(resultData, serializationService.getSerializationContext()));
}
@Override
public TransactionManagerService getTransactionManagerService() {
return nodeEngine.getTransactionManagerService();
}
@Override
public Address getMasterAddress() {
return node.getMasterAddress();
}
@Override
public Address getThisAddress() {
return node.getThisAddress();
}
@Override
public MemberImpl getLocalMember() {
return node.getLocalMember();
}
@Override
public Config getConfig() {
return node.getConfig();
}
@Override
public ILogger getLogger(Class clazz) {
return node.getLogger(clazz);
}
@Override
public ILogger getLogger(String className) {
return node.getLogger(className);
}
Set<ClientEndpoint> getEndpoints(String uuid) {
Set<ClientEndpoint> endpointSet = new HashSet<ClientEndpoint>();
for (ClientEndpoint endpoint : endpoints.values()) {
if (uuid.equals(endpoint.getUuid())) {
endpointSet.add(endpoint);
}
}
return endpointSet;
}
ClientEndpoint getEndpoint(Connection conn) {
return endpoints.get(conn);
}
ClientEndpoint createEndpoint(Connection conn) {
if (!conn.live()) {
logger.severe("Can't create and endpoint for a dead connection");
return null;
}
String clientUuid = UuidUtil.createClientUuid(conn.getEndPoint());
ClientEndpoint endpoint = new ClientEndpoint(ClientEngineImpl.this, conn, clientUuid);
if (endpoints.putIfAbsent(conn, endpoint) != null) {
logger.severe("An endpoint already exists for connection:" + conn);
}
return endpoint;
}
ClientEndpoint removeEndpoint(final Connection connection) {
return removeEndpoint(connection, false);
}
ClientEndpoint removeEndpoint(final Connection connection, boolean closeImmediately) {
final ClientEndpoint endpoint = endpoints.remove(connection);
destroyEndpoint(endpoint, closeImmediately);
return endpoint;
}
private void destroyEndpoint(ClientEndpoint endpoint, boolean closeImmediately) {
if (endpoint != null) {
logger.info("Destroying " + endpoint);
try {
endpoint.destroy();
} catch (LoginException e) {
logger.warning(e);
}
final Connection connection = endpoint.getConnection();
if (closeImmediately) {
try {
connection.close();
} catch (Throwable e) {
logger.warning("While closing client connection: " + connection, e);
}
} else {
nodeEngine.getExecutionService().schedule(new Runnable() {
public void run() {
if (connection.live()) {
try {
connection.close();
} catch (Throwable e) {
logger.warning("While closing client connection: " + e.toString());
}
}
}
}, DESTROY_ENDPOINT_DELAY_MS, TimeUnit.MILLISECONDS);
}
sendClientEvent(endpoint);
}
}
@Override
public SecurityContext getSecurityContext() {
return node.securityContext;
}
void bind(final ClientEndpoint endpoint) {
final Connection conn = endpoint.getConnection();
if (conn instanceof TcpIpConnection) {
Address address = new Address(conn.getRemoteSocketAddress());
TcpIpConnectionManager connectionManager = (TcpIpConnectionManager) node.getConnectionManager();
connectionManager.bind((TcpIpConnection) conn, address, null, false);
}
sendClientEvent(endpoint);
}
private void sendClientEvent(ClientEndpoint endpoint) {
if (!endpoint.isFirstConnection()) {
final EventService eventService = nodeEngine.getEventService();
final Collection<EventRegistration> regs = eventService.getRegistrations(SERVICE_NAME, SERVICE_NAME);
eventService.publishEvent(SERVICE_NAME, regs, endpoint, endpoint.getUuid().hashCode());
}
}
@Override
public void dispatchEvent(ClientEndpoint event, ClientListener listener) {
if (event.isAuthenticated()) {
listener.clientConnected(event);
} else {
listener.clientDisconnected(event);
}
}
@Override
public void memberAdded(MembershipServiceEvent event) {
}
@Override
public void memberRemoved(MembershipServiceEvent event) {
if (event.getMember().localMember()) {
return;
}
final String uuid = event.getMember().getUuid();
try {
nodeEngine.getExecutionService().schedule(new Runnable() {
@Override
public void run() {
Iterator<ClientEndpoint> iterator = endpoints.values().iterator();
while (iterator.hasNext()) {
ClientEndpoint endpoint = iterator.next();
String ownerUuid = endpoint.getPrincipal().getOwnerUuid();
if (uuid.equals(ownerUuid)) {
iterator.remove();
destroyEndpoint(endpoint, true);
}
}
}
}, ENDPOINT_REMOVE_DELAY_MS, TimeUnit.SECONDS);
} catch (RejectedExecutionException e) {
if (logger.isFinestEnabled()) {
logger.finest(e);
}
}
}
@Override
public void memberAttributeChanged(MemberAttributeServiceEvent event) {
}
String addClientListener(ClientListener clientListener) {
EventService eventService = nodeEngine.getEventService();
EventRegistration registration = eventService
.registerLocalListener(SERVICE_NAME, SERVICE_NAME, clientListener);
return registration.getId();
}
boolean removeClientListener(String registrationId) {
return nodeEngine.getEventService().deregisterListener(SERVICE_NAME, SERVICE_NAME, registrationId);
}
public ClientService getClientService() {
return new ClientServiceProxy(this);
}
public Collection<Client> getClients() {
final HashSet<Client> clients = new HashSet<Client>();
for (ClientEndpoint endpoint : endpoints.values()) {
if (!endpoint.isFirstConnection()) {
clients.add(endpoint);
}
}
return clients;
}
@Override
public void init(NodeEngine nodeEngine, Properties properties) {
ClassDefinitionBuilder builder = new ClassDefinitionBuilder(ClientPortableHook.ID, ClientPortableHook.PRINCIPAL);
builder.addUTFField("uuid").addUTFField("ownerUuid");
serializationService.getSerializationContext().registerClassDefinition(builder.build());
node.getConnectionManager().addConnectionListener(connectionListener);
}
@Override
public void reset() {
}
public void shutdown(boolean terminate) {
for (ClientEndpoint endpoint : endpoints.values()) {
try {
endpoint.destroy();
} catch (LoginException e) {
logger.finest(e.getMessage());
}
try {
final Connection conn = endpoint.getConnection();
if (conn.live()) {
conn.close();
}
} catch (Exception e) {
logger.finest(e);
}
}
endpoints.clear();
}
private final class ClientPacketProcessor implements Runnable {
final ClientPacket packet;
private ClientPacketProcessor(ClientPacket packet) {
this.packet = packet;
}
@Override
public void run() {
Connection conn = packet.getConn();
ClientEndpoint endpoint = getEndpoint(conn);
ClientRequest request = null;
try {
request = loadRequest();
if (request == null) {
handlePacketWithNullRequest();
} else if (request instanceof AuthenticationRequest) {
endpoint = createEndpoint(conn);
if (endpoint != null) {
processRequest(endpoint, request);
} else {
handleEndpointNotCreatedConnectionNotAlive();
}
} else if (endpoint == null) {
handleMissingEndpoint(conn);
} else if (endpoint.isAuthenticated()) {
processRequest(endpoint, request);
} else {
handleAuthenticationFailure(conn, endpoint, request);
}
} catch (Throwable e) {
handleProcessingFailure(endpoint, request, e);
}
}
private ClientRequest loadRequest() {
Data data = packet.getData();
return serializationService.toObject(data);
}
private void handleEndpointNotCreatedConnectionNotAlive() {
logger.warning("Dropped: " + packet + " -> endpoint not created for AuthenticationRequest, "
+ "connection not alive");
}
private void handlePacketWithNullRequest() {
logger.warning("Dropped: " + packet + " -> null request");
}
private void handleMissingEndpoint(Connection conn) {
if (conn.live()) {
logger.severe("Dropping: " + packet + " -> no endpoint found for live connection.");
} else {
if (logger.isFinestEnabled()) {
logger.finest("Dropping: " + packet + " -> no endpoint found for dead connection.");
}
}
}
private void handleProcessingFailure(ClientEndpoint endpoint, ClientRequest request, Throwable e) {
Level level = nodeEngine.isActive() ? Level.SEVERE : Level.FINEST;
if (logger.isLoggable(level)) {
if (request == null) {
logger.log(level, e.getMessage(), e);
} else {
logger.log(level, "While executing request: " + request + " -> " + e.getMessage(), e);
}
}
if (request != null && endpoint != null) {
endpoint.sendResponse(e, request.getCallId());
}
}
private void processRequest(ClientEndpoint endpoint, ClientRequest request) throws Exception {
request.setEndpoint(endpoint);
initService(request);
request.setClientEngine(ClientEngineImpl.this);
checkPermissions(endpoint, request);
request.process();
}
private void checkPermissions(ClientEndpoint endpoint, ClientRequest request) {
SecurityContext securityContext = getSecurityContext();
if (securityContext != null) {
Permission permission = request.getRequiredPermission();
if (permission != null) {
securityContext.checkPermission(endpoint.getSubject(), permission);
}
}
}
private void initService(ClientRequest request) {
String serviceName = request.getServiceName();
if (serviceName == null) {
return;
}
Object service = nodeEngine.getService(serviceName);
if (service == null) {
if (nodeEngine.isActive()) {
throw new IllegalArgumentException("No service registered with name: " + serviceName);
}
throw new HazelcastInstanceNotActiveException();
}
request.setService(service);
}
private void handleAuthenticationFailure(Connection conn, ClientEndpoint endpoint, ClientRequest request) {
Exception exception;
if (nodeEngine.isActive()) {
String message = "Client " + conn + " must authenticate before any operation.";
logger.severe(message);
exception = new AuthenticationException(message);
} else {
exception = new HazelcastInstanceNotActiveException();
}
endpoint.sendResponse(exception, request.getCallId());
removeEndpoint(conn);
}
}
private final class ConnectionListenerImpl implements ConnectionListener {
@Override
public void connectionAdded(Connection conn) {
//no-op
//unfortunately we can't do the endpoint creation here, because this event is only called when the
//connection is bound, but we need to use the endpoint connection before that.
}
@Override
public void connectionRemoved(Connection connection) {
if (connection.isClient() && connection instanceof TcpIpConnection && nodeEngine.isActive()) {
ClientEndpoint endpoint = endpoints.get(connection);
if (endpoint == null) {
return;
}
String localMemberUuid = node.getLocalMember().getUuid();
String ownerUuid = endpoint.getPrincipal().getOwnerUuid();
if (localMemberUuid.equals(ownerUuid)) {
doRemoveEndpoint(connection, endpoint);
}
}
}
private void doRemoveEndpoint(Connection connection, ClientEndpoint endpoint) {
removeEndpoint(connection, true);
if (!endpoint.isFirstConnection()) {
return;
}
NodeEngine nodeEngine = node.nodeEngine;
Collection<MemberImpl> memberList = nodeEngine.getClusterService().getMemberList();
OperationService operationService = nodeEngine.getOperationService();
for (MemberImpl member : memberList) {
ClientDisconnectionOperation op = new ClientDisconnectionOperation(endpoint.getUuid());
op.setNodeEngine(nodeEngine)
.setServiceName(SERVICE_NAME)
.setService(ClientEngineImpl.this)
.setResponseHandler(createEmptyResponseHandler());
if (member.localMember()) {
operationService.runOperationOnCallingThread(op);
} else {
operationService.send(op, member.getAddress());
}
}
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_client_ClientEngineImpl.java
|
35 |
public class SetCommand extends AbstractTextCommand {
ByteBuffer response;
private final String key;
private final int flag;
private final int expiration;
private final int valueLen;
private final boolean noreply;
private final ByteBuffer bbValue;
public SetCommand(TextCommandType type, String key, int flag, int expiration, int valueLen, boolean noreply) {
super(type);
this.key = key;
this.flag = flag;
this.expiration = expiration;
this.valueLen = valueLen;
this.noreply = noreply;
bbValue = ByteBuffer.allocate(valueLen);
}
public boolean readFrom(ByteBuffer cb) {
copy(cb);
if (!bbValue.hasRemaining()) {
while (cb.hasRemaining()) {
char c = (char) cb.get();
if (c == '\n') {
bbValue.flip();
return true;
}
}
}
return false;
}
void copy(ByteBuffer cb) {
if (cb.isDirect()) {
int n = Math.min(cb.remaining(), bbValue.remaining());
if (n > 0) {
cb.get(bbValue.array(), bbValue.position(), n);
bbValue.position(bbValue.position() + n);
}
} else {
IOUtil.copyToHeapBuffer(cb, bbValue);
}
}
public void setResponse(byte[] value) {
this.response = ByteBuffer.wrap(value);
}
public boolean writeTo(ByteBuffer bb) {
if (response == null) {
response = ByteBuffer.wrap(STORED);
}
while (bb.hasRemaining() && response.hasRemaining()) {
bb.put(response.get());
}
return !response.hasRemaining();
}
public boolean shouldReply() {
return !noreply;
}
public int getExpiration() {
return expiration;
}
public String getKey() {
return key;
}
public byte[] getValue() {
return bbValue.array();
}
public int getFlag() {
return flag;
}
@Override
public String toString() {
return "SetCommand [" + type + "]{"
+ "key='"
+ key
+ '\''
+ ", flag="
+ flag
+ ", expiration="
+ expiration
+ ", valueLen="
+ valueLen
+ ", value="
+ bbValue
+ '}'
+ super.toString();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_memcache_SetCommand.java
|
1,477 |
labels = OMultiValue.array(iParameters, String.class, new OCallable<Object, Object>() {
@Override
public Object call(final Object iArgument) {
return OStringSerializerHelper.getStringContent(iArgument);
}
});
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionMove.java
|
146 |
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
| 0true
|
src_main_java_jsr166e_extra_AtomicDouble.java
|
94 |
public interface NamedOperationManager {
Map<String, String> manageNamedParameters(Map<String, String> parameterMap);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_service_operation_NamedOperationManager.java
|
44 |
public class InvalidIDException extends TitanException {
public InvalidIDException(String msg) {
super(msg);
}
public InvalidIDException(String msg, Throwable cause) {
super(msg, cause);
}
public InvalidIDException(Throwable cause) {
super(cause);
}
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_InvalidIDException.java
|
358 |
future.andThen(new ExecutionCallback<Integer>() {
@Override
public void onResponse(Integer response) {
result[0] = response.intValue();
semaphore.release();
}
@Override
public void onFailure(Throwable t) {
semaphore.release();
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java
|
1,498 |
public class CommitVerticesMapReduce {
// public static final String ACTION = Tokens.makeNamespace(CommitVerticesMapReduce.class) + ".action";
public enum Counters {
VERTICES_KEPT,
VERTICES_DROPPED,
OUT_EDGES_KEPT,
IN_EDGES_KEPT
}
public static org.apache.hadoop.conf.Configuration createConfiguration(final Tokens.Action action) {
ModifiableHadoopConfiguration c = ModifiableHadoopConfiguration.withoutResources();
c.set(COMMIT_VERTICES_ACTION, action);
return c.getHadoopConfiguration();
}
public static class Map extends Mapper<NullWritable, FaunusVertex, LongWritable, Holder> {
private boolean drop;
private final Holder<FaunusVertex> holder = new Holder<FaunusVertex>();
private final LongWritable longWritable = new LongWritable();
private Configuration faunusConf;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
faunusConf = ModifiableHadoopConfiguration.of(DEFAULT_COMPAT.getContextConfiguration(context));
Tokens.Action configuredAction = faunusConf.get(COMMIT_VERTICES_ACTION);
drop = Tokens.Action.DROP.equals(configuredAction);
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, LongWritable, Holder>.Context context) throws IOException, InterruptedException {
final boolean keep;
final boolean hasPaths = value.hasPaths();
long verticesKept = 0;
long verticesDropped = 0;
if (this.drop && hasPaths)
keep = false;
else if (!this.drop && hasPaths)
keep = true;
else
keep = this.drop && !hasPaths;
if (keep) {
this.longWritable.set(value.getLongId());
context.write(this.longWritable, this.holder.set('v', value));
verticesKept++;
} else {
final long vertexId = value.getLongId();
this.holder.set('k', new FaunusVertex(faunusConf, vertexId));
Iterator<Edge> itty = value.getEdges(OUT).iterator();
while (itty.hasNext()) {
Edge edge = itty.next();
final Long id = (Long) edge.getVertex(IN).getId();
if (!id.equals(vertexId)) {
this.longWritable.set(id);
context.write(this.longWritable, this.holder);
}
}
itty = value.getEdges(IN).iterator();
while (itty.hasNext()) {
Edge edge = itty.next();
final Long id = (Long) edge.getVertex(OUT).getId();
if (!id.equals(vertexId)) {
this.longWritable.set(id);
context.write(this.longWritable, this.holder);
}
}
this.longWritable.set(value.getLongId());
context.write(this.longWritable, this.holder.set('d', value));
verticesDropped++;
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_DROPPED, verticesDropped);
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_KEPT, verticesKept);
}
}
public static class Combiner extends Reducer<LongWritable, Holder, LongWritable, Holder> {
private final Holder<FaunusVertex> holder = new Holder<FaunusVertex>();
private Configuration faunusConf;
@Override
public void setup(final Combiner.Context context) {
faunusConf = ModifiableHadoopConfiguration.of(DEFAULT_COMPAT.getContextConfiguration(context));
}
@Override
public void reduce(final LongWritable key, final Iterable<Holder> values, final Reducer<LongWritable, Holder, LongWritable, Holder>.Context context) throws IOException, InterruptedException {
FaunusVertex vertex = null;
final Set<Long> ids = new HashSet<Long>();
boolean isDeleted = false;
for (final Holder holder : values) {
char tag = holder.getTag();
if (tag == 'k') {
ids.add(holder.get().getLongId());
// todo: once vertex is found, do individual removes to save memory
} else {
vertex = (FaunusVertex) holder.get();
isDeleted = tag == 'd';
}
}
if (null != vertex) {
if (ids.size() > 0)
vertex.removeEdgesToFrom(ids);
context.write(key, this.holder.set(isDeleted ? 'd' : 'v', vertex));
} else {
// vertex not on the same machine as the vertices being deleted
for (final Long id : ids) {
context.write(key, this.holder.set('k', new FaunusVertex(faunusConf, id)));
}
}
}
}
public static class Reduce extends Reducer<LongWritable, Holder, NullWritable, FaunusVertex> {
private boolean trackState;
@Override
public void setup(final Reducer.Context context) {
this.trackState = context.getConfiguration().getBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_STATE, false);
}
@Override
public void reduce(final LongWritable key, final Iterable<Holder> values, final Reducer<LongWritable, Holder, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
FaunusVertex vertex = null;
final Set<Long> ids = new HashSet<Long>();
for (final Holder holder : values) {
final char tag = holder.getTag();
if (tag == 'k') {
ids.add(holder.get().getLongId());
// todo: once vertex is found, do individual removes to save memory
} else if (tag == 'v') {
vertex = (FaunusVertex) holder.get();
} else {
vertex = (FaunusVertex) holder.get();
Iterator<Edge> itty = vertex.getEdges(Direction.BOTH).iterator();
while (itty.hasNext()) {
itty.next();
itty.remove();
}
vertex.updateLifeCycle(ElementLifeCycle.Event.REMOVED);
}
}
if (null != vertex) {
if (ids.size() > 0)
vertex.removeEdgesToFrom(ids);
if (this.trackState)
context.write(NullWritable.get(), vertex);
else if (!vertex.isRemoved())
context.write(NullWritable.get(), vertex);
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_KEPT, Iterables.size(vertex.getEdges(OUT)));
DEFAULT_COMPAT.incrementContextCounter(context, Counters.IN_EDGES_KEPT, Iterables.size(vertex.getEdges(IN)));
}
}
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_sideeffect_CommitVerticesMapReduce.java
|
1,878 |
public class ListGridRecord {
protected ListGrid listGrid;
protected String id;
protected List<Field> fields = new ArrayList<Field>();
protected List<Field> hiddenFields = new ArrayList<Field>();
/**
* Convenience map keyed by the field name. Used to guarantee field ordering with header fields within a ListGrid
*/
protected Map<String, Field> fieldMap;
public String getPath() {
return listGrid.getPath() + "/" + id;
}
public boolean getCanLinkToExternalEntity() {
return StringUtils.isNotBlank(listGrid.getExternalEntitySectionKey());
}
public String getExternalEntityPath() {
return listGrid.getExternalEntitySectionKey() + "/" + id;
}
public ListGrid getListGrid() {
return listGrid;
}
public void setListGrid(ListGrid listGrid) {
this.listGrid = listGrid;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public int getIndex() {
return listGrid.getStartIndex() + listGrid.getRecords().indexOf(this);
}
/**
* Normally you should not be looping through these fields. In order to preserve proper field ordering, instead you
* should loop through {@link ListGrid#getHeaderFields()} and then invoke the {@link #getField(String)} method
* with that header field name.
*
* @return
*/
public List<Field> getFields() {
return fields;
}
public void setFields(List<Field> fields) {
this.fields = fields;
}
public List<Field> getHiddenFields() {
return hiddenFields;
}
public void setHiddenFields(List<Field> hiddenFields) {
this.hiddenFields = hiddenFields;
}
/**
* Returns a {@link Field} in this record for a particular field name. Used when displaying a {@link ListGrid} in order
* to guarantee proper field ordering
*
* @param fieldName
* @return
*/
public Field getField(String fieldName) {
if (fieldMap == null) {
fieldMap = new LinkedHashMap<String, Field>();
for (Field field : fields) {
fieldMap.put(field.getName(), field);
}
for (Field hiddenField : hiddenFields) {
fieldMap.put(hiddenField.getName(), hiddenField);
}
}
Field field = fieldMap.get(fieldName);
// We'll return a null field is this particular record doesn't have this polymorphic property.
// This prevents NPEs in list grids
if (field == null) {
field = new Field();
}
return field;
}
public void clearFieldMap() {
fieldMap = null;
}
public String getHiddenFieldsJson() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("\"hiddenFields\":[");
for (int j=0;j<hiddenFields.size();j++) {
sb.append("{\"name\":\"");
sb.append(hiddenFields.get(j).getName());
sb.append("\",\"val\":\"");
sb.append(hiddenFields.get(j).getValue());
sb.append("\"}");
if (j < hiddenFields.size()-1) {
sb.append(",");
}
}
sb.append("]}");
return sb.toString();
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_web_form_component_ListGridRecord.java
|
1,117 |
public class OSQLFunctionSum extends OSQLFunctionMathAbstract {
public static final String NAME = "sum";
private Number sum;
public OSQLFunctionSum() {
super(NAME, 1, -1);
}
public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters,
OCommandContext iContext) {
if (iParameters.length == 1) {
if (iParameters[0] instanceof Number)
sum((Number) iParameters[0]);
else if (OMultiValue.isMultiValue(iParameters[0]))
for (Object n : OMultiValue.getMultiValueIterable(iParameters[0]))
sum((Number) n);
} else {
sum = null;
for (int i = 0; i < iParameters.length; ++i)
sum((Number) iParameters[i]);
}
return sum;
}
protected void sum(final Number value) {
if (value != null) {
if (sum == null)
// FIRST TIME
sum = value;
else
sum = OType.increment(sum, value);
}
}
@Override
public boolean aggregateResults() {
return configuredParameters.length == 1;
}
public String getSyntax() {
return "Syntax error: sum(<field> [,<field>*])";
}
@Override
public Object getResult() {
return sum;
}
@Override
public Object mergeDistributedResult(List<Object> resultsToMerge) {
Number sum = null;
for (Object iParameter : resultsToMerge) {
final Number value = (Number) iParameter;
if (value != null) {
if (sum == null)
// FIRST TIME
sum = value;
else
sum = OType.increment(sum, value);
}
}
return sum;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_math_OSQLFunctionSum.java
|
286 |
public class PassthroughEncryptionModule implements EncryptionModule {
protected static final Logger LOG = LogManager.getLogger(PassthroughEncryptionModule.class);
protected RuntimeEnvironmentKeyResolver keyResolver = new SystemPropertyRuntimeEnvironmentKeyResolver();
public PassthroughEncryptionModule() {
if ("production".equals(keyResolver.resolveRuntimeEnvironmentKey())) {
LOG.warn("This passthrough encryption module provides NO ENCRYPTION and should NOT be used in production.");
}
}
public String decrypt(String cipherText) {
return cipherText;
}
public String encrypt(String plainText) {
return plainText;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_encryption_PassthroughEncryptionModule.java
|
298 |
transportService.sendRequest(node, action.name(), request, transportOptions, new BaseTransportResponseHandler<Response>() {
@Override
public Response newInstance() {
return action.newResponse();
}
@Override
public String executor() {
if (request.listenerThreaded()) {
return ThreadPool.Names.GENERIC;
}
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(Response response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
listener.onFailure(exp);
}
});
| 0true
|
src_main_java_org_elasticsearch_action_TransportActionNodeProxy.java
|
540 |
public class DeleteMappingRequest extends AcknowledgedRequest<DeleteMappingRequest> {
private String[] indices;
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false);
private String[] types;
DeleteMappingRequest() {
}
/**
* Constructs a new delete mapping request against one or more indices. If nothing is set then
* it will be executed against all indices.
*/
public DeleteMappingRequest(String... indices) {
this.indices = indices;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (CollectionUtils.isEmpty(types)) {
validationException = addValidationError("mapping type is missing", validationException);
} else {
validationException = checkForEmptyString(validationException, types);
}
if (CollectionUtils.isEmpty(indices)) {
validationException = addValidationError("index is missing", validationException);
} else {
validationException = checkForEmptyString(validationException, indices);
}
return validationException;
}
private ActionRequestValidationException checkForEmptyString(ActionRequestValidationException validationException, String[] strings) {
boolean containsEmptyString = false;
for (String string : strings) {
if (!Strings.hasText(string)) {
containsEmptyString = true;
}
}
if (containsEmptyString) {
validationException = addValidationError("types must not contain empty strings", validationException);
}
return validationException;
}
/**
* Sets the indices this delete mapping operation will execute on.
*/
public DeleteMappingRequest indices(String[] indices) {
this.indices = indices;
return this;
}
/**
* The indices the mappings will be removed from.
*/
public String[] indices() {
return indices;
}
public IndicesOptions indicesOptions() {
return indicesOptions;
}
public DeleteMappingRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* The mapping types.
*/
public String[] types() {
return types;
}
/**
* The type of the mappings to remove.
*/
public DeleteMappingRequest types(String... types) {
this.types = types;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
types = in.readStringArray();
readTimeout(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(indices);
indicesOptions.writeIndicesOptions(out);
out.writeStringArrayNullable(types);
writeTimeout(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_mapping_delete_DeleteMappingRequest.java
|
1,240 |
public final class CeylonClasspathUtil {
private CeylonClasspathUtil() {
// utility class
}
/**
* Get the Ivy classpath container from the selection in the Java package view
*
* @param selection
* the selection
* @return
* @throws JavaModelException
*/
public static CeylonProjectModulesContainer getCeylonClasspathContainer(IStructuredSelection selection) {
if (selection == null) {
return null;
}
for (@SuppressWarnings("rawtypes") Iterator it = selection.iterator();
it.hasNext();) {
Object element = it.next();
CeylonProjectModulesContainer cp = (CeylonProjectModulesContainer) CeylonPlugin.adapt(element,
CeylonProjectModulesContainer.class);
if (cp != null) {
return cp;
}
if (element instanceof ClassPathContainer) {
// FIXME: we shouldn't check against internal JDT API but there are not adaptable to
// useful class
return jdt2CeylonCPC((ClassPathContainer) element);
}
}
return null;
}
/**
* Work around the non adaptability of ClassPathContainer
*
* @param cpc
* the container to transform into an CeylonApplicationModulesContainer
* @return the CeylonApplicationModulesContainer is such, null, if not
*/
public static CeylonProjectModulesContainer jdt2CeylonCPC(ClassPathContainer cpc) {
IClasspathEntry entry = cpc.getClasspathEntry();
try {
IClasspathContainer icp = JavaCore.getClasspathContainer(entry.getPath(), cpc
.getJavaProject());
if (icp instanceof CeylonProjectModulesContainer) {
return (CeylonProjectModulesContainer) icp;
}
}
catch (JavaModelException e) {
// unless there are issues with the JDT, this should never happen
e.printStackTrace();
}
return null;
}
public static boolean isCeylonClasspathContainer(IPath containerPath) {
return isLanguageModuleClasspathContainer(containerPath) ||
isProjectModulesClasspathContainer(containerPath);
}
public static boolean isLanguageModuleClasspathContainer(IPath containerPath) {
int size = containerPath.segmentCount();
if (size > 0) {
return (containerPath.segment(0).equals(CeylonLanguageModuleContainer.CONTAINER_ID));
}
return false;
}
public static boolean isProjectModulesClasspathContainer(IPath containerPath) {
int size = containerPath.segmentCount();
if (size > 0) {
return (containerPath.segment(0).equals(CeylonProjectModulesContainer.CONTAINER_ID));
}
return false;
}
/**
* Search the Ceylon classpath containers within the specified Java project
*
* @param javaProject
* the project to search into
* @return the Ceylon classpath container if found
*/
public static List <IClasspathContainer> getCeylonClasspathContainers(
IJavaProject javaProject) {
List<IClasspathContainer> containers = new ArrayList<IClasspathContainer>();
if (FakeProjectManager.isFake(javaProject) || !javaProject.exists()) {
return containers;
}
try {
IClasspathEntry[] entries = javaProject.getRawClasspath();
for (int i = 0; i < entries.length; i++) {
IClasspathEntry entry = entries[i];
if (entry != null && entry.getEntryKind() == IClasspathEntry.CPE_CONTAINER) {
IPath path = entry.getPath();
if (isCeylonClasspathContainer(path)) {
IClasspathContainer cp = JavaCore.getClasspathContainer(path, javaProject);
if (cp instanceof CeylonProjectModulesContainer ||
cp instanceof CeylonLanguageModuleContainer) {
containers.add(cp);
}
}
}
}
}
catch (JavaModelException e) {
// unless there are issues with the JDT, this should never happen
e.printStackTrace();
}
return containers;
}
public static CeylonProjectModulesContainer getCeylonProjectModulesClasspathContainer(
IJavaProject javaProject) {
try {
IClasspathEntry[] entries = javaProject.getRawClasspath();
for (int i = 0; i < entries.length; i++) {
IClasspathEntry entry = entries[i];
if (entry != null && entry.getEntryKind() == IClasspathEntry.CPE_CONTAINER) {
IPath path = entry.getPath();
if (isProjectModulesClasspathContainer(path)) {
IClasspathContainer cp = JavaCore.getClasspathContainer(path, javaProject);
if (cp instanceof CeylonProjectModulesContainer) {
return (CeylonProjectModulesContainer) cp;
}
}
}
}
}
catch (JavaModelException e) {
// unless there are issues with the JDT, this should never happen
e.printStackTrace();
}
return null;
}
public static List<String> split(String str) {
String[] terms = str.split(",");
List<String> ret = new ArrayList<String>();
for (int i = 0; i < terms.length; i++) {
String t = terms[i].trim();
if (t.length() > 0) {
ret.add(t);
}
}
return ret;
}
public static String concat(Collection<String> list) {
if (list == null) {
return "";
}
StringBuffer b = new StringBuffer();
Iterator<String> it = list.iterator();
while (it.hasNext()) {
b.append(it.next());
if (it.hasNext()) {
b.append(",");
}
}
return b.toString();
}
/**
* Just a verbatim copy of the internal Eclipse function:
* org.eclipse.jdt.internal.corext.javadoc
* .JavaDocLocations#getLibraryJavadocLocation(IClasspathEntry)
*
* @param entry
* @return
*/
public static URL getLibraryJavadocLocation(IClasspathEntry entry) {
if (entry == null) {
throw new IllegalArgumentException("Entry must not be null"); //$NON-NLS-1$
}
int kind = entry.getEntryKind();
if (kind != IClasspathEntry.CPE_LIBRARY && kind != IClasspathEntry.CPE_VARIABLE) {
throw new IllegalArgumentException(
"Entry must be of kind CPE_LIBRARY or " + "CPE_VARIABLE"); //$NON-NLS-1$
}
IClasspathAttribute[] extraAttributes = entry.getExtraAttributes();
for (int i = 0; i < extraAttributes.length; i++) {
IClasspathAttribute attrib = extraAttributes[i];
if (IClasspathAttribute.JAVADOC_LOCATION_ATTRIBUTE_NAME.equals(attrib.getName())) {
try {
return new URL(attrib.getValue());
} catch (MalformedURLException e) {
return null;
}
}
}
return null;
}
/**
* Search the Ivy classpath entry within the specified Java project with the specific path
*
* @param containerPath
* the path of the container
* @param javaProject
* the project to search into
* @return the Ivy classpath container if found, otherwise return <code>null</code>
*/
public static IClasspathEntry getCeylonClasspathEntry(IPath containerPath,
IJavaProject javaProject) {
if (FakeProjectManager.isFake(javaProject) || !javaProject.exists()) {
return null;
}
try {
IClasspathEntry[] entries = javaProject.getRawClasspath();
for (int i = 0; i < entries.length; i++) {
IClasspathEntry entry = entries[i];
if (entry != null && entry.getEntryKind() == IClasspathEntry.CPE_CONTAINER) {
if (containerPath.equals(entry.getPath())) {
return entry;
}
}
}
}
catch (JavaModelException e) {
// unless there are issues with the JDT, this should never happen
e.printStackTrace();
}
return null;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_classpath_CeylonClasspathUtil.java
|
166 |
public interface RetryableRequest {
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_RetryableRequest.java
|
1,612 |
public class ODeployDatabaseTask extends OAbstractReplicatedTask {
private static final long serialVersionUID = 1L;
// private static final String BACKUP_DIRECTORY = "tempBackups";
protected final static int CHUNK_MAX_SIZE = 1048576; // 1MB
public ODeployDatabaseTask() {
}
@Override
public Object execute(final OServer iServer, ODistributedServerManager iManager, final ODatabaseDocumentTx database)
throws Exception {
if (!getNodeSource().equals(iManager.getLocalNodeName())) {
final String databaseName = database.getName();
final Lock lock = iManager.getLock(databaseName);
if (lock.tryLock()) {
try {
ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT, "deploying database %s...",
databaseName);
final ByteArrayOutputStream out = new ByteArrayOutputStream();
database.backup(out, null, null);
final byte[] buffer = out.toByteArray();
ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT,
"sending the compressed database %s over the network, total %s", databaseName,
OFileUtils.getSizeAsString(buffer.length));
return new OBuffer(buffer);
// final File f = new File(BACKUP_DIRECTORY + "/" + database.getName());
// database.backup(new FileOutputStream(f), null);
//
// final ByteArrayOutputStream out = new ByteArrayOutputStream(CHUNK_MAX_SIZE);
// final FileInputStream in = new FileInputStream(f);
// try {
// final long fileSize = f.length();
//
// ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT,
// "copying %s to remote node...", OFileUtils.getSizeAsString(fileSize));
//
// for (int byteCopied = 0; byteCopied < fileSize;) {
// byteCopied += OIOUtils.copyStream(in, out, CHUNK_MAX_SIZE);
//
// if ((Boolean) iManager.sendRequest(database.getName(), null, new OCopyDatabaseChunkTask(out.toByteArray()),
// EXECUTION_MODE.RESPONSE)) {
// out.reset();
// }
// }
//
// return "deployed";
// } finally {
// OFileUtils.deleteRecursively(new File(BACKUP_DIRECTORY));
// }
} finally {
lock.unlock();
}
} else
ODistributedServerLog.debug(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.NONE,
"skip deploying database %s because another node is doing it", databaseName);
} else
ODistributedServerLog.debug(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.NONE,
"skip deploying database from the same node");
return new OBuffer(new byte[0]);
}
public RESULT_STRATEGY getResultStrategy() {
return RESULT_STRATEGY.UNION;
}
public QUORUM_TYPE getQuorumType() {
return QUORUM_TYPE.NONE;
}
@Override
public long getTimeout() {
return 60000;
}
@Override
public String getPayload() {
return null;
}
@Override
public String getName() {
return "deploy_db";
}
@Override
public void writeExternal(final ObjectOutput out) throws IOException {
}
@Override
public void readExternal(final ObjectInput in) throws IOException, ClassNotFoundException {
}
@Override
public OFixUpdateRecordTask getFixTask(ODistributedRequest iRequest, ODistributedResponse iBadResponse,
ODistributedResponse iGoodResponse) {
return null;
}
}
| 1no label
|
server_src_main_java_com_orientechnologies_orient_server_distributed_task_ODeployDatabaseTask.java
|
155 |
archiveStructuredContentTemplate.send(archiveStructuredContentDestination, new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
HashMap<String, String> objectMap = new HashMap<String,String>(2);
objectMap.put("nameKey", baseNameKey);
objectMap.put("typeKey", baseTypeKey);
return session.createObjectMessage(objectMap);
}
});
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_message_jms_JMSArchivedStructuredContentPublisher.java
|
1 |
public class MultiPaxosServer
{
private AtomicBroadcastSerializer broadcastSerializer;
private ProtocolServer server;
public static void main( String[] args )
throws IOException, InvocationTargetException, IllegalAccessException
{
new MultiPaxosServer().start();
}
protected Cluster cluster;
protected AtomicBroadcast broadcast;
public void start()
throws IOException
{
broadcastSerializer = new AtomicBroadcastSerializer(new ObjectStreamFactory(), new ObjectStreamFactory());
final LifeSupport life = new LifeSupport();
try
{
MessageTimeoutStrategy timeoutStrategy = new MessageTimeoutStrategy( new FixedTimeoutStrategy( 5000 ) )
.timeout( HeartbeatMessage.sendHeartbeat, 200 );
NetworkedServerFactory serverFactory = new NetworkedServerFactory( life,
new MultiPaxosServerFactory( new ClusterConfiguration( "default", StringLogger.SYSTEM ),
new LogbackService( null, null ) ),
timeoutStrategy, new LogbackService( null, null ), new ObjectStreamFactory(), new ObjectStreamFactory() );
ServerIdElectionCredentialsProvider electionCredentialsProvider = new ServerIdElectionCredentialsProvider();
server = serverFactory.newNetworkedServer(
new Config( MapUtil.stringMap(), ClusterSettings.class ),
new InMemoryAcceptorInstanceStore(),
electionCredentialsProvider );
server.addBindingListener( electionCredentialsProvider );
server.addBindingListener( new BindingListener()
{
@Override
public void listeningAt( URI me )
{
System.out.println( "Listening at:" + me );
}
} );
cluster = server.newClient( Cluster.class );
cluster.addClusterListener( new ClusterListener()
{
@Override
public void enteredCluster( ClusterConfiguration clusterConfiguration )
{
System.out.println( "Entered cluster:" + clusterConfiguration );
}
@Override
public void joinedCluster( InstanceId instanceId, URI member )
{
System.out.println( "Joined cluster:" + instanceId + " (at URI " + member +")" );
}
@Override
public void leftCluster( InstanceId instanceId )
{
System.out.println( "Left cluster:" + instanceId );
}
@Override
public void leftCluster()
{
System.out.println( "Left cluster" );
}
@Override
public void elected( String role, InstanceId instanceId, URI electedMember )
{
System.out.println( instanceId + " at URI " + electedMember + " was elected as " + role );
}
@Override
public void unelected( String role, InstanceId instanceId, URI electedMember )
{
System.out.println( instanceId + " at URI " + electedMember + " was removed from " + role );
}
} );
Heartbeat heartbeat = server.newClient( Heartbeat.class );
heartbeat.addHeartbeatListener( new HeartbeatListener()
{
@Override
public void failed( InstanceId server )
{
System.out.println( server + " failed" );
}
@Override
public void alive( InstanceId server )
{
System.out.println( server + " alive" );
}
} );
broadcast = server.newClient( AtomicBroadcast.class );
broadcast.addAtomicBroadcastListener( new AtomicBroadcastListener()
{
@Override
public void receive( Payload value )
{
try
{
System.out.println( broadcastSerializer.receive( value ) );
}
catch ( IOException e )
{
e.printStackTrace();
}
catch ( ClassNotFoundException e )
{
e.printStackTrace();
}
}
} );
life.start();
String command;
BufferedReader reader = new BufferedReader( new InputStreamReader( System.in ) );
while ( !(command = reader.readLine()).equals( "quit" ) )
{
String[] arguments = command.split( " " );
Method method = getCommandMethod( arguments[0] );
if ( method != null )
{
String[] realArgs = new String[arguments.length - 1];
System.arraycopy( arguments, 1, realArgs, 0, realArgs.length );
try
{
method.invoke( this, (Object[])realArgs );
}
catch ( IllegalAccessException e )
{
e.printStackTrace();
}
catch ( IllegalArgumentException e )
{
e.printStackTrace();
}
catch ( InvocationTargetException e )
{
e.printStackTrace();
}
}
}
cluster.leave();
}
finally
{
life.shutdown();
System.out.println( "Done" );
}
}
public void logging( String name, String level )
{
LoggerContext loggerContext = (LoggerContext) StaticLoggerBinder.getSingleton().getLoggerFactory();
List<Logger> loggers = loggerContext.getLoggerList();
for ( Logger logger : loggers )
{
if ( logger.getName().startsWith( name ) )
{
logger.setLevel( Level.toLevel( level ) );
}
}
}
public void config()
{
ClusterConfiguration configuration = ((ClusterContext) server.getStateMachines()
.getStateMachine( ClusterMessage.class )
.getContext()).getConfiguration();
Collection<InstanceId> failed = ((HeartbeatContext) server.getStateMachines().getStateMachine( HeartbeatMessage
.class ).getContext()).getFailed();
System.out.println( configuration + " Failed:" + failed );
}
private Method getCommandMethod( String name )
{
for ( Method method : MultiPaxosServer.class.getMethods() )
{
if ( method.getName().equals( name ) )
{
return method;
}
}
return null;
}
}
| 1no label
|
enterprise_cluster_src_test_java_org_neo4j_cluster_protocol_atomicbroadcast_multipaxos_MultiPaxosServer.java
|
3,262 |
public class QueuePermission extends InstancePermission {
private static final int ADD = 0x4;
private static final int READ = 0x8;
private static final int REMOVE = 0x16;
private static final int LISTEN = 0x32;
private static final int ALL = ADD | REMOVE | READ | CREATE | DESTROY | LISTEN;
public QueuePermission(String name, String... actions) {
super(name, actions);
}
@Override
protected int initMask(String[] actions) {
int mask = NONE;
for (String action : actions) {
if (ActionConstants.ACTION_ALL.equals(action)) {
return ALL;
}
if (ActionConstants.ACTION_CREATE.equals(action)) {
mask |= CREATE;
} else if (ActionConstants.ACTION_DESTROY.equals(action)) {
mask |= DESTROY;
} else if (ActionConstants.ACTION_ADD.equals(action)) {
mask |= ADD;
} else if (ActionConstants.ACTION_READ.equals(action)) {
mask |= READ;
} else if (ActionConstants.ACTION_REMOVE.equals(action)) {
mask |= REMOVE;
} else if (ActionConstants.ACTION_LISTEN.equals(action)) {
mask |= LISTEN;
}
}
return mask;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_security_permission_QueuePermission.java
|
270 |
public class EmailInfo implements Serializable {
private static final long serialVersionUID = 1L;
private String emailType;
private String emailTemplate;
private String subject;
private String fromAddress;
private String messageBody;
private List<Attachment> attachments = new ArrayList<Attachment>();
private String sendEmailReliableAsync;
private String sendAsyncPriority;
/**
* @return the emailType
*/
public String getEmailType() {
return emailType;
}
/**
* @param emailType the emailType to set
*/
public void setEmailType(String emailType) {
this.emailType = emailType;
}
/**
* @return the emailTemplate
*/
public String getEmailTemplate() {
return emailTemplate;
}
/**
* @param emailTemplate the emailTemplate to set
*/
public void setEmailTemplate(String emailTemplate) {
this.emailTemplate = emailTemplate;
}
/**
* @return the subject
*/
public String getSubject() {
return subject;
}
/**
* @param subject the subject to set
*/
public void setSubject(String subject) {
this.subject = subject;
}
/**
* @return the fromAddress
*/
public String getFromAddress() {
return fromAddress;
}
/**
* @param fromAddress the fromAddress to set
*/
public void setFromAddress(String fromAddress) {
this.fromAddress = fromAddress;
}
/**
* @return the sendEmailReliableAsync
*/
public String getSendEmailReliableAsync() {
return sendEmailReliableAsync;
}
/**
* @param sendEmailReliableAsync the sendEmailReliableAsync to set
*/
public void setSendEmailReliableAsync(String sendEmailReliableAsync) {
this.sendEmailReliableAsync = sendEmailReliableAsync;
}
/**
* @return the sendAsyncPriority
*/
public String getSendAsyncPriority() {
return sendAsyncPriority;
}
/**
* @param sendAsyncPriority the sendAsyncPriority to set
*/
public void setSendAsyncPriority(String sendAsyncPriority) {
this.sendAsyncPriority = sendAsyncPriority;
}
public String getMessageBody() {
return messageBody;
}
public void setMessageBody(String messageBody) {
this.messageBody = messageBody;
}
public List<Attachment> getAttachments() {
return attachments;
}
public void setAttachments(List<Attachment> attachments) {
this.attachments = attachments;
}
public synchronized EmailInfo clone() {
EmailInfo info = new EmailInfo();
info.setAttachments(attachments);
info.setEmailTemplate(emailTemplate);
info.setEmailType(emailType);
info.setFromAddress(fromAddress);
info.setMessageBody(messageBody);
info.setSendAsyncPriority(sendAsyncPriority);
info.setSendEmailReliableAsync(sendEmailReliableAsync);
info.setSubject(subject);
return info;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_email_service_info_EmailInfo.java
|
3,696 |
public abstract class AbstractExecutorThreadFactory implements ThreadFactory {
protected final ClassLoader classLoader;
protected final ThreadGroup threadGroup;
public AbstractExecutorThreadFactory(ThreadGroup threadGroup, ClassLoader classLoader) {
this.threadGroup = threadGroup;
this.classLoader = classLoader;
}
@Override
public final Thread newThread(Runnable r) {
final Thread t = createThread(r);
ClassLoader cl = classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader();
t.setContextClassLoader(cl);
if (t.isDaemon()) {
t.setDaemon(false);
}
if (t.getPriority() != Thread.NORM_PRIORITY) {
t.setPriority(Thread.NORM_PRIORITY);
}
return t;
}
protected abstract Thread createThread(Runnable r);
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_util_executor_AbstractExecutorThreadFactory.java
|
723 |
createIndexAction.execute(new CreateIndexRequest(request.index()).cause("auto(delete api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
innerExecute(request, listener);
} else {
listener.onFailure(e);
}
}
});
| 0true
|
src_main_java_org_elasticsearch_action_delete_TransportDeleteAction.java
|
733 |
public class DeleteByQueryRequestBuilder extends IndicesReplicationOperationRequestBuilder<DeleteByQueryRequest, DeleteByQueryResponse, DeleteByQueryRequestBuilder> {
private QuerySourceBuilder sourceBuilder;
public DeleteByQueryRequestBuilder(Client client) {
super((InternalClient) client, new DeleteByQueryRequest());
}
/**
* The types of documents the query will run against. Defaults to all types.
*/
public DeleteByQueryRequestBuilder setTypes(String... types) {
request.types(types);
return this;
}
/**
* A comma separated list of routing values to control the shards the action will be executed on.
*/
public DeleteByQueryRequestBuilder setRouting(String routing) {
request.routing(routing);
return this;
}
/**
* The routing values to control the shards that the action will be executed on.
*/
public DeleteByQueryRequestBuilder setRouting(String... routing) {
request.routing(routing);
return this;
}
/**
* The query to delete documents for.
*
* @see org.elasticsearch.index.query.QueryBuilders
*/
public DeleteByQueryRequestBuilder setQuery(QueryBuilder queryBuilder) {
sourceBuilder().setQuery(queryBuilder);
return this;
}
/**
* The source to execute. It is preferable to use either {@link #setSource(byte[])}
* or {@link #setQuery(QueryBuilder)}.
*/
public DeleteByQueryRequestBuilder setSource(String source) {
request().source(source);
return this;
}
/**
* The source to execute in the form of a map.
*/
public DeleteByQueryRequestBuilder setSource(Map<String, Object> source) {
request().source(source);
return this;
}
/**
* The source to execute in the form of a builder.
*/
public DeleteByQueryRequestBuilder setSource(XContentBuilder builder) {
request().source(builder);
return this;
}
/**
* The source to execute.
*/
public DeleteByQueryRequestBuilder setSource(byte[] source) {
request().source(source);
return this;
}
/**
* The source to execute.
*/
public DeleteByQueryRequestBuilder setSource(BytesReference source) {
request().source(source, false);
return this;
}
/**
* The source to execute.
*/
public DeleteByQueryRequestBuilder setSource(BytesReference source, boolean unsafe) {
request().source(source, unsafe);
return this;
}
/**
* The source to execute.
*/
public DeleteByQueryRequestBuilder setSource(byte[] source, int offset, int length, boolean unsafe) {
request().source(source, offset, length, unsafe);
return this;
}
/**
* The replication type to use with this operation.
*/
public DeleteByQueryRequestBuilder setReplicationType(ReplicationType replicationType) {
request.replicationType(replicationType);
return this;
}
/**
* The replication type to use with this operation.
*/
public DeleteByQueryRequestBuilder setReplicationType(String replicationType) {
request.replicationType(replicationType);
return this;
}
public DeleteByQueryRequestBuilder setConsistencyLevel(WriteConsistencyLevel consistencyLevel) {
request.consistencyLevel(consistencyLevel);
return this;
}
@Override
protected void doExecute(ActionListener<DeleteByQueryResponse> listener) {
if (sourceBuilder != null) {
request.source(sourceBuilder);
}
((Client) client).deleteByQuery(request, listener);
}
private QuerySourceBuilder sourceBuilder() {
if (sourceBuilder == null) {
sourceBuilder = new QuerySourceBuilder();
}
return sourceBuilder;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_deletebyquery_DeleteByQueryRequestBuilder.java
|
347 |
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(request.delay.millis());
} catch (InterruptedException e) {
// ignore
}
// first, stop the cluster service
logger.trace("[cluster_shutdown]: stopping the cluster service so no re-routing will occur");
clusterService.stop();
final CountDownLatch latch = new CountDownLatch(nodes.size());
for (ObjectCursor<DiscoveryNode> cursor : nodes) {
final DiscoveryNode node = cursor.value;
if (node.id().equals(state.nodes().masterNodeId())) {
// don't shutdown the master yet...
latch.countDown();
} else {
logger.trace("[cluster_shutdown]: sending shutdown request to [{}]", node);
transportService.sendRequest(node, NodeShutdownRequestHandler.ACTION, new NodeShutdownRequest(request), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleResponse(TransportResponse.Empty response) {
logger.trace("[cluster_shutdown]: received shutdown response from [{}]", node);
latch.countDown();
}
@Override
public void handleException(TransportException exp) {
logger.warn("[cluster_shutdown]: received failed shutdown response from [{}]", exp, node);
latch.countDown();
}
});
}
}
try {
latch.await();
} catch (InterruptedException e) {
// ignore
}
logger.info("[cluster_shutdown]: done shutting down all nodes except master, proceeding to master");
// now, kill the master
logger.trace("[cluster_shutdown]: shutting down the master [{}]", state.nodes().masterNode());
transportService.sendRequest(state.nodes().masterNode(), NodeShutdownRequestHandler.ACTION, new NodeShutdownRequest(request), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleResponse(TransportResponse.Empty response) {
logger.trace("[cluster_shutdown]: received shutdown response from master");
}
@Override
public void handleException(TransportException exp) {
logger.warn("[cluster_shutdown]: received failed shutdown response master", exp);
}
});
}
});
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_TransportNodesShutdownAction.java
|
8 |
public class LabelAbbreviationsTest {
@Test
public void getAbbreviation() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
AbbreviationsImpl availableAbbreviations = new AbbreviationsImpl("value");
availableAbbreviations.addPhrase("Amps", Collections.singletonList("A"));
availableAbbreviations.addPhrase("BCA1", Collections.<String>emptyList());
availableAbbreviations.addPhrase("Ch1", Collections.<String>emptyList());
availableAbbreviations.addPhrase("Serial", Collections.<String>emptyList());
AbbreviationSettings aSettings = new AbbreviationSettings("fullLabel", availableAbbreviations, new LabelAbbreviations());
String abbreviatedLabel = aSettings.getAbbreviatedLabel();
Assert.assertEquals(abbreviatedLabel, "Amps BCA1 Ch1 Serial");
LabelAbbreviations available2 = aSettings.getAbbreviations();
Assert.assertEquals(available2.getAbbreviation("BCA1"), "BCA1");
Assert.assertEquals(available2.getAbbreviation("Amps"), "Amps");
// Change the state of the control panel via currentAbbreviations
LabelAbbreviations currentAbbreviations = new LabelAbbreviations();
currentAbbreviations.addAbbreviation("Amps", "A | a | Amp");
currentAbbreviations.addAbbreviation("BCA1", "B | bca1");
currentAbbreviations.addAbbreviation("CAT", "C");
currentAbbreviations.addAbbreviation("DOG", "D");
currentAbbreviations.addAbbreviation("Ace", "ace");
currentAbbreviations.addAbbreviation("Abb", "a");
currentAbbreviations.addAbbreviation("Rabbit", "R");
AbbreviationSettings a2Settings = new AbbreviationSettings("fullLabel", availableAbbreviations, currentAbbreviations);
LabelAbbreviations available2afterSelect = a2Settings.getAbbreviations();
Assert.assertEquals(available2afterSelect.getAbbreviation("BCA1"), "B | bca1");
Assert.assertEquals(available2afterSelect.getAbbreviation("Amps"), "A | a | Amp");
Map<String, String> map = getAbbreviations(currentAbbreviations);
Assert.assertEquals(map.size(), 7);
}
private Map<String, String> getAbbreviations(
LabelAbbreviations currentAbbreviations) throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
Field f = currentAbbreviations.getClass().getDeclaredField("abbreviations"); //NoSuchFieldException
f.setAccessible(true);
@SuppressWarnings("unchecked")
Map<String, String> map = (HashMap<String,String>) f.get(currentAbbreviations); //IllegalAccessException
return map;
}
}
| 0true
|
tableViews_src_test_java_gov_nasa_arc_mct_abbreviation_impl_LabelAbbreviationsTest.java
|
4,492 |
class FinalizeRecoveryRequestHandler extends BaseTransportRequestHandler<RecoveryFinalizeRecoveryRequest> {
@Override
public RecoveryFinalizeRecoveryRequest newInstance() {
return new RecoveryFinalizeRecoveryRequest();
}
@Override
public String executor() {
return ThreadPool.Names.GENERIC;
}
@Override
public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel) throws Exception {
RecoveryStatus onGoingRecovery = onGoingRecoveries.get(request.recoveryId());
if (onGoingRecovery == null) {
// shard is getting closed on us
throw new IndexShardClosedException(request.shardId());
}
if (onGoingRecovery.isCanceled()) {
onGoingRecovery.sentCanceledToSource = true;
throw new IndexShardClosedException(request.shardId());
}
onGoingRecovery.stage = RecoveryStatus.Stage.FINALIZE;
onGoingRecovery.indexShard.performRecoveryFinalization(false, onGoingRecovery);
onGoingRecovery.time = System.currentTimeMillis() - onGoingRecovery.startTime;
onGoingRecovery.stage = RecoveryStatus.Stage.DONE;
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
}
| 1no label
|
src_main_java_org_elasticsearch_indices_recovery_RecoveryTarget.java
|
1,137 |
public class OSQLMethodAsInteger extends OAbstractSQLMethod {
public static final String NAME = "asinteger";
public OSQLMethodAsInteger() {
super(NAME);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
if (ioResult instanceof Number) {
ioResult = ((Number) ioResult).intValue();
} else {
ioResult = ioResult != null ? new Integer(ioResult.toString().trim()) : null;
}
return ioResult;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsInteger.java
|
439 |
static final class Fields {
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MASTER_ONLY = new XContentBuilderString("master_only");
static final XContentBuilderString DATA_ONLY = new XContentBuilderString("data_only");
static final XContentBuilderString MASTER_DATA = new XContentBuilderString("master_data");
static final XContentBuilderString CLIENT = new XContentBuilderString("client");
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java
|
3,259 |
public abstract class NumberComparatorBase<T> extends NestedWrappableComparator<T> {
/**
* Adds numeric value at the specified doc to the specified slot.
*
* @param slot The specified slot
* @param doc The specified doc
*/
public abstract void add(int slot, int doc);
/**
* Divides the value at the specified slot with the specified divisor.
*
* @param slot The specified slot
* @param divisor The specified divisor
*/
public abstract void divide(int slot, int divisor);
}
| 1no label
|
src_main_java_org_elasticsearch_index_fielddata_fieldcomparator_NumberComparatorBase.java
|
3,291 |
protected static class Empty extends AtomicGeoPointFieldData<ScriptDocValues> {
private final int numDocs;
Empty(int numDocs) {
this.numDocs = numDocs;
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public boolean isValuesOrdered() {
return false;
}
@Override
public long getNumberUniqueValues() {
return 0;
}
@Override
public long getMemorySizeInBytes() {
return 0;
}
@Override
public BytesValues getBytesValues(boolean needsHashes) {
return BytesValues.EMPTY;
}
@Override
public GeoPointValues getGeoPointValues() {
return GeoPointValues.EMPTY;
}
@Override
public ScriptDocValues getScriptValues() {
return ScriptDocValues.EMPTY;
}
@Override
public int getNumDocs() {
return numDocs;
}
@Override
public void close() {
// no-op
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_fielddata_plain_AbstractGeoPointIndexFieldData.java
|
26 |
public class CacheLayer implements StoreReadLayer
{
private static final Function<? super SchemaRule, IndexDescriptor> TO_INDEX_RULE =
new Function<SchemaRule, IndexDescriptor>()
{
@Override
public IndexDescriptor apply( SchemaRule from )
{
IndexRule rule = (IndexRule) from;
// We know that we only have int range of property key ids.
return new IndexDescriptor( rule.getLabel(), rule.getPropertyKey() );
}
};
private final CacheLoader<Iterator<DefinedProperty>> nodePropertyLoader = new CacheLoader<Iterator<DefinedProperty>>()
{
@Override
public Iterator<DefinedProperty> load( long id ) throws EntityNotFoundException
{
return diskLayer.nodeGetAllProperties( id );
}
};
private final CacheLoader<Iterator<DefinedProperty>> relationshipPropertyLoader = new CacheLoader<Iterator<DefinedProperty>>()
{
@Override
public Iterator<DefinedProperty> load( long id ) throws EntityNotFoundException
{
return diskLayer.relationshipGetAllProperties( id );
}
};
private final CacheLoader<Iterator<DefinedProperty>> graphPropertyLoader = new CacheLoader<Iterator<DefinedProperty>>()
{
@Override
public Iterator<DefinedProperty> load( long id ) throws EntityNotFoundException
{
return diskLayer.graphGetAllProperties();
}
};
private final CacheLoader<int[]> nodeLabelLoader = new CacheLoader<int[]>()
{
@Override
public int[] load( long id ) throws EntityNotFoundException
{
return primitiveIntIteratorToIntArray( diskLayer.nodeGetLabels( id ) );
}
};
private final PersistenceCache persistenceCache;
private final SchemaCache schemaCache;
private final DiskLayer diskLayer;
private final IndexingService indexingService;
public CacheLayer(
DiskLayer diskLayer,
PersistenceCache persistenceCache,
IndexingService indexingService,
SchemaCache schemaCache )
{
this.diskLayer = diskLayer;
this.persistenceCache = persistenceCache;
this.indexingService = indexingService;
this.schemaCache = schemaCache;
}
@Override
public boolean nodeHasLabel( KernelStatement state, long nodeId, int labelId ) throws EntityNotFoundException
{
return persistenceCache.nodeHasLabel( state, nodeId, labelId, nodeLabelLoader );
}
@Override
public PrimitiveIntIterator nodeGetLabels( KernelStatement state, long nodeId ) throws EntityNotFoundException
{
return new PrimitiveIntIteratorForArray( persistenceCache.nodeGetLabels( state, nodeId, nodeLabelLoader ) );
}
@Override
public Iterator<IndexDescriptor> indexesGetForLabel( KernelStatement state, int labelId )
{
return toIndexDescriptors( schemaCache.schemaRulesForLabel( labelId ), SchemaRule.Kind.INDEX_RULE );
}
@Override
public Iterator<IndexDescriptor> indexesGetAll( KernelStatement state )
{
return toIndexDescriptors( schemaCache.schemaRules(), SchemaRule.Kind.INDEX_RULE );
}
@Override
public Iterator<IndexDescriptor> uniqueIndexesGetForLabel( KernelStatement state, int labelId )
{
return toIndexDescriptors( schemaCache.schemaRulesForLabel( labelId ),
SchemaRule.Kind.CONSTRAINT_INDEX_RULE );
}
@Override
public Iterator<IndexDescriptor> uniqueIndexesGetAll( KernelStatement state )
{
return toIndexDescriptors( schemaCache.schemaRules(), SchemaRule.Kind.CONSTRAINT_INDEX_RULE );
}
private static Iterator<IndexDescriptor> toIndexDescriptors( Iterable<SchemaRule> rules,
final SchemaRule.Kind kind )
{
Iterator<SchemaRule> filteredRules = filter( new Predicate<SchemaRule>()
{
@Override
public boolean accept( SchemaRule item )
{
return item.getKind() == kind;
}
}, rules.iterator() );
return map( TO_INDEX_RULE, filteredRules );
}
@Override
public Long indexGetOwningUniquenessConstraintId( KernelStatement state, IndexDescriptor index )
throws SchemaRuleNotFoundException
{
IndexRule rule = indexRule( index, SchemaStorage.IndexRuleKind.ALL );
if ( rule != null )
{
return rule.getOwningConstraint();
}
return diskLayer.indexGetOwningUniquenessConstraintId( index );
}
@Override
public long indexGetCommittedId( KernelStatement state, IndexDescriptor index, SchemaStorage.IndexRuleKind kind )
throws SchemaRuleNotFoundException
{
IndexRule rule = indexRule( index, kind );
if ( rule != null )
{
return rule.getId();
}
return diskLayer.indexGetCommittedId( index );
}
@Override
public IndexRule indexRule( IndexDescriptor index, SchemaStorage.IndexRuleKind kind )
{
for ( SchemaRule rule : schemaCache.schemaRulesForLabel( index.getLabelId() ) )
{
if ( rule instanceof IndexRule )
{
IndexRule indexRule = (IndexRule) rule;
if ( kind.isOfKind( indexRule ) && indexRule.getPropertyKey() == index.getPropertyKeyId() )
{
return indexRule;
}
}
}
return null;
}
@Override
public PrimitiveLongIterator nodeGetPropertyKeys( KernelStatement state, long nodeId ) throws EntityNotFoundException
{
return persistenceCache.nodeGetPropertyKeys( nodeId, nodePropertyLoader );
}
@Override
public Property nodeGetProperty( KernelStatement state, long nodeId, int propertyKeyId ) throws EntityNotFoundException
{
return persistenceCache.nodeGetProperty( nodeId, propertyKeyId, nodePropertyLoader );
}
@Override
public Iterator<DefinedProperty> nodeGetAllProperties( KernelStatement state, long nodeId ) throws EntityNotFoundException
{
return persistenceCache.nodeGetProperties( nodeId, nodePropertyLoader );
}
@Override
public PrimitiveLongIterator relationshipGetPropertyKeys( KernelStatement state, long relationshipId )
throws EntityNotFoundException
{
return new PropertyKeyIdIterator( relationshipGetAllProperties( state, relationshipId ) );
}
@Override
public Property relationshipGetProperty( KernelStatement state, long relationshipId, int propertyKeyId )
throws EntityNotFoundException
{
return persistenceCache.relationshipGetProperty( relationshipId, propertyKeyId,
relationshipPropertyLoader );
}
@Override
public Iterator<DefinedProperty> relationshipGetAllProperties( KernelStatement state, long nodeId )
throws EntityNotFoundException
{
return persistenceCache.relationshipGetProperties( nodeId, relationshipPropertyLoader );
}
@Override
public PrimitiveLongIterator graphGetPropertyKeys( KernelStatement state )
{
return persistenceCache.graphGetPropertyKeys( graphPropertyLoader );
}
@Override
public Property graphGetProperty( KernelStatement state, int propertyKeyId )
{
return persistenceCache.graphGetProperty( graphPropertyLoader, propertyKeyId );
}
@Override
public Iterator<DefinedProperty> graphGetAllProperties( KernelStatement state )
{
return persistenceCache.graphGetProperties( graphPropertyLoader );
}
@Override
public Iterator<UniquenessConstraint> constraintsGetForLabelAndPropertyKey(
KernelStatement state, int labelId, int propertyKeyId )
{
return schemaCache.constraintsForLabelAndProperty( labelId, propertyKeyId );
}
@Override
public Iterator<UniquenessConstraint> constraintsGetForLabel( KernelStatement state, int labelId )
{
return schemaCache.constraintsForLabel( labelId );
}
@Override
public Iterator<UniquenessConstraint> constraintsGetAll( KernelStatement state )
{
return schemaCache.constraints();
}
@Override
public PrimitiveLongIterator nodeGetUniqueFromIndexLookup(
KernelStatement state,
IndexDescriptor index,
Object value )
throws IndexNotFoundKernelException, IndexBrokenKernelException
{
return diskLayer.nodeGetUniqueFromIndexLookup( state, schemaCache.indexId( index ), value );
}
@Override
public PrimitiveLongIterator nodesGetForLabel( KernelStatement state, int labelId )
{
return diskLayer.nodesGetForLabel( state, labelId );
}
@Override
public PrimitiveLongIterator nodesGetFromIndexLookup( KernelStatement state, IndexDescriptor index, Object value )
throws IndexNotFoundKernelException
{
return diskLayer.nodesGetFromIndexLookup( state, schemaCache.indexId( index ), value );
}
@Override
public IndexDescriptor indexesGetForLabelAndPropertyKey( KernelStatement state, int labelId, int propertyKey )
throws SchemaRuleNotFoundException
{
return schemaCache.indexDescriptor( labelId, propertyKey );
}
@Override
public InternalIndexState indexGetState( KernelStatement state, IndexDescriptor descriptor )
throws IndexNotFoundKernelException
{
return indexingService.getProxyForRule( schemaCache.indexId( descriptor ) ).getState();
}
@Override
public String indexGetFailure( Statement state, IndexDescriptor descriptor ) throws IndexNotFoundKernelException
{
return diskLayer.indexGetFailure( descriptor );
}
@Override
public int labelGetForName( String labelName )
{
return diskLayer.labelGetForName( labelName );
}
@Override
public String labelGetName( int labelId ) throws LabelNotFoundKernelException
{
return diskLayer.labelGetName( labelId );
}
@Override
public int propertyKeyGetForName( String propertyKeyName )
{
return diskLayer.propertyKeyGetForName( propertyKeyName );
}
@Override
public int propertyKeyGetOrCreateForName( String propertyKeyName )
{
return diskLayer.propertyKeyGetOrCreateForName( propertyKeyName );
}
@Override
public String propertyKeyGetName( int propertyKeyId ) throws PropertyKeyIdNotFoundKernelException
{
return diskLayer.propertyKeyGetName( propertyKeyId );
}
@Override
public Iterator<Token> propertyKeyGetAllTokens()
{
return diskLayer.propertyKeyGetAllTokens().iterator();
}
@Override
public Iterator<Token> labelsGetAllTokens()
{
return diskLayer.labelGetAllTokens().iterator();
}
@Override
public int relationshipTypeGetForName( String relationshipTypeName )
{
return diskLayer.relationshipTypeGetForName( relationshipTypeName );
}
@Override
public String relationshipTypeGetName( int relationshipTypeId ) throws RelationshipTypeIdNotFoundKernelException
{
return diskLayer.relationshipTypeGetName( relationshipTypeId );
}
@Override
public int labelGetOrCreateForName( String labelName ) throws TooManyLabelsException
{
return diskLayer.labelGetOrCreateForName( labelName );
}
@Override
public int relationshipTypeGetOrCreateForName( String relationshipTypeName )
{
return diskLayer.relationshipTypeGetOrCreateForName( relationshipTypeName );
}
}
| 1no label
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_store_CacheLayer.java
|
4,476 |
public class RecoverySource extends AbstractComponent {
public static class Actions {
public static final String START_RECOVERY = "index/shard/recovery/startRecovery";
}
private final TransportService transportService;
private final IndicesService indicesService;
private final RecoverySettings recoverySettings;
private final ClusterService clusterService;
private final TimeValue internalActionTimeout;
private final TimeValue internalActionLongTimeout;
@Inject
public RecoverySource(Settings settings, TransportService transportService, IndicesService indicesService,
RecoverySettings recoverySettings, ClusterService clusterService) {
super(settings);
this.transportService = transportService;
this.indicesService = indicesService;
this.clusterService = clusterService;
this.recoverySettings = recoverySettings;
transportService.registerHandler(Actions.START_RECOVERY, new StartRecoveryTransportRequestHandler());
this.internalActionTimeout = componentSettings.getAsTime("internal_action_timeout", TimeValue.timeValueMinutes(15));
this.internalActionLongTimeout = new TimeValue(internalActionTimeout.millis() * 2);
}
private RecoveryResponse recover(final StartRecoveryRequest request) {
final InternalIndexShard shard = (InternalIndexShard) indicesService.indexServiceSafe(request.shardId().index().name()).shardSafe(request.shardId().id());
// verify that our (the source) shard state is marking the shard to be in recovery mode as well, otherwise
// the index operations will not be routed to it properly
RoutingNode node = clusterService.state().readOnlyRoutingNodes().node(request.targetNode().id());
if (node == null) {
throw new DelayRecoveryException("source node does not have the node [" + request.targetNode() + "] in its state yet..");
}
ShardRouting targetShardRouting = null;
for (ShardRouting shardRouting : node) {
if (shardRouting.shardId().equals(request.shardId())) {
targetShardRouting = shardRouting;
break;
}
}
if (targetShardRouting == null) {
throw new DelayRecoveryException("source node does not have the shard listed in its state as allocated on the node");
}
if (!targetShardRouting.initializing()) {
throw new DelayRecoveryException("source node has the state of the target shard to be [" + targetShardRouting.state() + "], expecting to be [initializing]");
}
logger.trace("[{}][{}] starting recovery to {}, mark_as_relocated {}", request.shardId().index().name(), request.shardId().id(), request.targetNode(), request.markAsRelocated());
final RecoveryResponse response = new RecoveryResponse();
shard.recover(new Engine.RecoveryHandler() {
@Override
public void phase1(final SnapshotIndexCommit snapshot) throws ElasticsearchException {
long totalSize = 0;
long existingTotalSize = 0;
try {
StopWatch stopWatch = new StopWatch().start();
for (String name : snapshot.getFiles()) {
StoreFileMetaData md = shard.store().metaData(name);
boolean useExisting = false;
if (request.existingFiles().containsKey(name)) {
// we don't compute checksum for segments, so always recover them
if (!name.startsWith("segments") && md.isSame(request.existingFiles().get(name))) {
response.phase1ExistingFileNames.add(name);
response.phase1ExistingFileSizes.add(md.length());
existingTotalSize += md.length();
useExisting = true;
if (logger.isTraceEnabled()) {
logger.trace("[{}][{}] recovery [phase1] to {}: not recovering [{}], exists in local store and has checksum [{}], size [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), name, md.checksum(), md.length());
}
}
}
if (!useExisting) {
if (request.existingFiles().containsKey(name)) {
logger.trace("[{}][{}] recovery [phase1] to {}: recovering [{}], exists in local store, but is different: remote [{}], local [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), name, request.existingFiles().get(name), md);
} else {
logger.trace("[{}][{}] recovery [phase1] to {}: recovering [{}], does not exists in remote", request.shardId().index().name(), request.shardId().id(), request.targetNode(), name);
}
response.phase1FileNames.add(name);
response.phase1FileSizes.add(md.length());
}
totalSize += md.length();
}
response.phase1TotalSize = totalSize;
response.phase1ExistingTotalSize = existingTotalSize;
logger.trace("[{}][{}] recovery [phase1] to {}: recovering_files [{}] with total_size [{}], reusing_files [{}] with total_size [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), response.phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize));
RecoveryFilesInfoRequest recoveryInfoFilesRequest = new RecoveryFilesInfoRequest(request.recoveryId(), request.shardId(), response.phase1FileNames, response.phase1FileSizes,
response.phase1ExistingFileNames, response.phase1ExistingFileSizes, response.phase1TotalSize, response.phase1ExistingTotalSize);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FILES_INFO, recoveryInfoFilesRequest, TransportRequestOptions.options().withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
final CountDownLatch latch = new CountDownLatch(response.phase1FileNames.size());
final AtomicReference<Throwable> lastException = new AtomicReference<Throwable>();
int fileIndex = 0;
for (final String name : response.phase1FileNames) {
ThreadPoolExecutor pool;
long fileSize = response.phase1FileSizes.get(fileIndex);
if (fileSize > recoverySettings.SMALL_FILE_CUTOFF_BYTES) {
pool = recoverySettings.concurrentStreamPool();
} else {
pool = recoverySettings.concurrentSmallFileStreamPool();
}
pool.execute(new Runnable() {
@Override
public void run() {
IndexInput indexInput = null;
try {
final int BUFFER_SIZE = (int) recoverySettings.fileChunkSize().bytes();
byte[] buf = new byte[BUFFER_SIZE];
StoreFileMetaData md = shard.store().metaData(name);
// TODO: maybe use IOContext.READONCE?
indexInput = shard.store().openInputRaw(name, IOContext.READ);
boolean shouldCompressRequest = recoverySettings.compress();
if (CompressorFactory.isCompressed(indexInput)) {
shouldCompressRequest = false;
}
long len = indexInput.length();
long readCount = 0;
while (readCount < len) {
if (shard.state() == IndexShardState.CLOSED) { // check if the shard got closed on us
throw new IndexShardClosedException(shard.shardId());
}
int toRead = readCount + BUFFER_SIZE > len ? (int) (len - readCount) : BUFFER_SIZE;
long position = indexInput.getFilePointer();
if (recoverySettings.rateLimiter() != null) {
recoverySettings.rateLimiter().pause(toRead);
}
indexInput.readBytes(buf, 0, toRead, false);
BytesArray content = new BytesArray(buf, 0, toRead);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FILE_CHUNK, new RecoveryFileChunkRequest(request.recoveryId(), request.shardId(), name, position, len, md.checksum(), content),
TransportRequestOptions.options().withCompress(shouldCompressRequest).withType(TransportRequestOptions.Type.RECOVERY).withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
readCount += toRead;
}
} catch (Throwable e) {
lastException.set(e);
} finally {
IOUtils.closeWhileHandlingException(indexInput);
latch.countDown();
}
}
});
fileIndex++;
}
latch.await();
if (lastException.get() != null) {
throw lastException.get();
}
// now, set the clean files request
Set<String> snapshotFiles = Sets.newHashSet(snapshot.getFiles());
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.CLEAN_FILES, new RecoveryCleanFilesRequest(request.recoveryId(), shard.shardId(), snapshotFiles), TransportRequestOptions.options().withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase1] to {}: took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
response.phase1Time = stopWatch.totalTime().millis();
} catch (Throwable e) {
throw new RecoverFilesRecoveryException(request.shardId(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), e);
}
}
@Override
public void phase2(Translog.Snapshot snapshot) throws ElasticsearchException {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
logger.trace("[{}][{}] recovery [phase2] to {}: start", request.shardId().index().name(), request.shardId().id(), request.targetNode());
StopWatch stopWatch = new StopWatch().start();
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.PREPARE_TRANSLOG, new RecoveryPrepareForTranslogOperationsRequest(request.recoveryId(), request.shardId()), TransportRequestOptions.options().withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
stopWatch.stop();
response.startTime = stopWatch.totalTime().millis();
logger.trace("[{}][{}] recovery [phase2] to {}: start took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
logger.trace("[{}][{}] recovery [phase2] to {}: sending transaction log operations", request.shardId().index().name(), request.shardId().id(), request.targetNode());
stopWatch = new StopWatch().start();
int totalOperations = sendSnapshot(snapshot);
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase2] to {}: took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
response.phase2Time = stopWatch.totalTime().millis();
response.phase2Operations = totalOperations;
}
@Override
public void phase3(Translog.Snapshot snapshot) throws ElasticsearchException {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
logger.trace("[{}][{}] recovery [phase3] to {}: sending transaction log operations", request.shardId().index().name(), request.shardId().id(), request.targetNode());
StopWatch stopWatch = new StopWatch().start();
int totalOperations = sendSnapshot(snapshot);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FINALIZE, new RecoveryFinalizeRecoveryRequest(request.recoveryId(), request.shardId()), TransportRequestOptions.options().withTimeout(internalActionLongTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
if (request.markAsRelocated()) {
// TODO what happens if the recovery process fails afterwards, we need to mark this back to started
try {
shard.relocated("to " + request.targetNode());
} catch (IllegalIndexShardStateException e) {
// we can ignore this exception since, on the other node, when it moved to phase3
// it will also send shard started, which might cause the index shard we work against
// to move be closed by the time we get to the the relocated method
}
}
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase3] to {}: took [{}]", request.shardId().index().name(), request.shardId().id(), request.targetNode(), stopWatch.totalTime());
response.phase3Time = stopWatch.totalTime().millis();
response.phase3Operations = totalOperations;
}
private int sendSnapshot(Translog.Snapshot snapshot) throws ElasticsearchException {
int ops = 0;
long size = 0;
int totalOperations = 0;
List<Translog.Operation> operations = Lists.newArrayList();
while (snapshot.hasNext()) {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
Translog.Operation operation = snapshot.next();
operations.add(operation);
ops += 1;
size += operation.estimateSize();
totalOperations++;
if (ops >= recoverySettings.translogOps() || size >= recoverySettings.translogSize().bytes()) {
// don't throttle translog, since we lock for phase3 indexing, so we need to move it as
// fast as possible. Note, sine we index docs to replicas while the index files are recovered
// the lock can potentially be removed, in which case, it might make sense to re-enable
// throttling in this phase
// if (recoverySettings.rateLimiter() != null) {
// recoverySettings.rateLimiter().pause(size);
// }
RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest(request.recoveryId(), request.shardId(), operations);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, TransportRequestOptions.options().withCompress(recoverySettings.compress()).withType(TransportRequestOptions.Type.RECOVERY).withTimeout(internalActionLongTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
ops = 0;
size = 0;
operations.clear();
}
}
// send the leftover
if (!operations.isEmpty()) {
RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest(request.recoveryId(), request.shardId(), operations);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, TransportRequestOptions.options().withCompress(recoverySettings.compress()).withType(TransportRequestOptions.Type.RECOVERY).withTimeout(internalActionLongTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
return totalOperations;
}
});
return response;
}
class StartRecoveryTransportRequestHandler extends BaseTransportRequestHandler<StartRecoveryRequest> {
@Override
public StartRecoveryRequest newInstance() {
return new StartRecoveryRequest();
}
@Override
public String executor() {
return ThreadPool.Names.GENERIC;
}
@Override
public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel) throws Exception {
RecoveryResponse response = recover(request);
channel.sendResponse(response);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_indices_recovery_RecoverySource.java
|
224 |
public class RuntimeEnvironmentPropertiesManager implements BeanFactoryAware {
private static final Log LOG = LogFactory.getLog(RuntimeEnvironmentPropertiesManager.class);
protected ConfigurableBeanFactory beanFactory;
protected String prefix;
public String getPrefix() {
return prefix;
}
public String setPrefix(String prefix) {
return this.prefix = prefix;
}
public String getProperty(String key, String suffix) {
if(key==null) {
return null;
}
String name = prefix + "." + key + "." + suffix;
if (prefix == null) {
name = key + "." + suffix;
}
String rv = beanFactory.resolveEmbeddedValue("${" + name + "}");
if (rv == null ||rv.equals("${" + name + "}")) {
LOG.warn("property ${" + name + "} not found, Reverting to property without suffix"+suffix);
rv = getProperty(key);
}
return rv;
}
public String getProperty(String key) {
if(key==null) {
return null;
}
String name = prefix + "." + key;
if (prefix == null) {
name = key;
}
String rv = beanFactory.resolveEmbeddedValue("${" + name + "}");
if(rv.equals("${" + name + "}")) {
return null;
}
return rv;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = (ConfigurableBeanFactory) beanFactory;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_config_RuntimeEnvironmentPropertiesManager.java
|
191 |
public class TimeDTO {
@AdminPresentation(excluded = true)
private Calendar cal;
@AdminPresentation(friendlyName = "TimeDTO_Hour_Of_Day", fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration = "org.broadleafcommerce.common.time.HourOfDayType")
private Integer hour;
@AdminPresentation(friendlyName = "TimeDTO_Day_Of_Week", fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration = "org.broadleafcommerce.common.time.DayOfWeekType")
private Integer dayOfWeek;
@AdminPresentation(friendlyName = "TimeDTO_Month", fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration = "org.broadleafcommerce.common.time.MonthType")
private Integer month;
@AdminPresentation(friendlyName = "TimeDTO_Day_Of_Month", fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration = "org.broadleafcommerce.common.time.DayOfMonthType")
private Integer dayOfMonth;
@AdminPresentation(friendlyName = "TimeDTO_Minute", fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration = "org.broadleafcommerce.common.time.MinuteType")
private Integer minute;
@AdminPresentation(friendlyName = "TimeDTO_Date")
private Date date;
public TimeDTO() {
cal = SystemTime.asCalendar();
}
public TimeDTO(Calendar cal) {
this.cal = cal;
}
/**
* @return int representing the hour of day as 0 - 23
*/
public HourOfDayType getHour() {
if (hour == null) {
hour = cal.get(Calendar.HOUR_OF_DAY);
}
return HourOfDayType.getInstance(hour.toString());
}
/**
* @return int representing the day of week using Calendar.DAY_OF_WEEK values.
* 1 = Sunday, 7 = Saturday
*/
public DayOfWeekType getDayOfWeek() {
if (dayOfWeek == null) {
dayOfWeek = cal.get(Calendar.DAY_OF_WEEK);
}
return DayOfWeekType.getInstance(dayOfWeek.toString());
}
/**
* @return the current day of the month (1-31).
*/
public DayOfMonthType getDayOfMonth() {
if (dayOfMonth == null) {
dayOfMonth = cal.get(Calendar.DAY_OF_MONTH);
}
return DayOfMonthType.getInstance(dayOfMonth.toString());
}
/**
* @return int representing the current month (1-12)
*/
public MonthType getMonth() {
if (month == null) {
month = cal.get(Calendar.MONTH);
}
return MonthType.getInstance(month.toString());
}
public MinuteType getMinute() {
if (minute == null) {
minute = cal.get(Calendar.MINUTE);
}
return MinuteType.getInstance(minute.toString());
}
public Date getDate() {
if (date == null) {
date = cal.getTime();
}
return date;
}
public void setCal(Calendar cal) {
this.cal = cal;
}
public void setHour(HourOfDayType hour) {
this.hour = Integer.valueOf(hour.getType());
;
}
public void setDayOfWeek(DayOfWeekType dayOfWeek) {
this.dayOfWeek = Integer.valueOf(dayOfWeek.getType());
}
public void setMonth(MonthType month) {
this.month = Integer.valueOf(month.getType());
}
public void setDayOfMonth(DayOfMonthType dayOfMonth) {
this.dayOfMonth = Integer.valueOf(dayOfMonth.getType());
}
public void setDate(Date date) {
this.date = date;
}
public void setMinute(MinuteType minute) {
this.minute = Integer.valueOf(minute.getType());
;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_TimeDTO.java
|
213 |
@LuceneTestCase.SuppressCodecs({"MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene3x"})
public class CustomPostingsHighlighterTests extends ElasticsearchLuceneTestCase {
@Test
public void testDiscreteHighlightingPerValue() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
final String firstValue = "This is a test. Just a test highlighting from postings highlighter.";
Document doc = new Document();
doc.add(body);
body.setStringValue(firstValue);
final String secondValue = "This is the second value to perform highlighting on.";
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue(secondValue);
final String thirdValue = "This is the third value to test highlighting with postings.";
Field body3 = new Field("body", "", offsetsType);
doc.add(body3);
body3.setStringValue(thirdValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
List<Object> fieldValues = new ArrayList<Object>();
fieldValues.add(firstValue);
fieldValues.add(secondValue);
fieldValues.add(thirdValue);
IndexSearcher searcher = newSearcher(ir);
Query query = new TermQuery(new Term("body", "highlighting"));
BytesRef[] queryTerms = filterTerms(extractTerms(query), "body", true);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;
//highlighting per value, considering whole values (simulating number_of_fragments=0)
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, false, Integer.MAX_VALUE - 1, 0);
highlighter.setBreakIterator(new WholeBreakIterator());
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is a test. Just a test <b>highlighting</b> from postings highlighter."));
snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the second value to perform <b>highlighting</b> on."));
snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the third value to test <b>highlighting</b> with postings."));
//let's try without whole break iterator as well, to prove that highlighting works the same when working per value (not optimized though)
highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, false, Integer.MAX_VALUE - 1, 0);
snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("Just a test <b>highlighting</b> from postings highlighter."));
snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the second value to perform <b>highlighting</b> on."));
snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the third value to test <b>highlighting</b> with postings."));
ir.close();
dir.close();
}
/*
Tests that scoring works properly even when using discrete per value highlighting
*/
@Test
public void testDiscreteHighlightingScoring() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
//good position but only one match
final String firstValue = "This is a test. Just a test1 highlighting from postings highlighter.";
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue(firstValue);
//two matches, not the best snippet due to its length though
final String secondValue = "This is the second highlighting value to perform highlighting on a longer text that gets scored lower.";
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue(secondValue);
//two matches and short, will be scored highest
final String thirdValue = "This is highlighting the third short highlighting value.";
Field body3 = new Field("body", "", offsetsType);
doc.add(body3);
body3.setStringValue(thirdValue);
//one match, same as first but at the end, will be scored lower due to its position
final String fourthValue = "Just a test4 highlighting from postings highlighter.";
Field body4 = new Field("body", "", offsetsType);
doc.add(body4);
body4.setStringValue(fourthValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
String firstHlValue = "Just a test1 <b>highlighting</b> from postings highlighter.";
String secondHlValue = "This is the second <b>highlighting</b> value to perform <b>highlighting</b> on a longer text that gets scored lower.";
String thirdHlValue = "This is <b>highlighting</b> the third short <b>highlighting</b> value.";
String fourthHlValue = "Just a test4 <b>highlighting</b> from postings highlighter.";
IndexSearcher searcher = newSearcher(ir);
Query query = new TermQuery(new Term("body", "highlighting"));
BytesRef[] queryTerms = filterTerms(extractTerms(query), "body", true);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;
List<Object> fieldValues = new ArrayList<Object>();
fieldValues.add(firstValue);
fieldValues.add(secondValue);
fieldValues.add(thirdValue);
fieldValues.add(fourthValue);
boolean mergeValues = true;
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, mergeValues, Integer.MAX_VALUE-1, 0);
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(4));
assertThat(snippets[0].getText(), equalTo(firstHlValue));
assertThat(snippets[1].getText(), equalTo(secondHlValue));
assertThat(snippets[2].getText(), equalTo(thirdHlValue));
assertThat(snippets[3].getText(), equalTo(fourthHlValue));
//Let's highlight each separate value and check how the snippets are scored
mergeValues = false;
highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, mergeValues, Integer.MAX_VALUE-1, 0);
List<Snippet> snippets2 = new ArrayList<Snippet>();
for (int i = 0; i < fieldValues.size(); i++) {
snippets2.addAll(Arrays.asList(highlighter.highlightDoc("body", queryTerms, searcher, docId, 5)));
}
assertThat(snippets2.size(), equalTo(4));
assertThat(snippets2.get(0).getText(), equalTo(firstHlValue));
assertThat(snippets2.get(1).getText(), equalTo(secondHlValue));
assertThat(snippets2.get(2).getText(), equalTo(thirdHlValue));
assertThat(snippets2.get(3).getText(), equalTo(fourthHlValue));
Comparator <Snippet> comparator = new Comparator<Snippet>() {
@Override
public int compare(Snippet o1, Snippet o2) {
return (int)Math.signum(o1.getScore() - o2.getScore());
}
};
//sorting both groups of snippets
Arrays.sort(snippets, comparator);
Collections.sort(snippets2, comparator);
//checking that the snippets are in the same order, regardless of whether we used per value discrete highlighting or not
//we can't compare the scores directly since they are slightly different due to the multiValued separator added when merging values together
//That causes slightly different lengths and start offsets, thus a slightly different score.
//Anyways, that's not an issue. What's important is that the score is computed the same way, so that the produced order is always the same.
for (int i = 0; i < snippets.length; i++) {
assertThat(snippets[i].getText(), equalTo(snippets2.get(i).getText()));
}
ir.close();
dir.close();
}
/*
Tests that we produce the same snippets and scores when manually merging values in our own custom highlighter rather than using the built-in code
*/
@Test
public void testMergeValuesScoring() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
//good position but only one match
final String firstValue = "This is a test. Just a test1 highlighting from postings highlighter.";
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue(firstValue);
//two matches, not the best snippet due to its length though
final String secondValue = "This is the second highlighting value to perform highlighting on a longer text that gets scored lower.";
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue(secondValue);
//two matches and short, will be scored highest
final String thirdValue = "This is highlighting the third short highlighting value.";
Field body3 = new Field("body", "", offsetsType);
doc.add(body3);
body3.setStringValue(thirdValue);
//one match, same as first but at the end, will be scored lower due to its position
final String fourthValue = "Just a test4 highlighting from postings highlighter.";
Field body4 = new Field("body", "", offsetsType);
doc.add(body4);
body4.setStringValue(fourthValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
String firstHlValue = "Just a test1 <b>highlighting</b> from postings highlighter.";
String secondHlValue = "This is the second <b>highlighting</b> value to perform <b>highlighting</b> on a longer text that gets scored lower.";
String thirdHlValue = "This is <b>highlighting</b> the third short <b>highlighting</b> value.";
String fourthHlValue = "Just a test4 <b>highlighting</b> from postings highlighter.";
IndexSearcher searcher = newSearcher(ir);
Query query = new TermQuery(new Term("body", "highlighting"));
BytesRef[] queryTerms = filterTerms(extractTerms(query), "body", true);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;
List<Object> fieldValues = new ArrayList<Object>();
fieldValues.add(firstValue);
fieldValues.add(secondValue);
fieldValues.add(thirdValue);
fieldValues.add(fourthValue);
boolean mergeValues = true;
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, mergeValues, Integer.MAX_VALUE-1, 0);
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(4));
assertThat(snippets[0].getText(), equalTo(firstHlValue));
assertThat(snippets[1].getText(), equalTo(secondHlValue));
assertThat(snippets[2].getText(), equalTo(thirdHlValue));
assertThat(snippets[3].getText(), equalTo(fourthHlValue));
//testing now our fork / normal postings highlighter, which merges multiple values together using the paragraph separator
XPostingsHighlighter highlighter2 = new XPostingsHighlighter(Integer.MAX_VALUE - 1) {
@Override
protected char getMultiValuedSeparator(String field) {
return HighlightUtils.PARAGRAPH_SEPARATOR;
}
@Override
protected PassageFormatter getFormatter(String field) {
return new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder());
}
};
Map<String, Object[]> highlightMap = highlighter2.highlightFieldsAsObjects(new String[]{"body"}, query, searcher, new int[]{docId}, new int[]{5});
Object[] objects = highlightMap.get("body");
assertThat(objects, notNullValue());
assertThat(objects.length, equalTo(1));
Snippet[] normalSnippets = (Snippet[])objects[0];
assertThat(normalSnippets.length, equalTo(4));
assertThat(normalSnippets[0].getText(), equalTo(firstHlValue));
assertThat(normalSnippets[1].getText(), equalTo(secondHlValue));
assertThat(normalSnippets[2].getText(), equalTo(thirdHlValue));
assertThat(normalSnippets[3].getText(), equalTo(fourthHlValue));
for (int i = 0; i < normalSnippets.length; i++) {
Snippet normalSnippet = snippets[0];
Snippet customSnippet = normalSnippets[0];
assertThat(customSnippet.getText(), equalTo(normalSnippet.getText()));
assertThat(customSnippet.getScore(), equalTo(normalSnippet.getScore()));
}
ir.close();
dir.close();
}
@Test
public void testRequireFieldMatch() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Field none = new Field("none", "", offsetsType);
Document doc = new Document();
doc.add(body);
doc.add(none);
String firstValue = "This is a test. Just a test highlighting from postings. Feel free to ignore.";
body.setStringValue(firstValue);
none.setStringValue(firstValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
Query query = new TermQuery(new Term("none", "highlighting"));
SortedSet<Term> queryTerms = extractTerms(query);
IndexSearcher searcher = newSearcher(ir);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;
List<Object> values = new ArrayList<Object>();
values.add(firstValue);
CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder());
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, 0);
//no snippets with simulated require field match (we filter the terms ourselves)
boolean requireFieldMatch = true;
BytesRef[] filteredQueryTerms = filterTerms(queryTerms, "body", requireFieldMatch);
Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(0));
highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, 0);
//one snippet without require field match, just passing in the query terms with no filtering on our side
requireFieldMatch = false;
filteredQueryTerms = filterTerms(queryTerms, "body", requireFieldMatch);
snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("Just a test <b>highlighting</b> from postings."));
ir.close();
dir.close();
}
@Test
public void testNoMatchSize() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Field none = new Field("none", "", offsetsType);
Document doc = new Document();
doc.add(body);
doc.add(none);
String firstValue = "This is a test. Just a test highlighting from postings. Feel free to ignore.";
body.setStringValue(firstValue);
none.setStringValue(firstValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
Query query = new TermQuery(new Term("none", "highlighting"));
SortedSet<Term> queryTerms = extractTerms(query);
IndexSearcher searcher = newSearcher(ir);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;
List<Object> values = new ArrayList<Object>();
values.add(firstValue);
BytesRef[] filteredQueryTerms = filterTerms(queryTerms, "body", true);
CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder());
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, 0);
Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(0));
highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, atLeast(1));
snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is a test."));
ir.close();
dir.close();
}
private static SortedSet<Term> extractTerms(Query query) {
SortedSet<Term> queryTerms = new TreeSet<Term>();
query.extractTerms(queryTerms);
return queryTerms;
}
private static BytesRef[] filterTerms(SortedSet<Term> queryTerms, String field, boolean requireFieldMatch) {
SortedSet<Term> fieldTerms;
if (requireFieldMatch) {
Term floor = new Term(field, "");
Term ceiling = new Term(field, UnicodeUtil.BIG_TERM);
fieldTerms = queryTerms.subSet(floor, ceiling);
} else {
fieldTerms = queryTerms;
}
BytesRef terms[] = new BytesRef[fieldTerms.size()];
int termUpto = 0;
for(Term term : fieldTerms) {
terms[termUpto++] = term.bytes();
}
return terms;
}
}
| 0true
|
src_test_java_org_apache_lucene_search_postingshighlight_CustomPostingsHighlighterTests.java
|
57 |
public class HttpPostCommand extends HttpCommand {
boolean nextLine;
boolean readyToReadData;
private ByteBuffer data;
private ByteBuffer line = ByteBuffer.allocate(500);
private String contentType;
private final SocketTextReader socketTextRequestReader;
private boolean chunked;
public HttpPostCommand(SocketTextReader socketTextRequestReader, String uri) {
super(TextCommandType.HTTP_POST, uri);
this.socketTextRequestReader = socketTextRequestReader;
}
/**
* POST /path HTTP/1.0
* User-Agent: HTTPTool/1.0
* Content-TextCommandType: application/x-www-form-urlencoded
* Content-Length: 45
* <next_line>
* <next_line>
* byte[45]
* <next_line>
*
* @param cb
* @return
*/
public boolean readFrom(ByteBuffer cb) {
boolean complete = doActualRead(cb);
while (!complete && readyToReadData && chunked && cb.hasRemaining()) {
complete = doActualRead(cb);
}
if (complete) {
if (data != null) {
data.flip();
}
}
return complete;
}
public byte[] getData() {
if (data == null) {
return null;
} else {
return data.array();
}
}
public byte[] getContentType() {
if (contentType == null) {
return null;
} else {
return stringToBytes(contentType);
}
}
public boolean doActualRead(ByteBuffer cb) {
if (readyToReadData) {
if (chunked && (data == null || !data.hasRemaining())) {
boolean hasLine = readLine(cb);
String lineStr = null;
if (hasLine) {
lineStr = toStringAndClear(line).trim();
}
if (hasLine) {
// hex string
int dataSize = lineStr.length() == 0 ? 0 : Integer.parseInt(lineStr, 16);
if (dataSize == 0) {
return true;
}
if (data != null) {
ByteBuffer newData = ByteBuffer.allocate(data.capacity() + dataSize);
newData.put(data.array());
data = newData;
} else {
data = ByteBuffer.allocate(dataSize);
}
}
}
IOUtil.copyToHeapBuffer(cb, data);
}
while (!readyToReadData && cb.hasRemaining()) {
byte b = cb.get();
char c = (char) b;
if (c == '\n') {
processLine(toStringAndClear(line).toLowerCase());
if (nextLine) {
readyToReadData = true;
}
nextLine = true;
} else if (c != '\r') {
nextLine = false;
line.put(b);
}
}
return !chunked && ((data != null) && !data.hasRemaining());
}
String toStringAndClear(ByteBuffer bb) {
if (bb == null) {
return "";
}
String result;
if (bb.position() == 0) {
result = "";
} else {
result = StringUtil.bytesToString(bb.array(), 0, bb.position());
}
bb.clear();
return result;
}
boolean readLine(ByteBuffer cb) {
while (cb.hasRemaining()) {
byte b = cb.get();
char c = (char) b;
if (c == '\n') {
return true;
} else if (c != '\r') {
line.put(b);
}
}
return false;
}
private void processLine(String currentLine) {
if (contentType == null && currentLine.startsWith(HEADER_CONTENT_TYPE)) {
contentType = currentLine.substring(currentLine.indexOf(' ') + 1);
} else if (data == null && currentLine.startsWith(HEADER_CONTENT_LENGTH)) {
data = ByteBuffer.allocate(Integer.parseInt(currentLine.substring(currentLine.indexOf(' ') + 1)));
} else if (!chunked && currentLine.startsWith(HEADER_CHUNKED)) {
chunked = true;
} else if (currentLine.startsWith(HEADER_EXPECT_100)) {
socketTextRequestReader.sendResponse(new NoOpCommand(RES_100));
}
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_rest_HttpPostCommand.java
|
244 |
public interface BroadleafCurrency extends Serializable {
public String getCurrencyCode();
public void setCurrencyCode(String code);
public String getFriendlyName();
public void setFriendlyName(String friendlyName);
public boolean getDefaultFlag();
public void setDefaultFlag(boolean defaultFlag);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_currency_domain_BroadleafCurrency.java
|
80 |
public static class FieldOrder {
// General Fields
public static final int NAME = 1000;
public static final int URL = 2000;
public static final int TITLE = 3000;
public static final int ALT_TEXT = 4000;
public static final int MIME_TYPE = 5000;
public static final int FILE_EXTENSION = 6000;
public static final int FILE_SIZE = 7000;
// Used by subclasses to know where the last field is.
public static final int LAST = 7000;
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_domain_StaticAssetImpl.java
|
629 |
public class PlotView implements PlotAbstraction {
private final static Logger logger = LoggerFactory.getLogger(PlotView.class);
private static final Timer timer = new Timer();
// Manifestation holding this plot
private PlotViewManifestation plotUser;
// Used in time synchronization line mode.
private SynchronizationControl synControl;
private Class <? extends AbstractPlottingPackage> plotPackage;
private String plotName;
// Initial Settings.
private AxisOrientationSetting axisOrientation;
private XAxisMaximumLocationSetting xAxisMaximumLocationSetting;
private YAxisMaximumLocationSetting yAxisMaximumLocationSetting;
private boolean useOrdinalPositionForSubplots;
// Subsequent settings
private TimeAxisSubsequentBoundsSetting timeAxisSubsequentSetting;
private NonTimeAxisSubsequentBoundsSetting nonTimeAxisMinSubsequentSetting;
private NonTimeAxisSubsequentBoundsSetting nonTimeAxisMaxSubsequentSetting;
/* Appearance Constants */
// - Fonts
private Font timeAxisFont;
// Thickness of the plotted lines.
private int plotLineThickness;
private Color plotBackgroundFrameColor;
private Color plotAreaBackgroundColor;
// - Axis
// -- x-axis
// Point where x-axis intercepts y axis
private int timeAxisIntercept;
//color for drawing x-axis
private Color timeAxisColor;
// x-axis labels
private Color timeAxisLabelColor;
private Color timeAxisLabelTextColor;
// format of the date when shown on the x-axis
private String timeAxisDataFormat;
// -- y-axis
private Color nonTimeAxisColor;
// - Gridlines
private Color gridLineColor;
/* Scrolling and scaling behaviors */
// Number of sample to accumulate before autoscaling the y-axis. This
// prevents rapid changing of the y axis.
private int minSamplesForAutoScale;
private double scrollRescaleTimeMargin;
private double scrollRescaleNonTimeMinMargin;
private double scrollRescaleNonTimeMaxMargin;
private double depdendentVaribleAxisMinValue;
private double depdendentVaribleAxisMaxValue;
private long timeVariableAxisMinValue;
private long timeVariableAxisMaxValue;
private boolean compressionEnabled;
private boolean localControlsEnabled;
private int numberOfSubPlots;
// Plot line settings
private PlotLineDrawingFlags plotLineDraw;
private PlotLineConnectionType plotLineConnectionType;
/** The list of sub plots. */
public List<AbstractPlottingPackage> subPlots;
/** The plot panel. */
JPanel plotPanel;
/** Map for containing the data set name to sub-group. */
public Map<String, Set<AbstractPlottingPackage>> dataSetNameToSubGroupMap = new HashMap<String, Set<AbstractPlottingPackage>>();
/** Map for containing the data set name to display map. */
public Map<String, String> dataSetNameToDisplayMap = new HashMap<String, String>();
private AbbreviatingPlotLabelingAlgorithm plotLabelingAlgorithm = new AbbreviatingPlotLabelingAlgorithm();;
/** List of plot subjects. */
List<PlotSubject> subPlotsToIgnoreNextUpdateFrom = new ArrayList<PlotSubject>();
/** Time axis at start of update cycle. */
GregorianCalendar timeAxisMaxAtStartOfDataUpdateCycle = new GregorianCalendar();
/** Lock updates flag. */
boolean lockUpdates = false;
private PinSupport pinSupport = new PinSupport() {
protected void informPinned(boolean pinned) {
if(pinned) {
pause();
} else {
unpause();
}
}
};
private Pinnable timeSyncLinePin = createPin();
private Axis timeAxis = new Axis();
/** This listens to key events for the plot view and all sub-components so it can forward modifier key presses and releases to the local controls managers. */
private KeyListener keyListener = new KeyListener() {
@Override
public void keyTyped(KeyEvent e) {
}
@Override
public void keyReleased(KeyEvent e) {
if(e.getKeyCode() == KeyEvent.VK_CONTROL) {
for(AbstractPlottingPackage p : subPlots) {
((PlotterPlot) p).localControlsManager.informCtlKeyState(false);
}
} else if(e.getKeyCode() == KeyEvent.VK_ALT) {
for(AbstractPlottingPackage p : subPlots) {
((PlotterPlot) p).localControlsManager.informAltKeyState(false);
}
} else if(e.getKeyCode() == KeyEvent.VK_SHIFT) {
for(AbstractPlottingPackage p : subPlots) {
((PlotterPlot) p).localControlsManager.informShiftKeyState(false);
}
}
}
@Override
public void keyPressed(KeyEvent e) {
if(e.getKeyCode() == KeyEvent.VK_CONTROL) {
for(AbstractPlottingPackage p : subPlots) {
((PlotterPlot) p).localControlsManager.informCtlKeyState(true);
}
} else if(e.getKeyCode() == KeyEvent.VK_ALT) {
for(AbstractPlottingPackage p : subPlots) {
((PlotterPlot) p).localControlsManager.informAltKeyState(true);
}
} else if(e.getKeyCode() == KeyEvent.VK_SHIFT) {
for(AbstractPlottingPackage p : subPlots) {
((PlotterPlot) p).localControlsManager.informShiftKeyState(true);
}
}
}
};
private Pinnable timeAxisUserPin = timeAxis.createPin();
private ContainerListener containerListener = new ContainerListener() {
@Override
public void componentAdded(ContainerEvent e) {
addRecursiveListeners(e.getChild());
}
@Override
public void componentRemoved(ContainerEvent e) {
removeRecursiveListeners(e.getChild());
}
};
private TimerTask updateTimeBoundsTask;
private TimeXYAxis plotTimeAxis;
/**
* Sets the plot view manifestation.
* @param theManifestation - the plot view manifestation.
*/
public void setManifestation(PlotViewManifestation theManifestation) {
if (theManifestation == null ) {
throw new IllegalArgumentException("Plot must not have a null user");
}
plotUser = theManifestation;
for (AbstractPlottingPackage p: subPlots) {
p.setPlotView(this);
}
}
@Override
public JPanel getPlotPanel() {
return plotPanel;
}
@Override
public void addDataSet(String dataSetName) {
addDataSet(dataSetName.toLowerCase(), getNextColor(subPlots.size()-1));
}
@Override
public void addDataSet(String dataSetName, Color plottingColor) {
throwIllegalArgumentExcpetionIfWeHaveNoPlots();
getLastPlot().addDataSet(dataSetName.toLowerCase(), plottingColor);
String name = dataSetName.toLowerCase();
Set<AbstractPlottingPackage> set = dataSetNameToSubGroupMap.get(name);
if(set == null) {
set = new HashSet<AbstractPlottingPackage>();
dataSetNameToSubGroupMap.put(name, set);
}
set.add(getLastPlot());
dataSetNameToDisplayMap.put(dataSetName.toLowerCase(), dataSetName);
}
@Override
public void addDataSet(String dataSetName, String displayName) {
throwIllegalArgumentExcpetionIfWeHaveNoPlots();
getLastPlot().addDataSet(dataSetName.toLowerCase(), getNextColor(subPlots.size()-1), displayName);
String name = dataSetName.toLowerCase();
Set<AbstractPlottingPackage> set = dataSetNameToSubGroupMap.get(name);
if(set == null) {
set = new HashSet<AbstractPlottingPackage>();
dataSetNameToSubGroupMap.put(name, set);
}
set.add(getLastPlot());
dataSetNameToDisplayMap.put(dataSetName.toLowerCase(), displayName);
}
/**
* Adds the data set per subgroup index, data set name and display name.
* @param subGroupIndex - the subgroup index.
* @param dataSetName - data set name.
* @param displayName - base display name.
*/
public void addDataSet(int subGroupIndex, String dataSetName, String displayName) {
throwIllegalArgumentExcpetionIfIndexIsNotInSubPlots(subGroupIndex);
String lowerCaseDataSetName = dataSetName.toLowerCase();
int actualIndex = subGroupIndex;
subPlots.get(actualIndex).addDataSet(lowerCaseDataSetName, getNextColor(actualIndex), displayName);
Set<AbstractPlottingPackage> set = dataSetNameToSubGroupMap.get(lowerCaseDataSetName);
if(set == null) {
set = new HashSet<AbstractPlottingPackage>();
dataSetNameToSubGroupMap.put(lowerCaseDataSetName, set);
}
set.add(subPlots.get(actualIndex));
dataSetNameToDisplayMap.put(lowerCaseDataSetName, displayName);
}
/**
* Adds the popup menus to plot legend entry.
*/
public void addPopupMenus() {
LegendEntryPopupMenuFactory popupManager = new LegendEntryPopupMenuFactory(plotUser);
for (int index = 0; index < subPlots.size(); index++) {
PlotterPlot plot = (PlotterPlot) subPlots.get(index);
for (String dataSetName : plot.plotDataManager.dataSeries.keySet()) {
plot.plotDataManager.dataSeries.get(dataSetName).legendEntry.setPopup(
popupManager
);
}
}
}
/**
* Get per-line settings currently in use for this stack of plots.
* Each element of the returned list corresponds,
* in order, to the sub-plots displayed, and maps subscription ID to a
* LineSettings object describing how its plot line should be drawn.
* @return a list of subscription->setting mappings for this plot
*/
public List<Map<String, LineSettings>> getLineSettings() {
List<Map<String,LineSettings>> settingsAssignments = new ArrayList<Map<String,LineSettings>>();
for (int subPlotIndex = 0; subPlotIndex < subPlots.size(); subPlotIndex++) {
Map<String, LineSettings> settingsMap = new HashMap<String, LineSettings>();
settingsAssignments.add(settingsMap);
PlotterPlot plot = (PlotterPlot) subPlots.get(subPlotIndex);
for (Entry<String, PlotDataSeries> entry : plot.plotDataManager.dataSeries.entrySet()) {
settingsMap.put(entry.getKey(), entry.getValue().legendEntry.getLineSettings());
}
}
return settingsAssignments;
}
/**
* Set line settings for use in this stack of plots.
* Each element corresponds, in order, to the sub-plots displayed, and maps
* subscription ID to the line settings described by a LineSettings object
* @param lineSettings a list of subscription->line setting mappings for this plot
*/
public void setLineSettings(
List<Map<String, LineSettings>> lineSettings) {
if (lineSettings != null) {
for (int subPlotIndex = 0; subPlotIndex < lineSettings.size() && subPlotIndex < subPlots.size(); subPlotIndex++) {
PlotterPlot plot = (PlotterPlot) subPlots.get(subPlotIndex);
for (Entry<String, LineSettings> entry : lineSettings.get(subPlotIndex).entrySet()) {
if (plot.plotDataManager.dataSeries.containsKey(entry.getKey())) {
plot.plotDataManager.dataSeries.get(entry.getKey()).legendEntry.setLineSettings(entry.getValue()) ;//.setForeground(PlotLineColorPalette.getColor(entry.getValue()));
}
}
}
}
}
@Override
public boolean isKnownDataSet(String setName) {
assert setName != null : "data set is null";
return dataSetNameToSubGroupMap.containsKey(setName.toLowerCase());
}
@Override
public void refreshDisplay() {
for (AbstractPlottingPackage p: subPlots) {
p.refreshDisplay();
}
}
@Override
public void updateLegend(String dataSetName, FeedProvider.RenderingInfo info) {
String dataSetNameLower = dataSetName.toLowerCase();
for(AbstractPlottingPackage plot : dataSetNameToSubGroupMap.get(dataSetNameLower)) {
plot.updateLegend(dataSetNameLower, info);
}
}
@Override
public LimitAlarmState getNonTimeMaxAlarmState(int subGroupIndex) {
throwIllegalArgumentExcpetionIfIndexIsNotInSubPlots(subGroupIndex);
return subPlots.get(subGroupIndex).getNonTimeMaxAlarmState();
}
@Override
public LimitAlarmState getNonTimeMinAlarmState(int subGroupIndex) {
throwIllegalArgumentExcpetionIfIndexIsNotInSubPlots(subGroupIndex);
return subPlots.get(subGroupIndex).getNonTimeMinAlarmState();
}
@Override
public GregorianCalendar getMinTime() {
return getLastPlot().getCurrentTimeAxisMin();
}
@Override
public GregorianCalendar getMaxTime() {
return getLastPlot().getCurrentTimeAxisMax();
}
@Override
public AxisOrientationSetting getAxisOrientationSetting() {
return getLastPlot().getAxisOrientationSetting();
}
@Override
public NonTimeAxisSubsequentBoundsSetting getNonTimeAxisSubsequentMaxSetting() {
return getLastPlot().getNonTimeAxisSubsequentMaxSetting();
}
@Override
public NonTimeAxisSubsequentBoundsSetting getNonTimeAxisSubsequentMinSetting() {
return getLastPlot().getNonTimeAxisSubsequentMinSetting();
}
@Override
public double getNonTimeMax() {
return getLastPlot().getInitialNonTimeMaxSetting();
}
@Override
public double getNonTimeMaxPadding() {
return getLastPlot().getNonTimeMaxPadding();
}
@Override
public double getNonTimeMin() {
return getLastPlot().getInitialNonTimeMinSetting();
}
@Override
public double getNonTimeMinPadding() {
return getLastPlot().getNonTimeMinPadding();
}
@Override
public boolean useOrdinalPositionForSubplots() {
return getLastPlot().getOrdinalPositionInStackedPlot();
}
@Override
public TimeAxisSubsequentBoundsSetting getTimeAxisSubsequentSetting() {
return getLastPlot().getTimeAxisSubsequentSetting();
}
@Override
public long getTimeMax() {
return getLastPlot().getInitialTimeMaxSetting();
}
@Override
public long getTimeMin() {
return getLastPlot().getInitialTimeMinSetting();
}
@Override
public double getTimePadding() {
return getLastPlot().getTimePadding();
}
@Override
public XAxisMaximumLocationSetting getXAxisMaximumLocation() {
return getLastPlot().getXAxisMaximumLocation();
}
@Override
public YAxisMaximumLocationSetting getYAxisMaximumLocation() {
return getLastPlot().getYAxisMaximumLocation();
}
@Override
public void showTimeSyncLine(GregorianCalendar time) {
assert time != null;
timeSyncLinePin.setPinned(true);
for (AbstractPlottingPackage p: subPlots) {
p.showTimeSyncLine(time);
}
}
@Override
public void removeTimeSyncLine() {
timeSyncLinePin.setPinned(false);
for (AbstractPlottingPackage p: subPlots) {
p.removeTimeSyncLine();
}
}
@Override
public boolean isTimeSyncLineVisible() {
return getLastPlot().isTimeSyncLineVisible();
}
@Override
public void initiateGlobalTimeSync(GregorianCalendar time) {
synControl = plotUser.synchronizeTime(time.getTimeInMillis());
}
@Override
public void updateGlobalTimeSync(GregorianCalendar time) {
if(synControl == null) {
synControl = plotUser.synchronizeTime(time.getTimeInMillis());
} else {
synControl.update(time.getTimeInMillis());
}
}
@Override
public void notifyGlobalTimeSyncFinished() {
if (synControl!=null) {
synControl.synchronizationDone();
}
removeTimeSyncLine();
}
@Override
public boolean inTimeSyncMode() {
return getLastPlot().inTimeSyncMode();
}
private Color getNextColor(int subGroupIndex) {
throwIllegalArgumentExcpetionIfIndexIsNotInSubPlots(subGroupIndex);
if (subPlots.get(subGroupIndex).getDataSetSize() < PlotLineColorPalette.getColorCount()) {
return PlotLineColorPalette.getColor(subPlots.get(subGroupIndex).getDataSetSize());
} else {
// Exceeded the number of colors in the pallet.
return PlotConstants.ROLL_OVER_PLOT_LINE_COLOR;
}
}
@Override
public double getNonTimeMaxCurrentlyDisplayed() {
return getLastPlot().getNonTimeMaxDataValueCurrentlyDisplayed();
}
@Override
public double getNonTimeMinCurrentlyDisplayed() {
return getLastPlot().getNonTimeMinDataValueCurrentlyDisplayed();
}
@Override
public String toString() {
assert plotPackage != null : "Plot package not initalized";
return "Plot: + " + plotName + "\n" + plotPackage.toString();
}
/**
* Construct plots using the builder pattern.
*
*/
public static class Builder {
// Required parameters
private Class<? extends AbstractPlottingPackage> plotPackage;
//Optional parameters
// default values give a "traditional" chart with time on the x-axis etc.
private String plotName = "Plot Name Undefined";
private AxisOrientationSetting axisOrientation = AxisOrientationSetting.X_AXIS_AS_TIME;
private XAxisMaximumLocationSetting xAxisMaximumLocatoinSetting = XAxisMaximumLocationSetting.MAXIMUM_AT_RIGHT;
private YAxisMaximumLocationSetting yAxisMaximumLocationSetting = YAxisMaximumLocationSetting.MAXIMUM_AT_TOP;
private TimeAxisSubsequentBoundsSetting timeAxisSubsequentSetting = TimeAxisSubsequentBoundsSetting.JUMP;
private NonTimeAxisSubsequentBoundsSetting nonTimeAxisMinSubsequentSetting = PlotConstants.DEFAULT_NON_TIME_AXIS_MIN_SUBSEQUENT_SETTING;
private NonTimeAxisSubsequentBoundsSetting nonTimeAxisMaxSubsequentSetting = PlotConstants.DEFAULT_NON_TIME_AXIS_MAX_SUBSEQUENT_SETTING;
private PlotLineDrawingFlags plotLineDraw = new PlotLineDrawingFlags(true, false); // TODO: Move to PlotConstants?
private PlotLineConnectionType plotLineConnectionType = PlotLineGlobalConfiguration.getDefaultConnectionType();
// initial settings
private Font timeAxisFont = PlotConstants.DEFAULT_TIME_AXIS_FONT;
private int plotLineThickness = PlotConstants.DEFAULT_PLOTLINE_THICKNESS ;
private Color plotBackgroundFrameColor = PlotConstants.DEFAULT_PLOT_FRAME_BACKGROUND_COLOR;
private Color plotAreaBackgroundColor = PlotConstants.DEFAULT_PLOT_AREA_BACKGROUND_COLOR;
private int timeAxisIntercept = PlotConstants.DEFAULT_TIME_AXIS_INTERCEPT;
private Color timeAxisColor = PlotConstants.DEFAULT_TIME_AXIS_COLOR;
private Color timeAxisLabelColor = PlotConstants.DEFAULT_TIME_AXIS_LABEL_COLOR;
private String timeAxisDateFormat = PlotConstants.DEFAULT_TIME_AXIS_DATA_FORMAT;
private Color nonTimeAxisColor = PlotConstants.DEFAULT_NON_TIME_AXIS_COLOR;
private Color gridLineColor = PlotConstants.DEFAULT_GRID_LINE_COLOR;
private int minSamplesForAutoScale = PlotConstants.DEFAULT_MIN_SAMPLES_FOR_AUTO_SCALE;
private double scrollRescaleMarginTimeAxis = PlotConstants.DEFAULT_TIME_AXIS_PADDING;
private double scrollRescaleMarginNonTimeMinAxis = PlotConstants.DEFAULT_NON_TIME_AXIS_PADDING_MIN;
private double scrollRescaleMarginNonTimeMaxAxis = PlotConstants.DEFAULT_NON_TIME_AXIS_PADDING_MAX;
private double depdendentVaribleAxisMinValue = PlotConstants.DEFAULT_NON_TIME_AXIS_MIN_VALUE;
private double dependentVaribleAxisMaxValue = PlotConstants.DEFAULT_NON_TIME_AXIS_MAX_VALUE;
private long timeVariableAxisMinValue = new GregorianCalendar().getTimeInMillis();
private long timeVariableAxisMaxValue = timeVariableAxisMinValue + PlotConstants.DEFAUlT_PLOT_SPAN;
private boolean compressionEnabled = PlotConstants.COMPRESSION_ENABLED_BY_DEFAULT;
private int numberOfSubPlots = PlotConstants.DEFAULT_NUMBER_OF_SUBPLOTS;
private boolean localControlsEnabled = PlotConstants.LOCAL_CONTROLS_ENABLED_BY_DEFAULT;
private boolean useOrdinalPositionForSubplotSetting = true;
private boolean pinTimeAxisSetting = false;
private AbbreviatingPlotLabelingAlgorithm plotLabelingAlgorithm = new AbbreviatingPlotLabelingAlgorithm();
/**
* Specifies the required parameters for constructing a plot.
* @param selectedPlotPackage plotting package to render the plot
*/
public Builder(Class<? extends AbstractPlottingPackage>selectedPlotPackage) {
this.plotPackage = selectedPlotPackage;
}
/**
* Specify the plot's user readable name.
* @param initPlotName the initial plot name.
* @return builder the plot view.
*/
public Builder plotName(String initPlotName) {
plotName = initPlotName;
return this;
}
/**
* Specify if the time axis should be oriented on the x- or y-axis.
* @param theAxisOrentation the axis orientation setting.
* @return builder the plot view.
*/
public Builder axisOrientation(PlotConstants.AxisOrientationSetting theAxisOrentation) {
axisOrientation = theAxisOrentation;
return this;
}
/**
* Specify if the x-axis maximum value should be on the left or right of the plot.
* @param theXAxisSetting the X-Axis setting.
* @return builder the plot view.
*/
public Builder xAxisMaximumLocation(PlotConstants.XAxisMaximumLocationSetting theXAxisSetting) {
xAxisMaximumLocatoinSetting = theXAxisSetting;
return this;
}
/**
* Specify if the y-axis maximum value should be on the bottom or top of the plot.
* @param theYAxisSetting the Y-Axis setting.
* @return the builder the plot view.
*/
public Builder yAxisMaximumLocation(PlotConstants.YAxisMaximumLocationSetting theYAxisSetting) {
yAxisMaximumLocationSetting = theYAxisSetting;
return this;
}
/**
* Specify how the bounds of the time axis will behave as plotting commences.
* @param theTimeAxisSubsequentSetting the time axis subsequent bounds settings.
* @return the builder the plot view.
*/
public Builder timeAxisBoundsSubsequentSetting(PlotConstants.TimeAxisSubsequentBoundsSetting theTimeAxisSubsequentSetting) {
timeAxisSubsequentSetting = theTimeAxisSubsequentSetting;
return this;
}
/**
* Specify how the bounds of the non time axis will behave as plotting commences.
* @param theNonTimeAxisMinSubsequentSetting the non-time axis minimal subsequent setting.
* @return the builder the plot view.
*/
public Builder nonTimeAxisMinSubsequentSetting(PlotConstants.NonTimeAxisSubsequentBoundsSetting theNonTimeAxisMinSubsequentSetting) {
nonTimeAxisMinSubsequentSetting = theNonTimeAxisMinSubsequentSetting;
return this;
}
/**
* Specify whether the ordinal position should be used to construct subplots.
* @param useOrdinalPositionForSubplots whether ordinal position for subplots should be used.
* @return the builder the plot view.
*/
public Builder useOrdinalPositionForSubplots(boolean useOrdinalPositionForSubplots) {
useOrdinalPositionForSubplotSetting = useOrdinalPositionForSubplots;
return this;
}
/**
* Specify whether the initial time axis should be pinned.
* @param pin whether time axis should initially be pinned.
* @return the builder the plot view.
*/
public Builder pinTimeAxis(boolean pin) {
pinTimeAxisSetting = pin;
return this;
}
/**
* Specify how the bounds of the non time axis will behave as plotting commences.
* @param theNonTimeAxisMaxSubsequentSetting the non-time axis minimal subsequent setting.
* @return the builder the plot view.
*/
public Builder nonTimeAxisMaxSubsequentSetting(PlotConstants.NonTimeAxisSubsequentBoundsSetting theNonTimeAxisMaxSubsequentSetting) {
nonTimeAxisMaxSubsequentSetting = theNonTimeAxisMaxSubsequentSetting;
return this;
}
/**
* Specify the size of the font of the labels on the time axis.
* @param theTimeAxisFontSize font size.
* @return the builder the plot view.
*/
public Builder timeAxisFontSize(int theTimeAxisFontSize) {
timeAxisFont = new Font(timeAxisFont.getFontName(), Font.PLAIN, theTimeAxisFontSize);
return this;
}
/**
* Specify the font that will be used to draw the labels on the axis axis.
* This parameter overrides the time axis font size parameter when specified.
* @param theTimeAxisFont the font size.
* @return the builder the plot view.
*/
public Builder timeAxisFont(Font theTimeAxisFont) {
timeAxisFont = theTimeAxisFont;
return this;
}
/**
* Specify the thickness of the line used to plot data on the plot.
* @param theThickness the thickness.
* @return the builder the plot view.
*/
public Builder plotLineThickness(int theThickness) {
plotLineThickness = theThickness;
return this;
}
/**
* Specify the color of the frame surrounding the plot area.
* @param theBackgroundColor the color.
* @return the builder the plot view.
*/
public Builder plotBackgroundFrameColor(Color theBackgroundColor) {
plotBackgroundFrameColor = theBackgroundColor;
return this;
}
/**
* Specify the background color of the plot area.
* @param thePlotAreaColor the color.
* @return the builder the plot view.
*/
public Builder plotAreaBackgroundColor (Color thePlotAreaColor) {
plotAreaBackgroundColor = thePlotAreaColor;
return this;
}
/**
* Specify the point at which the time axis intercepts the non time axis.
* @param theIntercept the intercept point.
* @return the builder the plot view.
*/
public Builder timeAxisIntercept(int theIntercept) {
timeAxisIntercept = theIntercept;
return this;
}
/**
* Specify the color of the time axis.
* @param theTimeAxisColor the color.
* @return the builder the plot view.
*/
public Builder timeAxisColor(Color theTimeAxisColor) {
timeAxisColor = theTimeAxisColor;
return this;
}
/**
* Specify color of text on the time axis.
* @param theTimeAxisTextColor the color.
* @return the builder the plot view.
*/
public Builder timeAxisTextColor(Color theTimeAxisTextColor) {
timeAxisLabelColor = theTimeAxisTextColor;
return this;
}
/**
* Set the format of how time information is printed on time axis labels.
* @param theTimeAxisDateFormat the format.
* @return the builder the plot view.
*/
public Builder timeAxisDateFormat(String theTimeAxisDateFormat) {
timeAxisDateFormat = theTimeAxisDateFormat;
return this;
}
/**
* Set the color of the non time axis.
* @param theNonTimeAxisColor the color.
* @return the builder the plot view.
*/
public Builder nonTimeAxisColor(Color theNonTimeAxisColor) {
nonTimeAxisColor = theNonTimeAxisColor;
return this;
}
/**
* Set the color of the plot gridlines.
* @param theGridLineColor the color.
* @return the builder the plot view.
*/
public Builder gridLineColor(Color theGridLineColor) {
gridLineColor = theGridLineColor;
return this;
}
/**
* The minimum number of samples to accumulate out of range before an autoscale occurs. This
* prevents rapid autoscaling on every plot action.
* @param theMinSamplesForAutoScale the number of samples.
* @return the plot view.
*/
public Builder minSamplesForAutoScale(int theMinSamplesForAutoScale) {
minSamplesForAutoScale = theMinSamplesForAutoScale;
return this;
}
/**
* Percentage of padding to use when rescalling the time axis.
* @param theScrollRescaleMargin the margin.
* @return the builder the plot view.
*/
public Builder scrollRescaleMarginTimeAxis(double theScrollRescaleMargin) {
assert theScrollRescaleMargin <= 1 && theScrollRescaleMargin >=0 : "Attempting to set a scroll rescale margin (time padding) outside of 0 .. 1";
scrollRescaleMarginTimeAxis = theScrollRescaleMargin;
return this;
}
/** Percentage of padding to use when rescalling the non time axis min end.
* @param theScrollRescaleMargin the margin.
* @return the builder the plot view.
*/
public Builder scrollRescaleMarginNonTimeMinAxis(double theScrollRescaleMargin) {
assert theScrollRescaleMargin <= 1 && theScrollRescaleMargin >=0 : "Attempting to set a scroll rescale margin (non time padding) outside of 0 .. 1";
scrollRescaleMarginNonTimeMinAxis = theScrollRescaleMargin;
return this;
}
/** Percentage of padding to use when rescalling the non time axis max end.
* @param theScrollRescaleMargin the margin.
* @return the builder the plot view.
*/
public Builder scrollRescaleMarginNonTimeMaxAxis(double theScrollRescaleMargin) {
assert theScrollRescaleMargin <= 1 && theScrollRescaleMargin >=0 : "Attempting to set a scroll rescale margin (non time padding) outside of 0 .. 1";
scrollRescaleMarginNonTimeMaxAxis = theScrollRescaleMargin;
return this;
}
/**
* Specify the maximum extent of the dependent variable axis.
* @param theNonTimeVaribleAxisMaxValue the non-time variable axis max value.
* @return the plot view.
*/
public Builder nonTimeVaribleAxisMaxValue(double theNonTimeVaribleAxisMaxValue) {
dependentVaribleAxisMaxValue = theNonTimeVaribleAxisMaxValue;
return this;
}
/**
* Specify the minimum value of the dependent variable axis.
* @param theNonTimeVaribleAxisMinValue the non-time axis minimal value.
* @return the builder the plot view.
*/
public Builder nonTimeVaribleAxisMinValue(double theNonTimeVaribleAxisMinValue) {
depdendentVaribleAxisMinValue = theNonTimeVaribleAxisMinValue;
return this;
}
/**
* Specify the initial minimum value of the time axis.
* @param theTimeVariableAxisMinValue the time variable axis minimal value.
* @return the builder the plot view.
*/
public Builder timeVariableAxisMinValue(long theTimeVariableAxisMinValue) {
timeVariableAxisMinValue = theTimeVariableAxisMinValue;
return this;
}
/**
* specify the initial maximum value of the time axis.
* @param theTimeVariableAxisMaxValue the time variable axis maximum value.
* @return the builder the plot view.
*/
public Builder timeVariableAxisMaxValue(long theTimeVariableAxisMaxValue) {
timeVariableAxisMaxValue = theTimeVariableAxisMaxValue;
return this;
}
/**
* Specify if the plot is to compress its data to match the screen resolution.
* @param state true to compress, false otherwise.
* @return the builder the plot view.
*/
public Builder isCompressionEnabled(boolean state) {
compressionEnabled = state;
return this;
}
/**
* Specify the number of subplots in this plotview.
* @param theNumberOfSubPlots the number of sub-plots.
* @return the builder the plot view.
*/
public Builder numberOfSubPlots(int theNumberOfSubPlots) {
numberOfSubPlots = theNumberOfSubPlots;
return this;
}
/**
* Turn the plot local controls on and off.
* @param theIsEnabled true enabled; otherwise false.
* @return builder the plot view.
*/
public Builder localControlsEnabled(boolean theIsEnabled) {
localControlsEnabled = theIsEnabled;
return this;
}
/**
* Specify the plot abbreviation labeling algorithm.
* @param thePlotLabelingAlgorithm the plot labeling algorithm.
* @return builder the plot view.
*/
public Builder plotLabelingAlgorithm(AbbreviatingPlotLabelingAlgorithm thePlotLabelingAlgorithm) {
plotLabelingAlgorithm = thePlotLabelingAlgorithm;
assert plotLabelingAlgorithm != null : "Plot labeling algorithm should NOT be NULL at this point.";
return this;
}
/**
* Specify whether to draw lines, markers, or both.
* @param plotLineDraw the plotting type
* @return the plot view.
*/
public Builder plotLineDraw(PlotLineDrawingFlags plotLineDraw) {
this.plotLineDraw = plotLineDraw;
return this;
}
/**
*
* @param plotLineConnectionType
* @return
*/
public Builder plotLineConnectionType(PlotLineConnectionType plotLineConnectionType) {
this.plotLineConnectionType = plotLineConnectionType;
return this;
}
/**
* Build a new plot instance and return it.
* @return the new plot instance.
*/
public PlotView build() {
return new PlotView(this);
}
}
// Private constructor. Construct using builder pattern.
private PlotView(Builder builder) {
plotPackage = builder.plotPackage;
plotName = builder.plotName;
axisOrientation = builder.axisOrientation;
xAxisMaximumLocationSetting = builder.xAxisMaximumLocatoinSetting;
yAxisMaximumLocationSetting = builder.yAxisMaximumLocationSetting;
timeAxisSubsequentSetting = builder.timeAxisSubsequentSetting;
nonTimeAxisMinSubsequentSetting = builder.nonTimeAxisMinSubsequentSetting;
nonTimeAxisMaxSubsequentSetting = builder.nonTimeAxisMaxSubsequentSetting;
useOrdinalPositionForSubplots = builder.useOrdinalPositionForSubplotSetting;
timeAxisFont = builder.timeAxisFont;
plotLineThickness = builder.plotLineThickness;
plotBackgroundFrameColor = builder.plotBackgroundFrameColor;
plotAreaBackgroundColor = builder.plotAreaBackgroundColor;
timeAxisIntercept = builder.timeAxisIntercept;
timeAxisColor = builder.timeAxisColor;
timeAxisLabelTextColor = builder.timeAxisLabelColor;
timeAxisDataFormat = builder.timeAxisDateFormat;
nonTimeAxisColor = builder.nonTimeAxisColor;
gridLineColor = builder.gridLineColor;
minSamplesForAutoScale = builder.minSamplesForAutoScale;
scrollRescaleTimeMargin = builder.scrollRescaleMarginTimeAxis;
scrollRescaleNonTimeMinMargin = builder.scrollRescaleMarginNonTimeMinAxis;
scrollRescaleNonTimeMaxMargin = builder.scrollRescaleMarginNonTimeMaxAxis;
depdendentVaribleAxisMinValue = builder.depdendentVaribleAxisMinValue;
depdendentVaribleAxisMaxValue = builder.dependentVaribleAxisMaxValue;
timeVariableAxisMaxValue = builder.timeVariableAxisMaxValue;
timeVariableAxisMinValue = builder.timeVariableAxisMinValue;
compressionEnabled = builder.compressionEnabled;
numberOfSubPlots = builder.numberOfSubPlots;
localControlsEnabled = builder.localControlsEnabled;
plotLabelingAlgorithm = builder.plotLabelingAlgorithm;
setPlotLineDraw(builder.plotLineDraw);
setPlotLineConnectionType(builder.plotLineConnectionType);
plotPanel = new JPanel();
plotPanel.addAncestorListener(new AncestorListener() {
@Override
public synchronized void ancestorRemoved(AncestorEvent event) {
if(updateTimeBoundsTask != null) {
updateTimeBoundsTask.cancel();
updateTimeBoundsTask = null;
}
}
@Override
public void ancestorMoved(AncestorEvent event) {
}
@Override
public synchronized void ancestorAdded(AncestorEvent event) {
for(AbstractPlottingPackage p : subPlots) {
p.updateCompressionRatio();
}
updateTimeBoundsTask = new TimerTask() {
@Override
public void run() {
try {
timeReachedEnd();
} catch(Exception e) {
// We need to catch exceptions because they can kill the timer.
logger.error(e.toString(), e);
}
}
};
timer.schedule(updateTimeBoundsTask, 0, 1000);
}
});
GridBagLayout layout = new StackPlotLayout(this);
plotPanel.setLayout(layout);
subPlots = new ArrayList<AbstractPlottingPackage>(numberOfSubPlots);
// create the specified number of subplots
for (int i=0; i< numberOfSubPlots; i++) {
AbstractPlottingPackage newPlot;
try {
newPlot = plotPackage.newInstance();
boolean isTimeLabelEnabled = i == (numberOfSubPlots -1);
newPlot.createChart(axisOrientation,
xAxisMaximumLocationSetting,
yAxisMaximumLocationSetting,
timeAxisSubsequentSetting,
nonTimeAxisMinSubsequentSetting,
nonTimeAxisMaxSubsequentSetting,
timeAxisFont,
plotLineThickness,
plotBackgroundFrameColor,
plotAreaBackgroundColor,
timeAxisIntercept,
timeAxisColor,
timeAxisLabelColor,
timeAxisLabelTextColor,
timeAxisDataFormat,
nonTimeAxisColor,
gridLineColor,
minSamplesForAutoScale,
scrollRescaleTimeMargin,
scrollRescaleNonTimeMinMargin,
scrollRescaleNonTimeMaxMargin,
depdendentVaribleAxisMinValue,
depdendentVaribleAxisMaxValue,
timeVariableAxisMinValue,
timeVariableAxisMaxValue,
compressionEnabled,
isTimeLabelEnabled,
localControlsEnabled,
useOrdinalPositionForSubplots,
getPlotLineDraw(),
getPlotLineConnectionType(),
this, plotLabelingAlgorithm);
newPlot.setPlotLabelingAlgorithm(plotLabelingAlgorithm);
subPlots.add(newPlot);
newPlot.registerObservor(this);
logger.debug("plotLabelingAlgorithm.getPanelContextTitleList().size()="
+ plotLabelingAlgorithm.getPanelContextTitleList().size()
+ ", plotLabelingAlgorithm.getCanvasContextTitleList().size()="
+ plotLabelingAlgorithm.getCanvasContextTitleList().size());
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
if (axisOrientation == AxisOrientationSetting.Y_AXIS_AS_TIME) {
Collections.reverse(subPlots);
}
for (AbstractPlottingPackage subPlot: subPlots) {
JComponent subPanel = subPlot.getPlotPanel();
plotPanel.add(subPanel);
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.BOTH;
c.weightx = 1;
c.weighty = 1;
if(axisOrientation == AxisOrientationSetting.X_AXIS_AS_TIME) {
c.gridwidth = GridBagConstraints.REMAINDER;
}
layout.setConstraints(subPanel, c);
}
// Note that using InputMap does not work for our situation.
// See http://stackoverflow.com/questions/4880704/listening-to-key-events-for-a-component-hierarchy
addRecursiveListeners(plotPanel);
if (builder.pinTimeAxisSetting) {
timeAxisUserPin.setPinned(true);
// update the corner reset buttons after the plot is visible
SwingUtilities.invokeLater(new Runnable() {
public void run() {
for(AbstractPlottingPackage subPlot : subPlots) {
PlotterPlot plot = (PlotterPlot) subPlot;
plot.updateResetButtons();
}
}
});
}
}
/**
* Gets the pinnable time axis by user.
* @return pinnable time axis.
*/
public Pinnable getTimeAxisUserPin() {
return timeAxisUserPin;
}
private void addRecursiveListeners(Component c) {
c.addKeyListener(keyListener);
if(c instanceof Container) {
Container cont = (Container) c;
cont.addContainerListener(containerListener);
for(Component child : cont.getComponents()) {
addRecursiveListeners(child);
}
}
}
private void removeRecursiveListeners(Component c) {
c.removeKeyListener(keyListener);
if(c instanceof Container) {
Container cont = (Container) c;
cont.removeContainerListener(containerListener);
for(Component child : cont.getComponents()) {
removeRecursiveListeners(child);
}
}
}
/**
* Gets the plot labeling algorithm.
* @return abbreviating plot labeling algorithm.
*/
public AbbreviatingPlotLabelingAlgorithm getPlotLabelingAlgorithm() {
return plotLabelingAlgorithm;
}
/**
* Sets the plot labeling algorithm.
* @param thePlotLabelingAlgorithm the plot labeling algorithm.
*/
public void setPlotLabelingAlgorithm(AbbreviatingPlotLabelingAlgorithm thePlotLabelingAlgorithm) {
plotLabelingAlgorithm = thePlotLabelingAlgorithm;
}
@Override
public AbstractPlottingPackage returnPlottingPackage() {
return getLastPlot();
}
@Override
public void setCompressionEnabled(boolean compression) {
for (AbstractPlottingPackage p: subPlots) {
p.setCompressionEnabled(compression);
}
}
@Override
public boolean isCompresionEnabled() {
return getLastPlot().isCompresionEnabled();
}
@Override
public void requestPlotData(GregorianCalendar startTime, GregorianCalendar endTime) {
plotUser.requestDataRefresh(startTime, endTime);
}
private void requestPredictivePlotData(GregorianCalendar startTime, GregorianCalendar endTime) {
plotUser.requestPredictiveData(startTime, endTime);
}
@Override
public void informUpdateDataEventStarted() {
timeAxisMaxAtStartOfDataUpdateCycle.setTimeInMillis(this.getLastPlot().getCurrentTimeAxisMax().getTimeInMillis());
for (AbstractPlottingPackage p: subPlots) {
p.informUpdateCachedDataStreamStarted();
}
}
@Override
public void informUpdateFromFeedEventStarted() {
timeAxisMaxAtStartOfDataUpdateCycle.setTimeInMillis(this.getLastPlot().getCurrentTimeAxisMax().getTimeInMillis());
for (AbstractPlottingPackage p: subPlots) {
p.informUpdateFromLiveDataStreamStarted();
}
}
@Override
public void informUpdateDataEventCompleted() {
for (AbstractPlottingPackage p: subPlots) {
p.informUpdateCacheDataStreamCompleted();
}
syncTimeAxisAcrossPlots();
}
@Override
public void informUpdateFromFeedEventCompleted() {
for (AbstractPlottingPackage p: subPlots) {
p.informUpdateFromLiveDataStreamCompleted();
}
syncTimeAxisAcrossPlots();
}
/**
* Synchronizes the time axis across all plots.
*/
void syncTimeAxisAcrossPlots() {
long maxAtStart = timeAxisMaxAtStartOfDataUpdateCycle.getTimeInMillis();
long currentMaxTime = maxAtStart;
long currentMinTime = maxAtStart;
for (AbstractPlottingPackage p: subPlots) {
long max = p.getCurrentTimeAxisMaxAsLong();
if (max > currentMaxTime) {
currentMaxTime = max;
currentMinTime = p.getCurrentTimeAxisMinAsLong();
}
}
if (currentMaxTime > maxAtStart) {
boolean inverted;
if(axisOrientation == AxisOrientationSetting.X_AXIS_AS_TIME) {
inverted = xAxisMaximumLocationSetting == XAxisMaximumLocationSetting.MAXIMUM_AT_LEFT;
} else {
inverted = yAxisMaximumLocationSetting == YAxisMaximumLocationSetting.MAXIMUM_AT_BOTTOM;
}
long start;
long end;
if(inverted) {
start = currentMaxTime;
end = currentMinTime;
} else {
start = currentMinTime;
end = currentMaxTime;
}
for (AbstractPlottingPackage p: subPlots) {
p.setTimeAxisStartAndStop(start, end);
}
}
}
@Override
public long getCurrentMCTTime() {
return plotUser.getCurrentMCTTime();
}
/**
* Gets the plot user view manifestation.
* @return plotUser the plot user view manifestation.
*/
public PlotViewManifestation getPlotUser() {
return plotUser;
}
/**
* Gets the last plot.
* @return abstract plotting package.
*/
AbstractPlottingPackage getLastPlot() {
throwIllegalArgumentExcpetionIfWeHaveNoPlots();
return subPlots.get(subPlots.size() - 1);
}
private void throwIllegalArgumentExcpetionIfWeHaveNoPlots() {
if (subPlots.size() < 1) {
throw new IllegalArgumentException("Plot contains no sub plots");
}
}
private void throwIllegalArgumentExcpetionIfIndexIsNotInSubPlots(int subGroupIndex) {
if ((subPlots.size() -1) < subGroupIndex) {
throw new IllegalArgumentException("subgroup is out of range" + subGroupIndex + " > " + (subPlots.size() -1));
}
}
@Override
public boolean plotMatchesSetting(PlotSettings settings) {
if (getAxisOrientationSetting() != settings.timeAxisSetting) {
return false;
}
if (getXAxisMaximumLocation() != settings.xAxisMaximumLocation) {
return false;
}
if (getYAxisMaximumLocation() != settings.yAxisMaximumLocation) {
return false;
}
if (getTimeAxisSubsequentSetting() != settings.timeAxisSubsequent) {
return false;
}
if (getNonTimeAxisSubsequentMinSetting() != settings.nonTimeAxisSubsequentMinSetting) {
return false;
}
if (getNonTimeAxisSubsequentMaxSetting() != settings.nonTimeAxisSubsequentMaxSetting) {
return false;
}
if (getTimeMax() != settings.maxTime) {
return false;
}
if (getTimeMin() != settings.minTime) {
return false;
}
if (getNonTimeMin() != settings.minNonTime) {
return false;
}
if (getNonTimeMax() != settings.maxNonTime) {
return false;
}
if (getTimePadding() != settings.timePadding) {
return false;
}
if (getNonTimeMaxPadding() != settings.nonTimeMaxPadding) {
return false;
}
if (getNonTimeMinPadding() != settings.nonTimeMinPadding) {
return false;
}
if (useOrdinalPositionForSubplots() != settings.ordinalPositionForStackedPlots) {
return false;
}
return true;
}
@Override
public void updateTimeAxis(PlotSubject subject, long startTime, long endTime) {
for (AbstractPlottingPackage plot: subPlots) {
if (plot!= subject) {
plot.setTimeAxisStartAndStop(startTime, endTime);
}
}
}
@Override
public void updateResetButtons() {
for(AbstractPlottingPackage p : subPlots) {
p.updateResetButtons();
}
}
@Override
public void clearAllDataFromPlot() {
for (AbstractPlottingPackage plot: subPlots) {
plot.clearAllDataFromPlot();
}
}
@Override
public Pinnable createPin() {
return pinSupport.createPin();
}
private void pause() {
for(AbstractPlottingPackage plot : subPlots) {
plot.pause(true);
}
}
private void unpause() {
// Request data from buffer to fill in what was missed while paused.
plotUser.updateFromFeed(null);
for(AbstractPlottingPackage plot : subPlots) {
plot.pause(false);
}
}
@Override
public boolean isPinned() {
return pinSupport.isPinned();
}
@Override
public List<AbstractPlottingPackage> getSubPlots() {
return Collections.unmodifiableList(subPlots);
}
@Override
public Axis getTimeAxis() {
return timeAxis;
}
/**
* Adds data set per map.
* @param dataForPlot data map.
*/
public void addData(Map<String, SortedMap<Long, Double>> dataForPlot) {
for(Entry<String, SortedMap<Long, Double>> feedData : dataForPlot.entrySet()) {
String feedID = feedData.getKey();
String dataSetNameLower = feedID.toLowerCase();
if (!isKnownDataSet(dataSetNameLower)) {
throw new IllegalArgumentException("Attempting to set value for an unknown data set " + feedID);
}
Set<AbstractPlottingPackage> feedPlots = dataSetNameToSubGroupMap.get(dataSetNameLower);
SortedMap<Long, Double> points = feedData.getValue();
for(AbstractPlottingPackage plot : feedPlots) {
plot.addData(dataSetNameLower, points);
}
}
}
/**
* Adds data set per feed Id, timestamp, and telemetry value.
* @param feedID the feed Id.
* @param time timestamp in millisecs.
* @param value telemetry value in double.
*/
public void addData(String feedID, long time, double value) {
SortedMap<Long, Double> points = new TreeMap<Long, Double>();
points.put(time, value);
addData(Collections.singletonMap(feedID, points));
}
/**
* Sets the plot X-Y time axis.
* @param axis the X-Y time axis.
*/
public void setPlotTimeAxis(TimeXYAxis axis) {
this.plotTimeAxis = axis;
}
/**
* Gets the plot X-Y time axis.
* @return X-Y time axis.
*/
public TimeXYAxis getPlotTimeAxis() {
return plotTimeAxis;
}
private void timeReachedEnd() {
long maxTime = getCurrentMCTTime();
double plotMax = Math.max(plotTimeAxis.getStart(), plotTimeAxis.getEnd());
double lag = maxTime - plotMax;
double scrollRescaleTimeMargin = this.scrollRescaleTimeMargin;
if (scrollRescaleTimeMargin == 0) {
scrollRescaleTimeMargin = (maxTime - plotMax) / Math.abs(plotTimeAxis.getEnd() - plotTimeAxis.getStart());
}
if(lag > 0 && !timeAxis.isPinned()) {
if(timeAxisSubsequentSetting == TimeAxisSubsequentBoundsSetting.JUMP) {
double increment = Math.abs(scrollRescaleTimeMargin * (plotTimeAxis.getEnd() - plotTimeAxis.getStart()));
plotTimeAxis.shift(Math.ceil(lag / increment) * increment);
for(AbstractPlottingPackage subPlot : subPlots) {
subPlot.setTimeAxisStartAndStop(plotTimeAxis.getStartAsLong(), plotTimeAxis.getEndAsLong());
}
} else if(timeAxisSubsequentSetting == TimeAxisSubsequentBoundsSetting.SCRUNCH) {
double max = plotTimeAxis.getEnd();
double min = plotTimeAxis.getStart();
double diff = max - min;
assert diff != 0 : "min = max = " + min;
double scrunchFactor = 1 + scrollRescaleTimeMargin;
if((max < min)) {
min = max + (maxTime - max)*(scrunchFactor);
} else {
max = min + (maxTime - min)*(scrunchFactor);
}
plotTimeAxis.setStart(min);
plotTimeAxis.setEnd(max);
for(AbstractPlottingPackage subPlot : subPlots) {
subPlot.setTimeAxisStartAndStop(plotTimeAxis.getStartAsLong(), plotTimeAxis.getEndAsLong());
subPlot.updateCompressionRatio();
}
} else {
assert false : "Unrecognized timeAxisSubsequentSetting: " + timeAxisSubsequentSetting;
}
double newPlotMax = Math.max(plotTimeAxis.getStart(), plotTimeAxis.getEnd());
if(newPlotMax != plotMax) {
GregorianCalendar start = new GregorianCalendar();
GregorianCalendar end = new GregorianCalendar();
start.setTimeInMillis((long) plotMax);
end.setTimeInMillis((long) newPlotMax);
requestPredictivePlotData(start, end);
}
}
}
@Override
public PlotLineDrawingFlags getPlotLineDraw() {
return plotLineDraw;
}
@Override
public PlotLineConnectionType getPlotLineConnectionType() {
return plotLineConnectionType;
}
@Override
public void setPlotLineDraw(PlotLineDrawingFlags draw) {
plotLineDraw = draw;
}
@Override
public void setPlotLineConnectionType(PlotLineConnectionType type) {
plotLineConnectionType = type;
}
}
| 1no label
|
fastPlotViews_src_main_java_gov_nasa_arc_mct_fastplot_bridge_PlotView.java
|
90 |
public interface ObjectToInt<A> {int apply(A a); }
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
2,238 |
public class ScriptScoreFunction extends ScoreFunction {
private final String sScript;
private final Map<String, Object> params;
private final SearchScript script;
public ScriptScoreFunction(String sScript, Map<String, Object> params, SearchScript script) {
super(CombineFunction.REPLACE);
this.sScript = sScript;
this.params = params;
this.script = script;
}
@Override
public void setNextReader(AtomicReaderContext ctx) {
script.setNextReader(ctx);
}
@Override
public double score(int docId, float subQueryScore) {
script.setNextDocId(docId);
script.setNextScore(subQueryScore);
return script.runAsDouble();
}
@Override
public Explanation explainScore(int docId, Explanation subQueryExpl) {
Explanation exp;
if (script instanceof ExplainableSearchScript) {
script.setNextDocId(docId);
script.setNextScore(subQueryExpl.getValue());
exp = ((ExplainableSearchScript) script).explain(subQueryExpl);
} else {
double score = score(docId, subQueryExpl.getValue());
exp = new Explanation(CombineFunction.toFloat(score), "script score function: composed of:");
exp.addDetail(subQueryExpl);
}
return exp;
}
@Override
public String toString() {
return "script[" + sScript + "], params [" + params + "]";
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_search_function_ScriptScoreFunction.java
|
1,177 |
public class OQueryOperatorContainsValue extends OQueryOperatorEqualityNotNulls {
public OQueryOperatorContainsValue() {
super("CONTAINSVALUE", 5, false);
}
@Override
@SuppressWarnings("unchecked")
protected boolean evaluateExpression(final OIdentifiable iRecord, final OSQLFilterCondition iCondition, final Object iLeft,
final Object iRight, OCommandContext iContext) {
final OSQLFilterCondition condition;
if (iCondition.getLeft() instanceof OSQLFilterCondition)
condition = (OSQLFilterCondition) iCondition.getLeft();
else if (iCondition.getRight() instanceof OSQLFilterCondition)
condition = (OSQLFilterCondition) iCondition.getRight();
else
condition = null;
if (iLeft instanceof Map<?, ?>) {
final Map<String, ?> map = (Map<String, ?>) iLeft;
if (condition != null) {
// CHECK AGAINST A CONDITION
for (Object o : map.values()) {
o = loadIfNeed(o);
if ((Boolean) condition.evaluate((ORecordSchemaAware<?>) o, null, iContext))
return true;
}
} else
return map.containsValue(iRight);
} else if (iRight instanceof Map<?, ?>) {
final Map<String, ?> map = (Map<String, ?>) iRight;
if (condition != null)
// CHECK AGAINST A CONDITION
for (Object o : map.values()) {
o = loadIfNeed(o);
if ((Boolean) condition.evaluate((ORecordSchemaAware<?>) o, null, iContext))
return true;
else
return map.containsValue(iLeft);
}
}
return false;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private Object loadIfNeed(Object o) {
final ORecord<?> record = (ORecord<?>) o;
if (record.getRecord().getInternalStatus() == ORecordElement.STATUS.NOT_LOADED) {
try {
o = record.<ORecord> load();
} catch (ORecordNotFoundException e) {
throw new OException("Error during loading record with id : " + record.getIdentity());
}
}
return o;
}
@Override
public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight) {
if (!(iRight instanceof OSQLFilterCondition) && !(iLeft instanceof OSQLFilterCondition))
return OIndexReuseType.INDEX_METHOD;
return OIndexReuseType.NO_INDEX;
}
@Override
public Object executeIndexQuery(OCommandContext iContext, OIndex<?> index, INDEX_OPERATION_TYPE iOperationType,
List<Object> keyParams, IndexResultListener resultListener, int fetchLimit) {
final OIndexDefinition indexDefinition = index.getDefinition();
final OIndexInternal<?> internalIndex = index.getInternal();
if (!internalIndex.canBeUsedInEqualityOperators())
return null;
final Object result;
if (indexDefinition.getParamCount() == 1) {
if (!((indexDefinition instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) indexDefinition)
.getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.VALUE))
return null;
final Object key = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(keyParams.get(0));
if (key == null)
return null;
final Object indexResult = index.get(key);
result = convertIndexResult(indexResult);
} else {
// in case of composite keys several items can be returned in case of we perform search
// using part of composite key stored in index.
final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
if (!((compositeIndexDefinition.getMultiValueDefinition() instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) compositeIndexDefinition
.getMultiValueDefinition()).getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.VALUE))
return null;
final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams);
if (keyOne == null)
return null;
if (internalIndex.hasRangeQuerySupport()) {
final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams);
if (resultListener != null) {
index.getValuesBetween(keyOne, true, keyTwo, true, resultListener);
result = resultListener.getResult();
} else
result = index.getValuesBetween(keyOne, true, keyTwo, true);
} else {
if (indexDefinition.getParamCount() == keyParams.size()) {
final Object indexResult = index.get(keyOne);
result = convertIndexResult(indexResult);
} else
return null;
}
}
updateProfiler(iContext, index, keyParams, indexDefinition);
return result;
}
private Object convertIndexResult(Object indexResult) {
Object result;
if (indexResult instanceof Collection)
result = (Collection<?>) indexResult;
else if (indexResult == null)
result = Collections.emptyList();
else
result = Collections.singletonList((OIdentifiable) indexResult);
return result;
}
@Override
public ORID getBeginRidRange(Object iLeft, Object iRight) {
return null;
}
@Override
public ORID getEndRidRange(Object iLeft, Object iRight) {
return null;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_operator_OQueryOperatorContainsValue.java
|
1,467 |
public class FindRefinementsVisitor extends Visitor implements NaturalVisitor {
private final Declaration declaration;
private Set<Tree.StatementOrArgument> declarationNodes =
new HashSet<Tree.StatementOrArgument>();
public FindRefinementsVisitor(Declaration declaration) {
this.declaration = declaration;
}
public Set<Tree.StatementOrArgument> getDeclarationNodes() {
return declarationNodes;
}
protected boolean isRefinement(Declaration dec) {
return dec!=null && dec.refines(declaration) ||
dec instanceof Setter && ((Setter)dec).getGetter()
.refines(declaration);
}
@Override
public void visit(Tree.SpecifierStatement that) {
if (that.getRefinement() &&
isRefinement(that.getDeclaration())) {
declarationNodes.add(that);
}
super.visit(that);
}
@Override
public void visit(Tree.Declaration that) {
if (isRefinement(that.getDeclarationModel())) {
declarationNodes.add(that);
}
super.visit(that);
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_util_FindRefinementsVisitor.java
|
90 |
public interface Duration extends Comparable<Duration> {
/**
* Returns the length of this duration in the given {@link TimeUnit}.
*
* @param unit
* @return
*/
public long getLength(TimeUnit unit);
/**
* Whether this duration is of zero length.
* @return
*/
public boolean isZeroLength();
/**
* Returns the native unit used by this duration. The actual time length is specified in this unit of time.
* </p>
* @return
*/
public TimeUnit getNativeUnit();
/**
* Returns a new duration that equals the length of this duration minus the length of the given duration
* in the unit of this duration.
*
* @param subtrahend
* @return
*/
public Duration sub(Duration subtrahend);
/**
* Returns a new duration that equals the combined length of this and the given duration in the
* unit of this duration.
*
* @param addend
* @return
*/
public Duration add(Duration addend);
/**
* Multiplies the length of this duration by the given multiplier. The multiplier must be a non-negative number.
*
* @param multiplier
* @return
*/
public Duration multiply(double multiplier);
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Duration.java
|
301 |
public abstract class TranslatableException extends Exception {
protected int code;
/**
* Create a new exception instance
*
* @param code an integer code that represents this exception state
* @param message the message that will be posted to stack traces on the console (not necessarily intended for the user)
*/
public TranslatableException(int code, String message) {
super(message);
this.code = code;
}
/**
* Retrieve the error code associated with this exception
*
* @return the error code
*/
public int getCode() {
return code;
}
/**
* <p>Return the message to show to the user. The framework will first look in the localized property bundles
* for any messages that match the supplied error code and exception type. If not found, the regular message
* submitted to the constructor will be returned.</p>
*
* <p>Message bundle properties have the following format:</p>
*
* <p>
* [simple class name of exception]_[integer error code]=[localized message for this exception and code]
* </p>
*
* @return The error message to display to the user
*/
@Override
public String getLocalizedMessage() {
String response = getMessage();
try {
String exCode = getClass().getSimpleName() + "_" + code;
BroadleafRequestContext context = BroadleafRequestContext.getBroadleafRequestContext();
if (context != null && context.getMessageSource() != null) {
response = context.getMessageSource().getMessage(exCode, null, context.getJavaLocale());
if (response.equals(exCode)) {
response = getMessage();
}
}
} catch (NoSuchMessageException e) {
response = getMessage();
}
return response;
}
/**
* Cause the message passed to the constructor to show up on stack trace logs
*
* @return the non-localized version of the exception message
*/
@Override
public String toString() {
String s = getClass().getName();
String message = getMessage();
return (message != null) ? (s + ": " + message) : s;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_exception_TranslatableException.java
|
1,259 |
public class OClusterLocal extends OSharedResourceAdaptive implements OCluster {
public static final int RECORD_SIZE = 11 + OVersionFactory.instance().getVersionSize();
public static final String TYPE = "PHYSICAL";
private static final int RECORD_TYPE_OFFSET = 10;
private static final String DEF_EXTENSION = ".ocl";
private static final int DEF_SIZE = 1000000;
private static final byte RECORD_WAS_DELETED = (byte) -1;
private OMultiFileSegment fileSegment;
private int id;
private long beginOffsetData = -1;
private long endOffsetData = -1; // end of
// data
// offset.
// -1 =
// latest
protected OClusterLocalHole holeSegment;
private OStoragePhysicalClusterConfigurationLocal config;
private OStorageLocal storage;
private String name;
private long version;
public OClusterLocal() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean());
}
public void configure(final OStorage iStorage, OStorageClusterConfiguration iConfig) throws IOException {
config = (OStoragePhysicalClusterConfigurationLocal) iConfig;
init(iStorage, config.getId(), config.getName(), config.getLocation(), config.getDataSegmentId());
}
public void configure(final OStorage iStorage, final int iId, final String iClusterName, final String iLocation,
final int iDataSegmentId, final Object... iParameters) throws IOException {
config = new OStoragePhysicalClusterConfigurationLocal(iStorage.getConfiguration(), iId, iDataSegmentId);
config.name = iClusterName;
init(iStorage, iId, iClusterName, iLocation, iDataSegmentId);
}
public void create(int iStartSize) throws IOException {
acquireExclusiveLock();
try {
if (iStartSize == -1)
iStartSize = DEF_SIZE;
if (config.root.clusters.size() <= config.id)
config.root.clusters.add(config);
else
config.root.clusters.set(config.id, config);
fileSegment.create(iStartSize);
holeSegment.create();
fileSegment.files[0].writeHeaderLong(0, beginOffsetData);
fileSegment.files[0].writeHeaderLong(OBinaryProtocol.SIZE_LONG, beginOffsetData);
fileSegment.files[0].writeHeaderLong(2 * OBinaryProtocol.SIZE_LONG, 2);
} finally {
releaseExclusiveLock();
}
}
public void open() throws IOException {
acquireExclusiveLock();
try {
fileSegment.open();
holeSegment.open();
beginOffsetData = fileSegment.files[0].readHeaderLong(0);
endOffsetData = fileSegment.files[0].readHeaderLong(OBinaryProtocol.SIZE_LONG);
version = fileSegment.files[0].readHeaderLong(2 * OBinaryProtocol.SIZE_LONG);
if (version < 1) {
convertDeletedRecords();
}
if (version < 2) {
if (endOffsetData < 0) {
endOffsetData = fileSegment.getFilledUpTo() / RECORD_SIZE - 1;
if (endOffsetData >= 0) {
long[] fetchPos;
for (long currentPos = endOffsetData * RECORD_SIZE; currentPos >= beginOffsetData; currentPos -= RECORD_SIZE) {
fetchPos = fileSegment.getRelativePosition(currentPos);
if (fileSegment.files[(int) fetchPos[0]].readByte(fetchPos[1] + RECORD_TYPE_OFFSET) != RECORD_WAS_DELETED)
// GOOD RECORD: SET IT AS BEGIN
break;
endOffsetData--;
}
}
fileSegment.files[0].writeHeaderLong(OBinaryProtocol.SIZE_LONG, endOffsetData);
}
fileSegment.files[0].writeHeaderLong(2 * OBinaryProtocol.SIZE_LONG, 2);
}
} finally {
releaseExclusiveLock();
}
}
private void convertDeletedRecords() throws IOException {
int holesCount = holeSegment.getHoles();
OLogManager.instance().info(this, "Please wait till %d holes will be converted to new format in cluster %s.", holesCount, name);
for (int i = 0; i < holesCount; i++) {
long relativeHolePosition = holeSegment.getEntryPosition(i);
if (relativeHolePosition > -1) {
final long[] pos = fileSegment.getRelativePosition(relativeHolePosition);
final OFile f = fileSegment.files[(int) pos[0]];
final long p = pos[1] + RECORD_TYPE_OFFSET;
f.writeByte(p, RECORD_WAS_DELETED);
}
if (i % 1000 == 0)
OLogManager.instance().info(this, "%d holes were converted in cluster %s ...", i, name);
}
OLogManager.instance().info(this, "Conversion of holes to new format was finished for cluster %s.", holesCount, name);
}
public void close() throws IOException {
acquireExclusiveLock();
try {
fileSegment.close();
holeSegment.close();
} finally {
releaseExclusiveLock();
}
}
@Override
public void close(boolean flush) throws IOException {
close();
}
@Override
public OModificationLock getExternalModificationLock() {
throw new UnsupportedOperationException("getExternalModificationLock");
}
@Override
public OPhysicalPosition createRecord(byte[] content, ORecordVersion recordVersion, byte recordType) throws IOException {
throw new UnsupportedOperationException("createRecord");
}
@Override
public boolean deleteRecord(OClusterPosition clusterPosition) throws IOException {
throw new UnsupportedOperationException("deleteRecord");
}
@Override
public void updateRecord(OClusterPosition clusterPosition, byte[] content, ORecordVersion recordVersion, byte recordType)
throws IOException {
throw new UnsupportedOperationException("updateRecord");
}
@Override
public ORawBuffer readRecord(OClusterPosition clusterPosition) throws IOException {
throw new UnsupportedOperationException("readRecord");
}
@Override
public boolean exists() {
throw new UnsupportedOperationException("exists");
}
public void delete() throws IOException {
acquireExclusiveLock();
try {
for (OFile f : fileSegment.files)
f.delete();
fileSegment.files = null;
holeSegment.delete();
} finally {
releaseExclusiveLock();
}
}
public void truncate() throws IOException {
storage.checkForClusterPermissions(getName());
acquireExclusiveLock();
try {
// REMOVE ALL DATA BLOCKS
final OClusterPosition begin = getFirstPosition();
if (begin.isPersistent()) {
final OClusterPosition end = getLastPosition();
final OPhysicalPosition ppos = new OPhysicalPosition();
for (ppos.clusterPosition = begin; ppos.clusterPosition.compareTo(end) <= 0; ppos.clusterPosition = ppos.clusterPosition
.inc()) {
final OPhysicalPosition pposToDelete = getPhysicalPosition(ppos);
if (pposToDelete != null && storage.checkForRecordValidity(pposToDelete)) {
final ODataLocal data = storage.getDataSegmentById(pposToDelete.dataSegmentId);
if (data != null)
data.deleteRecord(pposToDelete.dataSegmentPos);
}
}
}
fileSegment.truncate();
holeSegment.truncate();
beginOffsetData = -1;
endOffsetData = -1;
} finally {
releaseExclusiveLock();
}
}
public void set(ATTRIBUTES iAttribute, Object iValue) throws IOException {
if (iAttribute == null)
throw new IllegalArgumentException("attribute is null");
final String stringValue = iValue != null ? iValue.toString() : null;
acquireExclusiveLock();
try {
switch (iAttribute) {
case NAME:
setNameInternal(stringValue);
break;
case DATASEGMENT:
setDataSegmentInternal(stringValue);
break;
}
} finally {
releaseExclusiveLock();
}
}
/**
* Fills and return the PhysicalPosition object received as parameter with the physical position of logical record iPosition
*
* @throws IOException
*/
public OPhysicalPosition getPhysicalPosition(final OPhysicalPosition iPPosition) throws IOException {
final OClusterPosition position = iPPosition.clusterPosition;
final long filePosition = position.longValue() * RECORD_SIZE;
acquireSharedLock();
try {
if (position.isNew() || position.compareTo(getLastPosition()) > 0)
return null;
final long[] pos = fileSegment.getRelativePosition(filePosition);
final OFile f = fileSegment.files[(int) pos[0]];
long p = pos[1];
iPPosition.dataSegmentId = f.readShort(p);
iPPosition.dataSegmentPos = f.readLong(p += OBinaryProtocol.SIZE_SHORT);
iPPosition.recordType = f.readByte(p += OBinaryProtocol.SIZE_LONG);
if (iPPosition.recordType == RECORD_WAS_DELETED)
return null;
p += OBinaryProtocol.SIZE_BYTE;
iPPosition.recordVersion.getSerializer().readFrom(f, p, iPPosition.recordVersion);
return iPPosition;
} finally {
releaseSharedLock();
}
}
@Override
public boolean useWal() {
return false;
}
@Override
public float recordGrowFactor() {
return 1;
}
@Override
public float recordOverflowGrowFactor() {
return 1;
}
@Override
public String compression() {
return ONothingCompression.NAME;
}
/**
* Update position in data segment (usually on defrag)
*
* @throws IOException
*/
public void updateDataSegmentPosition(OClusterPosition iPosition, final int iDataSegmentId, final long iDataSegmentPosition)
throws IOException {
long position = iPosition.longValue();
position = position * RECORD_SIZE;
acquireExclusiveLock();
try {
final long[] pos = fileSegment.getRelativePosition(position);
final OFile f = fileSegment.files[(int) pos[0]];
long p = pos[1];
f.writeShort(p, (short) iDataSegmentId);
f.writeLong(p += OBinaryProtocol.SIZE_SHORT, iDataSegmentPosition);
} finally {
releaseExclusiveLock();
}
}
public void updateVersion(OClusterPosition iPosition, final ORecordVersion iVersion) throws IOException {
long position = iPosition.longValue();
position = position * RECORD_SIZE;
acquireExclusiveLock();
try {
final long[] pos = fileSegment.getRelativePosition(position);
iVersion.getSerializer().writeTo(fileSegment.files[(int) pos[0]],
pos[1] + OBinaryProtocol.SIZE_SHORT + OBinaryProtocol.SIZE_LONG + OBinaryProtocol.SIZE_BYTE, iVersion);
} finally {
releaseExclusiveLock();
}
}
public void updateRecordType(OClusterPosition iPosition, final byte iRecordType) throws IOException {
long position = iPosition.longValue();
position = position * RECORD_SIZE;
acquireExclusiveLock();
try {
final long[] pos = fileSegment.getRelativePosition(position);
fileSegment.files[(int) pos[0]].writeByte(pos[1] + OBinaryProtocol.SIZE_SHORT + OBinaryProtocol.SIZE_LONG, iRecordType);
} finally {
releaseExclusiveLock();
}
}
/**
* Removes the Logical position entry. Add to the hole segment and add the minus to the version.
*
* @throws IOException
*/
public void removePhysicalPosition(final OClusterPosition iPosition) throws IOException {
final long position = iPosition.longValue() * RECORD_SIZE;
acquireExclusiveLock();
try {
final long[] pos = fileSegment.getRelativePosition(position);
final OFile file = fileSegment.files[(int) pos[0]];
final long p = pos[1] + RECORD_TYPE_OFFSET;
holeSegment.pushPosition(position);
file.writeByte(p, RECORD_WAS_DELETED);
updateBoundsAfterDeletion(iPosition.longValue());
} finally {
releaseExclusiveLock();
}
}
public boolean removeHole(final long iPosition) throws IOException {
acquireExclusiveLock();
try {
return holeSegment.removeEntryWithPosition(iPosition * RECORD_SIZE);
} finally {
releaseExclusiveLock();
}
}
public int getDataSegmentId() {
acquireSharedLock();
try {
return config.getDataSegmentId();
} finally {
releaseSharedLock();
}
}
/**
* Adds a new entry.
*
* @throws IOException
*/
public boolean addPhysicalPosition(final OPhysicalPosition iPPosition) throws IOException {
final long[] pos;
long offset;
acquireExclusiveLock();
try {
offset = holeSegment.popLastEntryPosition();
boolean recycled;
if (offset > -1) {
// REUSE THE HOLE
pos = fileSegment.getRelativePosition(offset);
recycled = true;
} else {
// NO HOLES FOUND: ALLOCATE MORE SPACE
pos = allocateRecord();
offset = fileSegment.getAbsolutePosition(pos);
recycled = false;
}
final OFile file = fileSegment.files[(int) pos[0]];
long p = pos[1];
file.writeShort(p, (short) iPPosition.dataSegmentId);
file.writeLong(p += OBinaryProtocol.SIZE_SHORT, iPPosition.dataSegmentPos);
file.writeByte(p += OBinaryProtocol.SIZE_LONG, iPPosition.recordType);
if (recycled) {
// GET LAST VERSION
iPPosition.recordVersion.getSerializer().readFrom(file, p + OBinaryProtocol.SIZE_BYTE, iPPosition.recordVersion);
if (iPPosition.recordVersion.isTombstone())
iPPosition.recordVersion.revive();
iPPosition.recordVersion.increment();
} else
iPPosition.recordVersion.reset();
iPPosition.recordVersion.getSerializer().writeTo(file, p + OBinaryProtocol.SIZE_BYTE, iPPosition.recordVersion);
iPPosition.clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(offset / RECORD_SIZE);
updateBoundsAfterInsertion(iPPosition.clusterPosition.longValue());
} finally {
releaseExclusiveLock();
}
return true;
}
/**
* Allocates space to store a new record.
*/
protected long[] allocateRecord() throws IOException {
return fileSegment.allocateSpace(RECORD_SIZE);
}
@Override
public OClusterPosition getFirstPosition() {
acquireSharedLock();
try {
return OClusterPositionFactory.INSTANCE.valueOf(beginOffsetData);
} finally {
releaseSharedLock();
}
}
/**
* Returns the endOffsetData value if it's not equals to the last one, otherwise the total entries.
*/
@Override
public OClusterPosition getLastPosition() {
acquireSharedLock();
try {
return OClusterPositionFactory.INSTANCE.valueOf(endOffsetData);
} finally {
releaseSharedLock();
}
}
public long getEntries() {
acquireSharedLock();
try {
return fileSegment.getFilledUpTo() / RECORD_SIZE - holeSegment.getHoles();
} finally {
releaseSharedLock();
}
}
@Override
public long getTombstonesCount() {
return 0;
}
@Override
public void convertToTombstone(OClusterPosition iPosition) throws IOException {
throw new UnsupportedOperationException("convertToTombstone");
}
@Override
public boolean hasTombstonesSupport() {
return false;
}
public int getId() {
return id;
}
public OClusterEntryIterator absoluteIterator() {
return new OClusterEntryIterator(this);
}
public long getSize() {
acquireSharedLock();
try {
return fileSegment.getSize();
} finally {
releaseSharedLock();
}
}
public long getFilledUpTo() {
acquireSharedLock();
try {
return fileSegment.getFilledUpTo();
} finally {
releaseSharedLock();
}
}
@Override
public String toString() {
return name + " (id=" + id + ")";
}
public void lock() {
acquireSharedLock();
}
public void unlock() {
releaseSharedLock();
}
public String getType() {
return TYPE;
}
public boolean isHashBased() {
return false;
}
public long getRecordsSize() {
acquireSharedLock();
try {
long size = fileSegment.getFilledUpTo();
final OClusterEntryIterator it = absoluteIterator();
while (it.hasNext()) {
final OPhysicalPosition pos = it.next();
if (pos.dataSegmentPos > -1 && !pos.recordVersion.isTombstone())
size += storage.getDataSegmentById(pos.dataSegmentId).getRecordSize(pos.dataSegmentPos);
}
return size;
} catch (IOException e) {
throw new OIOException("Error on calculating cluster size for: " + getName(), e);
} finally {
releaseSharedLock();
}
}
public void synch() throws IOException {
acquireSharedLock();
try {
fileSegment.synch();
holeSegment.synch();
} finally {
releaseSharedLock();
}
}
public void setSoftlyClosed(boolean softlyClosed) throws IOException {
acquireExclusiveLock();
try {
fileSegment.setSoftlyClosed(softlyClosed);
holeSegment.setSoftlyClosed(softlyClosed);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean wasSoftlyClosed() throws IOException {
acquireSharedLock();
try {
boolean wasSoftlyClosed = fileSegment.wasSoftlyClosedAtPreviousTime();
wasSoftlyClosed = wasSoftlyClosed && holeSegment.wasSoftlyClosedAtPreviousTime();
return wasSoftlyClosed;
} finally {
releaseSharedLock();
}
}
public String getName() {
return name;
}
public OStoragePhysicalClusterConfiguration getConfig() {
return config;
}
private void setNameInternal(final String iNewName) {
if (storage.getClusterIdByName(iNewName) > -1)
throw new IllegalArgumentException("Cluster with name '" + iNewName + "' already exists");
for (int i = 0; i < fileSegment.files.length; i++) {
final String osFileName = fileSegment.files[i].getName();
if (osFileName.startsWith(name)) {
final File newFile = new File(storage.getStoragePath() + "/" + iNewName
+ osFileName.substring(osFileName.lastIndexOf(name) + name.length()));
for (OStorageFileConfiguration conf : config.infoFiles) {
if (conf.parent.name.equals(name))
conf.parent.name = iNewName;
if (conf.path.endsWith(osFileName))
conf.path = new String(conf.path.replace(osFileName, newFile.getName()));
}
boolean renamed = fileSegment.files[i].renameTo(newFile);
while (!renamed) {
OMemoryWatchDog.freeMemoryForResourceCleanup(100);
renamed = fileSegment.files[i].renameTo(newFile);
}
}
}
config.name = iNewName;
holeSegment.rename(name, iNewName);
storage.renameCluster(name, iNewName);
name = iNewName;
storage.getConfiguration().update();
}
/**
* Assigns a different data-segment id.
*
* @param iName
* Data-segment's name
*/
private void setDataSegmentInternal(final String iName) {
final int dataId = storage.getDataSegmentIdByName(iName);
config.setDataSegmentId(dataId);
storage.getConfiguration().update();
}
protected void updateBoundsAfterInsertion(final long iPosition) throws IOException {
if (iPosition < beginOffsetData || beginOffsetData == -1) {
// UPDATE END OF DATA
beginOffsetData = iPosition;
fileSegment.files[0].writeHeaderLong(0, beginOffsetData);
}
if (iPosition > endOffsetData) {
// UPDATE END OF DATA
endOffsetData = iPosition;
fileSegment.files[0].writeHeaderLong(OBinaryProtocol.SIZE_LONG, endOffsetData);
}
}
protected void updateBoundsAfterDeletion(final long iPosition) throws IOException {
final long position = iPosition * RECORD_SIZE;
if (iPosition == beginOffsetData) {
if (getEntries() == 0)
beginOffsetData = -1;
else {
// DISCOVER THE BEGIN OF DATA
beginOffsetData++;
long[] fetchPos;
for (long currentPos = position + RECORD_SIZE; currentPos < fileSegment.getFilledUpTo(); currentPos += RECORD_SIZE) {
fetchPos = fileSegment.getRelativePosition(currentPos);
if (fileSegment.files[(int) fetchPos[0]].readByte(fetchPos[1] + RECORD_TYPE_OFFSET) != RECORD_WAS_DELETED)
// GOOD RECORD: SET IT AS BEGIN
break;
beginOffsetData++;
}
}
fileSegment.files[0].writeHeaderLong(0, beginOffsetData);
}
if (iPosition == endOffsetData) {
if (getEntries() == 0)
endOffsetData = -1;
else {
// DISCOVER THE END OF DATA
endOffsetData--;
long[] fetchPos;
for (long currentPos = position - RECORD_SIZE; currentPos >= beginOffsetData; currentPos -= RECORD_SIZE) {
fetchPos = fileSegment.getRelativePosition(currentPos);
if (fileSegment.files[(int) fetchPos[0]].readByte(fetchPos[1] + RECORD_TYPE_OFFSET) != RECORD_WAS_DELETED)
// GOOD RECORD: SET IT AS BEGIN
break;
endOffsetData--;
}
}
fileSegment.files[0].writeHeaderLong(OBinaryProtocol.SIZE_LONG, endOffsetData);
}
}
protected void init(final OStorage iStorage, final int iId, final String iClusterName, final String iLocation,
final int iDataSegmentId, final Object... iParameters) throws IOException {
OFileUtils.checkValidName(iClusterName);
storage = (OStorageLocal) iStorage;
config.setDataSegmentId(iDataSegmentId);
config.id = iId;
config.name = iClusterName;
name = iClusterName;
id = iId;
if (fileSegment == null) {
fileSegment = new OMultiFileSegment(storage, config, DEF_EXTENSION, RECORD_SIZE);
config.setHoleFile(new OStorageClusterHoleConfiguration(config, OStorageVariableParser.DB_PATH_VARIABLE + "/" + config.name,
config.fileType, config.fileMaxSize));
holeSegment = new OClusterLocalHole(this, storage, config.getHoleFile());
}
}
public boolean isSoftlyClosed() {
// Look over files of the cluster
if (!fileSegment.wasSoftlyClosedAtPreviousTime())
return false;
// Look over the hole segment
if (!holeSegment.wasSoftlyClosedAtPreviousTime())
return false;
// Look over files of the corresponding data segment
final ODataLocal dataSegment = storage.getDataSegmentById(config.getDataSegmentId());
if (!dataSegment.wasSoftlyClosedAtPreviousTime())
return false;
// Look over the hole segment
if (!dataSegment.holeSegment.wasSoftlyClosedAtPreviousTime())
return false;
return true;
}
@Override
public OPhysicalPosition[] higherPositions(OPhysicalPosition position) throws IOException {
long filePosition = position.clusterPosition.longValue() * RECORD_SIZE;
if (filePosition < 0)
filePosition = 0;
acquireSharedLock();
try {
if (getFirstPosition().longValue() < 0)
return new OPhysicalPosition[0];
final long lastFilePosition = getLastPosition().longValue() * RECORD_SIZE;
if (filePosition >= lastFilePosition)
return new OPhysicalPosition[0];
byte recordType;
OFile f;
long[] pos;
do {
filePosition += RECORD_SIZE;
pos = fileSegment.getRelativePosition(filePosition);
f = fileSegment.files[(int) pos[0]];
long p = pos[1] + RECORD_TYPE_OFFSET;
recordType = f.readByte(p);
} while (recordType == RECORD_WAS_DELETED && filePosition < lastFilePosition);
if (recordType == RECORD_WAS_DELETED) {
return new OPhysicalPosition[0];
} else {
long p = pos[1];
final OPhysicalPosition physicalPosition = readPhysicalPosition(f, p);
return new OPhysicalPosition[] { physicalPosition };
}
} finally {
releaseSharedLock();
}
}
private OPhysicalPosition readPhysicalPosition(OFile f, long p) throws IOException {
final OPhysicalPosition physicalPosition = new OPhysicalPosition();
physicalPosition.clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(p / RECORD_SIZE);
physicalPosition.dataSegmentId = f.readShort(p);
physicalPosition.dataSegmentPos = f.readLong(p += OBinaryProtocol.SIZE_SHORT);
physicalPosition.recordType = f.readByte(p += OBinaryProtocol.SIZE_LONG);
physicalPosition.recordVersion.getSerializer().readFrom(f, p += OBinaryProtocol.SIZE_BYTE, physicalPosition.recordVersion);
return physicalPosition;
}
@Override
public OPhysicalPosition[] ceilingPositions(OPhysicalPosition position) throws IOException {
long filePosition = position.clusterPosition.longValue() * RECORD_SIZE;
if (filePosition < 0)
filePosition = 0;
acquireSharedLock();
try {
if (getFirstPosition().longValue() < 0)
return new OPhysicalPosition[0];
final long lastFilePosition = getLastPosition().longValue() * RECORD_SIZE;
if (filePosition > lastFilePosition)
return new OPhysicalPosition[0];
byte recordType;
OFile f;
long[] pos;
do {
pos = fileSegment.getRelativePosition(filePosition);
f = fileSegment.files[(int) pos[0]];
long p = pos[1] + RECORD_TYPE_OFFSET;
recordType = f.readByte(p);
filePosition += RECORD_SIZE;
} while (recordType == RECORD_WAS_DELETED && filePosition <= lastFilePosition);
if (recordType == RECORD_WAS_DELETED) {
return new OPhysicalPosition[0];
} else {
long p = pos[1];
final OPhysicalPosition physicalPosition = readPhysicalPosition(f, p);
return new OPhysicalPosition[] { physicalPosition };
}
} finally {
releaseSharedLock();
}
}
@Override
public OPhysicalPosition[] lowerPositions(OPhysicalPosition position) throws IOException {
long filePosition = position.clusterPosition.longValue() * RECORD_SIZE;
acquireSharedLock();
try {
long firstFilePosition = getFirstPosition().longValue() * RECORD_SIZE;
if (filePosition <= firstFilePosition)
return new OPhysicalPosition[0];
byte recordType;
long[] pos;
OFile f;
do {
filePosition -= RECORD_SIZE;
pos = fileSegment.getRelativePosition(filePosition);
f = fileSegment.files[(int) pos[0]];
long p = pos[1] + RECORD_TYPE_OFFSET;
recordType = f.readByte(p);
} while (recordType == RECORD_WAS_DELETED && filePosition > firstFilePosition);
if (recordType == RECORD_WAS_DELETED) {
return new OPhysicalPosition[0];
} else {
long p = pos[1];
final OPhysicalPosition physicalPosition = readPhysicalPosition(f, p);
return new OPhysicalPosition[] { physicalPosition };
}
} finally {
releaseSharedLock();
}
}
@Override
public OPhysicalPosition[] floorPositions(OPhysicalPosition position) throws IOException {
long filePosition = position.clusterPosition.longValue() * RECORD_SIZE;
acquireSharedLock();
try {
long firstFilePosition = getFirstPosition().longValue() * RECORD_SIZE;
if (filePosition <= firstFilePosition)
return new OPhysicalPosition[0];
byte recordType;
long[] pos;
OFile f;
do {
pos = fileSegment.getRelativePosition(filePosition);
f = fileSegment.files[(int) pos[0]];
long p = pos[1] + RECORD_TYPE_OFFSET;
recordType = f.readByte(p);
filePosition -= RECORD_SIZE;
} while (recordType == RECORD_WAS_DELETED && filePosition >= firstFilePosition);
if (recordType == RECORD_WAS_DELETED) {
return new OPhysicalPosition[0];
} else {
long p = pos[1];
final OPhysicalPosition physicalPosition = readPhysicalPosition(f, p);
return new OPhysicalPosition[] { physicalPosition };
}
} finally {
releaseSharedLock();
}
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_OClusterLocal.java
|
113 |
HazelcastTestSupport.assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
final NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(remainingSize, stats.getOwnedEntryCount());
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientNearCacheTest.java
|
88 |
private final class ClientPacketProcessor implements Runnable {
final ClientPacket packet;
private ClientPacketProcessor(ClientPacket packet) {
this.packet = packet;
}
@Override
public void run() {
Connection conn = packet.getConn();
ClientEndpoint endpoint = getEndpoint(conn);
ClientRequest request = null;
try {
request = loadRequest();
if (request == null) {
handlePacketWithNullRequest();
} else if (request instanceof AuthenticationRequest) {
endpoint = createEndpoint(conn);
if (endpoint != null) {
processRequest(endpoint, request);
} else {
handleEndpointNotCreatedConnectionNotAlive();
}
} else if (endpoint == null) {
handleMissingEndpoint(conn);
} else if (endpoint.isAuthenticated()) {
processRequest(endpoint, request);
} else {
handleAuthenticationFailure(conn, endpoint, request);
}
} catch (Throwable e) {
handleProcessingFailure(endpoint, request, e);
}
}
private ClientRequest loadRequest() {
Data data = packet.getData();
return serializationService.toObject(data);
}
private void handleEndpointNotCreatedConnectionNotAlive() {
logger.warning("Dropped: " + packet + " -> endpoint not created for AuthenticationRequest, "
+ "connection not alive");
}
private void handlePacketWithNullRequest() {
logger.warning("Dropped: " + packet + " -> null request");
}
private void handleMissingEndpoint(Connection conn) {
if (conn.live()) {
logger.severe("Dropping: " + packet + " -> no endpoint found for live connection.");
} else {
if (logger.isFinestEnabled()) {
logger.finest("Dropping: " + packet + " -> no endpoint found for dead connection.");
}
}
}
private void handleProcessingFailure(ClientEndpoint endpoint, ClientRequest request, Throwable e) {
Level level = nodeEngine.isActive() ? Level.SEVERE : Level.FINEST;
if (logger.isLoggable(level)) {
if (request == null) {
logger.log(level, e.getMessage(), e);
} else {
logger.log(level, "While executing request: " + request + " -> " + e.getMessage(), e);
}
}
if (request != null && endpoint != null) {
endpoint.sendResponse(e, request.getCallId());
}
}
private void processRequest(ClientEndpoint endpoint, ClientRequest request) throws Exception {
request.setEndpoint(endpoint);
initService(request);
request.setClientEngine(ClientEngineImpl.this);
checkPermissions(endpoint, request);
request.process();
}
private void checkPermissions(ClientEndpoint endpoint, ClientRequest request) {
SecurityContext securityContext = getSecurityContext();
if (securityContext != null) {
Permission permission = request.getRequiredPermission();
if (permission != null) {
securityContext.checkPermission(endpoint.getSubject(), permission);
}
}
}
private void initService(ClientRequest request) {
String serviceName = request.getServiceName();
if (serviceName == null) {
return;
}
Object service = nodeEngine.getService(serviceName);
if (service == null) {
if (nodeEngine.isActive()) {
throw new IllegalArgumentException("No service registered with name: " + serviceName);
}
throw new HazelcastInstanceNotActiveException();
}
request.setService(service);
}
private void handleAuthenticationFailure(Connection conn, ClientEndpoint endpoint, ClientRequest request) {
Exception exception;
if (nodeEngine.isActive()) {
String message = "Client " + conn + " must authenticate before any operation.";
logger.severe(message);
exception = new AuthenticationException(message);
} else {
exception = new HazelcastInstanceNotActiveException();
}
endpoint.sendResponse(exception, request.getCallId());
removeEndpoint(conn);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_client_ClientEngineImpl.java
|
768 |
public class IndexRequest extends ShardReplicationOperationRequest<IndexRequest> {
/**
* Operation type controls if the type of the index operation.
*/
public static enum OpType {
/**
* Index the source. If there an existing document with the id, it will
* be replaced.
*/
INDEX((byte) 0),
/**
* Creates the resource. Simply adds it to the index, if there is an existing
* document with the id, then it won't be removed.
*/
CREATE((byte) 1);
private final byte id;
private final String lowercase;
OpType(byte id) {
this.id = id;
this.lowercase = this.toString().toLowerCase(Locale.ENGLISH);
}
/**
* The internal representation of the operation type.
*/
public byte id() {
return id;
}
public String lowercase() {
return this.lowercase;
}
/**
* Constructs the operation type from its internal representation.
*/
public static OpType fromId(byte id) {
if (id == 0) {
return INDEX;
} else if (id == 1) {
return CREATE;
} else {
throw new ElasticsearchIllegalArgumentException("No type match for [" + id + "]");
}
}
}
private String type;
private String id;
@Nullable
private String routing;
@Nullable
private String parent;
@Nullable
private String timestamp;
private long ttl = -1;
private BytesReference source;
private boolean sourceUnsafe;
private OpType opType = OpType.INDEX;
private boolean refresh = false;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private XContentType contentType = Requests.INDEX_CONTENT_TYPE;
public IndexRequest() {
}
/**
* Constructs a new index request against the specific index. The {@link #type(String)}
* {@link #source(byte[])} must be set.
*/
public IndexRequest(String index) {
this.index = index;
}
/**
* Constructs a new index request against the specific index and type. The
* {@link #source(byte[])} must be set.
*/
public IndexRequest(String index, String type) {
this.index = index;
this.type = type;
}
/**
* Constructs a new index request against the index, type, id and using the source.
*
* @param index The index to index into
* @param type The type to index into
* @param id The id of document
*/
public IndexRequest(String index, String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if (type == null) {
validationException = addValidationError("type is missing", validationException);
}
if (source == null) {
validationException = addValidationError("source is missing", validationException);
}
return validationException;
}
/**
* Before we fork on a local thread, make sure we copy over the bytes if they are unsafe
*/
@Override
public void beforeLocalFork() {
// only fork if copy over if source is unsafe
safeSource();
}
/**
* Sets the content type that will be used when generating a document from user provided objects (like Map).
*/
public IndexRequest contentType(XContentType contentType) {
this.contentType = contentType;
return this;
}
/**
* The type of the indexed document.
*/
public String type() {
return type;
}
/**
* Sets the type of the indexed document.
*/
public IndexRequest type(String type) {
this.type = type;
return this;
}
/**
* The id of the indexed document. If not set, will be automatically generated.
*/
public String id() {
return id;
}
/**
* Sets the id of the indexed document. If not set, will be automatically generated.
*/
public IndexRequest id(String id) {
this.id = id;
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
public IndexRequest routing(String routing) {
if (routing != null && routing.length() == 0) {
this.routing = null;
} else {
this.routing = routing;
}
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
public String routing() {
return this.routing;
}
/**
* Sets the parent id of this document. If routing is not set, automatically set it as the
* routing as well.
*/
public IndexRequest parent(String parent) {
this.parent = parent;
if (routing == null) {
routing = parent;
}
return this;
}
public String parent() {
return this.parent;
}
/**
* Sets the timestamp either as millis since the epoch, or, in the configured date format.
*/
public IndexRequest timestamp(String timestamp) {
this.timestamp = timestamp;
return this;
}
public String timestamp() {
return this.timestamp;
}
/**
* Sets the relative ttl value. It musts be > 0 as it makes little sense otherwise. Setting it
* to <tt>null</tt> will reset to have no ttl.
*/
public IndexRequest ttl(Long ttl) throws ElasticsearchGenerationException {
if (ttl == null) {
this.ttl = -1;
return this;
}
if (ttl <= 0) {
throw new ElasticsearchIllegalArgumentException("TTL value must be > 0. Illegal value provided [" + ttl + "]");
}
this.ttl = ttl;
return this;
}
public long ttl() {
return this.ttl;
}
/**
* The source of the document to index, recopied to a new array if it is unsage.
*/
public BytesReference source() {
return source;
}
public BytesReference safeSource() {
if (sourceUnsafe) {
source = source.copyBytesArray();
sourceUnsafe = false;
}
return source;
}
public Map<String, Object> sourceAsMap() {
return XContentHelper.convertToMap(source, false).v2();
}
/**
* Index the Map as a {@link org.elasticsearch.client.Requests#INDEX_CONTENT_TYPE}.
*
* @param source The map to index
*/
public IndexRequest source(Map source) throws ElasticsearchGenerationException {
return source(source, contentType);
}
/**
* Index the Map as the provided content type.
*
* @param source The map to index
*/
public IndexRequest source(Map source, XContentType contentType) throws ElasticsearchGenerationException {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.map(source);
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
}
/**
* Sets the document source to index.
* <p/>
* <p>Note, its preferable to either set it using {@link #source(org.elasticsearch.common.xcontent.XContentBuilder)}
* or using the {@link #source(byte[])}.
*/
public IndexRequest source(String source) {
this.source = new BytesArray(source.getBytes(Charsets.UTF_8));
this.sourceUnsafe = false;
return this;
}
/**
* Sets the content source to index.
*/
public IndexRequest source(XContentBuilder sourceBuilder) {
source = sourceBuilder.bytes();
sourceUnsafe = false;
return this;
}
public IndexRequest source(String field1, Object value1) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.startObject().field(field1, value1).endObject();
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate", e);
}
}
public IndexRequest source(String field1, Object value1, String field2, Object value2) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.startObject().field(field1, value1).field(field2, value2).endObject();
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate", e);
}
}
public IndexRequest source(String field1, Object value1, String field2, Object value2, String field3, Object value3) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.startObject().field(field1, value1).field(field2, value2).field(field3, value3).endObject();
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate", e);
}
}
public IndexRequest source(String field1, Object value1, String field2, Object value2, String field3, Object value3, String field4, Object value4) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.startObject().field(field1, value1).field(field2, value2).field(field3, value3).field(field4, value4).endObject();
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate", e);
}
}
public IndexRequest source(Object... source) {
if (source.length % 2 != 0) {
throw new IllegalArgumentException("The number of object passed must be even but was [" + source.length + "]");
}
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.startObject();
for (int i = 0; i < source.length; i++) {
builder.field(source[i++].toString(), source[i]);
}
builder.endObject();
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate", e);
}
}
/**
* Sets the document to index in bytes form.
*/
public IndexRequest source(BytesReference source, boolean unsafe) {
this.source = source;
this.sourceUnsafe = unsafe;
return this;
}
/**
* Sets the document to index in bytes form.
*/
public IndexRequest source(byte[] source) {
return source(source, 0, source.length);
}
/**
* Sets the document to index in bytes form (assumed to be safe to be used from different
* threads).
*
* @param source The source to index
* @param offset The offset in the byte array
* @param length The length of the data
*/
public IndexRequest source(byte[] source, int offset, int length) {
return source(source, offset, length, false);
}
/**
* Sets the document to index in bytes form.
*
* @param source The source to index
* @param offset The offset in the byte array
* @param length The length of the data
* @param unsafe Is the byte array safe to be used form a different thread
*/
public IndexRequest source(byte[] source, int offset, int length, boolean unsafe) {
this.source = new BytesArray(source, offset, length);
this.sourceUnsafe = unsafe;
return this;
}
/**
* Sets the type of operation to perform.
*/
public IndexRequest opType(OpType opType) {
this.opType = opType;
return this;
}
/**
* Sets a string representation of the {@link #opType(org.elasticsearch.action.index.IndexRequest.OpType)}. Can
* be either "index" or "create".
*/
public IndexRequest opType(String opType) throws ElasticsearchIllegalArgumentException {
if ("create".equals(opType)) {
return opType(OpType.CREATE);
} else if ("index".equals(opType)) {
return opType(OpType.INDEX);
} else {
throw new ElasticsearchIllegalArgumentException("No index opType matching [" + opType + "]");
}
}
/**
* Set to <tt>true</tt> to force this index to use {@link OpType#CREATE}.
*/
public IndexRequest create(boolean create) {
if (create) {
return opType(OpType.CREATE);
} else {
return opType(OpType.INDEX);
}
}
/**
* The type of operation to perform.
*/
public OpType opType() {
return this.opType;
}
/**
* Should a refresh be executed post this index operation causing the operation to
* be searchable. Note, heavy indexing should not set this to <tt>true</tt>. Defaults
* to <tt>false</tt>.
*/
public IndexRequest refresh(boolean refresh) {
this.refresh = refresh;
return this;
}
public boolean refresh() {
return this.refresh;
}
/**
* Sets the version, which will cause the index operation to only be performed if a matching
* version exists and no changes happened on the doc since then.
*/
public IndexRequest version(long version) {
this.version = version;
return this;
}
public long version() {
return this.version;
}
/**
* Sets the versioning type. Defaults to {@link VersionType#INTERNAL}.
*/
public IndexRequest versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public VersionType versionType() {
return this.versionType;
}
public void process(MetaData metaData, String aliasOrIndex, @Nullable MappingMetaData mappingMd, boolean allowIdGeneration) throws ElasticsearchException {
// resolve the routing if needed
routing(metaData.resolveIndexRouting(routing, aliasOrIndex));
// resolve timestamp if provided externally
if (timestamp != null) {
timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp,
mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER);
}
// extract values if needed
if (mappingMd != null) {
MappingMetaData.ParseContext parseContext = mappingMd.createParseContext(id, routing, timestamp);
if (parseContext.shouldParse()) {
XContentParser parser = null;
try {
parser = XContentHelper.createParser(source);
mappingMd.parse(parser, parseContext);
if (parseContext.shouldParseId()) {
id = parseContext.id();
}
if (parseContext.shouldParseRouting()) {
routing = parseContext.routing();
}
if (parseContext.shouldParseTimestamp()) {
timestamp = parseContext.timestamp();
timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter());
}
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse doc to extract routing/timestamp/id", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
// might as well check for routing here
if (mappingMd.routing().required() && routing == null) {
throw new RoutingMissingException(index, type, id);
}
if (parent != null && !mappingMd.hasParentField()) {
throw new ElasticsearchIllegalArgumentException("Can't specify parent if no parent field has been configured");
}
} else {
if (parent != null) {
throw new ElasticsearchIllegalArgumentException("Can't specify parent if no parent field has been configured");
}
}
// generate id if not already provided and id generation is allowed
if (allowIdGeneration) {
if (id == null) {
id(Strings.randomBase64UUID());
// since we generate the id, change it to CREATE
opType(IndexRequest.OpType.CREATE);
}
}
// generate timestamp if not provided, we always have one post this stage...
if (timestamp == null) {
timestamp = Long.toString(System.currentTimeMillis());
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
type = in.readSharedString();
id = in.readOptionalString();
routing = in.readOptionalString();
parent = in.readOptionalString();
timestamp = in.readOptionalString();
ttl = in.readLong();
source = in.readBytesReference();
sourceUnsafe = false;
opType = OpType.fromId(in.readByte());
refresh = in.readBoolean();
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeSharedString(type);
out.writeOptionalString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
out.writeOptionalString(timestamp);
out.writeLong(ttl);
out.writeBytesReference(source);
out.writeByte(opType.id());
out.writeBoolean(refresh);
out.writeLong(version);
out.writeByte(versionType.getValue());
}
@Override
public String toString() {
String sSource = "_na_";
try {
sSource = XContentHelper.convertToJson(source, false);
} catch (Exception e) {
// ignore
}
return "index {[" + index + "][" + type + "][" + id + "], source[" + sSource + "]}";
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_index_IndexRequest.java
|
1,488 |
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private boolean isVertex;
private ElementChecker startChecker;
private ElementChecker endChecker;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
final String key = context.getConfiguration().get(KEY);
final Class valueClass = context.getConfiguration().getClass(VALUE_CLASS, String.class);
final Object startValue;
final Object endValue;
if (valueClass.equals(String.class)) {
startValue = context.getConfiguration().get(START_VALUE);
endValue = context.getConfiguration().get(END_VALUE);
} else if (Number.class.isAssignableFrom((valueClass))) {
startValue = context.getConfiguration().getFloat(START_VALUE, Float.MIN_VALUE);
endValue = context.getConfiguration().getFloat(END_VALUE, Float.MAX_VALUE);
} else {
throw new IOException("Class " + valueClass + " is an unsupported value class");
}
this.startChecker = new ElementChecker(key, Compare.GREATER_THAN_EQUAL, startValue);
this.endChecker = new ElementChecker(key, Compare.LESS_THAN, endValue);
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.isVertex) {
if (value.hasPaths() && !(this.startChecker.isLegal(value) && this.endChecker.isLegal(value))) {
value.clearPaths();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_FILTERED, 1L);
}
} else {
long counter = 0;
for (final Edge e : value.getEdges(Direction.BOTH)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths() && !(this.startChecker.isLegal(edge) && this.endChecker.isLegal(edge))) {
edge.clearPaths();
counter++;
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.EDGES_FILTERED, counter);
}
context.write(NullWritable.get(), value);
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_filter_IntervalFilterMap.java
|
124 |
client.getLifecycleService().addLifecycleListener(new LifecycleListener() {
@Override
public void stateChanged(LifecycleEvent event) {
connectedLatch.countDown();
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientReconnectTest.java
|
30 |
@Service("blLocaleFieldService")
public class LocaleFieldServiceImpl extends AbstractRuleBuilderFieldService {
@Override
public void init() {
fields.add(new FieldData.Builder()
.label("rule_localeName")
.name("friendlyName")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_localeCode")
.name("localeCode")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
}
@Override
public String getName() {
return RuleIdentifier.LOCALE;
}
@Override
public String getDtoClassName() {
return "org.broadleafcommerce.common.locale.domain.LocaleImpl";
}
}
| 0true
|
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_web_rulebuilder_service_LocaleFieldServiceImpl.java
|
1,336 |
@Service("blSolrIndexService")
public class SolrIndexServiceImpl implements SolrIndexService {
private static final Log LOG = LogFactory.getLog(SolrIndexServiceImpl.class);
@Value("${solr.index.product.pageSize}")
protected int pageSize;
@Resource(name = "blProductDao")
protected ProductDao productDao;
@Resource(name = "blFieldDao")
protected FieldDao fieldDao;
@Resource(name = "blLocaleService")
protected LocaleService localeService;
@Resource(name = "blSolrHelperService")
protected SolrHelperService shs;
@Resource(name = "blSolrSearchServiceExtensionManager")
protected SolrSearchServiceExtensionManager extensionManager;
@Resource(name = "blTransactionManager")
protected PlatformTransactionManager transactionManager;
public static String ATTR_MAP = "productAttributes";
@Override
@SuppressWarnings("rawtypes")
public void rebuildIndex() throws ServiceException, IOException {
LOG.info("Rebuilding the solr index...");
StopWatch s = new StopWatch();
// If we are in single core mode, we have to delete the documents before reindexing
if (SolrContext.isSingleCoreMode()) {
deleteAllDocuments();
}
// Populate the reindex core with the necessary information
BroadleafRequestContext savedContext = BroadleafRequestContext.getBroadleafRequestContext();
HashMap savedPricing = SkuPricingConsiderationContext.getSkuPricingConsiderationContext();
DynamicSkuPricingService savedPricingService = SkuPricingConsiderationContext.getSkuPricingService();
DynamicSkuActiveDatesService savedActiveDateServcie = SkuActiveDateConsiderationContext.getSkuActiveDatesService();
try {
Long numProducts = productDao.readCountAllActiveProducts();
if (LOG.isDebugEnabled()) {
LOG.debug("There are " + numProducts + " total products");
}
int page = 0;
while ((page * pageSize) < numProducts) {
buildIncrementalIndex(page, pageSize);
page++;
}
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Optimizing the index...");
}
SolrContext.getReindexServer().optimize();
} catch (SolrServerException e) {
throw new ServiceException("Could not rebuild index", e);
}
} catch (ServiceException e) {
throw e;
} finally {
// Restore the current context, regardless of whether an exception happened or not
BroadleafRequestContext.setBroadleafRequestContext(savedContext);
SkuPricingConsiderationContext.setSkuPricingConsiderationContext(savedPricing);
SkuPricingConsiderationContext.setSkuPricingService(savedPricingService);
SkuActiveDateConsiderationContext.setSkuActiveDatesService(savedActiveDateServcie);
}
// Swap the active and the reindex cores
shs.swapActiveCores();
// If we are not in single core mode, we delete the documents for the unused core after swapping
if (!SolrContext.isSingleCoreMode()) {
deleteAllDocuments();
}
LOG.info(String.format("Finished building index in %s", s.toLapString()));
}
protected void deleteAllDocuments() throws ServiceException {
try {
String deleteQuery = "*:*";
LOG.debug("Deleting by query: " + deleteQuery);
SolrContext.getReindexServer().deleteByQuery(deleteQuery);
SolrContext.getReindexServer().commit();
} catch (Exception e) {
throw new ServiceException("Could not delete documents", e);
}
}
protected void buildIncrementalIndex(int page, int pageSize) throws ServiceException {
TransactionStatus status = TransactionUtils.createTransaction("readProducts",
TransactionDefinition.PROPAGATION_REQUIRED, transactionManager, true);
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("Building index - page: [%s], pageSize: [%s]", page, pageSize));
}
StopWatch s = new StopWatch();
try {
List<Product> products = readAllActiveProducts(page, pageSize);
List<Field> fields = fieldDao.readAllProductFields();
List<Locale> locales = getAllLocales();
Collection<SolrInputDocument> documents = new ArrayList<SolrInputDocument>();
for (Product product : products) {
documents.add(buildDocument(product, fields, locales));
}
if (LOG.isTraceEnabled()) {
for (SolrInputDocument document : documents) {
LOG.trace(document);
}
}
if (!CollectionUtils.isEmpty(documents)) {
SolrContext.getReindexServer().add(documents);
SolrContext.getReindexServer().commit();
}
TransactionUtils.finalizeTransaction(status, transactionManager, false);
} catch (SolrServerException e) {
TransactionUtils.finalizeTransaction(status, transactionManager, true);
throw new ServiceException("Could not rebuild index", e);
} catch (IOException e) {
TransactionUtils.finalizeTransaction(status, transactionManager, true);
throw new ServiceException("Could not rebuild index", e);
} catch (RuntimeException e) {
TransactionUtils.finalizeTransaction(status, transactionManager, true);
throw e;
}
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("Built index - page: [%s], pageSize: [%s] in [%s]", page, pageSize, s.toLapString()));
}
}
/**
* This method to read all active products will be slow if you have a large catalog. In this case, you will want to
* read the products in a different manner. For example, if you know the fields that will be indexed, you can configure
* a DAO object to only load those fields. You could also use a JDBC based DAO for even faster access. This default
* implementation is only suitable for small catalogs.
*
* @return the list of all active products to be used by the index building task
*/
protected List<Product> readAllActiveProducts() {
return productDao.readAllActiveProducts();
}
/**
* This method to read active products utilizes paging to improve performance over {@link #readAllActiveProducts()}.
* While not optimal, this will reduce the memory required to load large catalogs.
*
* It could still be improved for specific implementations by only loading fields that will be indexed or by accessing
* the database via direct JDBC (instead of Hibernate).
*
* @return the list of all active products to be used by the index building task
* @since 2.2.0
*/
protected List<Product> readAllActiveProducts(int page, int pageSize) {
return productDao.readAllActiveProducts(page, pageSize);
}
/**
* @return a list of all possible locale prefixes to consider
*/
protected List<Locale> getAllLocales() {
return localeService.findAllLocales();
}
/**
* Given a product, fields that relate to that product, and a list of locales and pricelists, builds a
* SolrInputDocument to be added to the Solr index.
*
* @param product
* @param fields
* @param locales
* @return the document
*/
protected SolrInputDocument buildDocument(Product product, List<Field> fields, List<Locale> locales) {
SolrInputDocument document = new SolrInputDocument();
attachBasicDocumentFields(product, document);
// Keep track of searchable fields added to the index. We need to also add the search facets if
// they weren't already added as a searchable field.
List<String> addedProperties = new ArrayList<String>();
for (Field field : fields) {
try {
// Index the searchable fields
if (field.getSearchable()) {
List<FieldType> searchableFieldTypes = shs.getSearchableFieldTypes(field);
for (FieldType sft : searchableFieldTypes) {
Map<String, Object> propertyValues = getPropertyValues(product, field, sft, locales);
// Build out the field for every prefix
for (Entry<String, Object> entry : propertyValues.entrySet()) {
String prefix = entry.getKey();
prefix = StringUtils.isBlank(prefix) ? prefix : prefix + "_";
String solrPropertyName = shs.getPropertyNameForFieldSearchable(field, sft, prefix);
Object value = entry.getValue();
document.addField(solrPropertyName, value);
addedProperties.add(solrPropertyName);
}
}
}
// Index the faceted field type as well
FieldType facetType = field.getFacetFieldType();
if (facetType != null) {
Map<String, Object> propertyValues = getPropertyValues(product, field, facetType, locales);
// Build out the field for every prefix
for (Entry<String, Object> entry : propertyValues.entrySet()) {
String prefix = entry.getKey();
prefix = StringUtils.isBlank(prefix) ? prefix : prefix + "_";
String solrFacetPropertyName = shs.getPropertyNameForFieldFacet(field, prefix);
Object value = entry.getValue();
if (!addedProperties.contains(solrFacetPropertyName)) {
document.addField(solrFacetPropertyName, value);
}
}
}
} catch (Exception e) {
LOG.trace("Could not get value for property[" + field.getQualifiedFieldName() + "] for product id["
+ product.getId() + "]", e);
}
}
return document;
}
/**
* Adds the ID, category, and explicitCategory fields for the product to the document
*
* @param product
* @param document
*/
protected void attachBasicDocumentFields(Product product, SolrInputDocument document) {
// Add the namespace and ID fields for this product
document.addField(shs.getNamespaceFieldName(), shs.getCurrentNamespace());
document.addField(shs.getIdFieldName(), shs.getSolrDocumentId(document, product));
document.addField(shs.getProductIdFieldName(), product.getId());
extensionManager.getProxy().attachAdditionalBasicFields(product, document, shs);
// The explicit categories are the ones defined by the product itself
for (CategoryProductXref categoryXref : product.getAllParentCategoryXrefs()) {
document.addField(shs.getExplicitCategoryFieldName(), categoryXref.getCategory().getId());
String categorySortFieldName = shs.getCategorySortFieldName(categoryXref.getCategory());
int index = -1;
int count = 0;
for (CategoryProductXref productXref : categoryXref.getCategory().getAllProductXrefs()) {
if (productXref.getProduct().equals(product)) {
index = count;
break;
}
count++;
}
document.addField(categorySortFieldName, index);
}
// This is the entire tree of every category defined on the product
Set<Category> fullCategoryHierarchy = new HashSet<Category>();
for (CategoryProductXref categoryXref : product.getAllParentCategoryXrefs()) {
fullCategoryHierarchy.addAll(categoryXref.getCategory().buildFullCategoryHierarchy(null));
}
for (Category category : fullCategoryHierarchy) {
document.addField(shs.getCategoryFieldName(), category.getId());
}
}
/**
* Returns a map of prefix to value for the requested attributes. For example, if the requested field corresponds to
* a Sku's description and the locales list has the en_US locale and the es_ES locale, the resulting map could be
*
* { "en_US" : "A description",
* "es_ES" : "Una descripcion" }
*
* @param product
* @param field
* @param isPriceField
* @param prefix
* @return the value of the property
* @throws IllegalAccessException
* @throws InvocationTargetException
* @throws NoSuchMethodException
*/
protected Map<String, Object> getPropertyValues(Product product, Field field, FieldType fieldType,
List<Locale> locales) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
String propertyName = field.getPropertyName();
Map<String, Object> values = new HashMap<String, Object>();
if (extensionManager != null) {
ExtensionResultStatusType result = extensionManager.getProxy().addPropertyValues(product, field, fieldType, values, propertyName, locales);
if (ExtensionResultStatusType.NOT_HANDLED.equals(result)) {
final Object propertyValue;
if (propertyName.contains(ATTR_MAP)) {
propertyValue = PropertyUtils.getMappedProperty(product, ATTR_MAP, propertyName.substring(ATTR_MAP.length() + 1));
} else {
propertyValue = PropertyUtils.getProperty(product, propertyName);
}
values.put("", propertyValue);
}
}
return values;
}
/**
* Converts a propertyName to one that is able to reference inside a map. For example, consider the property
* in Product that references a List<ProductAttribute>, "productAttributes". Also consider the utility method
* in Product called "mappedProductAttributes", which returns a map of the ProductAttributes keyed by the name
* property in the ProductAttribute. Given the parameters "productAttributes.heatRange", "productAttributes",
* "mappedProductAttributes" (which would represent a property called "productAttributes.heatRange" that
* references a specific ProductAttribute inside of a product whose "name" property is equal to "heatRange",
* this method will convert this property to mappedProductAttributes(heatRange).value, which is then usable
* by the standard beanutils PropertyUtils class to get the value.
*
* @param propertyName
* @param listPropertyName
* @param mapPropertyName
* @return the converted property name
*/
protected String convertToMappedProperty(String propertyName, String listPropertyName, String mapPropertyName) {
String[] splitName = StringUtils.split(propertyName, ".");
StringBuilder convertedProperty = new StringBuilder();
for (int i = 0; i < splitName.length; i++) {
if (convertedProperty.length() > 0) {
convertedProperty.append(".");
}
if (splitName[i].equals(listPropertyName)) {
convertedProperty.append(mapPropertyName).append("(");
convertedProperty.append(splitName[i + 1]).append(").value");
i++;
} else {
convertedProperty.append(splitName[i]);
}
}
return convertedProperty.toString();
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_service_solr_SolrIndexServiceImpl.java
|
387 |
new Thread() {
public void run() {
if (mm.tryLock(key) == false) {
tryLockFailed.countDown();
}
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapLockTest.java
|
300 |
@RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientLockWithTerminationTest {
private HazelcastInstance node1;
private HazelcastInstance node2;
private HazelcastInstance client1;
private HazelcastInstance client2;
private String keyOwnedByNode1;
@Before
public void setup() throws InterruptedException {
node1 = Hazelcast.newHazelcastInstance();
node2 = Hazelcast.newHazelcastInstance();
client1 = HazelcastClient.newHazelcastClient();
client2 = HazelcastClient.newHazelcastClient();
keyOwnedByNode1 = HazelcastTestSupport.generateKeyOwnedBy(node1);
}
@After
public void tearDown() throws IOException {
Hazelcast.shutdownAll();
HazelcastClient.shutdownAll();
}
@Test
public void testLockOnClientCrash() throws InterruptedException {
ILock lock = client1.getLock(keyOwnedByNode1);
lock.lock();
client1.getLifecycleService().terminate();
lock = client2.getLock(keyOwnedByNode1);
boolean lockObtained = lock.tryLock();
assertTrue("Lock was Not Obtained, lock should be released on client crash", lockObtained);
}
@Test
@Category(ProblematicTest.class)
public void testLockOnClient_withNodeCrash() throws InterruptedException {
ILock lock = client1.getLock(keyOwnedByNode1);
lock.lock();
node1.getLifecycleService().terminate();
lock = client2.getLock(keyOwnedByNode1);
boolean lockObtained = lock.tryLock();
assertFalse("Lock was obtained by 2 different clients ", lockObtained);
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_lock_ClientLockWithTerminationTest.java
|
142 |
@Test
public class DoubleSerializerTest {
private static final int FIELD_SIZE = 8;
private static final Double OBJECT = Math.PI;
private ODoubleSerializer doubleSerializer;
byte[] stream = new byte[FIELD_SIZE];
@BeforeClass
public void beforeClass() {
doubleSerializer = new ODoubleSerializer();
}
public void testFieldSize() {
Assert.assertEquals(doubleSerializer.getObjectSize(null), FIELD_SIZE);
}
public void testSerialize() {
doubleSerializer.serialize(OBJECT, stream, 0);
Assert.assertEquals(doubleSerializer.deserialize(stream, 0), OBJECT);
}
public void testSerializeNative() {
doubleSerializer.serializeNative(OBJECT, stream, 0);
Assert.assertEquals(doubleSerializer.deserializeNative(stream, 0), OBJECT);
}
public void testNativeDirectMemoryCompatibility() {
doubleSerializer.serializeNative(OBJECT, stream, 0);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream);
try {
Assert.assertEquals(doubleSerializer.deserializeFromDirectMemory(pointer, 0), OBJECT);
} finally {
pointer.free();
}
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_serialization_types_DoubleSerializerTest.java
|
396 |
public class ClusterSearchShardsResponse extends ActionResponse implements ToXContent {
private ClusterSearchShardsGroup[] groups;
private DiscoveryNode[] nodes;
ClusterSearchShardsResponse() {
}
public ClusterSearchShardsGroup[] getGroups() {
return groups;
}
public DiscoveryNode[] getNodes() {
return nodes;
}
public ClusterSearchShardsResponse(ClusterSearchShardsGroup[] groups, DiscoveryNode[] nodes) {
this.groups = groups;
this.nodes = nodes;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
groups = new ClusterSearchShardsGroup[in.readVInt()];
for (int i = 0; i < groups.length; i++) {
groups[i] = ClusterSearchShardsGroup.readSearchShardsGroupResponse(in);
}
nodes = new DiscoveryNode[in.readVInt()];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = DiscoveryNode.readNode(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(groups.length);
for (ClusterSearchShardsGroup response : groups) {
response.writeTo(out);
}
out.writeVInt(nodes.length);
for (DiscoveryNode node : nodes) {
node.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("nodes");
for (DiscoveryNode node : nodes) {
builder.startObject(node.getId(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("name", node.name());
builder.field("transport_address", node.getAddress());
if (!node.attributes().isEmpty()) {
builder.startObject("attributes");
for (Map.Entry<String, String> attr : node.attributes().entrySet()) {
builder.field(attr.getKey(), attr.getValue());
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
builder.startArray("shards");
for (ClusterSearchShardsGroup group : groups) {
group.toXContent(builder, params);
}
builder.endArray();
return builder;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_shards_ClusterSearchShardsResponse.java
|
115 |
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
NearCacheStats stats = clientMap.getLocalMapStats().getNearCacheStats();
assertEquals(0, stats.getOwnedEntryCount());
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientNearCacheTest.java
|
110 |
public class OUtils {
public static boolean equals(final Object a, final Object b) {
if (a == b)
return true;
if (a != null)
return a.equals(b);
return b.equals(a);
}
public static String camelCase(final String iText) {
return Character.toUpperCase(iText.charAt(0)) + iText.substring(1);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_io_OUtils.java
|
436 |
EventHandler<PortableMessage> handler = new EventHandler<PortableMessage>() {
@Override
public void handle(PortableMessage event) {
SerializationService serializationService = getContext().getSerializationService();
ClientClusterService clusterService = getContext().getClusterService();
E messageObject = serializationService.toObject(event.getMessage());
Member member = clusterService.getMember(event.getUuid());
Message<E> message = new Message<E>(name, messageObject, event.getPublishTime(), member);
listener.onMessage(message);
}
@Override
public void onListenerRegister() {
}
};
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientTopicProxy.java
|
726 |
public class IndexDeleteResponse extends ActionResponse {
private String index;
private int successfulShards;
private int failedShards;
private ShardDeleteResponse[] deleteResponses;
IndexDeleteResponse(String index, int successfulShards, int failedShards, ShardDeleteResponse[] deleteResponses) {
this.index = index;
this.successfulShards = successfulShards;
this.failedShards = failedShards;
this.deleteResponses = deleteResponses;
}
IndexDeleteResponse() {
}
/**
* The index the delete by query operation was executed against.
*/
public String getIndex() {
return this.index;
}
/**
* The total number of shards the delete by query was executed on.
*/
public int getTotalShards() {
return failedShards + successfulShards;
}
/**
* The successful number of shards the delete by query was executed on.
*/
public int getSuccessfulShards() {
return successfulShards;
}
/**
* The failed number of shards the delete by query was executed on.
*/
public int getFailedShards() {
return failedShards;
}
public ShardDeleteResponse[] getResponses() {
return this.deleteResponses;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
index = in.readString();
successfulShards = in.readVInt();
failedShards = in.readVInt();
deleteResponses = new ShardDeleteResponse[in.readVInt()];
for (int i = 0; i < deleteResponses.length; i++) {
deleteResponses[i] = new ShardDeleteResponse();
deleteResponses[i].readFrom(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(index);
out.writeVInt(successfulShards);
out.writeVInt(failedShards);
out.writeVInt(deleteResponses.length);
for (ShardDeleteResponse deleteResponse : deleteResponses) {
deleteResponse.writeTo(out);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_delete_index_IndexDeleteResponse.java
|
58 |
public class AssetNotFoundException extends RuntimeException {
private static final long serialVersionUID = -6349160176427682630L;
public AssetNotFoundException() {
//do nothing
}
public AssetNotFoundException(Throwable cause) {
super(cause);
}
public AssetNotFoundException(String message) {
super(message);
}
public AssetNotFoundException(String message, Throwable cause) {
super(message, cause);
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_common_AssetNotFoundException.java
|
2,227 |
public class FiltersFunctionScoreQuery extends Query {
public static class FilterFunction {
public final Filter filter;
public final ScoreFunction function;
public FilterFunction(Filter filter, ScoreFunction function) {
this.filter = filter;
this.function = function;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
FilterFunction that = (FilterFunction) o;
if (filter != null ? !filter.equals(that.filter) : that.filter != null)
return false;
if (function != null ? !function.equals(that.function) : that.function != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = filter != null ? filter.hashCode() : 0;
result = 31 * result + (function != null ? function.hashCode() : 0);
return result;
}
}
public static enum ScoreMode {
First, Avg, Max, Sum, Min, Multiply
}
Query subQuery;
final FilterFunction[] filterFunctions;
final ScoreMode scoreMode;
final float maxBoost;
protected CombineFunction combineFunction;
public FiltersFunctionScoreQuery(Query subQuery, ScoreMode scoreMode, FilterFunction[] filterFunctions, float maxBoost) {
this.subQuery = subQuery;
this.scoreMode = scoreMode;
this.filterFunctions = filterFunctions;
this.maxBoost = maxBoost;
combineFunction = CombineFunction.MULT;
}
public FiltersFunctionScoreQuery setCombineFunction(CombineFunction combineFunction) {
this.combineFunction = combineFunction;
return this;
}
public Query getSubQuery() {
return subQuery;
}
public FilterFunction[] getFilterFunctions() {
return filterFunctions;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query newQ = subQuery.rewrite(reader);
if (newQ == subQuery)
return this;
FiltersFunctionScoreQuery bq = (FiltersFunctionScoreQuery) this.clone();
bq.subQuery = newQ;
return bq;
}
@Override
public void extractTerms(Set<Term> terms) {
subQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
Weight subQueryWeight = subQuery.createWeight(searcher);
return new CustomBoostFactorWeight(subQueryWeight, filterFunctions.length);
}
class CustomBoostFactorWeight extends Weight {
final Weight subQueryWeight;
final Bits[] docSets;
public CustomBoostFactorWeight(Weight subQueryWeight, int filterFunctionLength) throws IOException {
this.subQueryWeight = subQueryWeight;
this.docSets = new Bits[filterFunctionLength];
}
public Query getQuery() {
return FiltersFunctionScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = subQueryWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm, float topLevelBoost) {
subQueryWeight.normalize(norm, topLevelBoost * getBoost());
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
// we ignore scoreDocsInOrder parameter, because we need to score in
// order if documents are scored with a script. The
// ShardLookup depends on in order scoring.
Scorer subQueryScorer = subQueryWeight.scorer(context, true, false, acceptDocs);
if (subQueryScorer == null) {
return null;
}
for (int i = 0; i < filterFunctions.length; i++) {
FilterFunction filterFunction = filterFunctions[i];
filterFunction.function.setNextReader(context);
docSets[i] = DocIdSets.toSafeBits(context.reader(), filterFunction.filter.getDocIdSet(context, acceptDocs));
}
return new CustomBoostFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, docSets, combineFunction);
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
Explanation subQueryExpl = subQueryWeight.explain(context, doc);
if (!subQueryExpl.isMatch()) {
return subQueryExpl;
}
// First: Gather explanations for all filters
List<ComplexExplanation> filterExplanations = new ArrayList<ComplexExplanation>();
for (FilterFunction filterFunction : filterFunctions) {
Bits docSet = DocIdSets.toSafeBits(context.reader(),
filterFunction.filter.getDocIdSet(context, context.reader().getLiveDocs()));
if (docSet.get(doc)) {
filterFunction.function.setNextReader(context);
Explanation functionExplanation = filterFunction.function.explainScore(doc, subQueryExpl);
double factor = functionExplanation.getValue();
float sc = CombineFunction.toFloat(factor);
ComplexExplanation filterExplanation = new ComplexExplanation(true, sc, "function score, product of:");
filterExplanation.addDetail(new Explanation(1.0f, "match filter: " + filterFunction.filter.toString()));
filterExplanation.addDetail(functionExplanation);
filterExplanations.add(filterExplanation);
}
}
if (filterExplanations.size() == 0) {
float sc = getBoost() * subQueryExpl.getValue();
Explanation res = new ComplexExplanation(true, sc, "function score, no filter match, product of:");
res.addDetail(subQueryExpl);
res.addDetail(new Explanation(getBoost(), "queryBoost"));
return res;
}
// Second: Compute the factor that would have been computed by the
// filters
double factor = 1.0;
switch (scoreMode) {
case First:
factor = filterExplanations.get(0).getValue();
break;
case Max:
factor = Double.NEGATIVE_INFINITY;
for (int i = 0; i < filterExplanations.size(); i++) {
factor = Math.max(filterExplanations.get(i).getValue(), factor);
}
break;
case Min:
factor = Double.POSITIVE_INFINITY;
for (int i = 0; i < filterExplanations.size(); i++) {
factor = Math.min(filterExplanations.get(i).getValue(), factor);
}
break;
case Multiply:
for (int i = 0; i < filterExplanations.size(); i++) {
factor *= filterExplanations.get(i).getValue();
}
break;
default: // Avg / Total
double totalFactor = 0.0f;
int count = 0;
for (int i = 0; i < filterExplanations.size(); i++) {
totalFactor += filterExplanations.get(i).getValue();
count++;
}
if (count != 0) {
factor = totalFactor;
if (scoreMode == ScoreMode.Avg) {
factor /= count;
}
}
}
ComplexExplanation factorExplanaition = new ComplexExplanation(true, CombineFunction.toFloat(factor),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]");
for (int i = 0; i < filterExplanations.size(); i++) {
factorExplanaition.addDetail(filterExplanations.get(i));
}
return combineFunction.explain(getBoost(), subQueryExpl, factorExplanaition, maxBoost);
}
}
static class CustomBoostFactorScorer extends Scorer {
private final float subQueryBoost;
private final Scorer scorer;
private final FilterFunction[] filterFunctions;
private final ScoreMode scoreMode;
private final float maxBoost;
private final Bits[] docSets;
private final CombineFunction scoreCombiner;
private CustomBoostFactorScorer(CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, FilterFunction[] filterFunctions,
float maxBoost, Bits[] docSets, CombineFunction scoreCombiner) throws IOException {
super(w);
this.subQueryBoost = w.getQuery().getBoost();
this.scorer = scorer;
this.scoreMode = scoreMode;
this.filterFunctions = filterFunctions;
this.maxBoost = maxBoost;
this.docSets = docSets;
this.scoreCombiner = scoreCombiner;
}
@Override
public int docID() {
return scorer.docID();
}
@Override
public int advance(int target) throws IOException {
return scorer.advance(target);
}
@Override
public int nextDoc() throws IOException {
return scorer.nextDoc();
}
@Override
public float score() throws IOException {
int docId = scorer.docID();
double factor = 1.0f;
float subQueryScore = scorer.score();
if (scoreMode == ScoreMode.First) {
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
factor = filterFunctions[i].function.score(docId, subQueryScore);
break;
}
}
} else if (scoreMode == ScoreMode.Max) {
double maxFactor = Double.NEGATIVE_INFINITY;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
maxFactor = Math.max(filterFunctions[i].function.score(docId, subQueryScore), maxFactor);
}
}
if (maxFactor != Float.NEGATIVE_INFINITY) {
factor = maxFactor;
}
} else if (scoreMode == ScoreMode.Min) {
double minFactor = Double.POSITIVE_INFINITY;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
minFactor = Math.min(filterFunctions[i].function.score(docId, subQueryScore), minFactor);
}
}
if (minFactor != Float.POSITIVE_INFINITY) {
factor = minFactor;
}
} else if (scoreMode == ScoreMode.Multiply) {
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
factor *= filterFunctions[i].function.score(docId, subQueryScore);
}
}
} else { // Avg / Total
double totalFactor = 0.0f;
int count = 0;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
totalFactor += filterFunctions[i].function.score(docId, subQueryScore);
count++;
}
}
if (count != 0) {
factor = totalFactor;
if (scoreMode == ScoreMode.Avg) {
factor /= count;
}
}
}
return scoreCombiner.combine(subQueryBoost, subQueryScore, factor, maxBoost);
}
@Override
public int freq() throws IOException {
return scorer.freq();
}
@Override
public long cost() {
return scorer.cost();
}
}
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("function score (").append(subQuery.toString(field)).append(", functions: [");
for (FilterFunction filterFunction : filterFunctions) {
sb.append("{filter(").append(filterFunction.filter).append("), function [").append(filterFunction.function).append("]}");
}
sb.append("])");
sb.append(ToStringUtils.boost(getBoost()));
return sb.toString();
}
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass())
return false;
FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o;
if (this.getBoost() != other.getBoost())
return false;
if (!this.subQuery.equals(other.subQuery)) {
return false;
}
return Arrays.equals(this.filterFunctions, other.filterFunctions);
}
public int hashCode() {
return subQuery.hashCode() + 31 * Arrays.hashCode(filterFunctions) ^ Float.floatToIntBits(getBoost());
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_search_function_FiltersFunctionScoreQuery.java
|
933 |
public class OfferDeliveryType implements Serializable, BroadleafEnumerationType, Comparable<OfferDeliveryType> {
private static final long serialVersionUID = 1L;
private static final Map<String, OfferDeliveryType> TYPES = new LinkedHashMap<String, OfferDeliveryType>();
public static final OfferDeliveryType AUTOMATIC = new OfferDeliveryType("AUTOMATIC", "Automatically", 1000);
public static final OfferDeliveryType CODE = new OfferDeliveryType("CODE", "Using Shared Code", 2000);
public static final OfferDeliveryType MANUAL = new OfferDeliveryType("MANUAL", "Via Application or Shared Code", 3000);
public static OfferDeliveryType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
private int order;
public OfferDeliveryType() {
//do nothing
}
public OfferDeliveryType(final String type, final String friendlyType, int order) {
this.friendlyType = friendlyType;
setType(type);
setOrder(order);
}
public void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OfferDeliveryType other = (OfferDeliveryType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
@Override
public int compareTo(OfferDeliveryType arg0) {
return this.order - arg0.order;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_type_OfferDeliveryType.java
|
6 |
Collections.sort(abbreviationsForPhrase, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o1.length() - o2.length();
}
});
| 0true
|
tableViews_src_main_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationsManager.java
|
746 |
public class TransportExplainAction extends TransportShardSingleOperationAction<ExplainRequest, ExplainResponse> {
private final IndicesService indicesService;
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
@Inject
public TransportExplainAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, IndicesService indicesService,
ScriptService scriptService, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
}
@Override
protected void doExecute(ExplainRequest request, ActionListener<ExplainResponse> listener) {
request.nowInMillis = System.currentTimeMillis();
super.doExecute(request, listener);
}
protected String transportAction() {
return ExplainAction.NAME;
}
protected String executor() {
return ThreadPool.Names.GET; // Or use Names.SEARCH?
}
@Override
protected void resolveRequest(ClusterState state, ExplainRequest request) {
String concreteIndex = state.metaData().concreteIndex(request.index());
request.filteringAlias(state.metaData().filteringAliases(concreteIndex, request.index()));
request.index(state.metaData().concreteIndex(request.index()));
// Fail fast on the node that received the request.
if (request.routing() == null && state.getMetaData().routingRequired(request.index(), request.type())) {
throw new RoutingMissingException(request.index(), request.type(), request.id());
}
}
protected ExplainResponse shardOperation(ExplainRequest request, int shardId) throws ElasticsearchException {
IndexService indexService = indicesService.indexService(request.index());
IndexShard indexShard = indexService.shardSafe(shardId);
Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
Engine.GetResult result = indexShard.get(new Engine.Get(false, uidTerm));
if (!result.exists()) {
return new ExplainResponse(false);
}
SearchContext context = new DefaultSearchContext(
0,
new ShardSearchRequest().types(new String[]{request.type()})
.filteringAliases(request.filteringAlias())
.nowInMillis(request.nowInMillis),
null, result.searcher(), indexService, indexShard,
scriptService, cacheRecycler, pageCacheRecycler
);
SearchContext.setCurrent(context);
try {
context.parsedQuery(indexService.queryParserService().parseQuery(request.source()));
context.preProcess();
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase;
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
for (RescoreSearchContext ctx : context.rescore()) {
Rescorer rescorer = ctx.rescorer();
explanation = rescorer.explain(topLevelDocId, context, ctx, explanation);
}
if (request.fields() != null || (request.fetchSourceContext() != null && request.fetchSourceContext().fetchSource())) {
// Advantage is that we're not opening a second searcher to retrieve the _source. Also
// because we are working in the same searcher in engineGetResult we can be sure that a
// doc isn't deleted between the initial get and this call.
GetResult getResult = indexShard.getService().get(result, request.id(), request.type(), request.fields(), request.fetchSourceContext());
return new ExplainResponse(true, explanation, getResult);
} else {
return new ExplainResponse(true, explanation);
}
} catch (IOException e) {
throw new ElasticsearchException("Could not explain", e);
} finally {
context.release();
SearchContext.removeCurrent();
}
}
protected ExplainRequest newRequest() {
return new ExplainRequest();
}
protected ExplainResponse newResponse() {
return new ExplainResponse();
}
protected ClusterBlockException checkGlobalBlock(ClusterState state, ExplainRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
protected ClusterBlockException checkRequestBlock(ClusterState state, ExplainRequest request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, request.index());
}
protected ShardIterator shards(ClusterState state, ExplainRequest request) throws ElasticsearchException {
return clusterService.operationRouting().getShards(
clusterService.state(), request.index(), request.type(), request.id(), request.routing(), request.preference()
);
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_explain_TransportExplainAction.java
|
480 |
final EventHandler<PortableDistributedObjectEvent> eventHandler = new EventHandler<PortableDistributedObjectEvent>() {
public void handle(PortableDistributedObjectEvent e) {
final ObjectNamespace ns = new DefaultObjectNamespace(e.getServiceName(), e.getName());
ClientProxyFuture future = proxies.get(ns);
ClientProxy proxy = future == null ? null : future.get();
if (proxy == null) {
proxy = getProxy(e.getServiceName(), e.getName());
}
DistributedObjectEvent event = new DistributedObjectEvent(e.getEventType(), e.getServiceName(), proxy);
if (DistributedObjectEvent.EventType.CREATED.equals(e.getEventType())) {
listener.distributedObjectCreated(event);
} else if (DistributedObjectEvent.EventType.DESTROYED.equals(e.getEventType())) {
listener.distributedObjectDestroyed(event);
}
}
@Override
public void onListenerRegister() {
}
};
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_spi_ProxyManager.java
|
1,575 |
public class AdornedTargetList implements PersistencePerspectiveItem {
private static final long serialVersionUID = 1L;
private String collectionFieldName;
private String linkedObjectPath;
private String targetObjectPath;
private String adornedTargetEntityClassname;
private String adornedTargetEntityPolymorphicType;
private String sortField;
private Boolean sortAscending;
private String linkedIdProperty;
private String targetIdProperty;
private Boolean inverse = Boolean.FALSE;
private String joinEntityClass;
private Boolean mutable = true;
public AdornedTargetList() {
//do nothing
}
public AdornedTargetList(String collectionFieldName, String linkedObjectPath, String linkedIdProperty, String targetObjectPath, String targetIdProperty, String adornedTargetEntityClassname) {
this(collectionFieldName, linkedObjectPath, linkedIdProperty, targetObjectPath, targetIdProperty, adornedTargetEntityClassname, null, null);
}
public AdornedTargetList(String collectionFieldName, String linkedObjectPath, String linkedIdProperty, String targetObjectPath, String targetIdProperty, String adornedTargetEntityClassname, String adornedTargetEntityPolymorphicType) {
this(collectionFieldName, linkedObjectPath, linkedIdProperty, targetObjectPath, targetIdProperty, adornedTargetEntityClassname, adornedTargetEntityPolymorphicType, null, null);
}
public AdornedTargetList(String collectionFieldName, String linkedObjectPath, String linkedIdProperty, String targetObjectPath, String targetIdProperty, String adornedTargetEntityClassname, String sortField, Boolean sortAscending) {
this(collectionFieldName, linkedObjectPath, linkedIdProperty, targetObjectPath, targetIdProperty, adornedTargetEntityClassname, null, sortField, sortAscending);
}
public AdornedTargetList(String collectionFieldName, String linkedObjectPath, String linkedIdProperty, String targetObjectPath, String targetIdProperty, String adornedTargetEntityClassname, String adornedTargetEntityPolymorphicType, String sortField, Boolean sortAscending) {
this.collectionFieldName = collectionFieldName;
this.linkedObjectPath = linkedObjectPath;
this.targetObjectPath = targetObjectPath;
this.adornedTargetEntityClassname = adornedTargetEntityClassname;
this.adornedTargetEntityPolymorphicType = adornedTargetEntityPolymorphicType;
this.sortField = sortField;
this.sortAscending = sortAscending;
this.linkedIdProperty = linkedIdProperty;
this.targetIdProperty = targetIdProperty;
}
public String getCollectionFieldName() {
return collectionFieldName;
}
public void setCollectionFieldName(String manyToField) {
this.collectionFieldName = manyToField;
}
public String getLinkedObjectPath() {
return linkedObjectPath;
}
public void setLinkedObjectPath(String linkedPropertyPath) {
this.linkedObjectPath = linkedPropertyPath;
}
public String getTargetObjectPath() {
return targetObjectPath;
}
public void setTargetObjectPath(String targetObjectPath) {
this.targetObjectPath = targetObjectPath;
}
public String getAdornedTargetEntityClassname() {
return adornedTargetEntityClassname;
}
public void setAdornedTargetEntityClassname(String adornedTargetEntityClassname) {
this.adornedTargetEntityClassname = adornedTargetEntityClassname;
}
public String getSortField() {
return sortField;
}
public void setSortField(String sortField) {
this.sortField = sortField;
}
public Boolean getSortAscending() {
return sortAscending;
}
public void setSortAscending(Boolean sortAscending) {
this.sortAscending = sortAscending;
}
public String getLinkedIdProperty() {
return linkedIdProperty;
}
public void setLinkedIdProperty(String linkedIdProperty) {
this.linkedIdProperty = linkedIdProperty;
}
public String getTargetIdProperty() {
return targetIdProperty;
}
public void setTargetIdProperty(String targetIdProperty) {
this.targetIdProperty = targetIdProperty;
}
public Boolean getInverse() {
return inverse;
}
public void setInverse(Boolean inverse) {
this.inverse = inverse;
}
public void accept(PersistencePerspectiveItemVisitor visitor) {
visitor.visit(this);
}
public String getAdornedTargetEntityPolymorphicType() {
return adornedTargetEntityPolymorphicType;
}
public void setAdornedTargetEntityPolymorphicType(String adornedTargetEntityPolymorphicType) {
this.adornedTargetEntityPolymorphicType = adornedTargetEntityPolymorphicType;
}
public String getJoinEntityClass() {
return joinEntityClass;
}
public void setJoinEntityClass(String joinEntityClass) {
this.joinEntityClass = joinEntityClass;
}
public Boolean getMutable() {
return mutable;
}
public void setMutable(Boolean mutable) {
this.mutable = mutable;
}
@Override
public PersistencePerspectiveItem clonePersistencePerspectiveItem() {
AdornedTargetList adornedTargetList = new AdornedTargetList();
adornedTargetList.collectionFieldName = collectionFieldName;
adornedTargetList.linkedObjectPath = linkedObjectPath;
adornedTargetList.targetObjectPath = targetObjectPath;
adornedTargetList.adornedTargetEntityClassname = adornedTargetEntityClassname;
adornedTargetList.adornedTargetEntityPolymorphicType = adornedTargetEntityPolymorphicType;
adornedTargetList.sortField = sortField;
adornedTargetList.sortAscending = sortAscending;
adornedTargetList.linkedIdProperty = linkedIdProperty;
adornedTargetList.targetIdProperty = targetIdProperty;
adornedTargetList.inverse = inverse;
adornedTargetList.joinEntityClass = joinEntityClass;
adornedTargetList.mutable = mutable;
return adornedTargetList;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof AdornedTargetList)) return false;
AdornedTargetList that = (AdornedTargetList) o;
if (adornedTargetEntityClassname != null ? !adornedTargetEntityClassname.equals(that.adornedTargetEntityClassname) : that.adornedTargetEntityClassname != null)
return false;
if (adornedTargetEntityPolymorphicType != null ? !adornedTargetEntityPolymorphicType.equals(that.adornedTargetEntityPolymorphicType) : that.adornedTargetEntityPolymorphicType != null)
return false;
if (collectionFieldName != null ? !collectionFieldName.equals(that.collectionFieldName) : that.collectionFieldName != null)
return false;
if (inverse != null ? !inverse.equals(that.inverse) : that.inverse != null) return false;
if (linkedIdProperty != null ? !linkedIdProperty.equals(that.linkedIdProperty) : that.linkedIdProperty != null)
return false;
if (linkedObjectPath != null ? !linkedObjectPath.equals(that.linkedObjectPath) : that.linkedObjectPath != null)
return false;
if (sortAscending != null ? !sortAscending.equals(that.sortAscending) : that.sortAscending != null)
return false;
if (sortField != null ? !sortField.equals(that.sortField) : that.sortField != null) return false;
if (targetIdProperty != null ? !targetIdProperty.equals(that.targetIdProperty) : that.targetIdProperty != null)
return false;
if (targetObjectPath != null ? !targetObjectPath.equals(that.targetObjectPath) : that.targetObjectPath != null)
return false;
if (joinEntityClass != null ? !joinEntityClass.equals(that.joinEntityClass) : that.joinEntityClass != null)
return false;
if (mutable != null ? !mutable.equals(that.mutable) : that.mutable != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = collectionFieldName != null ? collectionFieldName.hashCode() : 0;
result = 31 * result + (linkedObjectPath != null ? linkedObjectPath.hashCode() : 0);
result = 31 * result + (targetObjectPath != null ? targetObjectPath.hashCode() : 0);
result = 31 * result + (adornedTargetEntityClassname != null ? adornedTargetEntityClassname.hashCode() : 0);
result = 31 * result + (adornedTargetEntityPolymorphicType != null ? adornedTargetEntityPolymorphicType.hashCode() : 0);
result = 31 * result + (sortField != null ? sortField.hashCode() : 0);
result = 31 * result + (sortAscending != null ? sortAscending.hashCode() : 0);
result = 31 * result + (linkedIdProperty != null ? linkedIdProperty.hashCode() : 0);
result = 31 * result + (targetIdProperty != null ? targetIdProperty.hashCode() : 0);
result = 31 * result + (inverse != null ? inverse.hashCode() : 0);
result = 31 * result + (joinEntityClass != null ? joinEntityClass.hashCode() : 0);
result = 31 * result + (mutable != null ? mutable.hashCode() : 0);
return result;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_AdornedTargetList.java
|
433 |
static final class Fields {
static final XContentBuilderString COUNT = new XContentBuilderString("count");
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsIndices.java
|
559 |
public class TypedQueryBuilder<T> {
protected Class<T> rootClass;
protected String rootAlias;
protected List<TQRestriction> restrictions = new ArrayList<TQRestriction>();
protected Map<String, Object> paramMap = new HashMap<String, Object>();
/**
* Creates a new TypedQueryBuilder that will utilize the rootAlias as the named object of the class
*
* @param rootClass
* @param rootAlias
*/
public TypedQueryBuilder(Class<T> rootClass, String rootAlias) {
this.rootClass = rootClass;
this.rootAlias = rootAlias;
}
/**
* Adds a simple restriction to the query. Note that all restrictions present on the TypedQueryBuilder will be joined
* with an AND clause.
*
* @param expression
* @param operation
* @param parameter
*/
public TypedQueryBuilder<T> addRestriction(String expression, String operation, Object parameter) {
restrictions.add(new TQRestriction(expression, operation, parameter));
return this;
}
/**
* Adds an explicit TQRestriction object. Note that all restrictions present on the TypedQueryBuilder will be joined
* with an AND clause.
*
* @param restriction
* @return
*/
public TypedQueryBuilder<T> addRestriction(TQRestriction restriction) {
restrictions.add(restriction);
return this;
}
/**
* Generates the query string based on the current contents of this builder. As the string is generated, this method
* will also populate the paramMap, which binds actual restriction values.
*
* Note that this method should typically not be invoked through DAOs. Instead, utilize {@link #toQuery(EntityManager)},
* which will automatically generate the TypedQuery and populate the required parameters.
*
* @return the QL string
*/
public String toQueryString() {
return toQueryString(false);
}
/**
* Generates the query string based on the current contents of this builder. As the string is generated, this method
* will also populate the paramMap, which binds actual restriction values.
*
* Note that this method should typically not be invoked through DAOs. Instead, utilize {@link #toQuery(EntityManager)},
* which will automatically generate the TypedQuery and populate the required parameters.
*
* If you are using this as a COUNT query, you should look at the corresponding {@link #toCountQuery(EntityManager)}
*
* @param whether or not the resulting query string should be used as a count query or not
* @return the QL string
*/
public String toQueryString(boolean count) {
StringBuilder sb = getSelectClause(new StringBuilder(), count)
.append(" FROM ").append(rootClass.getName()).append(" ").append(rootAlias);
if (CollectionUtils.isNotEmpty(restrictions)) {
sb.append(" WHERE ");
for (int i = 0; i < restrictions.size(); i++) {
TQRestriction r = restrictions.get(i);
sb.append(r.toQl("p" + i, paramMap));
if (i != restrictions.size() - 1) {
sb.append(" AND ");
}
}
}
return sb.toString();
}
/**
* Adds the select query from {@link #toQueryString()}
*
* @return <b>sb</b> with the select query appended to it
*/
protected StringBuilder getSelectClause(StringBuilder sb, boolean count) {
sb.append("SELECT ");
if (count) {
return sb.append("COUNT(*)");
} else {
return sb.append(rootAlias);
}
}
/**
* Returns a TypedQuery that represents this builder object. It will already have all of the appropriate parameter
* values set and is able to be immediately queried against.
*
* @param em
* @return the TypedQuery
*/
public TypedQuery<T> toQuery(EntityManager em) {
TypedQuery<T> q = em.createQuery(toQueryString(), rootClass);
fillParameterMap(q);
return q;
}
public TypedQuery<Long> toCountQuery(EntityManager em) {
TypedQuery<Long> q = em.createQuery(toQueryString(true), Long.class);
fillParameterMap(q);
return q;
}
protected void fillParameterMap(TypedQuery<?> q) {
for (Entry<String, Object> entry : paramMap.entrySet()) {
if (entry.getValue() != null) {
q.setParameter(entry.getKey(), entry.getValue());
}
}
}
/**
* @return the paramMap
*/
public Map<String, Object> getParamMap() {
return paramMap;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_util_dao_TypedQueryBuilder.java
|
637 |
@Component("blStatelessSessionFilter")
public class StatelessSessionFilter extends GenericFilterBean {
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) throws IOException, ServletException {
BLCRequestUtils.setOKtoUseSession(new ServletWebRequest((HttpServletRequest) request, (HttpServletResponse) response), Boolean.FALSE);
filterChain.doFilter(request, response);
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_web_filter_StatelessSessionFilter.java
|
3 |
public class OCompositeKey implements Comparable<OCompositeKey>, Serializable {
private static final long serialVersionUID = 1L;
/**
* List of heterogeneous values that are going to be stored in {@link OMVRBTree}.
*/
private final List<Object> keys;
private final Comparator<Object> comparator;
public OCompositeKey(final List<?> keys) {
this.keys = new ArrayList<Object>(keys.size());
this.comparator = ODefaultComparator.INSTANCE;
for (final Object key : keys)
addKey(key);
}
public OCompositeKey(final Object... keys) {
this.keys = new ArrayList<Object>(keys.length);
this.comparator = ODefaultComparator.INSTANCE;
for (final Object key : keys)
addKey(key);
}
public OCompositeKey() {
this.keys = new ArrayList<Object>();
this.comparator = ODefaultComparator.INSTANCE;
}
/**
* Clears the keys array for reuse of the object
*/
public void reset() {
if (this.keys != null)
this.keys.clear();
}
/**
* @return List of heterogeneous values that are going to be stored in {@link OMVRBTree}.
*/
public List<Object> getKeys() {
return Collections.unmodifiableList(keys);
}
/**
* Add new key value to the list of already registered values.
*
* If passed in value is {@link OCompositeKey} itself then its values will be copied in current index. But key itself will not be
* added.
*
* @param key
* Key to add.
*/
public void addKey(final Object key) {
if (key instanceof OCompositeKey) {
final OCompositeKey compositeKey = (OCompositeKey) key;
for (final Object inKey : compositeKey.keys) {
addKey(inKey);
}
} else {
keys.add(key);
}
}
/**
* Performs partial comparison of two composite keys.
*
* Two objects will be equal if the common subset of their keys is equal. For example if first object contains two keys and second
* contains four keys then only first two keys will be compared.
*
* @param otherKey
* Key to compare.
*
* @return a negative integer, zero, or a positive integer as this object is less than, equal to, or greater than the specified
* object.
*/
public int compareTo(final OCompositeKey otherKey) {
final Iterator<Object> inIter = keys.iterator();
final Iterator<Object> outIter = otherKey.keys.iterator();
while (inIter.hasNext() && outIter.hasNext()) {
final Object inKey = inIter.next();
final Object outKey = outIter.next();
if (outKey instanceof OAlwaysGreaterKey)
return -1;
if (outKey instanceof OAlwaysLessKey)
return 1;
final int result = comparator.compare(inKey, outKey);
if (result != 0)
return result;
}
return 0;
}
/**
* {@inheritDoc }
*/
@Override
public boolean equals(final Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
final OCompositeKey that = (OCompositeKey) o;
return keys.equals(that.keys);
}
/**
* {@inheritDoc }
*/
@Override
public int hashCode() {
return keys.hashCode();
}
/**
* {@inheritDoc }
*/
@Override
public String toString() {
return "OCompositeKey{" + "keys=" + keys + '}';
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_OCompositeKey.java
|
205 |
public class ExistsFieldQueryExtension implements FieldQueryExtension {
public static final String NAME = "_exists_";
@Override
public Query query(QueryParseContext parseContext, String queryText) {
String fieldName = queryText;
Filter filter = null;
MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
if (smartNameFieldMappers != null) {
if (smartNameFieldMappers.hasMapper()) {
filter = smartNameFieldMappers.mapper().rangeFilter(null, null, true, true, parseContext);
}
}
if (filter == null) {
filter = new TermRangeFilter(fieldName, null, null, true, true);
}
// we always cache this one, really does not change...
filter = parseContext.cacheFilter(filter, null);
filter = wrapSmartNameFilter(filter, smartNameFieldMappers, parseContext);
return new XConstantScoreQuery(filter);
}
}
| 1no label
|
src_main_java_org_apache_lucene_queryparser_classic_ExistsFieldQueryExtension.java
|
69 |
public interface StaticAssetDao {
public StaticAsset readStaticAssetById(Long id);
public List<StaticAsset> readAllStaticAssets();
public void delete(StaticAsset asset);
public StaticAsset addOrUpdateStaticAsset(StaticAsset asset, boolean clearLevel1Cache);
public StaticAsset readStaticAssetByFullUrl(String fullUrl, SandBox targetSandBox);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_dao_StaticAssetDao.java
|
595 |
public class PlotConstants {
/*
* Default Plot Properties.
*/
public static final int DEFAULT_NUMBER_OF_SUBPLOTS = 1;
public static final boolean LOCAL_CONTROLS_ENABLED_BY_DEFAULT = true;
public static final YAxisMaximumLocationSetting DEFAULT_Y_AXIS_MAX_LOCATION_SETTING = YAxisMaximumLocationSetting.MAXIMUM_AT_TOP;
public static final NonTimeAxisSubsequentBoundsSetting DEFAULT_NON_TIME_AXIS_MIN_SUBSEQUENT_SETTING = NonTimeAxisSubsequentBoundsSetting.SEMI_FIXED;
public static final NonTimeAxisSubsequentBoundsSetting DEFAULT_NON_TIME_AXIS_MAX_SUBSEQUENT_SETTING = NonTimeAxisSubsequentBoundsSetting.SEMI_FIXED;
public static final int MILLISECONDS_IN_SECOND = 1000;
public static final int MILLISECONDS_IN_MIN = MILLISECONDS_IN_SECOND * 60;
public static final int MILLISECONDS_IN_HOUR = MILLISECONDS_IN_MIN * 60;
public static final int MILLISECONDS_IN_DAY = MILLISECONDS_IN_HOUR * 24;
public static final int DEFAUlT_PLOT_SPAN = 30 * 60 * 1000; // 30 mins in Milliseconds
public static final Color ROLL_OVER_PLOT_LINE_COLOR = Color.white;
public static final int DEFAULT_TIME_AXIS_FONT_SIZE = 10;
public static final Font DEFAULT_TIME_AXIS_FONT = new Font("Arial", Font.PLAIN, DEFAULT_TIME_AXIS_FONT_SIZE);
public static final int DEFAULT_PLOTLINE_THICKNESS = 1;
public static final int SELECTED_LINE_THICKNESS = 2;
public static final Color DEFAULT_PLOT_FRAME_BACKGROUND_COLOR = new Color(51, 51, 51);
public static final Color DEFAULT_PLOT_AREA_BACKGROUND_COLOR = Color.black;
public static final int DEFAULT_TIME_AXIS_INTERCEPT = 0;
public static final Color DEFAULT_TIME_AXIS_COLOR = Color.white;
public static final Color DEFAULT_TIME_AXIS_LABEL_COLOR = Color.white;
public static final Color DEFAULT_NON_TIME_AXIS_COLOR= Color.white;
public static final Color DEFAULT_GRID_LINE_COLOR = Color.LIGHT_GRAY;
public static final int DEFAULT_MIN_SAMPLES_FOR_AUTO_SCALE = 0;
public static final double DEFAULT_TIME_AXIS_PADDING = 0.25;
public static final double DEFAULT_TIME_AXIS_PADDING_JUMP_MIN = 0.05;
public static final double DEFAULT_TIME_AXIS_PADDING_JUMP_MAX = 0.25;
public static final double DEFAULT_TIME_AXIS_PADDING_SCRUNCH_MIN = 0.20;
public static final double DEFAULT_TIME_AXIS_PADDING_SCRUNCH_MAX = 0.25;
public static final double DEFAULT_NON_TIME_AXIS_PADDING_MAX = 0.05;
public static final double DEFAULT_NON_TIME_AXIS_PADDING_MIN = 0.05;
public static final double DEFAULT_NON_TIME_AXIS_MIN_VALUE = 0;
public static final double DEFAULT_NON_TIME_AXIS_MAX_VALUE = 1;
public static final long DEFAULT_TIME_AXIS_MIN_VALUE = new GregorianCalendar().getTimeInMillis();
public static final long DEFAULT_TIME_AXIS_MAX_VALUE= DEFAULT_TIME_AXIS_MIN_VALUE + DEFAUlT_PLOT_SPAN;
public static final int MAX_NUMBER_OF_DATA_ITEMS_ON_A_PLOT = 30;
public static final int MAX_NUMBER_SUBPLOTS = 10;
public static final PlotLineDrawingFlags DEFAULT_PLOT_LINE_DRAW = new PlotLineDrawingFlags(true, false);
public static final int MAJOR_TICK_MARK_LENGTH = 3;
public static final int MINOR_TICK_MARK_LENGTH = 1;
public static final String GMT = "GMT";
public static final String DEFAULT_TIME_ZONE = GMT;
public static final String DEFAULT_TIME_AXIS_DATA_FORMAT = "DDD/HH:mm:ss"; // add a z to see the time zone.
// Field names for persistence
public static final String TIME_AXIS_SETTING = "PlotTimeAxisSetting";
public static final String X_AXIS_MAXIMUM_LOCATION_SETTING = "PlotXAxisMaximumLocation";
public static final String Y_AXIS_MAXIMUM_LOCATION_SETTING = "PlotYAxisMaximumLocation";
public static final String TIME_AXIS_SUBSEQUENT_SETTING = "PlotTimeAxisSubsequentSetting";
public static final String NON_TIME_AXIS_SUBSEQUENT_MIN_SETTING = "PlotNonTimeAxisSubsequentMinSetting";
public static final String NON_TIME_AXIS_SUBSEQUENT_MAX_SETTING = "PlotNonTimeAxisSubsequentMaxSetting";
public static final String NON_TIME_MAX = "NonTimeMax";
public static final String NON_TIME_MIN = "NonTimeMin";
public static final String TIME_MAX = "TimeMax";
public static final String TIME_MIN = "TimeMin";
public static final String TIME_PADDING = "TimePadding";
public static final String NON_TIME_MIN_PADDING = "NonTimeMinPadding";
public static final String NON_TIME_MAX_PADDING = "NonTimeMaxPadding";
public static final String GROUP_BY_ORDINAL_POSITION = "GroupByOrdinalPosition";
public static final String PIN_TIME_AXIS = "PinTimeAxis";
public static final String DRAW_LINES = "PlotLineDrawLines";
public static final String DRAW_MARKERS = "PlotLineDrawMarkers";
public static final String DRAW_CHARACTERS = "PlotLineDrawCharacters";
public static final String CONNECTION_TYPE = "PlotLineConnectionType";
public static final String COLOR_ASSIGNMENTS = "PlotColorAssignments";
public static final String LINE_SETTINGS = "PlotLineSettings";
// Delay before firing a request for data at a higher resolution on a window.
public final static int RESIZE_TIMER = 200; // in milliseconds.
// Limit button border settings
public static final int ARROW_BUTTON_BORDER_STYLE_TOP = 1;
public static final int ARROW_BUTTON_BORDER_STYLE_LEFT = 0;
public static final int ARROW_BUTTON_BORDER_STYLE_BOTTOM = 0;
public static final int ARROW_BUTTON_BORDER_STYLE_RIGHT = 0;
// The size below which the plot will not go before it starts to truncate the legends.
public static final int MINIMUM_PLOT_WIDTH = 200; //200;
public static final int MINIMUM_PLOT_HEIGHT = 100;
public static final int Y_AXIS_WHEN_NON_TIME_LABEL_WIDTH = 28;
// Legends
public final static Color LEGEND_BACKGROUND_COLOR = DEFAULT_PLOT_FRAME_BACKGROUND_COLOR;
public static final int PLOT_LEGEND_BUFFER = 5;
public static final int PLOT_LEGEND_WIDTH = 120;
public static final int PLOT_MINIMUM_LEGEND_WIDTH = 40;
public static final int PLOT_LEGEND_OFFSET_FROM_LEFT_HAND_SIDE = 0;
public static final String LEGEND_NEWLINE_CHARACTER = "\n";
public static final String LEGEND_ELLIPSES = "...";
public static final int MAXIMUM_LEGEND_TEXT_SIZE = 20; //maximum width of a legend
public static final DecimalFormat DECIMAL_FORMAT = new DecimalFormat("#0.000");
// Sync line
public static final Color TIME_SYNC_LINE_COLOR = Color.orange;
public static final int TIME_SYNC_LINE_WIDTH = 2;
public static final int SYNC_LINE_STYLE = 9; // ChartConstants.LS_DASH_DOT;
public static final int SHIFT_KEY_MASK = InputEvent.SHIFT_MASK;
public static final int ALT_KEY_MASK = InputEvent.ALT_MASK;
public static final int CTL_KEY_MASK = InputEvent.CTRL_MASK;
// Data Cursor
public static final Color DATA_CURSOR_COLOR = new Color(235, 235, 235);//new Color(51, 102, 153);
public static final int SLOPE_LINE_STYLE = 0; // ChartConstants.LS_SOLID;
public static final int SLOPE_LINE_WIDTH = 1;
public static final String SLOPE_UNIT = "/min";
public static final String REGRESSION_LINE = "RegressionLine";
public static final int NUMBER_REGRESSION_POINTS = 20;
public static final int SLOPE_UNIT_DIVIDER_IN_MS = PlotConstants.MILLISECONDS_IN_MIN; // per second.
public final static float dash1[] = {10.0f};
// Data Compression
// Sets the default value for data compression which can be overridden by the client.
public static final boolean COMPRESSION_ENABLED_BY_DEFAULT = true;
public static final int MAXIMUM_PLOT_DATA_BUFFER_SLIZE_REQUEST_SIZE = 12 * MILLISECONDS_IN_HOUR ;
// Panning and zooming controls
public static final double PANNING_NON_TIME_AXIS_PERCENTAGE = 25;
public static final double PANNING_TIME_AXIS_PERCENTAGE = 25;
public static final double ZOOMING_NON_TIME_AXIS_PERCENTAGE = 10;
public static final double ZOOMING_TIME_AXIS_PERCENTAGE = 10;
public static final int zoomingTimeAxisIncrementInMiliseconds = 30 * MILLISECONDS_IN_SECOND;
public static final int zoomingNonTimeAxisIncrement = 10;
public static final int LOCAL_CONTROL_HEIGHT = 25;
public static final int LOCAL_CONTROL_WIDTH = 28;
/**
* Orientation of the time axis.
*/
public enum AxisOrientationSetting {
X_AXIS_AS_TIME, Y_AXIS_AS_TIME
}
public enum AxisBounds {
MAX, MIN
}
public enum XAxisMaximumLocationSetting {
MAXIMUM_AT_RIGHT, MAXIMUM_AT_LEFT
}
public enum YAxisMaximumLocationSetting {
MAXIMUM_AT_TOP, MAXIMUM_AT_BOTTOM
}
/**
* Subsequent modes on the time axis.
*/
public enum TimeAxisSubsequentBoundsSetting {
JUMP, SCRUNCH
}
/**
* Subsequent modes on the non-time axis
*/
public enum NonTimeAxisSubsequentBoundsSetting {
AUTO, FIXED, SEMI_FIXED
}
/**
* State that limit alarms can be in.
*/
public enum LimitAlarmState{
NO_ALARM, ALARM_RAISED, ALARM_OPENED_BY_USER, ALARM_CLOSED_BY_USER
}
/**
* Panning actions
*/
public enum PanDirection {
PAN_LOWER_X_AXIS, PAN_HIGHER_X_AXIS, PAN_LOWER_Y_AXIS, PAN_HIGHER_Y_AXIS;
}
/**
* Zoom actions
*/
public enum ZoomDirection {
ZOOM_IN_HIGH_Y_AXIS, ZOOM_OUT_HIGH_Y_AXIS,
ZOOM_IN_CENTER_Y_AXIS, ZOOM_OUT_CENTER_Y_AXIS,
ZOOM_IN_LOW_Y_AXIS, ZOOM_OUT_LOW_Y_AXIS,
ZOOM_IN_LEFT_X_AXIS, ZOOM_OUT_LEFT_X_AXIS,
ZOOM_IN_CENTER_X_AXIS, ZOOM_OUT_CENTER_X_AXIS,
ZOOM_IN_RIGHT_X_AXIS, ZOOM_OUT_RIGHT_X_AXIS;
}
public enum AxisType {
TIME_IN_JUMP_MODE (DEFAULT_TIME_AXIS_PADDING_JUMP_MIN,
DEFAULT_TIME_AXIS_PADDING_JUMP_MAX),
TIME_IN_SCRUNCH_MODE (DEFAULT_TIME_AXIS_PADDING_SCRUNCH_MIN,
DEFAULT_TIME_AXIS_PADDING_SCRUNCH_MAX),
NON_TIME (DEFAULT_NON_TIME_AXIS_PADDING_MIN,
DEFAULT_NON_TIME_AXIS_PADDING_MAX);
private final double minimumDefaultPadding;
private final double maximumDefaultPadding;
AxisType(double minPadding, double maxPadding) {
this.minimumDefaultPadding = minPadding;
this.maximumDefaultPadding = maxPadding;
}
public double getMinimumDefaultPadding() {
return minimumDefaultPadding;
}
public String getMinimumDefaultPaddingAsText() {
String percentString = NumberFormat.getPercentInstance().format(this.minimumDefaultPadding);
return percentString.substring(0, percentString.length()-1);
}
public double getMaximumDefaultPadding() {
return maximumDefaultPadding;
}
public String getMaximumDefaultPaddingAsText() {
String percentString = NumberFormat.getPercentInstance().format(this.maximumDefaultPadding);
return percentString.substring(0, percentString.length()-1);
}
}
/**
* DISPLAY_ONLY optimizes the plot buffering for displaying multiple plots with the minimum buffer wait.
* Switching to USER_INTERACTION mode deepens and widens the plot buffer to support user interactions such
* as panning and zooming.
*/
public enum PlotDisplayState {
DISPLAY_ONLY, USER_INTERACTION;
}
/**
* Indicates whether we will be drawing plot lines, point markers, or both.
*/
public static class PlotLineDrawingFlags {
private boolean line, markers;
public PlotLineDrawingFlags(boolean line, boolean markers) {
this.line = line;
this.markers = markers;
}
public boolean drawLine() {
return line;
}
public boolean drawMarkers() {
return markers;
}
}
/**
* Indicates how to connect plot point with lines.
*/
public enum PlotLineConnectionType {
DIRECT, STEP_X_THEN_Y
}
/**
* Params for Labeling Algorithm
*/
/**
* The regular expression defining the delimiter pattern between words.
* Words are delimited by a sequence of one or more spaces or underscores.
*/
public static final String WORD_DELIMITERS = "[ _]+";
/**
* The compiled regular expression defining the delimiter pattern between
* words.
*/
public static final Pattern WORD_DELIMITER_PATTERN = Pattern.compile(WORD_DELIMITERS);
/**
* The separator to use when concatenating words together to form labels.
*/
public static final String WORD_SEPARATOR = " ";
/**
* The maximum thickness for a plot line's stroke
*/
public static final int MAX_LINE_THICKNESS = 5;
}
| 1no label
|
fastPlotViews_src_main_java_gov_nasa_arc_mct_fastplot_bridge_PlotConstants.java
|
98 |
@Repository("blPageDao")
public class PageDaoImpl implements PageDao {
private static SandBox DUMMY_SANDBOX = new SandBoxImpl();
{
DUMMY_SANDBOX.setId(-1l);
}
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public Page readPageById(Long id) {
return em.find(PageImpl.class, id);
}
@Override
public PageTemplate readPageTemplateById(Long id) {
return em.find(PageTemplateImpl.class, id);
}
@Override
public PageTemplate savePageTemplate(PageTemplate template) {
return em.merge(template);
}
@Override
public Map<String, PageField> readPageFieldsByPage(Page page) {
Query query = em.createNamedQuery("BC_READ_PAGE_FIELDS_BY_PAGE_ID");
query.setParameter("page", page);
query.setHint(QueryHints.HINT_CACHEABLE, true);
List<PageField> pageFields = query.getResultList();
Map<String, PageField> pageFieldMap = new HashMap<String, PageField>();
for (PageField pageField : pageFields) {
pageFieldMap.put(pageField.getFieldKey(), pageField);
}
return pageFieldMap;
}
@Override
public Page updatePage(Page page) {
return em.merge(page);
}
@Override
public void delete(Page page) {
if (!em.contains(page)) {
page = readPageById(page.getId());
}
em.remove(page);
}
@Override
public Page addPage(Page clonedPage) {
return em.merge(clonedPage);
}
@Override
public List<Page> findPageByURI(SandBox sandBox, Locale fullLocale, Locale languageOnlyLocale, String uri) {
Query query;
if (languageOnlyLocale == null) {
languageOnlyLocale = fullLocale;
}
// locale
if (sandBox == null) {
query = em.createNamedQuery("BC_READ_PAGE_BY_URI");
} else if (SandBoxType.PRODUCTION.equals(sandBox.getSandBoxType())) {
query = em.createNamedQuery("BC_READ_PAGE_BY_URI_AND_PRODUCTION_SANDBOX");
query.setParameter("sandbox", sandBox);
} else {
query = em.createNamedQuery("BC_READ_PAGE_BY_URI_AND_USER_SANDBOX");
query.setParameter("sandboxId", sandBox.getId());
}
query.setParameter("fullLocale", fullLocale);
query.setParameter("languageOnlyLocale", languageOnlyLocale);
query.setParameter("uri", uri);
return query.getResultList();
}
@Override
public List<Page> readAllPages() {
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Page> criteria = builder.createQuery(Page.class);
Root<PageImpl> page = criteria.from(PageImpl.class);
criteria.select(page);
try {
return em.createQuery(criteria).getResultList();
} catch (NoResultException e) {
return new ArrayList<Page>();
}
}
@Override
public List<PageTemplate> readAllPageTemplates() {
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<PageTemplate> criteria = builder.createQuery(PageTemplate.class);
Root<PageTemplateImpl> template = criteria.from(PageTemplateImpl.class);
criteria.select(template);
try {
return em.createQuery(criteria).getResultList();
} catch (NoResultException e) {
return new ArrayList<PageTemplate>();
}
}
@Override
public List<Page> findPageByURI(SandBox sandBox, Locale locale, String uri) {
return findPageByURI(sandBox, locale, null, uri);
}
@Override
public void detachPage(Page page) {
em.detach(page);
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_dao_PageDaoImpl.java
|
49 |
@SuppressWarnings("restriction")
public class OUnsafeByteArrayComparator implements Comparator<byte[]> {
public static final OUnsafeByteArrayComparator INSTANCE = new OUnsafeByteArrayComparator();
private static final Unsafe unsafe;
private static final int BYTE_ARRAY_OFFSET;
private static final boolean littleEndian = ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN);
private static final int LONG_SIZE = Long.SIZE / Byte.SIZE;
static {
unsafe = (Unsafe) AccessController.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
try {
Field f = Unsafe.class.getDeclaredField("theUnsafe");
f.setAccessible(true);
return f.get(null);
} catch (NoSuchFieldException e) {
throw new Error();
} catch (IllegalAccessException e) {
throw new Error();
}
}
});
BYTE_ARRAY_OFFSET = unsafe.arrayBaseOffset(byte[].class);
final int byteArrayScale = unsafe.arrayIndexScale(byte[].class);
if (byteArrayScale != 1)
throw new Error();
}
public int compare(byte[] arrayOne, byte[] arrayTwo) {
if (arrayOne.length > arrayTwo.length)
return 1;
if (arrayOne.length < arrayTwo.length)
return -1;
final int WORDS = arrayOne.length / LONG_SIZE;
for (int i = 0; i < WORDS * LONG_SIZE; i += LONG_SIZE) {
final long index = i + BYTE_ARRAY_OFFSET;
final long wOne = unsafe.getLong(arrayOne, index);
final long wTwo = unsafe.getLong(arrayTwo, index);
if (wOne == wTwo)
continue;
if (littleEndian)
return lessThanUnsigned(Long.reverseBytes(wOne), Long.reverseBytes(wTwo)) ? -1 : 1;
return lessThanUnsigned(wOne, wTwo) ? -1 : 1;
}
for (int i = WORDS * LONG_SIZE; i < arrayOne.length; i++) {
int diff = compareUnsignedByte(arrayOne[i], arrayTwo[i]);
if (diff != 0)
return diff;
}
return 0;
}
private static boolean lessThanUnsigned(long longOne, long longTwo) {
return (longOne + Long.MIN_VALUE) < (longTwo + Long.MIN_VALUE);
}
private static int compareUnsignedByte(byte byteOne, byte byteTwo) {
final int valOne = byteOne & 0xFF;
final int valTwo = byteTwo & 0xFF;
return valOne - valTwo;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_comparator_OUnsafeByteArrayComparator.java
|
1,124 |
public class OSQLFunctionIf extends OSQLFunctionAbstract {
public static final String NAME = "if";
public OSQLFunctionIf() {
super(NAME, 2, 3);
}
@Override
public Object execute(final OIdentifiable iCurrentRecord, final Object iCurrentResult, final Object[] iFuncParams,
final OCommandContext iContext) {
boolean result;
try {
Object condition = iFuncParams[0];
if (condition instanceof Boolean)
result = (Boolean) condition;
else if (condition instanceof String)
result = Boolean.parseBoolean(condition.toString());
else if (condition instanceof Number)
result = ((Number) condition).intValue() > 0;
else
return null;
return result ? iFuncParams[1] : iFuncParams[2];
} catch (Exception e) {
return null;
}
}
@Override
public String getSyntax() {
return "Syntax error: if(<field|value|expression>, <return_value_if_true> [,<return_value_if_false>])";
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_misc_OSQLFunctionIf.java
|
121 |
public abstract class OAbstractProfiler extends OSharedResourceAbstract implements OProfilerMBean {
protected long recordingFrom = -1;
protected final Map<String, OProfilerHookValue> hooks = new ConcurrentHashMap<String, OProfilerHookValue>();
protected final ConcurrentHashMap<String, String> dictionary = new ConcurrentHashMap<String, String>();
protected final ConcurrentHashMap<String, METRIC_TYPE> types = new ConcurrentHashMap<String, METRIC_TYPE>();
public interface OProfilerHookValue {
public Object getValue();
}
public OAbstractProfiler() {
}
public OAbstractProfiler(final OAbstractProfiler profiler) {
hooks.putAll(profiler.hooks);
dictionary.putAll(profiler.dictionary);
types.putAll(profiler.types);
}
public void shutdown() {
stopRecording();
}
public boolean startRecording() {
if (isRecording())
return false;
recordingFrom = System.currentTimeMillis();
return true;
}
public boolean stopRecording() {
if (!isRecording())
return false;
recordingFrom = -1;
return true;
}
public boolean isRecording() {
return recordingFrom > -1;
}
public void updateCounter(final String iStatName, final String iDescription, final long iPlus) {
updateCounter(iStatName, iDescription, iPlus, iStatName);
}
@Override
public String getName() {
return "profiler";
}
@Override
public void startup() {
startRecording();
}
@Override
public String dump() {
return null;
}
@Override
public String dumpCounters() {
return null;
}
@Override
public OProfilerEntry getChrono(String string) {
return null;
}
@Override
public long startChrono() {
return 0;
}
@Override
public long stopChrono(String iName, String iDescription, long iStartTime) {
return 0;
}
@Override
public long stopChrono(String iName, String iDescription, long iStartTime, String iDictionary) {
return 0;
}
@Override
public String dumpChronos() {
return null;
}
@Override
public String[] getCountersAsString() {
return null;
}
@Override
public String[] getChronosAsString() {
return null;
}
@Override
public Date getLastReset() {
return null;
}
@Override
public void setAutoDump(int iNewValue) {
}
@Override
public String metadataToJSON() {
return null;
}
@Override
public Map<String, OPair<String, METRIC_TYPE>> getMetadata() {
final Map<String, OPair<String, METRIC_TYPE>> metadata = new HashMap<String, OPair<String, METRIC_TYPE>>();
for (Entry<String, String> entry : dictionary.entrySet())
metadata.put(entry.getKey(), new OPair<String, METRIC_TYPE>(entry.getValue(), types.get(entry.getKey())));
return metadata;
}
public void registerHookValue(final String iName, final String iDescription, final METRIC_TYPE iType,
final OProfilerHookValue iHookValue) {
registerHookValue(iName, iDescription, iType, iHookValue, iName);
}
public void registerHookValue(final String iName, final String iDescription, final METRIC_TYPE iType,
final OProfilerHookValue iHookValue, final String iMetadataName) {
if (iName != null) {
unregisterHookValue(iName);
updateMetadata(iMetadataName, iDescription, iType);
hooks.put(iName, iHookValue);
}
}
@Override
public void unregisterHookValue(final String iName) {
if (iName != null)
hooks.remove(iName);
}
@Override
public String getSystemMetric(final String iMetricName) {
final StringBuilder buffer = new StringBuilder();
buffer.append("system.");
buffer.append(iMetricName);
return buffer.toString();
}
@Override
public String getProcessMetric(final String iMetricName) {
final StringBuilder buffer = new StringBuilder();
buffer.append("process.");
buffer.append(iMetricName);
return buffer.toString();
}
@Override
public String getDatabaseMetric(final String iDatabaseName, final String iMetricName) {
final StringBuilder buffer = new StringBuilder();
buffer.append("db.");
buffer.append(iDatabaseName != null ? iDatabaseName : "*");
buffer.append('.');
buffer.append(iMetricName);
return buffer.toString();
}
@Override
public String toJSON(String command, final String iPar1) {
return null;
}
/**
* Updates the metric metadata.
*/
protected void updateMetadata(final String iName, final String iDescription, final METRIC_TYPE iType) {
if (iDescription != null && dictionary.putIfAbsent(iName, iDescription) == null)
types.put(iName, iType);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_profiler_OAbstractProfiler.java
|
1,481 |
public class RoutingService extends AbstractLifecycleComponent<RoutingService> implements ClusterStateListener {
private static final String CLUSTER_UPDATE_TASK_SOURCE = "routing-table-updater";
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final AllocationService allocationService;
private final TimeValue schedule;
private volatile boolean routingTableDirty = false;
private volatile Future scheduledRoutingTableFuture;
@Inject
public RoutingService(Settings settings, ThreadPool threadPool, ClusterService clusterService, AllocationService allocationService) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.allocationService = allocationService;
this.schedule = componentSettings.getAsTime("schedule", timeValueSeconds(10));
clusterService.addFirst(this);
}
@Override
protected void doStart() throws ElasticsearchException {
}
@Override
protected void doStop() throws ElasticsearchException {
}
@Override
protected void doClose() throws ElasticsearchException {
if (scheduledRoutingTableFuture != null) {
scheduledRoutingTableFuture.cancel(true);
scheduledRoutingTableFuture = null;
}
clusterService.remove(this);
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (event.source().equals(CLUSTER_UPDATE_TASK_SOURCE)) {
// that's us, ignore this event
return;
}
if (event.state().nodes().localNodeMaster()) {
// we are master, schedule the routing table updater
if (scheduledRoutingTableFuture == null) {
// a new master (us), make sure we reroute shards
routingTableDirty = true;
scheduledRoutingTableFuture = threadPool.scheduleWithFixedDelay(new RoutingTableUpdater(), schedule);
}
if (event.nodesRemoved()) {
// if nodes were removed, we don't want to wait for the scheduled task
// since we want to get primary election as fast as possible
routingTableDirty = true;
reroute();
// Commented out since we make sure to reroute whenever shards changes state or metadata changes state
// } else if (event.routingTableChanged()) {
// routingTableDirty = true;
// reroute();
} else {
if (event.nodesAdded()) {
for (DiscoveryNode node : event.nodesDelta().addedNodes()) {
if (node.dataNode()) {
routingTableDirty = true;
break;
}
}
}
}
} else {
if (scheduledRoutingTableFuture != null) {
scheduledRoutingTableFuture.cancel(true);
scheduledRoutingTableFuture = null;
}
}
}
private void reroute() {
try {
if (!routingTableDirty) {
return;
}
if (lifecycle.stopped()) {
return;
}
clusterService.submitStateUpdateTask(CLUSTER_UPDATE_TASK_SOURCE, Priority.HIGH, new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
RoutingAllocation.Result routingResult = allocationService.reroute(currentState);
if (!routingResult.changed()) {
// no state changed
return currentState;
}
return ClusterState.builder(currentState).routingResult(routingResult).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
});
routingTableDirty = false;
} catch (Exception e) {
logger.warn("Failed to reroute routing table", e);
}
}
private class RoutingTableUpdater implements Runnable {
@Override
public void run() {
reroute();
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_cluster_routing_RoutingService.java
|
264 |
private static class Task implements Serializable, Callable {
@Override
public Object call() throws Exception {
counter.incrementAndGet();
return null;
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_executor_ExecutionDelayTest.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.