conflict_resolution
stringlengths
27
16k
<<<<<<< ======= import com.google.common.base.Function; import com.google.common.util.concurrent.AsyncFunction; import com.google.common.util.concurrent.Futures; >>>>>>> import com.google.common.base.Function; import com.google.common.util.concurrent.AsyncFunction; import com.google.common.util.concurrent.Futures; <<<<<<< ======= import org.thingsboard.server.dao.model.CustomerEntity; import org.thingsboard.server.dao.model.DeviceEntity; import org.thingsboard.server.dao.model.TenantDeviceTypeEntity; import org.thingsboard.server.dao.model.TenantEntity; import org.thingsboard.server.dao.relation.EntitySearchDirection; >>>>>>> import org.thingsboard.server.dao.model.TenantDeviceTypeEntity; import org.thingsboard.server.dao.relation.EntitySearchDirection; <<<<<<< import static org.thingsboard.server.dao.DaoUtil.toUUIDs; ======= import static org.thingsboard.server.dao.DaoUtil.*; >>>>>>> import static org.thingsboard.server.dao.DaoUtil.toUUIDs; <<<<<<< List<Device> devices = deviceDao.findDevicesByTenantId(tenantId.getId(), pageLink); return new TextPageData<>(devices, pageLink); ======= List<DeviceEntity> deviceEntities = deviceDao.findDevicesByTenantId(tenantId.getId(), pageLink); List<Device> devices = convertDataList(deviceEntities); return new TextPageData<>(devices, pageLink); } @Override public TextPageData<Device> findDevicesByTenantIdAndType(TenantId tenantId, String type, TextPageLink pageLink) { log.trace("Executing findDevicesByTenantIdAndType, tenantId [{}], type [{}], pageLink [{}]", tenantId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<DeviceEntity> deviceEntities = deviceDao.findDevicesByTenantIdAndType(tenantId.getId(), type, pageLink); List<Device> devices = convertDataList(deviceEntities); return new TextPageData<>(devices, pageLink); >>>>>>> List<Device> devices = deviceDao.findDevicesByTenantId(tenantId.getId(), pageLink); return new TextPageData<>(devices, pageLink); } @Override public TextPageData<Device> findDevicesByTenantIdAndType(TenantId tenantId, String type, TextPageLink pageLink) { log.trace("Executing findDevicesByTenantIdAndType, tenantId [{}], type [{}], pageLink [{}]", tenantId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<Device> devices = deviceDao.findDevicesByTenantIdAndType(tenantId.getId(), type, pageLink); return new TextPageData<>(devices, pageLink); <<<<<<< List<Device> devices = deviceDao.findDevicesByTenantIdAndCustomerId(tenantId.getId(), customerId.getId(), pageLink); return new TextPageData<>(devices, pageLink); ======= List<DeviceEntity> deviceEntities = deviceDao.findDevicesByTenantIdAndCustomerId(tenantId.getId(), customerId.getId(), pageLink); List<Device> devices = convertDataList(deviceEntities); return new TextPageData<>(devices, pageLink); } @Override public TextPageData<Device> findDevicesByTenantIdAndCustomerIdAndType(TenantId tenantId, CustomerId customerId, String type, TextPageLink pageLink) { log.trace("Executing findDevicesByTenantIdAndCustomerIdAndType, tenantId [{}], customerId [{}], type [{}], pageLink [{}]", tenantId, customerId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateId(customerId, "Incorrect customerId " + customerId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<DeviceEntity> deviceEntities = deviceDao.findDevicesByTenantIdAndCustomerIdAndType(tenantId.getId(), customerId.getId(), type, pageLink); List<Device> devices = convertDataList(deviceEntities); return new TextPageData<>(devices, pageLink); >>>>>>> List<Device> devices = deviceDao.findDevicesByTenantIdAndCustomerId(tenantId.getId(), customerId.getId(), pageLink); return new TextPageData<>(devices, pageLink); } @Override public TextPageData<Device> findDevicesByTenantIdAndCustomerIdAndType(TenantId tenantId, CustomerId customerId, String type, TextPageLink pageLink) { log.trace("Executing findDevicesByTenantIdAndCustomerIdAndType, tenantId [{}], customerId [{}], type [{}], pageLink [{}]", tenantId, customerId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateId(customerId, "Incorrect customerId " + customerId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<Device> devices = deviceDao.findDevicesByTenantIdAndCustomerIdAndType(tenantId.getId(), customerId.getId(), type, pageLink); return new TextPageData<>(devices, pageLink);
<<<<<<< import static edu.hm.hafner.analysis.assertions.Assertions.*; import static edu.hm.hafner.analysis.parser.violations.JUnitAdapter.*; ======= import static edu.hm.hafner.analysis.assertions.Assertions.*; >>>>>>> import static edu.hm.hafner.analysis.assertions.Assertions.*; import static edu.hm.hafner.analysis.parser.violations.JUnitAdapter.*; <<<<<<< softly.assertThat(report.getCounter(TOTAL_TESTS)).isEqualTo(6); softly.assertThat(report.getCounter(PASSED_TESTS)).isEqualTo(4); softly.assertThat(report.getCounter(FAILED_TESTS)).isEqualTo(2); softly.assertThat(report.getCounter(SKIPPED_TESTS)).isEqualTo(0); ======= >>>>>>> softly.assertThat(report.getCounter(TOTAL_TESTS)).isEqualTo(6); softly.assertThat(report.getCounter(PASSED_TESTS)).isEqualTo(4); softly.assertThat(report.getCounter(FAILED_TESTS)).isEqualTo(2); softly.assertThat(report.getCounter(SKIPPED_TESTS)).isEqualTo(0); <<<<<<< /** * Verifies that skipped tests will be counted. */ @Test void shouldCountSkipped() { Report report = parse("junit-skipped.xml"); assertThat(report).isEmpty(); assertThat(report.getCounter(TOTAL_TESTS)).isEqualTo(5); assertThat(report.getCounter(SKIPPED_TESTS)).isEqualTo(1); assertThat(report.getCounter(PASSED_TESTS)).isEqualTo(4); assertThat(report.getCounter(FAILED_TESTS)).isEqualTo(0); } ======= /** * Verifies that report of iOS can be parsed. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-63527">Issue 63527</a> */ @Test void issue63527() { Report report = parse("report.junit"); assertThat(report).hasSize(1); } >>>>>>> /** * Verifies that skipped tests will be counted. */ @Test void shouldCountSkipped() { Report report = parse("junit-skipped.xml"); assertThat(report).isEmpty(); assertThat(report.getCounter(TOTAL_TESTS)).isEqualTo(5); assertThat(report.getCounter(SKIPPED_TESTS)).isEqualTo(1); assertThat(report.getCounter(PASSED_TESTS)).isEqualTo(4); assertThat(report.getCounter(FAILED_TESTS)).isEqualTo(0); } /** * Verifies that report of iOS can be parsed. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-63527">Issue 63527</a> */ @Test void issue63527() { Report report = parse("report.junit"); assertThat(report).hasSize(1); }
<<<<<<< parsers.add(new ArmccCompilerParser()); ======= parsers.add(new YuiCompressorParser()); >>>>>>> parsers.add(new ArmccCompilerParser()); parsers.add(new YuiCompressorParser());
<<<<<<< else if ( line2.toLowerCase().startsWith( "<cast" ) ) { tr = new CASTAIPReader().parse( fileToParse ); } ======= else if ( line2.startsWith( "<castaip" ) ) { tr = new CASTAIPReader().parse( fileToParse ); } >>>>>>> else if ( line2.toLowerCase().startsWith( "<castaip" ) ) { tr = new CASTAIPReader().parse( fileToParse ); } <<<<<<< else System.out.println("Error: No matching parser found for file: " + filename); ======= else if (filename.endsWith(".hlg")) { tr = new HdivReader().parse(fileToParse); } >>>>>>> else if ( filename.endsWith( ".hlg" ) ) { tr = new HdivReader().parse(fileToParse); } else System.out.println("Error: No matching parser found for file: " + filename);
<<<<<<< final class VersionOperationImpl extends OperationImpl implements VersionOperation, NoopOperation { private static final byte[] REQUEST = "version\r\n".getBytes(); public VersionOperationImpl(OperationCallback c) { super(c); } @Override public void handleLine(String line) { assert line.startsWith("VERSION "); getCallback().receivedStatus( new OperationStatus(true, line.substring("VERSION ".length()))); transitionState(OperationState.COMPLETE); } @Override public void initialize() { setBuffer(ByteBuffer.wrap(REQUEST)); } ======= final class VersionOperationImpl extends OperationImpl implements VersionOperation, NoopOperation { private static final byte[] REQUEST="version\r\n".getBytes(); public VersionOperationImpl(OperationCallback c) { super(c); } @Override public void handleLine(String line) { assert line.startsWith("VERSION "); getCallback().receivedStatus( new OperationStatus(true, line.substring("VERSION ".length()))); transitionState(OperationState.COMPLETE); } @Override public void initialize() { setBuffer(ByteBuffer.wrap(REQUEST)); } @Override public String toString() { return "Cmd: version"; } >>>>>>> final class VersionOperationImpl extends OperationImpl implements VersionOperation, NoopOperation { private static final byte[] REQUEST = "version\r\n".getBytes(); public VersionOperationImpl(OperationCallback c) { super(c); } @Override public void handleLine(String line) { assert line.startsWith("VERSION "); getCallback().receivedStatus( new OperationStatus(true, line.substring("VERSION ".length()))); transitionState(OperationState.COMPLETE); } @Override public void initialize() { setBuffer(ByteBuffer.wrap(REQUEST)); } @Override public String toString() { return "Cmd: version"; }
<<<<<<< /** * Has this operation been cancelled? */ boolean isCancelled(); /** * True if an error occurred while processing this operation. */ boolean hasErrored(); /** * Get the exception that occurred (or null if no exception occurred). */ OperationException getException(); /** * Get the callback for this get operation. */ OperationCallback getCallback(); /** * Cancel this operation. */ void cancel(); /** * Get the current state of this operation. */ OperationState getState(); /** * Get the write buffer for this operation. */ ByteBuffer getBuffer(); /** * Invoked after having written all of the bytes from the supplied output * buffer. */ void writeComplete(); /** * Initialize this operation. This is used to prepare output byte buffers and * stuff. */ void initialize(); /** * Read data from the given byte buffer and dispatch to the appropriate read * mechanism. */ void readFromBuffer(ByteBuffer data) throws IOException; /** * Handle a raw data read. */ void handleRead(ByteBuffer data); /** * Get the node that should've been handling this operation. */ MemcachedNode getHandlingNode(); /** * Set a reference to the node that will be/is handling this operation. * * @param to a memcached node */ void setHandlingNode(MemcachedNode to); /** * Mark this operation as one which has exceeded its timeout value. */ void timeOut(); /** * True if the operation has timed out. * * <p> * A timed out operation may or may not have been sent to the server already, * but it exceeded either the specified or the default timeout value. */ boolean isTimedOut(); /** * True if the operation has timed out. * * The ttl allows the caller to specify how long the operation should have * been given since its creation, returning true if the operation has exceeded * that time period. * * <p> * A timed out operation may or may not have been sent to the server already, * but it exceeded either the specified or the default timeout value. * * <p> * In the rare case this may be called with a longer timeout value after * having been called with a shorter value that caused the operation to be * timed out, an IllegalArgumentException may be thrown. * * @param ttlMillis the max amount of time an operation may have existed since * its creation in milliseconds. */ boolean isTimedOut(long ttlMillis); /** * True if the operation has timed out and has not been sent. * * If the client has timed out this operation and knows it has not been * written to the network, this will be true. */ boolean isTimedOutUnsent(); ======= /** * Has this operation been cancelled? */ boolean isCancelled(); /** * True if an error occurred while processing this operation. */ boolean hasErrored(); /** * Get the exception that occurred (or null if no exception occurred). */ OperationException getException(); /** * Get the callback for this get operation. */ OperationCallback getCallback(); /** * Cancel this operation. */ void cancel(); /** * Get the current state of this operation. */ OperationState getState(); /** * Get the write buffer for this operation. */ ByteBuffer getBuffer(); /** * Invoked when we start writing all of the bytes from this operation to * the sockets write buffer. */ void writing(); /** * Invoked after having written all of the bytes from the supplied output * buffer. */ void writeComplete(); /** * Initialize this operation. This is used to prepare output byte buffers * and stuff. */ void initialize(); /** * Read data from the given byte buffer and dispatch to the appropriate * read mechanism. */ void readFromBuffer(ByteBuffer data) throws IOException; /** * Handle a raw data read. */ void handleRead(ByteBuffer data); /** * Get the node that should've been handling this operation. */ MemcachedNode getHandlingNode(); /** * Set a reference to the node that will be/is handling this operation. * * @param to a memcached node */ void setHandlingNode(MemcachedNode to); /** * Mark this operation as one which has exceeded its timeout value. */ public void timeOut(); /** * True if the operation has timed out. * * <p>A timed out operation may or may not have been sent to the server * already, but it exceeded either the specified or the default timeout * value. */ public boolean isTimedOut(); /** * True if the operation has timed out. * * The ttl allows the caller to specify how long the operation should * have been given since its creation, returning true if the operation * has exceeded that time period. * * <p>A timed out operation may or may not have been sent to the server * already, but it exceeded either the specified or the default timeout * value. * * <p>In the rare case this may be called with a longer timeout value * after having been called with a shorter value that caused the * operation to be timed out, an IllegalArgumentException may be thrown. * * @param ttlMillis the max amount of time an operation may have existed * since its creation in milliseconds. */ public boolean isTimedOut(long ttlMillis); /** * True if the operation has timed out and has not been sent. * * If the client has timed out this operation and knows it has not been * written to the network, this will be true. * */ public boolean isTimedOutUnsent(); >>>>>>> /** * Has this operation been cancelled? */ boolean isCancelled(); /** * True if an error occurred while processing this operation. */ boolean hasErrored(); /** * Get the exception that occurred (or null if no exception occurred). */ OperationException getException(); /** * Get the callback for this get operation. */ OperationCallback getCallback(); /** * Cancel this operation. */ void cancel(); /** * Get the current state of this operation. */ OperationState getState(); /** * Get the write buffer for this operation. */ ByteBuffer getBuffer(); /** * Invoked when we start writing all of the bytes from this operation to * the sockets write buffer. */ void writing(); /** * Invoked after having written all of the bytes from the supplied output * buffer. */ void writeComplete(); /** * Initialize this operation. This is used to prepare output byte buffers and * stuff. */ void initialize(); /** * Read data from the given byte buffer and dispatch to the appropriate read * mechanism. */ void readFromBuffer(ByteBuffer data) throws IOException; /** * Handle a raw data read. */ void handleRead(ByteBuffer data); /** * Get the node that should've been handling this operation. */ MemcachedNode getHandlingNode(); /** * Set a reference to the node that will be/is handling this operation. * * @param to a memcached node */ void setHandlingNode(MemcachedNode to); /** * Mark this operation as one which has exceeded its timeout value. */ void timeOut(); /** * True if the operation has timed out. * * <p> * A timed out operation may or may not have been sent to the server already, * but it exceeded either the specified or the default timeout value. */ boolean isTimedOut(); /** * True if the operation has timed out. * * The ttl allows the caller to specify how long the operation should have * been given since its creation, returning true if the operation has exceeded * that time period. * * <p> * A timed out operation may or may not have been sent to the server already, * but it exceeded either the specified or the default timeout value. * * <p> * In the rare case this may be called with a longer timeout value after * having been called with a shorter value that caused the operation to be * timed out, an IllegalArgumentException may be thrown. * * @param ttlMillis the max amount of time an operation may have existed since * its creation in milliseconds. */ boolean isTimedOut(long ttlMillis); /** * True if the operation has timed out and has not been sent. * * If the client has timed out this operation and knows it has not been * written to the network, this will be true. */ boolean isTimedOutUnsent();
<<<<<<< public AsciiMemcachedNodeImpl(SocketAddress sa, SocketChannel c, int bufSize, BlockingQueue<Operation> rq, BlockingQueue<Operation> wq, BlockingQueue<Operation> iq, Long opQueueMaxBlockTimeNs, long dt) { // ASCII never does auth super(sa, c, bufSize, rq, wq, iq, opQueueMaxBlockTimeNs, false, dt); } @Override protected void optimize() { // make sure there are at least two get operations in a row before // attempting to optimize them. if (writeQ.peek() instanceof GetOperation) { optimizedOp = writeQ.remove(); if (writeQ.peek() instanceof GetOperation) { OptimizedGetImpl og = new OptimizedGetImpl((GetOperation) optimizedOp); optimizedOp = og; while (writeQ.peek() instanceof GetOperation) { GetOperationImpl o = (GetOperationImpl) writeQ.remove(); if (!o.isCancelled()) { og.addOperation(o); } } // Initialize the new mega get optimizedOp.initialize(); assert optimizedOp.getState() == OperationState.WRITING; ProxyCallback pcb = (ProxyCallback) og.getCallback(); getLogger().debug("Set up %s with %s keys and %s callbacks", this, pcb.numKeys(), pcb.numCallbacks()); } } } ======= public AsciiMemcachedNodeImpl(SocketAddress sa, SocketChannel c, int bufSize, BlockingQueue<Operation> rq, BlockingQueue<Operation> wq, BlockingQueue<Operation> iq, Long opQueueMaxBlockTimeNs, long dt) { super(sa, c, bufSize, rq, wq, iq, opQueueMaxBlockTimeNs, false, dt); /* ascii never does auth */ } @Override protected void optimize() { // make sure there are at least two get operations in a row before // attempting to optimize them. if(writeQ.peek() instanceof GetOperation) { optimizedOp=writeQ.remove(); if(writeQ.peek() instanceof GetOperation) { OptimizedGetImpl og=new OptimizedGetImpl( (GetOperation)optimizedOp); optimizedOp=og; while(writeQ.peek() instanceof GetOperation) { GetOperationImpl o=(GetOperationImpl) writeQ.remove(); if(!o.isCancelled()) { og.addOperation(o); } } // Initialize the new mega get optimizedOp.initialize(); assert optimizedOp.getState() == OperationState.WRITE_QUEUED; ProxyCallback pcb=(ProxyCallback) og.getCallback(); getLogger().debug("Set up %s with %s keys and %s callbacks", this, pcb.numKeys(), pcb.numCallbacks()); } } } >>>>>>> public AsciiMemcachedNodeImpl(SocketAddress sa, SocketChannel c, int bufSize, BlockingQueue<Operation> rq, BlockingQueue<Operation> wq, BlockingQueue<Operation> iq, Long opQueueMaxBlockTimeNs, long dt) { // ASCII never does auth super(sa, c, bufSize, rq, wq, iq, opQueueMaxBlockTimeNs, false, dt); } @Override protected void optimize() { // make sure there are at least two get operations in a row before // attempting to optimize them. if (writeQ.peek() instanceof GetOperation) { optimizedOp = writeQ.remove(); if (writeQ.peek() instanceof GetOperation) { OptimizedGetImpl og = new OptimizedGetImpl((GetOperation) optimizedOp); optimizedOp = og; while (writeQ.peek() instanceof GetOperation) { GetOperationImpl o = (GetOperationImpl) writeQ.remove(); if (!o.isCancelled()) { og.addOperation(o); } } // Initialize the new mega get optimizedOp.initialize(); assert optimizedOp.getState() == OperationState.WRITE_QUEUED; ProxyCallback pcb = (ProxyCallback) og.getCallback(); getLogger().debug("Set up %s with %s keys and %s callbacks", this, pcb.numKeys(), pcb.numCallbacks()); } } }
<<<<<<< class StoreOperationImpl extends SingleKeyOperationImpl implements StoreOperation, CASOperation { private static final int SET = 0x01; private static final int ADD = 0x02; private static final int REPLACE = 0x03; static final int SETQ = 0x11; static final int ADDQ = 0x12; static final int REPLACEQ = 0x13; // 4-byte flags, 4-byte expiration static final int EXTRA_LEN = 8; private final StoreType storeType; private final int flags; private final int exp; private final long cas; private final byte[] data; private static int cmdMap(StoreType t) { int rv; switch (t) { case set: rv = SET; break; case add: rv = ADD; break; case replace: rv = REPLACE; break; default: rv = -1; } // Check fall-through. assert rv != -1 : "Unhandled store type: " + t; return rv; } public StoreOperationImpl(StoreType t, String k, int f, int e, byte[] d, long c, OperationCallback cb) { super(cmdMap(t), generateOpaque(), k, cb); flags = f; exp = e; data = d; cas = c; storeType = t; } @Override public void initialize() { prepareBuffer(key, cas, data, flags, exp); } public byte[] getBytes() { return data; } public long getCasValue() { return cas; } public int getExpiration() { return exp; } public int getFlags() { return flags; } public byte[] getData() { return data; } public StoreType getStoreType() { return storeType; } ======= class StoreOperationImpl extends SingleKeyOperationImpl implements StoreOperation, CASOperation { private static final int SET=0x01; private static final int ADD=0x02; private static final int REPLACE=0x03; static final int SETQ=0x11; static final int ADDQ=0x12; static final int REPLACEQ=0x13; // 4-byte flags, 4-byte expiration static final int EXTRA_LEN = 8; private final StoreType storeType; private final int flags; private final int exp; private final long cas; private final byte[] data; private static int cmdMap(StoreType t) { int rv=-1; switch(t) { case set: rv=SET; break; case add: rv=ADD; break; case replace: rv=REPLACE; break; } // Check fall-through. assert rv != -1 : "Unhandled store type: " + t; return rv; } public StoreOperationImpl(StoreType t, String k, int f, int e, byte[] d, long c, OperationCallback cb) { super(cmdMap(t), generateOpaque(), k, cb); flags=f; exp=e; data=d; cas=c; storeType=t; } @Override public void initialize() { prepareBuffer(key, cas, data, flags, exp); } public long getCasValue() { return cas; } public int getExpiration() { return exp; } public int getFlags() { return flags; } public byte[] getData() { return data; } public StoreType getStoreType() { return storeType; } @Override public String toString() { return super.toString() + " Cas: " + cas + " Exp: " + exp + " Flags: " + flags + " Data Length: " + data.length; } >>>>>>> class StoreOperationImpl extends SingleKeyOperationImpl implements StoreOperation, CASOperation { private static final int SET = 0x01; private static final int ADD = 0x02; private static final int REPLACE = 0x03; static final int SETQ = 0x11; static final int ADDQ = 0x12; static final int REPLACEQ = 0x13; // 4-byte flags, 4-byte expiration static final int EXTRA_LEN = 8; private final StoreType storeType; private final int flags; private final int exp; private final long cas; private final byte[] data; private static int cmdMap(StoreType t) { int rv; switch (t) { case set: rv = SET; break; case add: rv = ADD; break; case replace: rv = REPLACE; break; default: rv = -1; } // Check fall-through. assert rv != -1 : "Unhandled store type: " + t; return rv; } public StoreOperationImpl(StoreType t, String k, int f, int e, byte[] d, long c, OperationCallback cb) { super(cmdMap(t), generateOpaque(), k, cb); flags = f; exp = e; data = d; cas = c; storeType = t; } @Override public void initialize() { prepareBuffer(key, cas, data, flags, exp); } public long getCasValue() { return cas; } public int getExpiration() { return exp; } public int getFlags() { return flags; } public byte[] getData() { return data; } public StoreType getStoreType() { return storeType; } @Override public String toString() { return super.toString() + " Cas: " + cas + " Exp: " + exp + " Flags: " + flags + " Data Length: " + data.length; }
<<<<<<< public ConcatenationType getStoreType() { return catType; } ======= @Override public String toString() { return super.toString() + " Cas: " + cas + " Data Length: " + data.length; } >>>>>>> public ConcatenationType getStoreType() { return catType; } @Override public String toString() { return super.toString() + " Cas: " + cas + " Data Length: " + data.length; }
<<<<<<< /** * Abstract implementation of a tap operation. */ public abstract class TapOperationImpl extends OperationImpl implements TapOperation { private static final int TAP_FLAG_ACK = 0x1; private int bytesProcessed; private int bodylen; private byte[] header; private byte[] message; static final int CMD = 0; protected TapOperationImpl(OperationCallback cb) { super(CMD, generateOpaque(), cb); this.header = new byte[BaseMessage.HEADER_LENGTH]; this.message = null; } public abstract void initialize(); @Override public void readFromBuffer(ByteBuffer data) throws IOException { while (data.remaining() > 0) { if (bytesProcessed < BaseMessage.HEADER_LENGTH) { header[bytesProcessed] = data.get(); bytesProcessed++; } else { if (message == null) { bodylen = (int) Util.fieldToValue(header, BaseMessage.TOTAL_BODY_INDEX, BaseMessage.TOTAL_BODY_FIELD_LENGTH); message = new byte[BaseMessage.HEADER_LENGTH + bodylen]; System.arraycopy(header, 0, message, 0, BaseMessage.HEADER_LENGTH); } if (bytesProcessed < message.length) { message[bytesProcessed] = data.get(); bytesProcessed++; } if (bytesProcessed >= message.length) { ResponseMessage response = new ResponseMessage(message); if (response.getOpcode() != TapOpcode.OPAQUE && response.getOpcode() != TapOpcode.NOOP) { if (response.getFlags() == TAP_FLAG_ACK) { ((Callback) getCallback()).gotAck(response.getOpcode(), response.getOpaque()); } ((Callback) getCallback()).gotData(response); } message = null; bytesProcessed = 0; } } } } ======= public abstract class TapOperationImpl extends OperationImpl implements TapOperation { private static final int TAP_FLAG_ACK = 0x1; private int bytesProcessed; private int bodylen; private byte[] header; private byte[] message; static final int CMD=0; protected TapOperationImpl(OperationCallback cb) { super(CMD, generateOpaque(), cb); this.header = new byte[BaseMessage.HEADER_LENGTH]; this.message = null; } public abstract void initialize(); @Override public void readFromBuffer(ByteBuffer data) throws IOException { while (data.remaining() > 0) { if (bytesProcessed < BaseMessage.HEADER_LENGTH) { header[bytesProcessed] = data.get(); bytesProcessed++; } else { if (message == null) { bodylen = (int) Util.fieldToValue(header, BaseMessage.TOTAL_BODY_INDEX, BaseMessage.TOTAL_BODY_FIELD_LENGTH); message = new byte[BaseMessage.HEADER_LENGTH + bodylen]; System.arraycopy(header, 0, message, 0, BaseMessage.HEADER_LENGTH); } if (bytesProcessed < message.length) { message[bytesProcessed] = data.get(); bytesProcessed++; } if (bytesProcessed >= message.length) { ResponseMessage response = new ResponseMessage(message); if (response.getFlags() == TAP_FLAG_ACK) { ((Callback)getCallback()).gotAck(response.getOpcode(), response.getOpaque()); } if (response.getOpcode() != TapOpcode.OPAQUE && response.getOpcode() != TapOpcode.NOOP) { ((Callback)getCallback()).gotData(response); } message = null; bytesProcessed = 0; } } } } >>>>>>> /** * Abstract implementation of a tap operation. */ public abstract class TapOperationImpl extends OperationImpl implements TapOperation { private static final int TAP_FLAG_ACK = 0x1; private int bytesProcessed; private int bodylen; private byte[] header; private byte[] message; static final int CMD = 0; protected TapOperationImpl(OperationCallback cb) { super(CMD, generateOpaque(), cb); this.header = new byte[BaseMessage.HEADER_LENGTH]; this.message = null; } public abstract void initialize(); @Override public void readFromBuffer(ByteBuffer data) throws IOException { while (data.remaining() > 0) { if (bytesProcessed < BaseMessage.HEADER_LENGTH) { header[bytesProcessed] = data.get(); bytesProcessed++; } else { if (message == null) { bodylen = (int) Util.fieldToValue(header, BaseMessage.TOTAL_BODY_INDEX, BaseMessage.TOTAL_BODY_FIELD_LENGTH); message = new byte[BaseMessage.HEADER_LENGTH + bodylen]; System.arraycopy(header, 0, message, 0, BaseMessage.HEADER_LENGTH); } if (bytesProcessed < message.length) { message[bytesProcessed] = data.get(); bytesProcessed++; } if (bytesProcessed >= message.length) { ResponseMessage response = new ResponseMessage(message); if (response.getFlags() == TAP_FLAG_ACK) { ((Callback) getCallback()).gotAck(response.getOpcode(), response.getOpaque()); } if (response.getOpcode() != TapOpcode.OPAQUE && response.getOpcode() != TapOpcode.NOOP) { ((Callback)getCallback()).gotData(response); } message = null; bytesProcessed = 0; } } } }
<<<<<<< @Override protected void finishedPayload(byte[] pl) throws IOException { if (errorCode == SASL_CONTINUE) { getCallback().receivedStatus(new OperationStatus(true, new String(pl))); transitionState(OperationState.COMPLETE); } else if (errorCode == 0) { getCallback().receivedStatus(new OperationStatus(true, "")); transitionState(OperationState.COMPLETE); } else { super.finishedPayload(pl); } } ======= @Override public String toString() { return "SASL base operation"; } >>>>>>> @Override protected void finishedPayload(byte[] pl) throws IOException { if (errorCode == SASL_CONTINUE) { getCallback().receivedStatus(new OperationStatus(true, new String(pl))); transitionState(OperationState.COMPLETE); } else if (errorCode == 0) { getCallback().receivedStatus(new OperationStatus(true, "")); transitionState(OperationState.COMPLETE); } else { super.finishedPayload(pl); } } @Override public String toString() { return "SASL base operation"; }
<<<<<<< /** * Get the expiration to be set for this operation. */ int getExpiration(); /** * Get the bytes to be set during this operation. * * <p> * Note, this returns an exact reference to the bytes and the data * <em>must not</em> be modified. * </p> */ byte[] getBytes(); ======= /** * Get the bytes to be set during this operation. * * <p> * Note, this returns an exact reference to the bytes and the data * <em>must not</em> be modified. * </p> */ byte[] getData(); >>>>>>> /** * Get the expiration to be set for this operation. */ int getExpiration(); /** * Get the bytes to be set during this operation. * * <p> * Note, this returns an exact reference to the bytes and the data * <em>must not</em> be modified. * </p> */ byte[] getData();
<<<<<<< abstract class MultiKeyOperationImpl extends OperationImpl implements VBucketAware, KeyedOperation { protected final Map<String, Short> vbmap = new HashMap<String, Short>(); protected MultiKeyOperationImpl(int c, int o, OperationCallback cb) { super(c, o, cb); } public Collection<String> getKeys() { return vbmap.keySet(); } public Collection<MemcachedNode> getNotMyVbucketNodes() { return notMyVbucketNodes; } public void addNotMyVbucketNode(MemcachedNode node) { notMyVbucketNodes.add(node); } public void setNotMyVbucketNodes(Collection<MemcachedNode> nodes) { notMyVbucketNodes = nodes; } public void setVBucket(String k, short vb) { assert vbmap.containsKey(k) : "Key " + k + " not contained in operation"; vbmap.put(k, Short.valueOf(vb)); } public short getVBucket(String k) { assert vbmap.containsKey(k) : "Key " + k + " not contained in operation"; return vbmap.get(k); } @Override public String toString() { return super.toString() + " Keys: " + StringUtils.join(getKeys(), " "); } ======= abstract class MultiKeyOperationImpl extends OperationImpl implements VBucketAware, KeyedOperation { protected final Map<String, Short> vbmap = new HashMap<String, Short>(); protected MultiKeyOperationImpl(byte c, int o, OperationCallback cb) { super(c, o, cb); } public Collection<String> getKeys() { return vbmap.keySet(); } public Collection<MemcachedNode> getNotMyVbucketNodes() { return notMyVbucketNodes; } public void addNotMyVbucketNode(MemcachedNode node) { notMyVbucketNodes.add(node); } public void setNotMyVbucketNodes(Collection<MemcachedNode> nodes) { notMyVbucketNodes = nodes; } public void setVBucket(String k, short vb) { assert vbmap.containsKey(k) : "Key " + k + " not contained in operation"; vbmap.put(k, new Short(vb)); } public short getVBucket(String k) { assert vbmap.containsKey(k) : "Key " + k + " not contained in operation" ; return vbmap.get(k); } @Override public String toString() { return super.toString() + " Keys: " + StringUtils.join(getKeys(), " "); } >>>>>>> abstract class MultiKeyOperationImpl extends OperationImpl implements VBucketAware, KeyedOperation { protected final Map<String, Short> vbmap = new HashMap<String, Short>(); protected MultiKeyOperationImpl(byte c, int o, OperationCallback cb) { super(c, o, cb); } public Collection<String> getKeys() { return vbmap.keySet(); } public Collection<MemcachedNode> getNotMyVbucketNodes() { return notMyVbucketNodes; } public void addNotMyVbucketNode(MemcachedNode node) { notMyVbucketNodes.add(node); } public void setNotMyVbucketNodes(Collection<MemcachedNode> nodes) { notMyVbucketNodes = nodes; } public void setVBucket(String k, short vb) { assert vbmap.containsKey(k) : "Key " + k + " not contained in operation"; vbmap.put(k, Short.valueOf(vb)); } public short getVBucket(String k) { assert vbmap.containsKey(k) : "Key " + k + " not contained in operation"; return vbmap.get(k); } @Override public String toString() { return super.toString() + " Keys: " + StringUtils.join(getKeys(), " "); }
<<<<<<< private static final OperationCallback NOOP_CALLBACK = new NoopCallback(); private final int terminalOpaque = generateOpaque(); private final Map<Integer, OperationCallback> callbacks = new HashMap<Integer, OperationCallback>(); private final List<CASOperation> ops = new ArrayList<CASOperation>(); // If nothing else, this will be a NOOP. private int byteCount = MIN_RECV_PACKET; /** * Construct an optimized get starting with the given get operation. */ public OptimizedSetImpl(CASOperation firstStore) { super(-1, -1, NOOP_CALLBACK); addOperation(firstStore); } public void addOperation(CASOperation op) { ops.add(op); // Count the bytes required by this operation. Iterator<String> is = op.getKeys().iterator(); String k = is.next(); int keylen = KeyUtil.getKeyBytes(k).length; byteCount += MIN_RECV_PACKET + StoreOperationImpl.EXTRA_LEN + keylen + op.getBytes().length; } public int size() { return ops.size(); } public int bytes() { return byteCount; } @Override public void initialize() { // Now create a buffer. ByteBuffer bb = ByteBuffer.allocate(byteCount); for (CASOperation so : ops) { Iterator<String> is = so.getKeys().iterator(); String k = is.next(); byte[] keyBytes = KeyUtil.getKeyBytes(k); assert !is.hasNext(); int myOpaque = generateOpaque(); callbacks.put(myOpaque, so.getCallback()); byte[] data = so.getBytes(); // Custom header bb.put(REQ_MAGIC); bb.put((byte) cmdMap(so.getStoreType())); bb.putShort((short) keyBytes.length); bb.put((byte) StoreOperationImpl.EXTRA_LEN); // extralen bb.put((byte) 0); // data type bb.putShort(((VBucketAware) so).getVBucket(k)); // vbucket bb.putInt(keyBytes.length + data.length + StoreOperationImpl.EXTRA_LEN); bb.putInt(myOpaque); bb.putLong(so.getCasValue()); // cas // Extras bb.putInt(so.getFlags()); bb.putInt(so.getExpiration()); // the actual key bb.put(keyBytes); // And the value bb.put(data); } // Add the noop bb.put(REQ_MAGIC); bb.put((byte) NoopOperationImpl.CMD); bb.putShort((short) 0); bb.put((byte) 0); // extralen bb.put((byte) 0); // data type bb.putShort((short) 0); // reserved bb.putInt(0); bb.putInt(terminalOpaque); bb.putLong(0); // cas bb.flip(); setBuffer(bb); } private static int cmdMap(StoreType t) { int rv; switch (t) { case set: rv = StoreOperationImpl.SETQ; break; case add: rv = StoreOperationImpl.ADDQ; break; case replace: rv = StoreOperationImpl.REPLACEQ; break; default: rv = -1; } // Check fall-through. assert rv != -1 : "Unhandled store type: " + t; return rv; } @Override protected void finishedPayload(byte[] pl) throws IOException { if (responseOpaque == terminalOpaque) { for (OperationCallback cb : callbacks.values()) { cb.receivedStatus(STATUS_OK); cb.complete(); } transitionState(OperationState.COMPLETE); } else { OperationCallback cb = callbacks.remove(responseOpaque); assert cb != null : "No callback for " + responseOpaque; assert errorCode != 0 : "Got no error on a quiet mutation."; OperationStatus status = getStatusForErrorCode(errorCode, pl); assert status != null : "Got no status for a quiet mutation error"; cb.receivedStatus(status); cb.complete(); } resetInput(); } @Override protected boolean opaqueIsValid() { return responseOpaque == terminalOpaque || callbacks.containsKey(responseOpaque); } static class NoopCallback implements OperationCallback { public void complete() { // noop } public void receivedStatus(OperationStatus status) { // noop } } ======= private static final OperationCallback NOOP_CALLBACK = new NoopCallback(); private final int terminalOpaque=generateOpaque(); private final Map<Integer, OperationCallback> callbacks = new HashMap<Integer, OperationCallback>(); private final List<CASOperation> ops = new ArrayList<CASOperation>(); // If nothing else, this will be a NOOP. private int byteCount = MIN_RECV_PACKET; /** * Construct an optimized get starting with the given get operation. */ public OptimizedSetImpl(CASOperation firstStore) { super(-1, -1, NOOP_CALLBACK); addOperation(firstStore); } public void addOperation(CASOperation op) { ops.add(op); // Count the bytes required by this operation. Iterator<String> is = op.getKeys().iterator(); String k = is.next(); int keylen = KeyUtil.getKeyBytes(k).length; byteCount += MIN_RECV_PACKET + StoreOperationImpl.EXTRA_LEN + keylen + op.getData().length; } public int size() { return ops.size(); } public int bytes() { return byteCount; } @Override public void initialize() { // Now create a buffer. ByteBuffer bb=ByteBuffer.allocate(byteCount); for(CASOperation so : ops) { Iterator<String> is = so.getKeys().iterator(); String k = is.next(); byte[] keyBytes = KeyUtil.getKeyBytes(k); assert !is.hasNext(); int myOpaque = generateOpaque(); callbacks.put(myOpaque, so.getCallback()); byte[] data = so.getData(); // Custom header bb.put(REQ_MAGIC); bb.put((byte)cmdMap(so.getStoreType())); bb.putShort((short)keyBytes.length); bb.put((byte)StoreOperationImpl.EXTRA_LEN); // extralen bb.put((byte)0); // data type bb.putShort(((VBucketAware)so).getVBucket(k)); // vbucket bb.putInt(keyBytes.length + data.length + StoreOperationImpl.EXTRA_LEN); bb.putInt(myOpaque); bb.putLong(so.getCasValue()); // cas // Extras bb.putInt(so.getFlags()); bb.putInt(so.getExpiration()); // the actual key bb.put(keyBytes); // And the value bb.put(data); } // Add the noop bb.put(REQ_MAGIC); bb.put((byte)NoopOperationImpl.CMD); bb.putShort((short)0); bb.put((byte)0); // extralen bb.put((byte)0); // data type bb.putShort((short)0); // reserved bb.putInt(0); bb.putInt(terminalOpaque); bb.putLong(0); // cas bb.flip(); setBuffer(bb); } private static int cmdMap(StoreType t) { int rv=-1; switch(t) { case set: rv=StoreOperationImpl.SETQ; break; case add: rv=StoreOperationImpl.ADDQ; break; case replace: rv=StoreOperationImpl.REPLACEQ; break; } // Check fall-through. assert rv != -1 : "Unhandled store type: " + t; return rv; } @Override protected void finishedPayload(byte[] pl) throws IOException { if(responseOpaque == terminalOpaque) { for(OperationCallback cb : callbacks.values()) { cb.receivedStatus(STATUS_OK); cb.complete(); } transitionState(OperationState.COMPLETE); } else { OperationCallback cb = callbacks.remove(responseOpaque); assert cb != null : "No callback for " + responseOpaque; assert errorCode != 0 : "Got no error on a quiet mutation."; OperationStatus status=getStatusForErrorCode(errorCode, pl); assert status != null : "Got no status for a quiet mutation error"; cb.receivedStatus(status); cb.complete(); } resetInput(); } @Override protected boolean opaqueIsValid() { return responseOpaque == terminalOpaque || callbacks.containsKey(responseOpaque); } static class NoopCallback implements OperationCallback { public void complete() { // noop } public void receivedStatus(OperationStatus status) { // noop } } >>>>>>> private static final OperationCallback NOOP_CALLBACK = new NoopCallback(); private final int terminalOpaque = generateOpaque(); private final Map<Integer, OperationCallback> callbacks = new HashMap<Integer, OperationCallback>(); private final List<CASOperation> ops = new ArrayList<CASOperation>(); // If nothing else, this will be a NOOP. private int byteCount = MIN_RECV_PACKET; /** * Construct an optimized get starting with the given get operation. */ public OptimizedSetImpl(CASOperation firstStore) { super(-1, -1, NOOP_CALLBACK); addOperation(firstStore); } public void addOperation(CASOperation op) { ops.add(op); // Count the bytes required by this operation. Iterator<String> is = op.getKeys().iterator(); String k = is.next(); int keylen = KeyUtil.getKeyBytes(k).length; byteCount += MIN_RECV_PACKET + StoreOperationImpl.EXTRA_LEN + keylen + op.getData().length; } public int size() { return ops.size(); } public int bytes() { return byteCount; } @Override public void initialize() { // Now create a buffer. ByteBuffer bb = ByteBuffer.allocate(byteCount); for (CASOperation so : ops) { Iterator<String> is = so.getKeys().iterator(); String k = is.next(); byte[] keyBytes = KeyUtil.getKeyBytes(k); assert !is.hasNext(); int myOpaque = generateOpaque(); callbacks.put(myOpaque, so.getCallback()); byte[] data = so.getData(); // Custom header bb.put(REQ_MAGIC); bb.put((byte) cmdMap(so.getStoreType())); bb.putShort((short) keyBytes.length); bb.put((byte) StoreOperationImpl.EXTRA_LEN); // extralen bb.put((byte) 0); // data type bb.putShort(((VBucketAware) so).getVBucket(k)); // vbucket bb.putInt(keyBytes.length + data.length + StoreOperationImpl.EXTRA_LEN); bb.putInt(myOpaque); bb.putLong(so.getCasValue()); // cas // Extras bb.putInt(so.getFlags()); bb.putInt(so.getExpiration()); // the actual key bb.put(keyBytes); // And the value bb.put(data); } // Add the noop bb.put(REQ_MAGIC); bb.put((byte) NoopOperationImpl.CMD); bb.putShort((short) 0); bb.put((byte) 0); // extralen bb.put((byte) 0); // data type bb.putShort((short) 0); // reserved bb.putInt(0); bb.putInt(terminalOpaque); bb.putLong(0); // cas bb.flip(); setBuffer(bb); } private static int cmdMap(StoreType t) { int rv; switch (t) { case set: rv = StoreOperationImpl.SETQ; break; case add: rv = StoreOperationImpl.ADDQ; break; case replace: rv = StoreOperationImpl.REPLACEQ; break; default: rv = -1; } // Check fall-through. assert rv != -1 : "Unhandled store type: " + t; return rv; } @Override protected void finishedPayload(byte[] pl) throws IOException { if (responseOpaque == terminalOpaque) { for (OperationCallback cb : callbacks.values()) { cb.receivedStatus(STATUS_OK); cb.complete(); } transitionState(OperationState.COMPLETE); } else { OperationCallback cb = callbacks.remove(responseOpaque); assert cb != null : "No callback for " + responseOpaque; assert errorCode != 0 : "Got no error on a quiet mutation."; OperationStatus status = getStatusForErrorCode(errorCode, pl); assert status != null : "Got no status for a quiet mutation error"; cb.receivedStatus(status); cb.complete(); } resetInput(); } @Override protected boolean opaqueIsValid() { return responseOpaque == terminalOpaque || callbacks.containsKey(responseOpaque); } static class NoopCallback implements OperationCallback { public void complete() { // noop } public void receivedStatus(OperationStatus status) { // noop } }
<<<<<<< for (TbMsg tbMsg : queue.findUnprocessed(ruleNode.getId().getId(), systemContext.getQueuePartitionId())) { ======= for (TbMsg tbMsg : queue.findUnprocessed(tenantId, ruleNode.getId().getId(), 0L)) { >>>>>>> for (TbMsg tbMsg : queue.findUnprocessed(tenantId, ruleNode.getId().getId(), systemContext.getQueuePartitionId())) { <<<<<<< for (TbMsg tbMsg : queue.findUnprocessed(entityId.getId(), systemContext.getQueuePartitionId())) { ======= for (TbMsg tbMsg : queue.findUnprocessed(tenantId, entityId.getId(), 0L)) { >>>>>>> for (TbMsg tbMsg : queue.findUnprocessed(tenantId, entityId.getId(), systemContext.getQueuePartitionId())) {
<<<<<<< ======= import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.jboss.netty.handler.codec.http.DefaultHttpRequest; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpVersion; import java.lang.reflect.Method; >>>>>>> import java.lang.reflect.Method; <<<<<<< private final URI cometStreamURI; private Bucket bucket; private final String httpUser; private final String httpPass; private final ChannelFactory factory; private Channel channel; private final String host; private final int port; private ConfigurationParser configParser; private BucketUpdateResponseHandler handler; /** * The specification version which this client meets. This will be included in * requests to the server. */ public static final String CLIENT_SPEC_VER = "1.0"; /** * @param cometStreamURI the URI which will stream node changes * @param bucketname the bucketToMonitor name we are monitoring * @param username the username required for HTTP Basic Auth to the restful * service * @param password the password required for HTTP Basic Auth to the restful * service */ public BucketMonitor(URI cometStreamURI, String bucketname, String username, String password, ConfigurationParser configParser) { super(); if (cometStreamURI == null) { throw new IllegalArgumentException("cometStreamURI cannot be NULL"); ======= private final URI cometStreamURI; private Bucket bucket; private final String httpUser; private final String httpPass; private final ChannelFactory factory; private Channel channel; private final String host; private final int port; private ConfigurationParser configParser; private BucketUpdateResponseHandler handler; private final HttpMessageHeaders headers; /** * The specification version which this client meets. This will be included * in requests to the server. */ public static final String CLIENT_SPEC_VER = "1.0"; /** * * @param cometStreamURI the URI which will stream node changes * @param bucketname the bucketToMonitor name we are monitoring * @param username the username required for HTTP Basic Auth to the restful service * @param password the password required for HTTP Basic Auth to the restful service */ public BucketMonitor(URI cometStreamURI, String bucketname, String username, String password, ConfigurationParser configParser) { super(); if (cometStreamURI == null) { throw new IllegalArgumentException("cometStreamURI cannot be NULL"); } String scheme = cometStreamURI.getScheme() == null ? "http" : cometStreamURI.getScheme(); if (!scheme.equals("http")) { // an SslHandler is needed in the pipeline //System.err.println("Only HTTP is supported."); throw new UnsupportedOperationException("Only http is supported."); } this.cometStreamURI = cometStreamURI; this.httpUser = username; this.httpPass = password; this.configParser = configParser; this.host = cometStreamURI.getHost(); this.port = cometStreamURI.getPort() == -1 ? 80 : cometStreamURI.getPort(); factory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); this.headers = new HttpMessageHeaders(); } /** * A strategy that selects and invokes the appropriate setHeader method on * the netty HttpHeader class, either setHeader(String, Object) or * setHeader(String, String). This indirection is needed as with netty 3.2.0 * setHeader(String, String) was changed to setHeader(String, Object) and * spymemcached users shall be saved from incompatibilities due to an * upgrade to the newer netty version. Once netty is upgraded to 3.2.0+ this * may strategy can be replaced with a direct invocation of setHeader. */ private static final class HttpMessageHeaders { private final Method m; private HttpMessageHeaders() { this(getHttpMessageHeaderStrategy()); } private HttpMessageHeaders(final Method m) { this.m = m; } private static Method getHttpMessageHeaderStrategy() { try { return HttpRequest.class.getMethod("setHeader", String.class, Object.class); } catch (final SecurityException e) { throw new RuntimeException( "Cannot check method due to security restrictions.", e); } catch (final NoSuchMethodException e) { try { return HttpRequest.class.getMethod("setHeader", String.class, String.class); } catch (final Exception e1) { throw new RuntimeException( "No suitable setHeader method found on netty" + " HttpRequest, the signature seems to have changed.", e1); } } } void setHeader(HttpRequest obj, String name, String value) { try { m.invoke(obj, name, value); } catch (final Exception e) { throw new RuntimeException("Could not invoke method " + m + " with args '" + name + "' and '" + value + "'.", e); } } >>>>>>> private final URI cometStreamURI; private Bucket bucket; private final String httpUser; private final String httpPass; private final ChannelFactory factory; private Channel channel; private final String host; private final int port; private ConfigurationParser configParser; private BucketUpdateResponseHandler handler; private final HttpMessageHeaders headers; /** * The specification version which this client meets. This will be included in * requests to the server. */ public static final String CLIENT_SPEC_VER = "1.0"; /** * @param cometStreamURI the URI which will stream node changes * @param bucketname the bucketToMonitor name we are monitoring * @param username the username required for HTTP Basic Auth to the restful * service * @param password the password required for HTTP Basic Auth to the restful * service */ public BucketMonitor(URI cometStreamURI, String bucketname, String username, String password, ConfigurationParser configParser) { super(); if (cometStreamURI == null) { throw new IllegalArgumentException("cometStreamURI cannot be NULL"); <<<<<<< createChannel(); this.handler = channel.getPipeline().get(BucketUpdateResponseHandler.class); handler.setBucketMonitor(this); HttpRequest request = prepareRequest(cometStreamURI, host); channel.write(request); try { String response = this.handler.getLastResponse(); logFiner("Getting server list returns this last chunked response:\n" + response); Bucket bucketToMonitor = this.configParser.parseBucket(response); setBucket(bucketToMonitor); } catch (ParseException ex) { Logger.getLogger(BucketMonitor.class.getName()).log(Level.WARNING, "Invalid client configuration received from server. Staying with " + "existing configuration.", ex); Logger.getLogger(BucketMonitor.class.getName()).log(Level.FINE, "Invalid client configuration received:\n" + handler.getLastResponse() + "\n"); ======= protected HttpRequest prepareRequest(URI uri, String h) { // Send the HTTP request. HttpRequest request = new DefaultHttpRequest( HttpVersion.HTTP_1_1, HttpMethod.GET, uri.toASCIIString()); headers.setHeader(request, HttpHeaders.Names.HOST, h); if (getHttpUser() != null) { String basicAuthHeader; try { basicAuthHeader = ConfigurationProviderHTTP.buildAuthHeader(getHttpUser(), getHttpPass()); headers.setHeader(request, HttpHeaders.Names.AUTHORIZATION, basicAuthHeader); } catch (UnsupportedEncodingException ex) { throw new RuntimeException("Could not encode specified credentials for HTTP request.", ex); } } headers.setHeader(request, HttpHeaders.Names.CONNECTION, HttpHeaders.Values.CLOSE); // No keep-alives for this headers.setHeader(request, HttpHeaders.Names.CACHE_CONTROL, HttpHeaders.Values.NO_CACHE); headers.setHeader(request, HttpHeaders.Names.ACCEPT, "application/json"); headers.setHeader(request, HttpHeaders.Names.USER_AGENT, "spymemcached vbucket client"); headers.setHeader(request, "X-memcachekv-Store-Client-Specification-Version", CLIENT_SPEC_VER); return request; >>>>>>> createChannel(); this.handler = channel.getPipeline().get(BucketUpdateResponseHandler.class); handler.setBucketMonitor(this); HttpRequest request = prepareRequest(cometStreamURI, host); channel.write(request); try { String response = this.handler.getLastResponse(); logFiner("Getting server list returns this last chunked response:\n" + response); Bucket bucketToMonitor = this.configParser.parseBucket(response); setBucket(bucketToMonitor); } catch (ParseException ex) { Logger.getLogger(BucketMonitor.class.getName()).log(Level.WARNING, "Invalid client configuration received from server. Staying with " + "existing configuration.", ex); Logger.getLogger(BucketMonitor.class.getName()).log(Level.FINE, "Invalid client configuration received:\n" + handler.getLastResponse() + "\n");
<<<<<<< public Mutator getType() { return mutator; } ======= @Override public String toString() { return super.toString() + " Amount: " + by + " Default: " + def + " Exp: " + exp; } >>>>>>> public Mutator getType() { return mutator; } @Override public String toString() { return super.toString() + " Amount: " + by + " Default: " + def + " Exp: " + exp; }
<<<<<<< public byte getMagic() { return magic; } ======= /** * Defines the magic value * @param magic - The new magic value */ TapMagic(byte magic) { this.magic = magic; } public static TapMagic getMagicByByte(byte b) { if (b == PROTOCOL_BINARY_REQ.magic) { return TapMagic.PROTOCOL_BINARY_REQ; } else if (b == PROTOCOL_BINARY_RES.magic) { return TapMagic.PROTOCOL_BINARY_RES; } else { throw new IllegalArgumentException("Bad magic value"); } } >>>>>>> public static TapMagic getMagicByByte(byte b) { if (b == PROTOCOL_BINARY_REQ.magic) { return TapMagic.PROTOCOL_BINARY_REQ; } else if (b == PROTOCOL_BINARY_RES.magic) { return TapMagic.PROTOCOL_BINARY_RES; } else { throw new IllegalArgumentException("Bad magic value"); } }
<<<<<<< /** * For the platform-bundled library, we currently don't enable SNI by default. */ public static boolean isSniEnabledByDefault() { return false; } ======= /** * Wrap the SocketFactory with the platform wrapper if needed for compatability. * For the platform-bundled library we never need to wrap. */ public static SSLSocketFactory wrapSocketFactoryIfNeeded(OpenSSLSocketFactoryImpl factory) { return factory; } >>>>>>> /** * For the platform-bundled library, we currently don't enable SNI by default. */ public static boolean isSniEnabledByDefault() { return false; } /** * Wrap the SocketFactory with the platform wrapper if needed for compatability. * For the platform-bundled library we never need to wrap. */ public static SSLSocketFactory wrapSocketFactoryIfNeeded(OpenSSLSocketFactoryImpl factory) { return factory; }
<<<<<<< /* == Signatures == */ put("Signature.MD5WithRSAEncryption", OpenSSLSignature.MD5RSA.class.getName()); put("Alg.Alias.Signature.MD5WithRSA", "MD5WithRSAEncryption"); put("Alg.Alias.Signature.MD5/RSA", "MD5WithRSAEncryption"); put("Alg.Alias.Signature.1.2.840.113549.1.1.4", "MD5WithRSAEncryption"); put("Alg.Alias.Signature.1.2.840.113549.2.5with1.2.840.113549.1.1.1", "MD5WithRSAEncryption"); ======= // Signatures put("Signature.MD5WithRSA", OpenSSLSignature.MD5RSA.class.getName()); put("Alg.Alias.Signature.MD5WithRSAEncryption", "MD5WithRSA"); put("Alg.Alias.Signature.MD5/RSA", "MD5WithRSA"); put("Alg.Alias.Signature.1.2.840.113549.1.1.4", "MD5WithRSA"); put("Alg.Alias.Signature.1.2.840.113549.2.5with1.2.840.113549.1.1.1", "MD5WithRSA"); >>>>>>> /* == Signatures == */ put("Signature.MD5WithRSA", OpenSSLSignature.MD5RSA.class.getName()); put("Alg.Alias.Signature.MD5WithRSAEncryption", "MD5WithRSA"); put("Alg.Alias.Signature.MD5/RSA", "MD5WithRSA"); put("Alg.Alias.Signature.1.2.840.113549.1.1.4", "MD5WithRSA"); put("Alg.Alias.Signature.1.2.840.113549.2.5with1.2.840.113549.1.1.1", "MD5WithRSA");
<<<<<<< final int result = LZ4JNI.LZ4_compressHC(src, srcOff, srcLen, dest, destOff, maxDestLen, compressionLevel); ======= final int result = LZ4JNI.LZ4_compressHC(src, null, srcOff, srcLen, dest, null, destOff, maxDestLen); if (result <= 0) { throw new LZ4Exception(); } return result; } @Override public int compress(ByteBuffer src, int srcOff, int srcLen, ByteBuffer dest, int destOff, int maxDestLen) { int result = LZ4JNI.LZ4_compressHC( ByteBufferUtils.getArray(src), src, srcOff, srcLen, ByteBufferUtils.getArray(dest), dest, destOff, maxDestLen); >>>>>>> final int result = LZ4JNI.LZ4_compressHC(src, null, srcOff, srcLen, dest, null, destOff, maxDestLen, compressionLevel); if (result <= 0) { throw new LZ4Exception(); } return result; } @Override public int compress(ByteBuffer src, int srcOff, int srcLen, ByteBuffer dest, int destOff, int maxDestLen) { int result = LZ4JNI.LZ4_compressHC( ByteBufferUtils.getArray(src), src, srcOff, srcLen, ByteBufferUtils.getArray(dest), dest, destOff, maxDestLen, compressionLevel);
<<<<<<< static native int LZ4_compress_limitedOutput(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); static native int LZ4_compressHC(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen, int compressionLevel); static native int LZ4_decompress_fast(byte[] src, int srcOff, byte[] dest, int destOff, int destLen); static native int LZ4_decompress_fast_withPrefix64k(byte[] src, int srcOff, byte[] dest, int destOff, int destLen); static native int LZ4_decompress_safe(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); static native int LZ4_decompress_safe_withPrefix64k(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); ======= static native int LZ4_compress_limitedOutput(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen); static native int LZ4_compressHC(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen); static native int LZ4_decompress_fast(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, byte[] destArray, ByteBuffer destBuffer, int destOff, int destLen); static native int LZ4_decompress_fast_withPrefix64k(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, byte[] destArray, ByteBuffer destBuffer, int destOff, int destLen); static native int LZ4_decompress_safe(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen); static native int LZ4_decompress_safe_withPrefix64k(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen); >>>>>>> static native int LZ4_compress_limitedOutput(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen); static native int LZ4_compressHC(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen, int compressionLevel); static native int LZ4_decompress_fast(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, byte[] destArray, ByteBuffer destBuffer, int destOff, int destLen); static native int LZ4_decompress_fast_withPrefix64k(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, byte[] destArray, ByteBuffer destBuffer, int destOff, int destLen); static native int LZ4_decompress_safe(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen); static native int LZ4_decompress_safe_withPrefix64k(byte[] srcArray, ByteBuffer srcBuffer, int srcOff, int srcLen, byte[] destArray, ByteBuffer destBuffer, int destOff, int maxDestLen);
<<<<<<< public void testUnknownWithCmsConcurrent() { // TODO: Create File in platform independent way. File testFile = new File("src/test/data/dataset111.txt"); GcManager jvmManager = new GcManager(); File preprocessedFile = jvmManager.preprocess(testFile, null); jvmManager.store(preprocessedFile, false); JvmRun jvmRun = jvmManager.getJvmRun(new Jvm(null, null), Constants.DEFAULT_BOTTLENECK_THROUGHPUT_THRESHOLD); String lastLogLineUnprocessed = "130454.251: [Full GC (Allocation Failure) 130454.251: [CMS130456.427: " + "[CMS-concurrent-mark: 2.176/2.182 secs] [Times: user=2.18 sys=0.00, real=2.18 secs]"; Assert.assertEquals("Last unprocessed log line not correct.", lastLogLineUnprocessed, jvmManager.getLastLogLineUnprocessed()); Assert.assertEquals("Event type count not correct.", 2, jvmRun.getEventTypes().size()); Assert.assertTrue("Log line not recognized as " + LogEventType.UNKNOWN.toString() + ".", jvmRun.getEventTypes().contains(LogEventType.UNKNOWN)); Assert.assertTrue("Log line not recognized as " + LogEventType.CMS_CONCURRENT.toString() + ".", jvmRun.getEventTypes().contains(LogEventType.CMS_CONCURRENT)); // Not the last preprocessed line, but part of last unpreprocessed line Assert.assertTrue(Analysis.INFO_UNIDENTIFIED_LOG_LINE_LAST + " analysis not identified.", jvmRun.getAnalysisKeys().contains(Analysis.INFO_UNIDENTIFIED_LOG_LINE_LAST)); Assert.assertFalse(Analysis.WARN_UNIDENTIFIED_LOG_LINE_REPORT + " analysis incorrectly identified.", jvmRun.getAnalysisKeys().contains(Analysis.WARN_UNIDENTIFIED_LOG_LINE_REPORT)); } ======= public void testUnknownWithCmsConcurrent() { // TODO: Create File in platform independent way. File testFile = new File("src/test/data/dataset111.txt"); GcManager jvmManager = new GcManager(); File preprocessedFile = jvmManager.preprocess(testFile, null); jvmManager.store(preprocessedFile, false); JvmRun jvmRun = jvmManager.getJvmRun(new Jvm(null, null), Constants.DEFAULT_BOTTLENECK_THROUGHPUT_THRESHOLD, true); Assert.assertEquals("Event type count not correct.", 2, jvmRun.getEventTypes().size()); Assert.assertTrue("Log line not recognized as " + LogEventType.UNKNOWN.toString() + ".", jvmRun.getEventTypes().contains(LogEventType.UNKNOWN)); Assert.assertTrue("Log line not recognized as " + LogEventType.CMS_CONCURRENT.toString() + ".", jvmRun.getEventTypes().contains(LogEventType.CMS_CONCURRENT)); Assert.assertTrue(Analysis.WARN_UNIDENTIFIED_LOG_LINE_REPORT + " analysis not identified.", jvmRun.getAnalysisKeys().contains(Analysis.WARN_UNIDENTIFIED_LOG_LINE_REPORT)); } >>>>>>> public void testUnknownWithCmsConcurrent() { // TODO: Create File in platform independent way. File testFile = new File("src/test/data/dataset111.txt"); GcManager jvmManager = new GcManager(); File preprocessedFile = jvmManager.preprocess(testFile, null); jvmManager.store(preprocessedFile, false); JvmRun jvmRun = jvmManager.getJvmRun(new Jvm(null, null), Constants.DEFAULT_BOTTLENECK_THROUGHPUT_THRESHOLD); String lastLogLineUnprocessed = "130454.251: [Full GC (Allocation Failure) 130454.251: [CMS130456.427: " + "[CMS-concurrent-mark: 2.176/2.182 secs] [Times: user=2.18 sys=0.00, real=2.18 secs]"; Assert.assertEquals("Last unprocessed log line not correct.", lastLogLineUnprocessed, jvmManager.getLastLogLineUnprocessed()); Assert.assertEquals("Event type count not correct.", 2, jvmRun.getEventTypes().size()); Assert.assertTrue("Log line not recognized as " + LogEventType.UNKNOWN.toString() + ".", jvmRun.getEventTypes().contains(LogEventType.UNKNOWN)); Assert.assertTrue("Log line not recognized as " + LogEventType.CMS_CONCURRENT.toString() + ".", jvmRun.getEventTypes().contains(LogEventType.CMS_CONCURRENT)); // Not the last preprocessed line, but part of last unpreprocessed line Assert.assertTrue(Analysis.INFO_UNIDENTIFIED_LOG_LINE_LAST + " analysis not identified.", jvmRun.getAnalysisKeys().contains(Analysis.INFO_UNIDENTIFIED_LOG_LINE_LAST)); }
<<<<<<< private static final String ALBA_EXPORT_PATH = "preference_alba_export_path"; private static final String GROUPS_EXPORT_PATH = "preference_groups_export_path"; private static final String URI_DEFAULT = Uri.fromFile( Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC)).toString(); ======= public static final String ALBA_EXPORT_PATH = "preference_alba_export_path"; public static final String GROUPS_EXPORT_PATH = "preference_groups_export_path"; public static final String URI_DEFAULT = Uri.EMPTY.toString(); >>>>>>> public static final String ALBA_EXPORT_PATH = "preference_alba_export_path"; public static final String GROUPS_EXPORT_PATH = "preference_groups_export_path"; public static final String URI_DEFAULT = Uri.fromFile( Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC)).toString(); <<<<<<< public static boolean getSetupDone() { return preferences.getBoolean(SETUP_DONE, SETUP_DONE_DEFAULT); } public static void setSetupDone(boolean done) { preferences.edit().putBoolean(SETUP_DONE, done).apply(); } public static int getAlbumArtSize() { return Integer.parseInt(preferences.getString(EXPORT_ALBUM_ART_SIZE, "" + EXPORT_ALBUM_ART_SIZE_DEFAULT)); } public static void setAlbumArtSize(int size) { preferences.edit().putString(EXPORT_ALBUM_ART_SIZE, "" + size).apply(); } ======= public static void setAutoExportPath(Uri treeUri) { preferences.edit().putString(AUTO_EXPORT_PATH, treeUri.toString()).apply(); } >>>>>>> public static void setAutoExportPath(Uri treeUri) { preferences.edit().putString(AUTO_EXPORT_PATH, treeUri.toString()).apply(); } public static boolean getSetupDone() { return preferences.getBoolean(SETUP_DONE, SETUP_DONE_DEFAULT); } public static void setSetupDone(boolean done) { preferences.edit().putBoolean(SETUP_DONE, done).apply(); } public static int getAlbumArtSize() { return Integer.parseInt(preferences.getString(EXPORT_ALBUM_ART_SIZE, "" + EXPORT_ALBUM_ART_SIZE_DEFAULT)); } public static void setAlbumArtSize(int size) { preferences.edit().putString(EXPORT_ALBUM_ART_SIZE, "" + size).apply(); }
<<<<<<< // System.out.println("Raw value..." + rawValue + ", " + floatValue); ======= int rawValue = (int) (floatValue / 0.0000002384185791015625); // sLOGGER.log(Level.DEBUG, "Raw value..." + rawValue + ", " + // floatValue); >>>>>>> // System.out.println("Raw value..." + rawValue + ", " + floatValue); <<<<<<< int demand = (aBuffer.getInt(2)) >> 8; // int demand2 = // System.out.println(String.format(" Demand: %d, Command: %d", demand, commandType)); ======= short demand = aBuffer.getShort(3); // sLOGGER.log(Level.DEBUG, String.format(" Demand: %d", demand)); >>>>>>> int demand = (aBuffer.getInt(2)) >> 8; // int demand2 = sLOGGER.log(Level.DEBUG, String.format(" Demand: %d, Command: %d", demand, commandType)); <<<<<<< double position = demand / 4096.0; wrapper.setPositionGoal(position); // System.out.println(" Setting by position." + position); ======= sLOGGER.log(Level.DEBUG, " Setting by position."); >>>>>>> double position = demand / 4096.0; wrapper.setPositionGoal(position); sLOGGER.log(Level.DEBUG, " Setting by position." + position); <<<<<<< double speed = demand * 600.0 / 4096.0; wrapper.setSpeedGoal(speed); // System.out.println(" Setting by speed. " + speed); ======= sLOGGER.log(Level.DEBUG, " Setting by speed."); >>>>>>> double speed = demand * 600.0 / 4096.0; wrapper.setSpeedGoal(speed); sLOGGER.log(Level.DEBUG, " Setting by speed. " + speed); <<<<<<< System.out.println(" Setting by current." + demand); ======= sLOGGER.log(Level.DEBUG, " Setting by current."); >>>>>>> sLOGGER.log(Level.DEBUG, " Setting by current." + demand); <<<<<<< ======= // sLOGGER.log(Level.DEBUG, " Setting by voltage. " + // voltageDemand); >>>>>>>
<<<<<<< import com.github.dfa.diaspora_android.data.DiasporaPodList; import com.github.dfa.diaspora_android.data.DiasporaPodList.DiasporaPod; ======= import com.github.dfa.diaspora_android.ui.IntellihideToolbarActivityListener; import com.github.dfa.diaspora_android.util.theming.ColorPalette; import com.github.dfa.diaspora_android.util.ProxyHandler; >>>>>>> import com.github.dfa.diaspora_android.ui.IntellihideToolbarActivityListener; <<<<<<< import com.github.dfa.diaspora_android.util.DiasporaUrlHelper; ======= import com.github.dfa.diaspora_android.util.theming.ThemeHelper; import butterknife.BindView; import butterknife.ButterKnife; import uz.shift.colorpicker.LineColorPicker; import uz.shift.colorpicker.OnColorChangedListener; >>>>>>> import com.github.dfa.diaspora_android.util.DiasporaUrlHelper; import com.github.dfa.diaspora_android.util.ProxyHandler; import com.github.dfa.diaspora_android.util.theming.ColorPalette; import com.github.dfa.diaspora_android.util.theming.ThemeHelper; import butterknife.BindView; import butterknife.ButterKnife; import uz.shift.colorpicker.LineColorPicker; import uz.shift.colorpicker.OnColorChangedListener; <<<<<<< App app = ((App) getActivity().getApplication()); DiasporaUrlHelper diasporaUrlHelper = new DiasporaUrlHelper(app.getSettings()); ======= String podDomain = appSettings.getPodDomain(); >>>>>>> DiasporaUrlHelper diasporaUrlHelper = new DiasporaUrlHelper(app.getSettings());
<<<<<<< MenuItem item; ======= Log.i(App.TAG, "MainActivity.onPrepareOptionsMenu()"); MenuItem itemNotification = menu.findItem(R.id.action_notifications); if (itemNotification != null) { if (podUserProfile.getNotificationCount() > 0) { itemNotification.setIcon(R.drawable.ic_notifications_colored_48px); } else { itemNotification.setIcon(R.drawable.ic_notifications_white_48px); } >>>>>>> Log.i(App.TAG, "MainActivity.onPrepareOptionsMenu()"); MenuItem item;
<<<<<<< ======= import com.github.dfa.diaspora_android.util.theming.ThemeHelper; >>>>>>> import com.github.dfa.diaspora_android.util.theming.ThemeHelper; <<<<<<< CustomFragment top = getTopFragment(); if(top != null && top.getFragmentTag().equals(DiasporaStreamFragment.TAG)) { MainActivity.this.setTitle(rId); } ======= CustomFragment top = getTopFragment(); if (top != null && top.getFragmentTag().equals(DiasporaStreamFragment.TAG)) { MainActivity.this.setTitle(rId); } >>>>>>> CustomFragment top = getTopFragment(); if(top != null && top.getFragmentTag().equals(DiasporaStreamFragment.TAG)) { MainActivity.this.setTitle(rId); } <<<<<<< /** * Show DiasporaStreamFragment if necessary and load URL url * @param url URL to load in the DiasporaStreamFragment */ ======= /** * Show DiasporaStreamFragment if necessary and load URL url * * @param url URL to load in the DiasporaStreamFragment */ >>>>>>> /** * Show DiasporaStreamFragment if necessary and load URL url * @param url URL to load in the DiasporaStreamFragment */ <<<<<<< /** * Forward incoming intents to handleIntent() * @param intent incoming */ ======= /** * Forward incoming intents to handleIntent() * * @param intent incoming */ >>>>>>> /** * Forward incoming intents to handleIntent() * @param intent incoming */ <<<<<<< /** * Handle intents and execute intent specific actions * @param intent intent to get handled */ ======= /** * Handle intents and execute intent specific actions * * @param intent intent to get handled */ >>>>>>> /** * Handle intents and execute intent specific actions * @param intent intent to get handled */ <<<<<<< AppLog.v(this, "Intent has a delicious URL for us: "+loadUrl); ======= AppLog.v(this, "Intent has a delicious URL for us: " + loadUrl); >>>>>>> AppLog.v(this, "Intent has a delicious URL for us: " + loadUrl); <<<<<<< ((DiasporaStreamFragment) getFragment(DiasporaStreamFragment.TAG)).getWebView().clearCache(true); } else if (ACTION_RELOAD_ACTIVITY.equals(action)) { AppLog.v(this, "Recreate activity"); recreate(); return; ======= ((DiasporaStreamFragment) getFragment(DiasporaStreamFragment.TAG)).getWebView().clearCache(true); >>>>>>> ((DiasporaStreamFragment) getFragment(DiasporaStreamFragment.TAG)).getWebView().clearCache(true); <<<<<<< /** * Handle activity results * @param requestCode reqCode * @param resultCode resCode * @param data data */ ======= /** * Handle activity results * * @param requestCode reqCode * @param resultCode resCode * @param data data */ >>>>>>> /** * Handle activity results * * @param requestCode reqCode * @param resultCode resCode * @param data data */ <<<<<<< AppLog.v(this, "onActivityResult(): "+requestCode); ======= AppLog.v(this, "onActivityResult(): " + requestCode); >>>>>>> AppLog.v(this, "onActivityResult(): " + requestCode); <<<<<<< /** * Return the fragment which is currently displayed in R.id.fragment_container * @return top fragment or null if there is none displayed */ private CustomFragment getTopFragment() { Fragment top = fm.findFragmentById(R.id.fragment_container); if(top != null) { return (CustomFragment) top; ======= /** * Return the fragment which is currently displayed in R.id.fragment_container * * @return top fragment or null if there is none displayed */ private CustomFragment getTopFragment() { Fragment top = fm.findFragmentById(R.id.fragment_container); if (top != null) { return (CustomFragment) top; >>>>>>> /** * Return the fragment which is currently displayed in R.id.fragment_container * @return top fragment or null if there is none displayed */ private CustomFragment getTopFragment() { Fragment top = fm.findFragmentById(R.id.fragment_container); if (top != null) { return (CustomFragment) top; <<<<<<< CustomFragment top = getTopFragment(); if(top != null) { AppLog.v(this, "Top Fragment is not null"); if(!top.onBackPressed()) { AppLog.v(this, "Top Fragment.onBackPressed was false"); AppLog.v(this, "BackStackEntryCount: "+fm.getBackStackEntryCount()); if(fm.getBackStackEntryCount()>0) { fm.popBackStack(); } else { snackbarExitApp.show(); } ======= CustomFragment top = getTopFragment(); if (top != null) { AppLog.v(this, "Top Fragment is not null"); if (!top.onBackPressed()) { AppLog.v(this, "Top Fragment.onBackPressed was false"); AppLog.v(this, "BackStackEntryCount: " + fm.getBackStackEntryCount()); if (fm.getBackStackEntryCount() > 0) { fm.popBackStack(); } else { snackbarExitApp.show(); } >>>>>>> CustomFragment top = getTopFragment(); if (top != null) { AppLog.v(this, "Top Fragment is not null"); if (!top.onBackPressed()) { AppLog.v(this, "Top Fragment.onBackPressed was false"); AppLog.v(this, "BackStackEntryCount: " + fm.getBackStackEntryCount()); if (fm.getBackStackEntryCount() > 0) { fm.popBackStack(); } else { snackbarExitApp.show(); } <<<<<<< /** * Clear and repopulate top and bottom toolbar. * Also add menu items of the displayed fragment * @param menu top toolbar * @return boolean */ ======= /** * Clear and repopulate top and bottom toolbar. * Also add menu items of the displayed fragment * * @param menu top toolbar * @return boolean */ >>>>>>> /** * Clear and repopulate top and bottom toolbar. * Also add menu items of the displayed fragment * @param menu top toolbar * @return boolean */ <<<<<<< /** * Handle clicks on the optionsmenu * @param item item * @return boolean */ ======= /** * Handle clicks on the optionsmenu * * @param item item * @return boolean */ >>>>>>> /** * Handle clicks on the optionsmenu * @param item item * @return boolean */ <<<<<<< /** * Update the profile name in the navigation slider * @param name name */ ======= /** * Update the profile name in the navigation slider * * @param name name */ >>>>>>> /** * Update the profile name in the navigation slider * @param name name */ <<<<<<< /** * Update the profile picture in the navigation slider * @param avatarUrl url of the new profile pic */ ======= /** * Update the profile picture in the navigation slider * * @param avatarUrl url of the new profile pic */ >>>>>>> /** * Update the profile picture in the navigation slider * @param avatarUrl url of the new profile pic */ <<<<<<< /** * Handle hashtag clicks. Open the new-post-url and inject the clicked hashtag into the post-editor * @param intent intent */ ======= /** * Handle hashtag clicks. Open the new-post-url and inject the clicked hashtag into the post-editor * * @param intent intent */ >>>>>>> /** * Handle hashtag clicks. Open the new-post-url and inject the clicked hashtag into the post-editor * @param intent intent */ <<<<<<< /** * Open the new-post-url and inject text that was shared into the app into the post editors text field * @param intent shareTextIntent */ ======= /** * Open the new-post-url and inject text that was shared into the app into the post editors text field * * @param intent shareTextIntent */ >>>>>>> /** * Open the new-post-url and inject text that was shared into the app into the post editors text field * @param intent shareTextIntent */ <<<<<<< /** * Share an image shared to the app via diaspora * @param intent shareImageIntent */ //TODO: Implement some day ======= /** * Share an image shared to the app via diaspora * * @param intent shareImageIntent */ //TODO: Implement some day >>>>>>> /** * Share an image shared to the app via diaspora * @param intent shareImageIntent */ //TODO: Implement some day <<<<<<< /** * Invalidate the top toolbar to update the notification counter * @param notificationCount new notification count */ ======= /** * Invalidate the top toolbar to update the notification counter * * @param notificationCount new notification count */ >>>>>>> /** * Invalidate the top toolbar to update the notification counter * @param notificationCount new notification count */ <<<<<<< /** * Invalidate the top toolbar to update the unread messages counter * @param unreadMessageCount new unread messages count */ ======= /** * Invalidate the top toolbar to update the unread messages counter * * @param unreadMessageCount new unread messages count */ >>>>>>> /** * Invalidate the top toolbar to update the unread messages counter * @param unreadMessageCount new unread messages count */ <<<<<<< /** * React to results of requestPermission * @param requestCode resCode * @param permissions requested permissions * @param grantResults granted results */ ======= /** * React to results of requestPermission * * @param requestCode resCode * @param permissions requested permissions * @param grantResults granted results */ >>>>>>> /** * React to results of requestPermission * @param requestCode resCode * @param permissions requested permissions * @param grantResults granted results */ <<<<<<< /** * Return the string that will be shared into the new-post-editor * @return String */ public String getTextToBeShared() { return textToBeShared; } /** * Set the string that will be shared into the new-post-editor * @param textToBeShared */ public void setTextToBeShared(String textToBeShared) { this.textToBeShared = textToBeShared; } ======= /** * Return the string that will be shared into the new-post-editor * * @return String */ public String getTextToBeShared() { return textToBeShared; } /** * Set the string that will be shared into the new-post-editor * * @param textToBeShared */ public void setTextToBeShared(String textToBeShared) { this.textToBeShared = textToBeShared; } @Override protected void applyColorToViews() { ThemeHelper.updateToolbarColor(toolbarTop); ThemeHelper.updateActionMenuViewColor(toolbarBottom); navDrawerLayout.setBackgroundColor(appSettings.getPrimaryColor()); navProfilePictureArea.setBackgroundColor(appSettings.getPrimaryColor()); } @Override public void enableToolbarHiding() { AppLog.d(this, "Enable Intellihide"); AppBarLayout.LayoutParams params = (AppBarLayout.LayoutParams) toolbarTop.getLayoutParams(); //scroll|enterAlways|snap params.setScrollFlags(toolbarDefaultScrollFlags); appBarLayout.setExpanded(true, true); } @Override public void disableToolbarHiding() { AppLog.d(this, "Disable Intellihide"); AppBarLayout.LayoutParams params = (AppBarLayout.LayoutParams) toolbarTop.getLayoutParams(); params.setScrollFlags(0); // clear all scroll flags appBarLayout.setExpanded(true, true); } >>>>>>> /** * Return the string that will be shared into the new-post-editor * @return String */ public String getTextToBeShared() { return textToBeShared; } /** * Set the string that will be shared into the new-post-editor * @param textToBeShared */ public void setTextToBeShared(String textToBeShared) { this.textToBeShared = textToBeShared; } @Override protected void applyColorToViews() { ThemeHelper.updateToolbarColor(toolbarTop); ThemeHelper.updateActionMenuViewColor(toolbarBottom); navDrawerLayout.setBackgroundColor(appSettings.getPrimaryColor()); navProfilePictureArea.setBackgroundColor(appSettings.getPrimaryColor()); } @Override public void enableToolbarHiding() { AppLog.d(this, "Enable Intellihide"); AppBarLayout.LayoutParams params = (AppBarLayout.LayoutParams) toolbarTop.getLayoutParams(); //scroll|enterAlways|snap params.setScrollFlags(toolbarDefaultScrollFlags); appBarLayout.setExpanded(true, true); } @Override public void disableToolbarHiding() { AppLog.d(this, "Disable Intellihide"); AppBarLayout.LayoutParams params = (AppBarLayout.LayoutParams) toolbarTop.getLayoutParams(); params.setScrollFlags(0); // clear all scroll flags appBarLayout.setExpanded(true, true); }
<<<<<<< public Pair<SharedTorrent, AnnounceableFileTorrent> remove(String hash) { final Pair<SharedTorrent, AnnounceableFileTorrent> result; ======= public Pair<SharedTorrent, LoadedTorrent> remove(String hash) { >>>>>>> public Pair<SharedTorrent, LoadedTorrent> remove(String hash) { final Pair<SharedTorrent, LoadedTorrent> result; <<<<<<< final AnnounceableFileTorrent announceableFileTorrent = myAnnounceableTorrents.remove(hash); result = new Pair<SharedTorrent, AnnounceableFileTorrent>(sharedTorrent, announceableFileTorrent); ======= final LoadedTorrent loadedTorrent = myAnnounceableTorrents.remove(hash); return new Pair<SharedTorrent, LoadedTorrent>(sharedTorrent, loadedTorrent); >>>>>>> final LoadedTorrent loadedTorrent = myAnnounceableTorrents.remove(hash); result = new Pair<SharedTorrent, LoadedTorrent>(sharedTorrent, loadedTorrent);
<<<<<<< public List<String> getListenerLog() { return BeforeAndAfterLoggingWithListenerFixture.getLog(); } ======= public void simulateRun(final String href) throws Exception { new RunCommandSimulator().simulate(href, this.getClass()); } >>>>>>> public List<String> getListenerLog() { return BeforeAndAfterLoggingWithListenerFixture.getLog(); } public void simulateRun(final String href) throws Exception { new RunCommandSimulator().simulate(href, this.getClass()); }
<<<<<<< void setSpecificationDescription(String description); ======= void record(ResultSummary result); >>>>>>> void setSpecificationDescription(String description); void record(ResultSummary result);
<<<<<<< if (!isBeforeExample) { announceBeforeExample(exampleName, node.getElement(), resultRecorder, fixture); ======= ImplementationStatus status = getImplementationStatus(node); if (!isBeforeExample && status != ImplementationStatus.IGNORED) { announceBeforeExample(exampleName, node.getElement(), resultRecorder); >>>>>>> ImplementationStatus status = getImplementationStatus(node); if (!isBeforeExample && status != ImplementationStatus.IGNORED) { announceBeforeExample(exampleName, node.getElement(), resultRecorder, fixture); <<<<<<< node.getChildren().processSequentially(evaluator, resultRecorder, fixture); ======= resultRecorder.setImplementationStatus(status); if (status == ImplementationStatus.IGNORED) { resultRecorder.record(Result.IGNORED); } else { node.getChildren().processSequentially(evaluator, resultRecorder); } >>>>>>> resultRecorder.setImplementationStatus(status); if (status == ImplementationStatus.IGNORED) { resultRecorder.record(Result.IGNORED); } else { node.getChildren().processSequentially(evaluator, resultRecorder, fixture); } <<<<<<< if (!isBeforeExample) { announceAfterExample(exampleName, node.getElement(), resultRecorder, fixture); ======= if (!isBeforeExample && status != ImplementationStatus.IGNORED) { announceAfterExample(exampleName, node.getElement(), resultRecorder); >>>>>>> if (!isBeforeExample && status != ImplementationStatus.IGNORED) { announceAfterExample(exampleName, node.getElement(), resultRecorder, fixture);
<<<<<<< import org.concordion.api.ResultModifier; ======= >>>>>>> import org.concordion.api.ResultModifier; <<<<<<< private String specificationDescription = ""; boolean forExample = false; private ResultModifier resultModifier; ======= >>>>>>> private String specificationDescription = ""; boolean forExample = false; <<<<<<< @Deprecated public void print( PrintStream out) { print(out, this); } @Deprecated public void print(PrintStream out, Object fixture) { print(out, fixture, null); } public void print(PrintStream out, Object fixture, String example) { out.print(printToString(fixture, example)); } @Deprecated public String printToString(Object fixture) { return printToString(fixture, null); } public String printToString(Object fixture, String example) { StringBuilder builder = new StringBuilder(); builder.append("\n"); builder.append(specificationDescription); builder.append("\n"); String counts = printCountsToString(fixture); if (counts != null) { builder.append(counts).append("\n"); } // builder.append("\n"); return builder.toString(); } @Deprecated public String printCountsToString(Object fixture) { return printCountsToString(fixture, null); } public String printCountsToString(Object fixture, String example) { StringBuilder builder = new StringBuilder(); builder.append("Successes: "); builder.append(getSuccessCount()); builder.append(", Failures: "); builder.append(getFailureCount()); if (getIgnoredCount() > 0) { builder.append(", Ignored: "); builder.append(getIgnoredCount()); } if (hasExceptions()) { builder.append(", Exceptions: "); builder.append(getExceptionCount()); } if (fixture != null) { builder.append(FixtureState.getFixtureState(fixture.getClass(), this.getResultModifier()).printNoteToString()); } return builder.toString(); } ======= @Override >>>>>>> @Deprecated public void print( PrintStream out) { print(out, this); } @Deprecated public void print(PrintStream out, Object fixture) { print(out, fixture, null); } public void print(PrintStream out, Object fixture, String example) { out.print(printToString(fixture, example)); } @Deprecated public String printToString(Object fixture) { return printToString(fixture, null); } public String printToString(Object fixture, String example) { StringBuilder builder = new StringBuilder(); builder.append("\n"); builder.append(specificationDescription); builder.append("\n"); String counts = printCountsToString(fixture); if (counts != null) { builder.append(counts).append("\n"); } // builder.append("\n"); return builder.toString(); } @Deprecated public String printCountsToString(Object fixture) { return printCountsToString(fixture, null); } public String printCountsToString(Object fixture, String example) { StringBuilder builder = new StringBuilder(); builder.append("Successes: "); builder.append(getSuccessCount()); builder.append(", Failures: "); builder.append(getFailureCount()); if (getIgnoredCount() > 0) { builder.append(", Ignored: "); builder.append(getIgnoredCount()); } if (hasExceptions()) { builder.append(", Exceptions: "); builder.append(getExceptionCount()); } if (fixture != null) { builder.append(FixtureState.getFixtureState(fixture.getClass(), this.getResultModifier()).printNoteToString()); } return builder.toString(); } <<<<<<< public void setSpecificationDescription( String specificationDescription) { this.specificationDescription = specificationDescription; } public String getSpecificationDescription() { return specificationDescription; } public void setForExample(boolean isForExample) { this.forExample = isForExample; } public boolean isForExample() { return forExample; } public long getTotalCount() { return getSuccessCount() + getFailureCount() + getExceptionCount() + getIgnoredCount(); } public ResultModifier getResultModifier() { return resultModifier; } public void setResultModifier(ResultModifier resultModifier) { this.resultModifier = resultModifier; } ======= >>>>>>> public void setSpecificationDescription( String specificationDescription) { this.specificationDescription = specificationDescription; } public String getSpecificationDescription() { return specificationDescription; } public void setForExample(boolean isForExample) { this.forExample = isForExample; } public boolean isForExample() { return forExample; } public long getTotalCount() { return getSuccessCount() + getFailureCount() + getExceptionCount() + getIgnoredCount(); }
<<<<<<< uniqueConstraint.setTable(table); uniqueConstraint.setClustered(false); // No way to set true via Hibernate ======= uniqueConstraint.setRelation(table); >>>>>>> uniqueConstraint.setRelation(table); uniqueConstraint.setClustered(false); // No way to set true via Hibernate <<<<<<< uniqueConstraint.setTable(table); uniqueConstraint.setClustered(false); // No way to set true via Hibernate ======= uniqueConstraint.setRelation(table); >>>>>>> uniqueConstraint.setRelation(table); uniqueConstraint.setClustered(false); // No way to set true via Hibernate
<<<<<<< ======= import org.hibernate.Criteria; import org.hibernate.Query; >>>>>>> import org.hibernate.Criteria; import org.hibernate.Query; <<<<<<< ======= import org.hibernate.criterion.ProjectionList; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import org.joda.time.DateTime; import org.n52.sos.ds.hibernate.dao.AbstractObservationDAO; import org.n52.sos.ds.hibernate.dao.DaoFactory; import org.n52.sos.ds.hibernate.dao.ProcedureDAO; import org.n52.sos.ds.hibernate.entities.AbstractObservation; import org.n52.sos.ds.hibernate.entities.FeatureOfInterest; import org.n52.sos.ds.hibernate.entities.ObservableProperty; import org.n52.sos.ds.hibernate.entities.ObservationInfo; import org.n52.sos.ds.hibernate.entities.Procedure; >>>>>>> import org.hibernate.criterion.ProjectionList; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import org.joda.time.DateTime; import org.n52.sos.ds.hibernate.dao.AbstractObservationDAO; import org.n52.sos.ds.hibernate.dao.DaoFactory; import org.n52.sos.ds.hibernate.dao.ProcedureDAO; import org.n52.sos.ds.hibernate.entities.AbstractObservation; import org.n52.sos.ds.hibernate.entities.FeatureOfInterest; import org.n52.sos.ds.hibernate.entities.ObservableProperty; import org.n52.sos.ds.hibernate.entities.ObservationInfo; import org.n52.sos.ds.hibernate.entities.Procedure; <<<<<<< import org.n52.sos.exception.CodedException; ======= import org.n52.sos.ds.hibernate.entities.series.SeriesObservationInfo; import org.n52.sos.ds.hibernate.util.HibernateHelper; import org.n52.sos.ds.hibernate.util.ProcedureTimeExtrema; >>>>>>> import org.n52.sos.exception.CodedException; import org.n52.sos.ds.hibernate.entities.series.SeriesObservationInfo; import org.n52.sos.ds.hibernate.util.HibernateHelper; import org.n52.sos.ds.hibernate.util.ProcedureTimeExtrema; <<<<<<< ======= import org.n52.sos.util.CollectionHelper; import org.n52.sos.util.DateTimeHelper; import org.n52.sos.util.StringHelper; >>>>>>> import org.n52.sos.util.CollectionHelper; import org.n52.sos.util.DateTimeHelper; import org.n52.sos.util.StringHelper; <<<<<<< ======= /** * Add observedProperties restriction to Hibernate Criteria * * @param c * Hibernate Criteria to add restriction * @param observedProperties * ObservableProperty identifiers to add */ public void addObservablePropertyToCriteria(Criteria c, Collection<String> observedProperties) { c.createCriteria(Series.OBSERVABLE_PROPERTY).add( Restrictions.in(ObservableProperty.IDENTIFIER, observedProperties)); } /** * Add procedures restriction to Hibernate Criteria * * @param c * Hibernate Criteria to add restriction * @param procedures * Procedure identifiers to add */ public void addProcedureToCriteria(Criteria c, Collection<String> procedures) { c.createCriteria(Series.PROCEDURE).add(Restrictions.in(Procedure.IDENTIFIER, procedures)); } /** * Get default Hibernate Criteria for querying series, deleted flag == * <code>false</code> * * @param session * Hibernate Session * @return Default criteria */ public Criteria getDefaultSeriesCriteria(Session session) { return session.createCriteria(Series.class).add(Restrictions.eq(Series.DELETED, false)) .setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); } /** * Get default Hibernate Criteria for querying all series * * @param session * Hibernate Session * @return Default criteria */ public Criteria getDefaultAllSeriesCriteria(Session session) { return session.createCriteria(Series.class).setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); } /** * Update Series for procedure by setting deleted flag and return changed * series * * @param procedure * Procedure for which the series should be changed * @param deleteFlag * New deleted flag value * @param session * Hibernate session * @return Updated Series */ @SuppressWarnings("unchecked") public List<Series> updateSeriesSetAsDeletedForProcedureAndGetSeries(String procedure, boolean deleteFlag, Session session) { Criteria criteria = getDefaultAllSeriesCriteria(session); addProcedureToCriteria(criteria, procedure); List<Series> hSeries = criteria.list(); for (Series series : hSeries) { series.setDeleted(deleteFlag); session.saveOrUpdate(series); session.flush(); } return hSeries; } /** * Update series values which will be used by the Timeseries API. * Can be later used by the SOS. * * @param series Series object * @param hObservation Observation object * @param session Hibernate session */ public void updateSeriesWithFirstLatestValues(Series series, AbstractObservation hObservation, Session session) { boolean minChanged = false; boolean maxChanged = false; if (!series.isSetFirstTimeStamp() || (series.isSetFirstTimeStamp() && series.getFirstTimeStamp().after(hObservation.getPhenomenonTimeStart()))) { minChanged = true; series.setFirstTimeStamp(hObservation.getPhenomenonTimeStart()); } if (!series.isSetLastTimeStamp() || (series.isSetLastTimeStamp() && series.getLastTimeStamp().before(hObservation.getPhenomenonTimeEnd()))) { maxChanged = true; series.setLastTimeStamp(hObservation.getPhenomenonTimeEnd()); } if (hObservation instanceof NumericObservation) { if (minChanged) { series.setFirstNumericValue(((NumericObservation) hObservation).getValue()); } if (maxChanged) { series.setLastNumericValue(((NumericObservation) hObservation).getValue()); } if (!series.isSetUnit() && hObservation.isSetUnit()) { // TODO check if both unit are equal. If not throw exception? series.setUnit(hObservation.getUnit()); } } session.saveOrUpdate(series); session.flush(); } /** * Check {@link Series} if the deleted observation time stamp corresponds to * the first/last series time stamp * * @param series * Series to update * @param observation * Deleted observation * @param session * Hibernate session */ public void updateSeriesAfterObservationDeletion(Series series, SeriesObservation observation, Session session) { SeriesObservationDAO seriesObservationDAO = new SeriesObservationDAO(); if (series.getFirstTimeStamp().equals(observation.getPhenomenonTimeStart())) { SeriesObservation firstObservation = seriesObservationDAO.getFirstObservationFor(series, session); series.setFirstTimeStamp(firstObservation.getPhenomenonTimeStart()); if (firstObservation instanceof NumericObservation) { series.setFirstNumericValue(((NumericObservation) firstObservation).getValue()); } } else if (series.getLastTimeStamp().equals(observation.getPhenomenonTimeEnd())) { SeriesObservation latestObservation = seriesObservationDAO.getLastObservationFor(series, session); series.setLastTimeStamp(latestObservation.getPhenomenonTimeEnd()); if (latestObservation instanceof NumericObservation) { series.setLastNumericValue(((NumericObservation) latestObservation).getValue()); } } session.saveOrUpdate(series); } public ProcedureTimeExtrema getProcedureTimeExtrema(Session session, String procedure) { Criteria c = getDefaultSeriesCriteria(session); addProcedureToCriteria(c, procedure); ProjectionList projectionList = Projections.projectionList(); projectionList.add(Projections.min(Series.FIRST_TIME_STAMP)); projectionList.add(Projections.max(Series.LAST_TIME_STAMP)); c.setProjection(projectionList); LOGGER.debug("QUERY getProcedureTimeExtrema(procedureIdentifier): {}", HibernateHelper.getSqlString(c)); Object[] result = (Object[]) c.uniqueResult(); ProcedureTimeExtrema pte = new ProcedureTimeExtrema(); if (result != null) { pte.setMinTime(DateTimeHelper.makeDateTime(result[0])); pte.setMaxTime(DateTimeHelper.makeDateTime(result[1])); } return pte; } >>>>>>>
<<<<<<< import io.realm.realmtasks.model.TaskList; import io.realm.realmtasks.model.TaskListList; ======= import io.realm.realmtasks.auth.facebook.FacebookAuth; import io.realm.realmtasks.auth.google.GoogleAuth; import io.realm.realmtasks.model.TaskList; import io.realm.realmtasks.model.TaskListList; import static io.realm.realmtasks.RealmTasksApplication.AUTH_URL; >>>>>>> import io.realm.realmtasks.auth.facebook.FacebookAuth; import io.realm.realmtasks.auth.google.GoogleAuth; import io.realm.realmtasks.model.TaskList; import io.realm.realmtasks.model.TaskListList; import static io.realm.realmtasks.RealmTasksApplication.AUTH_URL; <<<<<<< createInitialDataIfNeeded(); ======= Realm realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { if (realm.isEmpty()) { final TaskListList taskListList = realm.createObject(TaskListList.class, 0); final TaskList taskList = new TaskList(); taskList.setId(RealmTasksApplication.DEFAULT_LIST_ID); taskList.setText(RealmTasksApplication.DEFAULT_LIST_NAME); taskListList.getItems().add(taskList); } } }); realm.close(); >>>>>>> createInitialDataIfNeeded(); <<<<<<< private static void createInitialDataIfNeeded() { final Realm realm = Realm.getDefaultInstance(); //noinspection TryFinallyCanBeTryWithResources try { if (realm.where(TaskListList.class).count() != 0) { return; } realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { if (realm.where(TaskListList.class).count() == 0) { final TaskListList taskListList = realm.createObject(TaskListList.class, 0); final TaskList taskList = new TaskList(); taskList.setId(RealmTasksApplication.DEFAULT_LIST_ID); taskList.setText(RealmTasksApplication.DEFAULT_LIST_NAME); taskListList.getItems().add(taskList); } } }); } finally { realm.close(); } } ======= @Override public void onSuccess(User user) { showProgress(false); loginComplete(user); } @Override public void onError(ObjectServerError error) { showProgress(false); String errorMsg; switch (error.getErrorCode()) { case UNKNOWN_ACCOUNT: errorMsg = "Account does not exists."; break; case INVALID_CREDENTIALS: errorMsg = "The provided credentials are invalid!"; // This message covers also expired account token break; default: errorMsg = error.toString(); } Toast.makeText(SignInActivity.this, errorMsg, Toast.LENGTH_LONG).show(); } >>>>>>> @Override public void onSuccess(User user) { showProgress(false); loginComplete(user); } @Override public void onError(ObjectServerError error) { showProgress(false); String errorMsg; switch (error.getErrorCode()) { case UNKNOWN_ACCOUNT: errorMsg = "Account does not exists."; break; case INVALID_CREDENTIALS: errorMsg = "The provided credentials are invalid!"; // This message covers also expired account token break; default: errorMsg = error.toString(); } Toast.makeText(SignInActivity.this, errorMsg, Toast.LENGTH_LONG).show(); } private static void createInitialDataIfNeeded() { final Realm realm = Realm.getDefaultInstance(); //noinspection TryFinallyCanBeTryWithResources try { if (realm.where(TaskListList.class).count() != 0) { return; } realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { if (realm.where(TaskListList.class).count() == 0) { final TaskListList taskListList = realm.createObject(TaskListList.class, 0); final TaskList taskList = new TaskList(); taskList.setId(RealmTasksApplication.DEFAULT_LIST_ID); taskList.setText(RealmTasksApplication.DEFAULT_LIST_NAME); taskListList.getItems().add(taskList); } } }); } finally { realm.close(); } }
<<<<<<< dbFlowModels = com.raizlabs.android.dbflow.sql.language.Select.all(SimpleAddressItem.class); mainActivity.logTime(startTime, "DBFlow Load"); ======= dbFlowModels = new Select().from(SimpleAddressItem.class).queryList(); MainActivity.logTime(startTime, "DBFlow load"); >>>>>>> dbFlowModels = new Select().from(SimpleAddressItem.class).queryList(); mainActivity.logTime(startTime, "DBFlow Load");
<<<<<<< import org.thingsboard.server.common.data.Tenant; ======= >>>>>>> import org.thingsboard.server.common.data.Tenant; <<<<<<< ======= import org.thingsboard.server.dao.model.CustomerEntity; import org.thingsboard.server.dao.model.TenantEntity; >>>>>>>
<<<<<<< /** Class that keeps a cache of queries * @since 5.0 */ static class ModuleCache<K> { Map<String, K> classCache = new HashMap<>(); boolean caching; String def_namespace; public ModuleCache(String namespace, boolean caching) { this.caching = caching; this.def_namespace = namespace; } /** Returns the post filter class named ModelName. Caches already * instantiaed matching models in map Cache_PostFilter. * If the matching model name doesn't contain '.', * then NAMESPACE_POSTFILTER is prefixed to the name. * @param Name The name of the post filter to return */ @SuppressWarnings("unchecked") K getModule(String Name) { K rtr = null; if (Name.indexOf(".") < 0 ) Name = def_namespace +'.' +Name; else if (Name.startsWith("uk.ac.gla.terrier")) Name = Name.replaceAll("uk.ac.gla.terrier", "org.terrier"); //check for already loaded post filters if (caching) rtr = classCache.get(Name); if (rtr != null) { return rtr; } try { rtr = (K) Class.forName(Name).newInstance(); } catch(Exception e) { logger.error("Problem with class named: "+Name,e); return null; } if (caching) classCache.put(Name, rtr); return rtr; } } /** Class that keeps a cache of queries, and helps parse controls to identify them * @since 5.0 */ static class ModuleManager<K> extends ModuleCache<K> { protected static final String[] tinySingleStringArray = new String[0]; protected static final String[][] tinyDoubleStringArray = new String[0][0]; /** An ordered list of post filters names. The controls at the same index in the PostFilters_Controls * list turn on the post process in this list. */ protected String[] Class_Order; /** A 2d array, contains (on 2nd level) the list of controls that turn on the PostFilters * at the same 1st level place on PostFilters_Order */ protected String[][] Class_Controls; protected String typeName; ModuleManager(String _typeName, String namespace, boolean _caching){ super(namespace, _caching); this.typeName = _typeName; this.load_module_controls(); } List<K> getActive(Map<String,String> controls) { List<K> classes = new ArrayList<>(); for(int i=0; i<Class_Order.length; i++) { String PostFilter_Name = Class_Order[i]; for(int j=0; j<Class_Controls[i].length; j++) { String ControlName = Class_Controls[i][j]; String value = (String)controls.get(ControlName); if (logger.isDebugEnabled()){ logger.debug(ControlName+ "("+PostFilter_Name+") => "+value); } if (value == null) continue; value = value.toLowerCase(); if(! (value.equals("off") || value.equals("false"))) { classes.add(getModule(PostFilter_Name)); //we've now run this post process module, no need to check the rest of the controls for it. break; } } } return classes; } /** parses the controls hashtable, looking for references to controls, and returns the appropriate * postfilters to be run. */ Iterator<K> getActiveIterator(Map<String,String> controls) { //TODO this implementation should check if controls have bene updated since the iterator was created. return getActive(controls).iterator(); } /** load in the allowed post filter controls, and the order to run post processes in */ protected void load_module_controls() { /* what we have is a mapping of controls to post filters, and an order post processes should be run in. what we need is the order to check the controls in, and which pp to run for each */ String[] order_pf, control_pf; String tmp = ApplicationSetup.getProperty("querying."+typeName+".order", "").trim(); if (tmp.length() > 0) order_pf = tmp.split("\\s*,\\s*"); else order_pf = new String[0]; tmp = ApplicationSetup.getProperty("querying."+typeName+".controls", "").trim(); if (tmp.length() > 0) control_pf = tmp.split("\\s*,\\s*"); else control_pf = new String[0]; String[] control_and_pf = new String[control_pf.length*2]; int count = 0; //iterate through controls and pf names putting in 1d array for(int i=0; i<control_pf.length; i++) { if (control_pf[i].indexOf(":") > 0) { String[] control_and_postfilter = control_pf[i].split(":"); control_and_pf[count] = control_and_postfilter[0];//control control_and_pf[count+1] = control_and_postfilter[1];//postfilter count+=2; } } /* basically, we now invert, so we have an array of pf names, in a separate array, a list of controls that can turn that pf on */ List<String> pf_order = new ArrayList<String>(); List<String[]> pf_controls = new ArrayList<String[]>(); for(int i=0; i<order_pf.length; i++) { List<String> controls_for_this_pf = new ArrayList<String>(); String tmpPF = order_pf[i]; for(int j=0;j<count;j+=2) { if (tmpPF.equals(control_and_pf[j+1])) { controls_for_this_pf.add(control_and_pf[j]); } } //ok, there are controls that can turn this pf on, so lets enable it if (controls_for_this_pf.size() > 0) { pf_controls.add(controls_for_this_pf.toArray(tinySingleStringArray)); pf_order.add(tmpPF); } } //cast back to arrays Class_Order = pf_order.toArray(tinySingleStringArray); Class_Controls = pf_controls.toArray(tinyDoubleStringArray); } } ======= /** A generaic query id for when no query id is given **/ private static final String GENERICQUERYID = "GenericQuery"; >>>>>>> /** Class that keeps a cache of queries * @since 5.0 */ static class ModuleCache<K> { Map<String, K> classCache = new HashMap<>(); boolean caching; String def_namespace; public ModuleCache(String namespace, boolean caching) { this.caching = caching; this.def_namespace = namespace; } /** Returns the post filter class named ModelName. Caches already * instantiaed matching models in map Cache_PostFilter. * If the matching model name doesn't contain '.', * then NAMESPACE_POSTFILTER is prefixed to the name. * @param Name The name of the post filter to return */ @SuppressWarnings("unchecked") K getModule(String Name) { K rtr = null; if (Name.indexOf(".") < 0 ) Name = def_namespace +'.' +Name; else if (Name.startsWith("uk.ac.gla.terrier")) Name = Name.replaceAll("uk.ac.gla.terrier", "org.terrier"); //check for already loaded post filters if (caching) rtr = classCache.get(Name); if (rtr != null) { return rtr; } try { rtr = (K) Class.forName(Name).newInstance(); } catch(Exception e) { logger.error("Problem with class named: "+Name,e); return null; } if (caching) classCache.put(Name, rtr); return rtr; } } /** Class that keeps a cache of queries, and helps parse controls to identify them * @since 5.0 */ static class ModuleManager<K> extends ModuleCache<K> { protected static final String[] tinySingleStringArray = new String[0]; protected static final String[][] tinyDoubleStringArray = new String[0][0]; /** An ordered list of post filters names. The controls at the same index in the PostFilters_Controls * list turn on the post process in this list. */ protected String[] Class_Order; /** A 2d array, contains (on 2nd level) the list of controls that turn on the PostFilters * at the same 1st level place on PostFilters_Order */ protected String[][] Class_Controls; protected String typeName; ModuleManager(String _typeName, String namespace, boolean _caching){ super(namespace, _caching); this.typeName = _typeName; this.load_module_controls(); } List<K> getActive(Map<String,String> controls) { List<K> classes = new ArrayList<>(); for(int i=0; i<Class_Order.length; i++) { String PostFilter_Name = Class_Order[i]; for(int j=0; j<Class_Controls[i].length; j++) { String ControlName = Class_Controls[i][j]; String value = (String)controls.get(ControlName); if (logger.isDebugEnabled()){ logger.debug(ControlName+ "("+PostFilter_Name+") => "+value); } if (value == null) continue; value = value.toLowerCase(); if(! (value.equals("off") || value.equals("false"))) { classes.add(getModule(PostFilter_Name)); //we've now run this post process module, no need to check the rest of the controls for it. break; } } } return classes; } /** parses the controls hashtable, looking for references to controls, and returns the appropriate * postfilters to be run. */ Iterator<K> getActiveIterator(Map<String,String> controls) { //TODO this implementation should check if controls have bene updated since the iterator was created. return getActive(controls).iterator(); } /** load in the allowed post filter controls, and the order to run post processes in */ protected void load_module_controls() { /* what we have is a mapping of controls to post filters, and an order post processes should be run in. what we need is the order to check the controls in, and which pp to run for each */ String[] order_pf, control_pf; String tmp = ApplicationSetup.getProperty("querying."+typeName+".order", "").trim(); if (tmp.length() > 0) order_pf = tmp.split("\\s*,\\s*"); else order_pf = new String[0]; tmp = ApplicationSetup.getProperty("querying."+typeName+".controls", "").trim(); if (tmp.length() > 0) control_pf = tmp.split("\\s*,\\s*"); else control_pf = new String[0]; String[] control_and_pf = new String[control_pf.length*2]; int count = 0; //iterate through controls and pf names putting in 1d array for(int i=0; i<control_pf.length; i++) { if (control_pf[i].indexOf(":") > 0) { String[] control_and_postfilter = control_pf[i].split(":"); control_and_pf[count] = control_and_postfilter[0];//control control_and_pf[count+1] = control_and_postfilter[1];//postfilter count+=2; } } /* basically, we now invert, so we have an array of pf names, in a separate array, a list of controls that can turn that pf on */ List<String> pf_order = new ArrayList<String>(); List<String[]> pf_controls = new ArrayList<String[]>(); for(int i=0; i<order_pf.length; i++) { List<String> controls_for_this_pf = new ArrayList<String>(); String tmpPF = order_pf[i]; for(int j=0;j<count;j+=2) { if (tmpPF.equals(control_and_pf[j+1])) { controls_for_this_pf.add(control_and_pf[j]); } } //ok, there are controls that can turn this pf on, so lets enable it if (controls_for_this_pf.size() > 0) { pf_controls.add(controls_for_this_pf.toArray(tinySingleStringArray)); pf_order.add(tmpPF); } } //cast back to arrays Class_Order = pf_order.toArray(tinySingleStringArray); Class_Controls = pf_controls.toArray(tinyDoubleStringArray); } } /** A generaic query id for when no query id is given **/ private static final String GENERICQUERYID = "GenericQuery";
<<<<<<< TestSnowball.class, ======= TestRemoveDiacritics.class, >>>>>>> TestSnowball.class, TestRemoveDiacritics.class,
<<<<<<< T frame(T parent, Buffer buffer) throws IOException, FramingException; ======= T frame(P parent, Buffer buffer) throws IOException; >>>>>>> T frame(P parent, Buffer buffer) throws IOException, FramingException;
<<<<<<< import io.pkts.packet.PacketParseException; ======= import io.pkts.packet.Packet; >>>>>>> import io.pkts.packet.PacketParseException; import io.pkts.packet.Packet; <<<<<<< public PCapPacket getNextPacket() throws IOException, PacketParseException { ======= public Packet getNextPacket() throws IOException { >>>>>>> public Packet getNextPacket() throws IOException, PacketParseException {
<<<<<<< /** * Creates a new {@link SpectralClustering} instance. * * @param alpha The weight given to inter cluster similarity for the relaxed * correlation objective function. {@code beta} will be set to * {@link 1 - alpha}. * @param cutterGenerator A {@link Generator} of {@link EigenCut} instances. */ ======= private final WorkQueue workQueue; >>>>>>> private final WorkQueue workQueue; /** * Creates a new {@link SpectralClustering} instance. * * @param alpha The weight given to inter cluster similarity for the relaxed * correlation objective function. {@code beta} will be set to * {@link 1 - alpha}. * @param cutterGenerator A {@link Generator} of {@link EigenCut} instances. */
<<<<<<< public void user() { ensureBasicEnabled(); // for now, the user stream will switch to OAuth at some point in the future startHandler(new StreamHandlingThread(true) { public UserStream getStream() throws TwitterException { ======= public void user () { ensureAuthorizationEnabled (); startHandler(new StreamHandlingThread() { public StatusStream getStream() throws TwitterException { >>>>>>> public void user() { ensureAuthorizationEnabled (); startHandler(new StreamHandlingThread(true) { public UserStream getStream() throws TwitterException { <<<<<<< public UserStream getUserStream() throws TwitterException { ensureBasicEnabled(); if (!(statusListener instanceof UserStreamListener)) { logger.warn("Use of UserStreamListener is suggested."); } ======= public StatusStream getUserStream() throws TwitterException { ensureAuthorizationEnabled (); >>>>>>> public UserStream getUserStream() throws TwitterException { ensureAuthorizationEnabled (); if (!(statusListener instanceof UserStreamListener)) { logger.warn("Use of UserStreamListener is suggested."); }
<<<<<<< return factory.createUserList(get(conf.getRestBaseURL() + "users/lookup.json", new HttpParameter[]{ new HttpParameter("screen_name", z_T4JInternalStringUtil.join(screenNames)) , INCLUDE_ENTITIES})); ======= try { return factory.createUserList(get(conf.getRestBaseURL() + "users/lookup.json", new HttpParameter[]{ new HttpParameter("screen_name", T4JInternalStringUtil.join(screenNames)) , INCLUDE_ENTITIES})); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } } >>>>>>> try { return factory.createUserList(get(conf.getRestBaseURL() + "users/lookup.json", new HttpParameter[]{ new HttpParameter("screen_name", z_T4JInternalStringUtil.join(screenNames)) , INCLUDE_ENTITIES})); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } } <<<<<<< return factory.createUserList(get(conf.getRestBaseURL() + "users/lookup.json", new HttpParameter[]{ new HttpParameter("user_id", z_T4JInternalStringUtil.join(ids)) , INCLUDE_ENTITIES})); ======= try { return factory.createUserList(get(conf.getRestBaseURL() + "users/lookup.json", new HttpParameter[]{ new HttpParameter("user_id", T4JInternalStringUtil.join(ids)) , INCLUDE_ENTITIES})); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } } >>>>>>> try { return factory.createUserList(get(conf.getRestBaseURL() + "users/lookup.json", new HttpParameter[]{ new HttpParameter("user_id", z_T4JInternalStringUtil.join(ids)) , INCLUDE_ENTITIES})); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } } <<<<<<< return factory.createFriendshipList(get(conf.getRestBaseURL() + "friendships/lookup.json?screen_name=" + z_T4JInternalStringUtil.join(screenNames))); ======= try { return factory.createFriendshipList(get(conf.getRestBaseURL() + "friendships/lookup.json?screen_name=" + T4JInternalStringUtil.join(screenNames))); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } } >>>>>>> try { return factory.createFriendshipList(get(conf.getRestBaseURL() + "friendships/lookup.json?screen_name=" + z_T4JInternalStringUtil.join(screenNames))); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } } <<<<<<< return factory.createFriendshipList(get(conf.getRestBaseURL() + "friendships/lookup.json?user_id=" + z_T4JInternalStringUtil.join(ids))); ======= try { return factory.createFriendshipList(get(conf.getRestBaseURL() + "friendships/lookup.json?user_id=" + T4JInternalStringUtil.join(ids))); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } } >>>>>>> try { return factory.createFriendshipList(get(conf.getRestBaseURL() + "friendships/lookup.json?user_id=" + z_T4JInternalStringUtil.join(ids))); } catch (TwitterException te) { if (404 == te.getStatusCode()) { return factory.createEmptyResponseList(te); } else { throw te; } }
<<<<<<< * Returns a cursored collection of user objects for every user the specified user is following (otherwise known as their "friends").<br> * At this time, results are ordered with the most recent following first — however, this ordering is subject to unannounced change and eventual consistency issues. Results are given in groups of 20 users and multiple "pages" of results can be navigated through using the next_cursor value in subsequent requests. See <a href="https://dev.twitter.com/docs/misc/cursoring">Using cursors to navigate collections</a> for more information. * <br>This method calls https://api.twitter.com/1.1/friends/list.json * * @param screenName The screen name of the user for whom to return results for. * @param cursor Causes the results to be broken into pages of no more than 20 records at a time. * @param count The number of users to return per page, up to a maximum of 200. Defaults to 20. * @return list of friends * @throws TwitterException when Twitter service or network is unavailable * @see <a href="https://dev.twitter.com/docs/api/1.1/get/friends/list">GET friends/list | Twitter Developers</a> */ PagableResponseList<User> getFriendsList(String screenName, long cursor, int count) throws TwitterException; /** * Returns a cursored collection of user objects for users following the specified user.<br> ======= * Returns a cursored collection of user objects for every user the specified user is following (otherwise known as their "friends").<br> * At this time, results are ordered with the most recent following first — however, this ordering is subject to unannounced change and eventual consistency issues. Results are given in groups of 20 users and multiple "pages" of results can be navigated through using the next_cursor value in subsequent requests. See <a href="https://dev.twitter.com/docs/misc/cursoring">Using cursors to navigate collections</a> for more information. * <br>This method calls https://api.twitter.com/1.1/friends/list.json * * @param userId The ID of the user for whom to return results for. * @param cursor Causes the results to be broken into pages of no more than 20 records at a time. * @param count The number of users to return per page, up to a maximum of 200. Defaults to 20. * @return list of friends * @throws TwitterException when Twitter service or network is unavailable * @see <a href="https://dev.twitter.com/docs/api/1.1/get/friends/list">GET friends/list | Twitter Developers</a> * @since Twitter4J 4.0.2 */ PagableResponseList<User> getFriendsList(long userId, long cursor, int count) throws TwitterException; /** * Returns a cursored collection of user objects for every user the specified user is following (otherwise known as their "friends").<br> * At this time, results are ordered with the most recent following first — however, this ordering is subject to unannounced change and eventual consistency issues. Results are given in groups of 20 users and multiple "pages" of results can be navigated through using the next_cursor value in subsequent requests. See <a href="https://dev.twitter.com/docs/misc/cursoring">Using cursors to navigate collections</a> for more information. * <br>This method calls https://api.twitter.com/1.1/friends/list.json * * @param screenName The screen name of the user for whom to return results for. * @param cursor Causes the results to be broken into pages of no more than 20 records at a time. * @param count The number of users to return per page, up to a maximum of 200. Defaults to 20. * @return list of friends * @throws TwitterException when Twitter service or network is unavailable * @see <a href="https://dev.twitter.com/docs/api/1.1/get/friends/list">GET friends/list | Twitter Developers</a> * @since Twitter4J 4.0.2 */ PagableResponseList<User> getFriendsList(String screenName, long cursor, int count) throws TwitterException; /** * Returns a cursored collection of user objects for every user the specified user is following (otherwise known as their "friends").<br> * At this time, results are ordered with the most recent following first — however, this ordering is subject to unannounced change and eventual consistency issues. Results are given in groups of 20 users and multiple "pages" of results can be navigated through using the next_cursor value in subsequent requests. See <a href="https://dev.twitter.com/docs/misc/cursoring">Using cursors to navigate collections</a> for more information. * <br>This method calls https://api.twitter.com/1.1/friends/list.json * * @param userId The ID of the user for whom to return results for. * @param cursor Causes the results to be broken into pages of no more than 20 records at a time. * @param count The number of users to return per page, up to a maximum of 200. Defaults to 20. * @param skipStatus When set to either true, statuses will not be included in the returned user objects. * @param includeUserEntities The user object entities node will be disincluded when set to false. * @return list of friends * @throws TwitterException when Twitter service or network is unavailable * @see <a href="https://dev.twitter.com/docs/api/1.1/get/friends/list">GET friends/list | Twitter Developers</a> * @since Twitter4J 4.0.2 */ PagableResponseList<User> getFriendsList(long userId, long cursor, int count, boolean skipStatus, boolean includeUserEntities) throws TwitterException; /** * Returns a cursored collection of user objects for every user the specified user is following (otherwise known as their "friends").<br> >>>>>>> * Returns a cursored collection of user objects for every user the specified user is following (otherwise known as their "friends").<br>
<<<<<<< import org.thingsboard.server.common.data.DataConstants; ======= >>>>>>> import org.thingsboard.server.common.data.DataConstants; <<<<<<< case MqttTopics.DEVICE_ATTRIBUTES_RESPONSES_TOPIC: case MqttTopics.GATEWAY_PROVISION_RESPONSE_TOPIC: case MqttTopics.DEVICE_PROVISION_RESPONSE_TOPIC: ======= >>>>>>> case MqttTopics.GATEWAY_PROVISION_RESPONSE_TOPIC: case MqttTopics.DEVICE_PROVISION_RESPONSE_TOPIC:
<<<<<<< return factory.createStatusList(get(conf.getRestBaseURL() + "favorites/list.json?user_id=" + userId, paging.asPostParameterArray())); ======= ensureAuthorizationEnabled(); return factory.createStatusList(get(conf.getRestBaseURL() + "favorites/list.json" , mergeParameters(new HttpParameter[]{new HttpParameter("user_id", userId)} , paging.asPostParameterArray()))); >>>>>>> return factory.createStatusList(get(conf.getRestBaseURL() + "favorites/list.json" , mergeParameters(new HttpParameter[]{new HttpParameter("user_id", userId)} , paging.asPostParameterArray()))); <<<<<<< return factory.createStatusList(get(conf.getRestBaseURL() + "favorites/list.json?screen_name=" + screenName, paging.asPostParameterArray())); ======= ensureAuthorizationEnabled(); return factory.createStatusList(get(conf.getRestBaseURL() + "favorites/list.json" , mergeParameters(new HttpParameter[]{new HttpParameter("screen_name", screenName)} , paging.asPostParameterArray()))); >>>>>>> return factory.createStatusList(get(conf.getRestBaseURL() + "favorites/list.json" , mergeParameters(new HttpParameter[]{new HttpParameter("screen_name", screenName)} , paging.asPostParameterArray())));
<<<<<<< ======= public boolean hasStableIds() { return mDelegate.hasStableIds(); } @Override public int getItemViewType(int position) { return mDelegate.getItemViewType(position); } @Override >>>>>>> public int getItemViewType(int position) { return mDelegate.getItemViewType(position); } @Override <<<<<<< ======= } @Override public int getViewTypeCount() { return mDelegate.getViewTypeCount(); >>>>>>> } @Override public int getViewTypeCount() { return mDelegate.getViewTypeCount(); } @Override public boolean hasStableIds() { return mDelegate.hasStableIds();
<<<<<<< import nova.core.util.math.Vector3DUtil; import nova.internal.Game; import nova.internal.launch.NovaLauncher; ======= import nova.core.util.transform.vector.Vector3d; import nova.internal.core.Game; import nova.internal.core.launch.NovaLauncher; >>>>>>> import nova.core.util.math.Vector3DUtil; import nova.internal.core.Game; import nova.internal.core.launch.NovaLauncher;
<<<<<<< import nova.core.util.math.Vector2DUtil; import nova.internal.Game; import org.apache.commons.math3.geometry.euclidean.twod.Vector2D; ======= import nova.core.util.transform.vector.Vector2d; import nova.core.util.transform.vector.Vector2i; import nova.internal.core.Game; >>>>>>> import nova.core.util.math.Vector2DUtil; import nova.internal.core.Game; import org.apache.commons.math3.geometry.euclidean.twod.Vector2D;
<<<<<<< public Registry<? extends Identifiable> getRegistry(Class<? extends Identifiable> c) { if (!registryMap.containsKey(c)) { registryMap.put(c, new Registry()); ======= public <T extends Named> Registry<T> getRegistry(Class<T> type) { if (!registryMap.containsKey(type)) { registryMap.put(type, new Registry<T>()); >>>>>>> public <T extends Identifiable> Registry<T> getRegistry(Class<T> type) { if (!registryMap.containsKey(type)) { registryMap.put(type, new Registry<T>());
<<<<<<< import nova.core.util.math.RotationUtil; import nova.internal.Game; import nova.internal.launch.NovaLauncher; ======= import nova.core.util.transform.matrix.Quaternion; import nova.core.util.transform.vector.Vector3d; import nova.core.util.transform.vector.Vector3i; import nova.internal.core.Game; import nova.internal.core.launch.NovaLauncher; >>>>>>> import nova.core.util.math.RotationUtil; import nova.internal.core.Game; import nova.internal.core.launch.NovaLauncher;
<<<<<<< ======= import lombok.ToString; import org.thingsboard.server.common.data.HasName; >>>>>>> import org.thingsboard.server.common.data.HasName;
<<<<<<< ObjectMapper m = newMapperBuilder() .addMixIn(Temporal.class, MockObjectConfiguration.class) .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .build(); String value = m.writeValueAsString(date); assertNotNull("The value should not be null.", value); ======= String value = newMapper() .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .addMixIn(Temporal.class, MockObjectConfiguration.class) .writeValueAsString(date); >>>>>>> ObjectMapper m = newMapperBuilder() .addMixIn(Temporal.class, MockObjectConfiguration.class) .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .build(); String value = m.writeValueAsString(date);
<<<<<<< ======= private static final long serialVersionUID = 1L; /** * Flag that indicates what leniency setting is enabled for this deserializer (either * due {@link JsonFormat} annotation on property or class, or due to per-type * "config override", or from global settings): leniency/strictness has effect * on accepting some non-default input value representations (such as integer values * for dates). *<p> * Note that global default setting is for leniency to be enabled, for Jackson 2.x, * and has to be explicitly change to force strict handling: this is to keep backwards * compatibility with earlier versions. * * @since 2.11 */ protected final boolean _isLenient; /** * @since 2.11 */ >>>>>>> /** * Flag that indicates what leniency setting is enabled for this deserializer (either * due {@link JsonFormat} annotation on property or class, or due to per-type * "config override", or from global settings): leniency/strictness has effect * on accepting some non-default input value representations (such as integer values * for dates). *<p> * Note that global default setting is for leniency to be enabled, for Jackson 2.x, * and has to be explicitly change to force strict handling: this is to keep backwards * compatibility with earlier versions. * * @since 2.11 */ protected final boolean _isLenient; <<<<<<< protected JSR310DeserializerBase(JSR310DeserializerBase<?> base) { super(base); ======= /** * @since 2.11 */ protected abstract JSR310DeserializerBase<T> withLeniency(Boolean leniency); /** * @return {@code true} if lenient handling is enabled; {code false} if not (strict mode) * * @since 2.11 */ protected boolean isLenient() { return _isLenient; >>>>>>> protected abstract JSR310DeserializerBase<T> withLeniency(Boolean leniency); /** * @return {@code true} if lenient handling is enabled; {code false} if not (strict mode) */ protected boolean isLenient() { return _isLenient;
<<<<<<< return _handleDateTimeFormatException(ctxt, e, _formatter, string); ======= return _handleDateTimeException(ctxt, e, value); >>>>>>> return _handleDateTimeFormatException(ctxt, e, _formatter, value);
<<<<<<< ======= import com.fasterxml.jackson.core.util.VersionUtil; >>>>>>> import com.fasterxml.jackson.core.util.VersionUtil; <<<<<<< if (p.hasToken(JsonToken.VALUE_STRING)) { final String text = p.getText().trim(); if (text.length() == 0) { CoercionAction act = ctxt.findCoercionAction(logicalType(), _valueClass, CoercionInputShape.EmptyString); if (act == CoercionAction.Fail) { ctxt.reportInputMismatch(this, "Cannot coerce empty String (\"\") to %s (but could if enabling coercion using `CoercionConfig`)", _coercedTypeDesc()); } if (act == CoercionAction.AsEmpty) { return getEmptyValue(ctxt); } // None of the types has specific null value return null; } try { switch (_typeSelector) { case TYPE_PERIOD: return Period.parse(text); case TYPE_ZONE_ID: return ZoneId.of(text); case TYPE_ZONE_OFFSET: return ZoneOffset.of(text); ======= JsonFormat.Value format = findFormatOverrides(ctxt, property, handledType()); JSR310StringParsableDeserializer deser = this; if (format != null) { if (format.hasLenient()) { Boolean leniency = format.getLenient(); if (leniency != null) { deser = this.withLeniency(leniency); >>>>>>> JsonFormat.Value format = findFormatOverrides(ctxt, property, handledType()); JSR310StringParsableDeserializer deser = this; if (format != null) { if (format.hasLenient()) { Boolean leniency = format.getLenient(); if (leniency != null) { deser = this.withLeniency(leniency); <<<<<<< } catch (DateTimeException e) { return _handleDateTimeException(ctxt, e, text); ======= >>>>>>> <<<<<<< @Override public JsonDeserializer<?> createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException ======= protected Object _fromString(JsonParser p, DeserializationContext ctxt, String string) throws IOException >>>>>>> protected Object _fromString(JsonParser p, DeserializationContext ctxt, String string) throws IOException
<<<<<<< ======= import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeParseException; import java.util.TimeZone; >>>>>>> <<<<<<< WithContextTimezoneDateFieldBean result = newMapper(TimeZone.getTimeZone("UTC")) .readerFor(WithContextTimezoneDateFieldBean.class) .without(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .readValue(inputStr); expect(OffsetDateTime.of(2016, 5, 13, 14, 24, 40, 545000000, ZoneOffset.UTC), result.date); ======= WithContextTimezoneDateFieldBean result = newMapper().setTimeZone(TimeZone.getTimeZone("UTC")). disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE).readValue(inputStr, WithContextTimezoneDateFieldBean.class); assertEquals("The value is not correct.", OffsetDateTime.of(2016, 5, 13, 14, 24, 40, 545000000, ZoneOffset.UTC), result.date); >>>>>>> WithContextTimezoneDateFieldBean result = newMapper(TimeZone.getTimeZone("UTC")) .readerFor(WithContextTimezoneDateFieldBean.class) .without(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .readValue(inputStr); assertEquals("The value is not correct.", OffsetDateTime.of(2016, 5, 13, 14, 24, 40, 545000000, ZoneOffset.UTC), result.date); <<<<<<< WithoutContextTimezoneDateFieldBean result = newMapper(TimeZone.getTimeZone("UTC")) .readerFor(WithoutContextTimezoneDateFieldBean.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .readValue(inputStr); expect(OffsetDateTime.of(2016, 5, 13, 17, 24, 40, 545000000, ZoneOffset.ofHours(3)), result.date); ======= WithoutContextTimezoneDateFieldBean result = newMapper().setTimeZone(TimeZone.getTimeZone("UTC")). enable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE).readValue(inputStr, WithoutContextTimezoneDateFieldBean.class); assertEquals("The value is not correct.", OffsetDateTime.of(2016, 5, 13, 17, 24, 40, 545000000, ZoneOffset.ofHours(3)), result.date); >>>>>>> WithoutContextTimezoneDateFieldBean result = newMapper(TimeZone.getTimeZone("UTC")) .readerFor(WithoutContextTimezoneDateFieldBean.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .readValue(inputStr); assertEquals("The value is not correct.", OffsetDateTime.of(2016, 5, 13, 17, 24, 40, 545000000, ZoneOffset.ofHours(3)), result.date); <<<<<<< OffsetDateTime value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS) .readValue(aposToQuotes("['2000-01-01T12:00+00']")); expect(OffsetDateTime.of(2000, 1, 1, 12, 0, 0, 0, ZoneOffset.UTC), value); ======= OffsetDateTime value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS) .readValue("[\"2000-01-01T12:00+00\"]"); assertEquals("The value is not correct.", OffsetDateTime.of(2000, 1, 1, 12, 0, 0, 0, ZoneOffset.UTC), value); >>>>>>> OffsetDateTime value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS) .readValue("[\"2000-01-01T12:00+00\"]"); assertEquals("The value is not correct.", OffsetDateTime.of(2000, 1, 1, 12, 0, 0, 0, ZoneOffset.UTC), value); <<<<<<< OffsetDateTime value= READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue(aposToQuotes("[]")); ======= String json="[]"; OffsetDateTime value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue(aposToQuotes(json)); >>>>>>> OffsetDateTime value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue(aposToQuotes("[]"));
<<<<<<< /* String json =*/ READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS) .readValue("[]"); fail("expected JsonMappingException"); ======= mapperBuilder() .configure(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, true) .build() .readerFor(ZoneOffset.class).readValue("[]"); fail("expected JsonMappingException"); >>>>>>> mapperBuilder() .configure(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, true) .build() .readerFor(ZoneOffset.class).readValue("[]"); fail("expected JsonMappingException"); <<<<<<< // OK ======= verifyException(e, "Unexpected token (END_ARRAY)"); >>>>>>> verifyException(e, "Unexpected token (END_ARRAY)"); <<<<<<< ZoneOffset value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS) .readValue(aposToQuotes("['+0300']")); ======= ZoneOffset value = mapperBuilder() .configure(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, true) .build() .readerFor(ZoneOffset.class).readValue("[\"+0300\"]"); >>>>>>> ZoneOffset value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS) .readValue(aposToQuotes("['+0300']")); <<<<<<< ZoneOffset value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue("[]"); assertNull(value); ======= ZoneOffset value = mapperBuilder() .configure(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, true) .configure(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true) .build() .readerFor(ZoneOffset.class).readValue("[]"); assertNull(value); >>>>>>> ZoneOffset value = READER .with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS, DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue("[]"); assertNull(value); <<<<<<< final ZoneOffset value = read(json); ======= final ZoneOffset value = READER.readValue(aposToQuotes(json)); >>>>>>> final ZoneOffset value = read(json); <<<<<<< private ZoneOffset read(final String json) throws IOException { return READER.readValue(aposToQuotes(json)); } ======= >>>>>>> private ZoneOffset read(final String json) throws IOException { return READER.readValue(aposToQuotes(json)); }
<<<<<<< import org.thingsboard.server.actors.service.ActorService; import org.thingsboard.server.common.data.*; ======= import org.thingsboard.server.common.data.BaseData; >>>>>>> import org.thingsboard.server.common.data.*;
<<<<<<< ======= import com.fasterxml.jackson.annotation.JsonFormat.Value; import com.fasterxml.jackson.annotation.OptBoolean; >>>>>>> import com.fasterxml.jackson.annotation.OptBoolean;
<<<<<<< ZSet zset = _getzset(key0, false); Iterable<ZSetEntry> entries = zset.subSet(_todouble(min1), _todouble(max2)); ======= if (key0 == null || min1 == null || max2 == null) { throw new RedisException("wrong number of arguments for 'zcount' command"); } BytesKeyZSet zset = _getzset(key0, false); Score min = _toscorerange(min1); Score max = _toscorerange(max2); NavigableSet<ZSetEntry> entries = zset.subSet(new ZSetEntry(null, min.value), true, new ZSetEntry(null, max.value), true); >>>>>>> if (key0 == null || min1 == null || max2 == null) { throw new RedisException("wrong number of arguments for 'zcount' command"); } ZSet zset = _getzset(key0, false); Score min = _toscorerange(min1); Score max = _toscorerange(max2); Iterable<ZSetEntry> entries = zset.subSet(_todouble(min1), _todouble(max2)); <<<<<<< ZSet destination = _getzset(args[0], true); for (int i = 2; i < numkeys + 2; i++) { ZSet zset = _getzset(args[i], false); if (i == 2) { ======= del(new byte[][] { destination0 }); BytesKeyZSet destination = _getzset(destination0, true); for (int i = 0; i < numkeys; i++) { BytesKeyZSet zset = _getzset(key2[i], false); if (i == 0) { >>>>>>> del(new byte[][] { destination0 }); ZSet destination = _getzset(destination0, true); for (int i = 0; i < numkeys; i++) { ZSet zset = _getzset(key2[i], false); if (i == 0) { <<<<<<< destination.remove(entry.getValue()); if (current != null) { double newscore = entry.getScore() * (weights == null ? 1 : weights[i - 2]); ======= destination.remove(entry); if (union || current != null) { double newscore = entry.getScore() * (weights == null ? 1 : weights[i]); >>>>>>> destination.remove(entry.getValue()); if (union || current != null) { double newscore = entry.getScore() * (weights == null ? 1 : weights[i]);
<<<<<<< import static biz.netcentric.cq.tools.actool.installationhistory.AcInstallationHistoryPojo.msHumanReadable; ======= import java.util.ArrayList; import java.util.Arrays; >>>>>>> import static biz.netcentric.cq.tools.actool.installationhistory.AcInstallationHistoryPojo.msHumanReadable; import java.util.ArrayList; import java.util.Arrays;
<<<<<<< public void shouldInstantiateDefaultFeignBlockingLoadBalancerClientWhenHttpClientDisabled() { ConfigurableApplicationContext context = initContext("feign.httpclient.enabled=false"); ======= void shouldInstantiateDefaultFeignBlockingLoadBalancerClientWhenHttpClientDisabled() { ConfigurableApplicationContext context = initContext( "spring.cloud.loadbalancer.ribbon.enabled=false", "feign.httpclient.enabled=false", "spring.cloud.loadbalancer.retry.enabled=false"); >>>>>>> void shouldInstantiateDefaultFeignBlockingLoadBalancerClientWhenHttpClientDisabled() { ConfigurableApplicationContext context = initContext("feign.httpclient.enabled=false", "spring.cloud.loadbalancer.retry.enabled=false"); <<<<<<< public void shouldInstantiateHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext("spring.cloud.loadbalancer.ribbon.enabled=false"); ======= void shouldInstantiateHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext( "spring.cloud.loadbalancer.ribbon.enabled=false", "spring.cloud.loadbalancer.retry.enabled=false"); >>>>>>> void shouldInstantiateHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext("spring.cloud.loadbalancer.retry.enabled=false"); <<<<<<< public void shouldInstantiateOkHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext("feign.httpclient.enabled=false", "feign.okhttp.enabled=true"); ======= void shouldInstantiateOkHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext( "spring.cloud.loadbalancer.ribbon.enabled=false", "feign.httpclient.enabled=false", "feign.okhttp.enabled=true", "spring.cloud.loadbalancer.retry.enabled=false"); >>>>>>> void shouldInstantiateOkHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext("feign.httpclient.enabled=false", "feign.okhttp.enabled=true", "spring.cloud.loadbalancer.retry.enabled=false"); <<<<<<< ======= assertThatBeanNotPresent(context, LoadBalancerFeignClient.class); } @Test void shouldInstantiateRetryableDefaultFeignBlockingLoadBalancerClientWhenHttpClientDisabled() { ConfigurableApplicationContext context = initContext( "spring.cloud.loadbalancer.ribbon.enabled=false", "feign.httpclient.enabled=false"); assertThatOneBeanPresent(context, BlockingLoadBalancerClient.class); assertLoadBalancedWithRetries(context, Client.Default.class); assertThatBeanNotPresent(context, LoadBalancerFeignClient.class); } @Test void shouldInstantiateRetryableHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext( "spring.cloud.loadbalancer.ribbon.enabled=false"); assertThatOneBeanPresent(context, BlockingLoadBalancerClient.class); assertLoadBalancedWithRetries(context, ApacheHttpClient.class); assertThatBeanNotPresent(context, LoadBalancerFeignClient.class); } @Test void shouldInstantiateRetryableOkHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext( "spring.cloud.loadbalancer.ribbon.enabled=false", "feign.httpclient.enabled=false", "feign.okhttp.enabled=true"); assertThatOneBeanPresent(context, BlockingLoadBalancerClient.class); assertLoadBalancedWithRetries(context, OkHttpClient.class); assertThatBeanNotPresent(context, LoadBalancerFeignClient.class); } @Test void shouldNotProcessLoadBalancerConfigurationWhenRibbonEnabled() { ConfigurableApplicationContext context = initContext( "spring.cloud.loadbalancer.ribbon.enabled=true", "spring.cloud.loadbalancer.retry.enabled=false"); assertThatOneBeanPresent(context, LoadBalancerFeignClient.class); assertThatBeanNotPresent(context, BlockingLoadBalancerClient.class); assertThatBeanNotPresent(context, FeignBlockingLoadBalancerClient.class); >>>>>>> } @Test void shouldInstantiateRetryableDefaultFeignBlockingLoadBalancerClientWhenHttpClientDisabled() { ConfigurableApplicationContext context = initContext("spring.cloud.loadbalancer.ribbon.enabled=false", "feign.httpclient.enabled=false"); assertThatOneBeanPresent(context, BlockingLoadBalancerClient.class); assertLoadBalancedWithRetries(context, Client.Default.class); } @Test void shouldInstantiateRetryableHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext("spring.cloud.loadbalancer.ribbon.enabled=false"); assertThatOneBeanPresent(context, BlockingLoadBalancerClient.class); assertLoadBalancedWithRetries(context, ApacheHttpClient.class); } @Test void shouldInstantiateRetryableOkHttpFeignClientWhenEnabled() { ConfigurableApplicationContext context = initContext("spring.cloud.loadbalancer.ribbon.enabled=false", "feign.httpclient.enabled=false", "feign.okhttp.enabled=true"); assertThatOneBeanPresent(context, BlockingLoadBalancerClient.class); assertLoadBalancedWithRetries(context, OkHttpClient.class); <<<<<<< ======= private void assertLoadBalancedWithRetries(ConfigurableApplicationContext context, Class delegateClass) { Map<String, RetryableBlockingFeignLoadBalancerClient> retryableBeans = context .getBeansOfType(RetryableBlockingFeignLoadBalancerClient.class); assertThat(retryableBeans).hasSize(1); Map<String, FeignBlockingLoadBalancerClient> beans = context .getBeansOfType(FeignBlockingLoadBalancerClient.class); assertThat(beans).isEmpty(); assertThat(retryableBeans.get("feignRetryClient").getDelegate()) .isInstanceOf(delegateClass); } private void assertThatBeanNotPresent(ConfigurableApplicationContext context, Class<?> beanClass) { Map<String, ?> beans = context.getBeansOfType(beanClass); assertThat(beans).isEmpty(); } >>>>>>> private void assertLoadBalancedWithRetries(ConfigurableApplicationContext context, Class delegateClass) { Map<String, RetryableBlockingFeignLoadBalancerClient> retryableBeans = context .getBeansOfType(RetryableBlockingFeignLoadBalancerClient.class); assertThat(retryableBeans).hasSize(1); Map<String, FeignBlockingLoadBalancerClient> beans = context .getBeansOfType(FeignBlockingLoadBalancerClient.class); assertThat(beans).isEmpty(); assertThat(retryableBeans.get("feignRetryClient").getDelegate()).isInstanceOf(delegateClass); }
<<<<<<< @Conditional(FeignCircuitBreakerDisabledConditions.class) ======= @Conditional(DefaultFeignTargeterConditions.class) >>>>>>> @Conditional(FeignCircuitBreakerDisabledConditions.class) <<<<<<< ======= @Conditional(FeignCircuitBreakerDisabledConditions.class) @ConditionalOnClass(name = "feign.hystrix.HystrixFeign") @ConditionalOnProperty(value = "feign.hystrix.enabled", havingValue = "true", matchIfMissing = true) protected static class HystrixFeignTargeterConfiguration { @Bean @ConditionalOnMissingBean public Targeter feignTargeter() { return new HystrixTargeter(); } } @Configuration(proxyBeanMethods = false) >>>>>>> <<<<<<< @Configuration(proxyBeanMethods = false) @ConditionalOnClass(OAuth2ClientContext.class) @ConditionalOnProperty("feign.oauth2.enabled") protected static class Oauth2FeignConfiguration { @Bean @ConditionalOnMissingBean(OAuth2FeignRequestInterceptor.class) @ConditionalOnBean({ OAuth2ClientContext.class, OAuth2ProtectedResourceDetails.class }) public RequestInterceptor oauth2FeignRequestInterceptor(OAuth2ClientContext oAuth2ClientContext, OAuth2ProtectedResourceDetails resource) { return new OAuth2FeignRequestInterceptor(oAuth2ClientContext, resource); } } ======= static class DefaultFeignTargeterConditions extends AllNestedConditions { DefaultFeignTargeterConditions() { super(ConfigurationPhase.PARSE_CONFIGURATION); } @Conditional(FeignCircuitBreakerDisabledConditions.class) static class FeignCircuitBreakerDisabled { } @Conditional(HystrixDisabledConditions.class) static class HystrixDisabled { } } >>>>>>> @Configuration(proxyBeanMethods = false) @ConditionalOnClass(OAuth2ClientContext.class) @ConditionalOnProperty("feign.oauth2.enabled") protected static class Oauth2FeignConfiguration { @Bean @ConditionalOnMissingBean(OAuth2FeignRequestInterceptor.class) @ConditionalOnBean({ OAuth2ClientContext.class, OAuth2ProtectedResourceDetails.class }) public RequestInterceptor oauth2FeignRequestInterceptor(OAuth2ClientContext oAuth2ClientContext, OAuth2ProtectedResourceDetails resource) { return new OAuth2FeignRequestInterceptor(oAuth2ClientContext, resource); } }
<<<<<<< * OAuth2 client registration constants. */ public static final String OAUTH2_TENANT_ID_PROPERTY = TENANT_ID_PROPERTY; public static final String OAUTH2_CLIENT_REGISTRATION_INFO_COLUMN_FAMILY_NAME = "oauth2_client_registration_info"; public static final String OAUTH2_CLIENT_REGISTRATION_COLUMN_FAMILY_NAME = "oauth2_client_registration"; public static final String OAUTH2_CLIENT_REGISTRATION_TO_DOMAIN_COLUMN_FAMILY_NAME = "oauth2_client_registration_to_domain"; public static final String OAUTH2_CLIENT_REGISTRATION_TEMPLATE_COLUMN_FAMILY_NAME = "oauth2_client_registration_template"; public static final String OAUTH2_ENABLED_PROPERTY = "enabled"; public static final String OAUTH2_TEMPLATE_PROVIDER_ID_PROPERTY = "provider_id"; public static final String OAUTH2_CLIENT_REGISTRATION_INFO_ID_PROPERTY = "client_registration_info_id"; public static final String OAUTH2_DOMAIN_NAME_PROPERTY = "domain_name"; public static final String OAUTH2_DOMAIN_SCHEME_PROPERTY = "domain_scheme"; public static final String OAUTH2_CLIENT_ID_PROPERTY = "client_id"; public static final String OAUTH2_CLIENT_SECRET_PROPERTY = "client_secret"; public static final String OAUTH2_AUTHORIZATION_URI_PROPERTY = "authorization_uri"; public static final String OAUTH2_TOKEN_URI_PROPERTY = "token_uri"; public static final String OAUTH2_REDIRECT_URI_TEMPLATE_PROPERTY = "redirect_uri_template"; public static final String OAUTH2_SCOPE_PROPERTY = "scope"; public static final String OAUTH2_USER_INFO_URI_PROPERTY = "user_info_uri"; public static final String OAUTH2_USER_NAME_ATTRIBUTE_NAME_PROPERTY = "user_name_attribute_name"; public static final String OAUTH2_JWK_SET_URI_PROPERTY = "jwk_set_uri"; public static final String OAUTH2_CLIENT_AUTHENTICATION_METHOD_PROPERTY = "client_authentication_method"; public static final String OAUTH2_LOGIN_BUTTON_LABEL_PROPERTY = "login_button_label"; public static final String OAUTH2_LOGIN_BUTTON_ICON_PROPERTY = "login_button_icon"; public static final String OAUTH2_ALLOW_USER_CREATION_PROPERTY = "allow_user_creation"; public static final String OAUTH2_ACTIVATE_USER_PROPERTY = "activate_user"; public static final String OAUTH2_MAPPER_TYPE_PROPERTY = "type"; public static final String OAUTH2_EMAIL_ATTRIBUTE_KEY_PROPERTY = "basic_email_attribute_key"; public static final String OAUTH2_FIRST_NAME_ATTRIBUTE_KEY_PROPERTY = "basic_first_name_attribute_key"; public static final String OAUTH2_LAST_NAME_ATTRIBUTE_KEY_PROPERTY = "basic_last_name_attribute_key"; public static final String OAUTH2_TENANT_NAME_STRATEGY_PROPERTY = "basic_tenant_name_strategy"; public static final String OAUTH2_TENANT_NAME_PATTERN_PROPERTY = "basic_tenant_name_pattern"; public static final String OAUTH2_CUSTOMER_NAME_PATTERN_PROPERTY = "basic_customer_name_pattern"; public static final String OAUTH2_DEFAULT_DASHBOARD_NAME_PROPERTY = "basic_default_dashboard_name"; public static final String OAUTH2_ALWAYS_FULL_SCREEN_PROPERTY = "basic_always_full_screen"; public static final String OAUTH2_MAPPER_URL_PROPERTY = "custom_url"; public static final String OAUTH2_MAPPER_USERNAME_PROPERTY = "custom_username"; public static final String OAUTH2_MAPPER_PASSWORD_PROPERTY = "custom_password"; public static final String OAUTH2_MAPPER_SEND_TOKEN_PROPERTY = "custom_send_token"; public static final String OAUTH2_TEMPLATE_COMMENT_PROPERTY = "comment"; public static final String OAUTH2_ADDITIONAL_INFO_PROPERTY = ADDITIONAL_INFO_PROPERTY; public static final String OAUTH2_TEMPLATE_ADDITIONAL_INFO_PROPERTY = ADDITIONAL_INFO_PROPERTY; public static final String OAUTH2_TEMPLATE_LOGIN_BUTTON_ICON_PROPERTY = OAUTH2_LOGIN_BUTTON_ICON_PROPERTY; public static final String OAUTH2_TEMPLATE_LOGIN_BUTTON_LABEL_PROPERTY = OAUTH2_LOGIN_BUTTON_LABEL_PROPERTY; public static final String OAUTH2_TEMPLATE_HELP_LINK_PROPERTY = "help_link"; /** ======= * Rule node state constants. */ public static final String RULE_NODE_STATE_TABLE_NAME = "rule_node_state"; public static final String RULE_NODE_STATE_NODE_ID_PROPERTY = "rule_node_id"; public static final String RULE_NODE_STATE_ENTITY_TYPE_PROPERTY = "entity_type"; public static final String RULE_NODE_STATE_ENTITY_ID_PROPERTY = "entity_id"; public static final String RULE_NODE_STATE_DATA_PROPERTY = "state_data"; /** >>>>>>> * Rule node state constants. */ public static final String RULE_NODE_STATE_TABLE_NAME = "rule_node_state"; public static final String RULE_NODE_STATE_NODE_ID_PROPERTY = "rule_node_id"; public static final String RULE_NODE_STATE_ENTITY_TYPE_PROPERTY = "entity_type"; public static final String RULE_NODE_STATE_ENTITY_ID_PROPERTY = "entity_id"; public static final String RULE_NODE_STATE_DATA_PROPERTY = "state_data"; /** * OAuth2 client registration constants. */ public static final String OAUTH2_TENANT_ID_PROPERTY = TENANT_ID_PROPERTY; public static final String OAUTH2_CLIENT_REGISTRATION_INFO_COLUMN_FAMILY_NAME = "oauth2_client_registration_info"; public static final String OAUTH2_CLIENT_REGISTRATION_COLUMN_FAMILY_NAME = "oauth2_client_registration"; public static final String OAUTH2_CLIENT_REGISTRATION_TO_DOMAIN_COLUMN_FAMILY_NAME = "oauth2_client_registration_to_domain"; public static final String OAUTH2_CLIENT_REGISTRATION_TEMPLATE_COLUMN_FAMILY_NAME = "oauth2_client_registration_template"; public static final String OAUTH2_ENABLED_PROPERTY = "enabled"; public static final String OAUTH2_TEMPLATE_PROVIDER_ID_PROPERTY = "provider_id"; public static final String OAUTH2_CLIENT_REGISTRATION_INFO_ID_PROPERTY = "client_registration_info_id"; public static final String OAUTH2_DOMAIN_NAME_PROPERTY = "domain_name"; public static final String OAUTH2_DOMAIN_SCHEME_PROPERTY = "domain_scheme"; public static final String OAUTH2_CLIENT_ID_PROPERTY = "client_id"; public static final String OAUTH2_CLIENT_SECRET_PROPERTY = "client_secret"; public static final String OAUTH2_AUTHORIZATION_URI_PROPERTY = "authorization_uri"; public static final String OAUTH2_TOKEN_URI_PROPERTY = "token_uri"; public static final String OAUTH2_REDIRECT_URI_TEMPLATE_PROPERTY = "redirect_uri_template"; public static final String OAUTH2_SCOPE_PROPERTY = "scope"; public static final String OAUTH2_USER_INFO_URI_PROPERTY = "user_info_uri"; public static final String OAUTH2_USER_NAME_ATTRIBUTE_NAME_PROPERTY = "user_name_attribute_name"; public static final String OAUTH2_JWK_SET_URI_PROPERTY = "jwk_set_uri"; public static final String OAUTH2_CLIENT_AUTHENTICATION_METHOD_PROPERTY = "client_authentication_method"; public static final String OAUTH2_LOGIN_BUTTON_LABEL_PROPERTY = "login_button_label"; public static final String OAUTH2_LOGIN_BUTTON_ICON_PROPERTY = "login_button_icon"; public static final String OAUTH2_ALLOW_USER_CREATION_PROPERTY = "allow_user_creation"; public static final String OAUTH2_ACTIVATE_USER_PROPERTY = "activate_user"; public static final String OAUTH2_MAPPER_TYPE_PROPERTY = "type"; public static final String OAUTH2_EMAIL_ATTRIBUTE_KEY_PROPERTY = "basic_email_attribute_key"; public static final String OAUTH2_FIRST_NAME_ATTRIBUTE_KEY_PROPERTY = "basic_first_name_attribute_key"; public static final String OAUTH2_LAST_NAME_ATTRIBUTE_KEY_PROPERTY = "basic_last_name_attribute_key"; public static final String OAUTH2_TENANT_NAME_STRATEGY_PROPERTY = "basic_tenant_name_strategy"; public static final String OAUTH2_TENANT_NAME_PATTERN_PROPERTY = "basic_tenant_name_pattern"; public static final String OAUTH2_CUSTOMER_NAME_PATTERN_PROPERTY = "basic_customer_name_pattern"; public static final String OAUTH2_DEFAULT_DASHBOARD_NAME_PROPERTY = "basic_default_dashboard_name"; public static final String OAUTH2_ALWAYS_FULL_SCREEN_PROPERTY = "basic_always_full_screen"; public static final String OAUTH2_MAPPER_URL_PROPERTY = "custom_url"; public static final String OAUTH2_MAPPER_USERNAME_PROPERTY = "custom_username"; public static final String OAUTH2_MAPPER_PASSWORD_PROPERTY = "custom_password"; public static final String OAUTH2_MAPPER_SEND_TOKEN_PROPERTY = "custom_send_token"; public static final String OAUTH2_TEMPLATE_COMMENT_PROPERTY = "comment"; public static final String OAUTH2_ADDITIONAL_INFO_PROPERTY = ADDITIONAL_INFO_PROPERTY; public static final String OAUTH2_TEMPLATE_ADDITIONAL_INFO_PROPERTY = ADDITIONAL_INFO_PROPERTY; public static final String OAUTH2_TEMPLATE_LOGIN_BUTTON_ICON_PROPERTY = OAUTH2_LOGIN_BUTTON_ICON_PROPERTY; public static final String OAUTH2_TEMPLATE_LOGIN_BUTTON_LABEL_PROPERTY = OAUTH2_LOGIN_BUTTON_LABEL_PROPERTY; public static final String OAUTH2_TEMPLATE_HELP_LINK_PROPERTY = "help_link"; /**
<<<<<<< private HttpEntity toApacheHttpEntity(RequestTemplate requestTemplate) throws IOException, URISyntaxException { ======= private HttpEntity toApacheHttpEntity(RequestTemplate requestTemplate) throws IOException { >>>>>>> private HttpEntity toApacheHttpEntity(RequestTemplate requestTemplate) throws IOException { <<<<<<< BDDMockito.given(this.httpClient.execute(ArgumentMatchers.<HttpUriRequest>any())) .will(new Answer<HttpResponse>() { @Override public HttpResponse answer(InvocationOnMock invocationOnMock) throws Throwable { request.add((HttpUriRequest) invocationOnMock.getArguments()[0]); return new BasicHttpResponse(new BasicStatusLine(new ProtocolVersion("http", 1, 1), 200, null)); } ======= BDDMockito.given(this.httpClient.execute(ArgumentMatchers.any())) .will((Answer<HttpResponse>) invocationOnMock -> { request.add((HttpUriRequest) invocationOnMock.getArguments()[0]); return new BasicHttpResponse(new BasicStatusLine( new ProtocolVersion("http", 1, 1), 200, null)); >>>>>>> BDDMockito.given(this.httpClient.execute(ArgumentMatchers.any())) .will((Answer<HttpResponse>) invocationOnMock -> { request.add((HttpUriRequest) invocationOnMock.getArguments()[0]); return new BasicHttpResponse(new BasicStatusLine(new ProtocolVersion("http", 1, 1), 200, null));
<<<<<<< import org.thingsboard.server.transport.mqtt.session.DeviceSessionCtx; ======= import org.thingsboard.server.gen.transport.TransportProtos.ProvisionDeviceResponseMsg; >>>>>>> import org.thingsboard.server.transport.mqtt.session.DeviceSessionCtx; import org.thingsboard.server.gen.transport.TransportProtos.ProvisionDeviceResponseMsg;
<<<<<<< import static org.hamcrest.Matchers.containsString; import static org.thingsboard.server.dao.model.ModelConstants.NULL_UUID; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.ArrayList; import java.util.Collections; import java.util.List; import com.datastax.oss.driver.api.core.uuid.Uuids; ======= import com.datastax.driver.core.utils.UUIDs; import com.fasterxml.jackson.core.type.TypeReference; >>>>>>> import com.datastax.oss.driver.api.core.uuid.Uuids; import com.fasterxml.jackson.core.type.TypeReference; <<<<<<< import org.thingsboard.server.common.data.page.PageData; import org.thingsboard.server.common.data.page.PageLink; ======= import org.thingsboard.server.common.data.page.TextPageData; import org.thingsboard.server.common.data.page.TextPageLink; import org.thingsboard.server.common.data.relation.EntityRelation; import org.thingsboard.server.common.data.relation.RelationTypeGroup; >>>>>>> import org.thingsboard.server.common.data.page.PageData; import org.thingsboard.server.common.data.page.PageLink; import org.thingsboard.server.common.data.relation.EntityRelation; import org.thingsboard.server.common.data.relation.RelationTypeGroup; <<<<<<< doPost("/api/customer/" + Uuids.timeBased().toString() ======= doPost("/api/customer/" + UUIDs.timeBased().toString() >>>>>>> doPost("/api/customer/" + Uuids.timeBased().toString() <<<<<<< pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<PageData<Device>>(){}, pageLink); ======= pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<TextPageData<Device>>() { }, pageLink); >>>>>>> pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<PageData<Device>>(){}, pageLink); <<<<<<< pageLink = new PageLink(4, 0, title1); pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<PageData<Device>>(){}, pageLink); ======= pageLink = new TextPageLink(4, title1); pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<TextPageData<Device>>() { }, pageLink); >>>>>>> pageLink = new PageLink(4, 0, title1); pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<PageData<Device>>(){}, pageLink); <<<<<<< pageLink = new PageLink(4, 0, title2); pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<PageData<Device>>(){}, pageLink); ======= pageLink = new TextPageLink(4, title2); pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<TextPageData<Device>>() { }, pageLink); >>>>>>> pageLink = new PageLink(4, 0, title2); pageData = doGetTypedWithPageLink("/api/tenant/devices?", new TypeReference<PageData<Device>>(){}, pageLink); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type1); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type1); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type1); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type2); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type2); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type2); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type1); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type1); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type1); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type2); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type2); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type2); <<<<<<< pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<PageData<Device>>(){}, pageLink); ======= pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<TextPageData<Device>>() { }, pageLink); >>>>>>> pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<PageData<Device>>(){}, pageLink); <<<<<<< pageLink = new PageLink(4, 0, title1); pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<PageData<Device>>(){}, pageLink); ======= pageLink = new TextPageLink(4, title1); pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<TextPageData<Device>>() { }, pageLink); >>>>>>> pageLink = new PageLink(4, 0, title1); pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<PageData<Device>>(){}, pageLink); <<<<<<< pageLink = new PageLink(4, 0, title2); pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<PageData<Device>>(){}, pageLink); ======= pageLink = new TextPageLink(4, title2); pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<TextPageData<Device>>() { }, pageLink); >>>>>>> pageLink = new PageLink(4, 0, title2); pageData = doGetTypedWithPageLink("/api/customer/" + customerId.getId().toString() + "/devices?", new TypeReference<PageData<Device>>(){}, pageLink); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type1); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type1); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type1); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type2); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type2); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type2); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type1); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type1); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type1); <<<<<<< new TypeReference<PageData<Device>>(){}, pageLink, type2); ======= new TypeReference<TextPageData<Device>>() { }, pageLink, type2); >>>>>>> new TypeReference<PageData<Device>>(){}, pageLink, type2);
<<<<<<< ======= import static org.thingsboard.server.dao.DaoUtil.convertDataList; import static org.thingsboard.server.dao.DaoUtil.getData; import static org.thingsboard.server.dao.service.Validator.validateId; import java.util.List; import com.google.common.base.Function; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; >>>>>>> import com.google.common.util.concurrent.ListenableFuture; <<<<<<< import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; ======= import org.thingsboard.server.common.data.Customer; >>>>>>> import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; <<<<<<< ======= import org.thingsboard.server.dao.model.CustomerEntity; import org.thingsboard.server.dao.model.TenantEntity; >>>>>>> <<<<<<< ======= import org.thingsboard.server.dao.user.UserService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; >>>>>>> <<<<<<< private PaginatedRemover<String, Tenant> tenantsRemover = new PaginatedRemover<String, Tenant>() { ======= private PaginatedRemover<String, TenantEntity> tenantsRemover = new PaginatedRemover<String, TenantEntity>() { >>>>>>> private PaginatedRemover<String, Tenant> tenantsRemover = new PaginatedRemover<String, Tenant>() {
<<<<<<< import org.apache.http.client.HttpClient; import org.apache.http.client.config.RequestConfig; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.impl.client.CloseableHttpClient; import org.springframework.beans.factory.annotation.Autowired; ======= >>>>>>> <<<<<<< import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; ======= >>>>>>> import org.springframework.boot.context.properties.EnableConfigurationProperties; <<<<<<< import feign.httpclient.ApacheHttpClient; import feign.okhttp.OkHttpClient; import okhttp3.ConnectionPool; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.TimeUnit; import javax.annotation.PreDestroy; ======= >>>>>>> <<<<<<< @EnableConfigurationProperties({ FeignHttpClientProperties.class }) ======= //Order is important here, last should be the default, first should be optional // see https://github.com/spring-cloud/spring-cloud-netflix/issues/2086#issuecomment-316281653 @Import({ HttpClientFeignLoadBalancedConfiguration.class, OkHttpFeignLoadBalancedConfiguration.class, DefaultFeignLoadBalancedConfiguration.class }) >>>>>>> @EnableConfigurationProperties({ FeignHttpClientProperties.class }) //Order is important here, last should be the default, first should be optional // see https://github.com/spring-cloud/spring-cloud-netflix/issues/2086#issuecomment-316281653 @Import({ HttpClientFeignLoadBalancedConfiguration.class, OkHttpFeignLoadBalancedConfiguration.class, DefaultFeignLoadBalancedConfiguration.class }) <<<<<<< public Client feignClient(CachingSpringLoadBalancerFactory cachingFactory, SpringClientFactory clientFactory) { return new LoadBalancerFeignClient(new Client.Default(null, null), cachingFactory, clientFactory); } @Bean @ConditionalOnMissingBean ======= >>>>>>>
<<<<<<< return new RetryableBlockingFeignLoadBalancerClient(delegate, loadBalancerClient, loadBalancedRetryFactory); ======= return new RetryableFeignBlockingLoadBalancerClient(delegate, loadBalancerClient, loadBalancedRetryFactories.get(0)); >>>>>>> return new RetryableFeignBlockingLoadBalancerClient(delegate, loadBalancerClient, loadBalancedRetryFactory);
<<<<<<< import org.springframework.util.ReflectionUtils; import rx.Observable; import rx.observables.GroupedObservable; ======= import rx.Observable; import rx.observables.GroupedObservable; >>>>>>> import rx.Observable; import rx.observables.GroupedObservable; import org.springframework.util.ReflectionUtils; <<<<<<< ======= import static org.springframework.cloud.netflix.turbine.amqp.Aggregator.getPayloadData; >>>>>>> import static org.springframework.cloud.netflix.turbine.amqp.Aggregator.getPayloadData;
<<<<<<< import org.springframework.cloud.client.loadbalancer.LoadBalancedBackOffPolicyFactory; ======= import org.mockito.ArgumentMatcher; >>>>>>> import org.springframework.cloud.client.loadbalancer.LoadBalancedBackOffPolicyFactory; import org.mockito.ArgumentMatcher; <<<<<<< boolean retryable, boolean retryOnAllOps, String serviceName, String host, int port, CloseableHttpClient delegate, ILoadBalancer lb, String statusCodes, LoadBalancedBackOffPolicyFactory loadBalancedBackOffPolicyFactory) throws Exception { ======= boolean retryable, boolean retryOnAllOps, String serviceName, String host, int port, HttpClient delegate, ILoadBalancer lb, String statusCodes) throws Exception { return setupClientForRetry(retriesNextServer, retriesSameServer, retryable, retryOnAllOps, serviceName, host, port, delegate, lb, statusCodes, false); } private RetryableRibbonLoadBalancingHttpClient setupClientForRetry(int retriesNextServer, int retriesSameServer, boolean retryable, boolean retryOnAllOps, String serviceName, String host, int port, HttpClient delegate, ILoadBalancer lb, String statusCodes, boolean isSecure) throws Exception { >>>>>>> boolean retryable, boolean retryOnAllOps, String serviceName, String host, int port, CloseableHttpClient delegate, ILoadBalancer lb, String statusCodes, LoadBalancedBackOffPolicyFactory loadBalancedBackOffPolicyFactory) throws Exception { return setupClientForRetry(retriesNextServer, retriesSameServer, retryable, retryOnAllOps, serviceName, host, port, delegate, lb, statusCodes, loadBalancedBackOffPolicyFactory, false); } private RetryableRibbonLoadBalancingHttpClient setupClientForRetry(int retriesNextServer, int retriesSameServer, boolean retryable, boolean retryOnAllOps, String serviceName, String host, int port, CloseableHttpClient delegate, ILoadBalancer lb, String statusCodes, LoadBalancedBackOffPolicyFactory loadBalancedBackOffPolicyFactory, boolean isSecure) throws Exception {
<<<<<<< import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; ======= >>>>>>> <<<<<<< import org.springframework.cloud.client.loadbalancer.LoadBalancedRetryPolicyFactory; import org.springframework.cloud.commons.httpclient.ApacheHttpClientConnectionManagerFactory; import org.springframework.cloud.commons.httpclient.ApacheHttpClientFactory; import org.springframework.cloud.commons.httpclient.HttpClientConfiguration; import org.springframework.cloud.commons.httpclient.OkHttpClientConnectionPoolFactory; import org.springframework.cloud.commons.httpclient.OkHttpClientFactory; import org.springframework.cloud.netflix.ribbon.apache.RetryableRibbonLoadBalancingHttpClient; import org.springframework.cloud.netflix.ribbon.apache.RibbonLoadBalancingHttpClient; import org.springframework.cloud.netflix.ribbon.okhttp.OkHttpLoadBalancingClient; import org.springframework.cloud.netflix.ribbon.okhttp.RetryableOkHttpLoadBalancingClient; ======= import org.springframework.cloud.netflix.ribbon.apache.HttpClientRibbonConfiguration; import org.springframework.cloud.netflix.ribbon.okhttp.OkHttpRibbonConfiguration; >>>>>>> import org.springframework.cloud.commons.httpclient.HttpClientConfiguration; import org.springframework.cloud.netflix.ribbon.apache.HttpClientRibbonConfiguration; import org.springframework.cloud.netflix.ribbon.okhttp.OkHttpRibbonConfiguration; <<<<<<< import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Lazy; ======= import org.springframework.context.annotation.Import; >>>>>>> import org.springframework.context.annotation.Import; <<<<<<< @Import(HttpClientConfiguration.class) ======= //Order is important here, last should be the default, first should be optional // see https://github.com/spring-cloud/spring-cloud-netflix/issues/2086#issuecomment-316281653 @Import({OkHttpRibbonConfiguration.class, RestClientRibbonConfiguration.class, HttpClientRibbonConfiguration.class}) >>>>>>> //Order is important here, last should be the default, first should be optional // see https://github.com/spring-cloud/spring-cloud-netflix/issues/2086#issuecomment-316281653 @Import({HttpClientConfiguration.class, OkHttpRibbonConfiguration.class, RestClientRibbonConfiguration.class, HttpClientRibbonConfiguration.class}) <<<<<<< @Configuration @ConditionalOnProperty(name = "ribbon.httpclient.enabled", matchIfMissing = true) protected static class ApacheHttpClientConfiguration { private final Timer connectionManagerTimer = new Timer( "RibbonApacheHttpClientConfiguration.connectionManagerTimer", true); private CloseableHttpClient httpClient; @Autowired(required = false) private RegistryBuilder registryBuilder; @Bean @ConditionalOnMissingBean(HttpClientConnectionManager.class) public HttpClientConnectionManager httpClientConnectionManager( IClientConfig config, ApacheHttpClientConnectionManagerFactory connectionManagerFactory) { Integer maxTotalConnections = config.getPropertyAsInteger( CommonClientConfigKey.MaxTotalConnections, DefaultClientConfigImpl.DEFAULT_MAX_TOTAL_CONNECTIONS); Integer maxConnectionsPerHost = config.getPropertyAsInteger( CommonClientConfigKey.MaxConnectionsPerHost, DefaultClientConfigImpl.DEFAULT_MAX_CONNECTIONS_PER_HOST); Integer timerRepeat = config.getPropertyAsInteger( CommonClientConfigKey.ConnectionCleanerRepeatInterval, DefaultClientConfigImpl.DEFAULT_CONNECTION_IDLE_TIMERTASK_REPEAT_IN_MSECS); Object timeToLiveObj = config .getProperty(CommonClientConfigKey.PoolKeepAliveTime); Long timeToLive = DefaultClientConfigImpl.DEFAULT_POOL_KEEP_ALIVE_TIME; Object ttlUnitObj = config .getProperty(CommonClientConfigKey.PoolKeepAliveTimeUnits); TimeUnit ttlUnit = DefaultClientConfigImpl.DEFAULT_POOL_KEEP_ALIVE_TIME_UNITS; if (timeToLiveObj instanceof Long) { timeToLive = (Long) timeToLiveObj; } if (ttlUnitObj instanceof TimeUnit) { ttlUnit = (TimeUnit) ttlUnitObj; } final HttpClientConnectionManager connectionManager = connectionManagerFactory .newConnectionManager(false, maxTotalConnections, maxConnectionsPerHost, timeToLive, ttlUnit, registryBuilder); this.connectionManagerTimer.schedule(new TimerTask() { @Override public void run() { connectionManager.closeExpiredConnections(); } }, 30000, timerRepeat); return connectionManager; } @Bean @ConditionalOnMissingBean(CloseableHttpClient.class) public CloseableHttpClient httpClient(ApacheHttpClientFactory httpClientFactory, HttpClientConnectionManager connectionManager, IClientConfig config) { Boolean followRedirects = config.getPropertyAsBoolean( CommonClientConfigKey.FollowRedirects, DefaultClientConfigImpl.DEFAULT_FOLLOW_REDIRECTS); Integer connectTimeout = config.getPropertyAsInteger( CommonClientConfigKey.ConnectTimeout, DefaultClientConfigImpl.DEFAULT_CONNECT_TIMEOUT); RequestConfig defaultRequestConfig = RequestConfig.custom() .setConnectTimeout(connectTimeout) .setRedirectsEnabled(followRedirects).build(); this.httpClient = httpClientFactory.createBuilder(). setDefaultRequestConfig(defaultRequestConfig). setConnectionManager(connectionManager).build(); return httpClient; } @PreDestroy public void destroy() throws Exception { connectionManagerTimer.cancel(); if(httpClient != null) { httpClient.close(); } } } @Configuration @ConditionalOnProperty(value = {"ribbon.okhttp.enabled"}) @ConditionalOnClass(name = "okhttp3.OkHttpClient") protected static class OkHttpClientConfiguration { private OkHttpClient httpClient; @Bean @ConditionalOnMissingBean(ConnectionPool.class) public ConnectionPool httpClientConnectionPool(IClientConfig config, OkHttpClientConnectionPoolFactory connectionPoolFactory) { Integer maxTotalConnections = config.getPropertyAsInteger( CommonClientConfigKey.MaxTotalConnections, DefaultClientConfigImpl.DEFAULT_MAX_TOTAL_CONNECTIONS); Object timeToLiveObj = config .getProperty(CommonClientConfigKey.PoolKeepAliveTime); Long timeToLive = DefaultClientConfigImpl.DEFAULT_POOL_KEEP_ALIVE_TIME; Object ttlUnitObj = config .getProperty(CommonClientConfigKey.PoolKeepAliveTimeUnits); TimeUnit ttlUnit = DefaultClientConfigImpl.DEFAULT_POOL_KEEP_ALIVE_TIME_UNITS; if (timeToLiveObj instanceof Long) { timeToLive = (Long) timeToLiveObj; } if (ttlUnitObj instanceof TimeUnit) { ttlUnit = (TimeUnit) ttlUnitObj; } return connectionPoolFactory.create(maxTotalConnections, timeToLive, ttlUnit); } @Bean @ConditionalOnMissingBean(OkHttpClient.class) public OkHttpClient client(OkHttpClientFactory httpClientFactory, ConnectionPool connectionPool, IClientConfig config) { Boolean followRedirects = config.getPropertyAsBoolean( CommonClientConfigKey.FollowRedirects, DefaultClientConfigImpl.DEFAULT_FOLLOW_REDIRECTS); Integer connectTimeout = config.getPropertyAsInteger( CommonClientConfigKey.ConnectTimeout, DefaultClientConfigImpl.DEFAULT_CONNECT_TIMEOUT); Integer readTimeout = config.getPropertyAsInteger(CommonClientConfigKey.ReadTimeout, DefaultClientConfigImpl.DEFAULT_READ_TIMEOUT); this.httpClient = httpClientFactory.createBuilder(false). connectTimeout(connectTimeout, TimeUnit.MILLISECONDS). readTimeout(readTimeout, TimeUnit.MILLISECONDS). followRedirects(followRedirects). connectionPool(connectionPool).build(); return this.httpClient; } @PreDestroy public void destroy() { if(httpClient != null) { httpClient.dispatcher().executorService().shutdown(); httpClient.connectionPool().evictAll(); } } } @Configuration @ConditionalOnProperty(name = "ribbon.httpclient.enabled", matchIfMissing = true) protected static class HttpClientRibbonConfiguration { @Value("${ribbon.client.name}") private String name = "client"; @Bean @ConditionalOnMissingBean(AbstractLoadBalancerAwareClient.class) @ConditionalOnMissingClass(value = "org.springframework.retry.support.RetryTemplate") public RibbonLoadBalancingHttpClient ribbonLoadBalancingHttpClient( IClientConfig config, ServerIntrospector serverIntrospector, ILoadBalancer loadBalancer, RetryHandler retryHandler, CloseableHttpClient httpClient) { RibbonLoadBalancingHttpClient client = new RibbonLoadBalancingHttpClient( httpClient, config, serverIntrospector); client.setLoadBalancer(loadBalancer); client.setRetryHandler(retryHandler); Monitors.registerObject("Client_" + this.name, client); return client; } @Bean @ConditionalOnMissingBean(AbstractLoadBalancerAwareClient.class) @ConditionalOnClass(name = "org.springframework.retry.support.RetryTemplate") public RetryableRibbonLoadBalancingHttpClient retryableRibbonLoadBalancingHttpClient( IClientConfig config, ServerIntrospector serverIntrospector, ILoadBalancer loadBalancer, RetryHandler retryHandler, LoadBalancedRetryPolicyFactory loadBalancedRetryPolicyFactory, CloseableHttpClient httpClient) { RetryableRibbonLoadBalancingHttpClient client = new RetryableRibbonLoadBalancingHttpClient( httpClient, config, serverIntrospector, loadBalancedRetryPolicyFactory); client.setLoadBalancer(loadBalancer); client.setRetryHandler(retryHandler); Monitors.registerObject("Client_" + this.name, client); return client; } } @Configuration @ConditionalOnProperty(value = {"ribbon.okhttp.enabled"}) @ConditionalOnClass(name = "okhttp3.OkHttpClient") protected static class OkHttpRibbonConfiguration { @Value("${ribbon.client.name}") private String name = "client"; @Bean @ConditionalOnMissingBean(AbstractLoadBalancerAwareClient.class) @ConditionalOnClass(name = "org.springframework.retry.support.RetryTemplate") public RetryableOkHttpLoadBalancingClient okHttpLoadBalancingClient( IClientConfig config, ServerIntrospector serverIntrospector, ILoadBalancer loadBalancer, RetryHandler retryHandler, LoadBalancedRetryPolicyFactory loadBalancedRetryPolicyFactory, OkHttpClient delegate) { RetryableOkHttpLoadBalancingClient client = new RetryableOkHttpLoadBalancingClient( delegate, config, serverIntrospector, loadBalancedRetryPolicyFactory); client.setLoadBalancer(loadBalancer); client.setRetryHandler(retryHandler); Monitors.registerObject("Client_" + this.name, client); return client; } @Bean @ConditionalOnMissingBean(AbstractLoadBalancerAwareClient.class) @ConditionalOnMissingClass(value = "org.springframework.retry.support.RetryTemplate") public OkHttpLoadBalancingClient retryableOkHttpLoadBalancingClient( IClientConfig config, ServerIntrospector serverIntrospector, ILoadBalancer loadBalancer, RetryHandler retryHandler, OkHttpClient delegate) { OkHttpLoadBalancingClient client = new OkHttpLoadBalancingClient(delegate, config, serverIntrospector); client.setLoadBalancer(loadBalancer); client.setRetryHandler(retryHandler); Monitors.registerObject("Client_" + this.name, client); return client; } } @Configuration @RibbonAutoConfiguration.ConditionalOnRibbonRestClient protected static class RestClientRibbonConfiguration { @Value("${ribbon.client.name}") private String name = "client"; /** * Create a Netflix {@link RestClient} integrated with Ribbon if none already * exists in the application context. It is not required for Ribbon to work * properly and is therefore created lazily if ever another component requires it. * * @param config the configuration to use by the underlying Ribbon instance * @param loadBalancer the load balancer to use by the underlying Ribbon instance * @param serverIntrospector server introspector to use by the underlying Ribbon * instance * @param retryHandler retry handler to use by the underlying Ribbon instance * @return a {@link RestClient} instances backed by Ribbon */ @Bean @Lazy @ConditionalOnMissingBean(AbstractLoadBalancerAwareClient.class) public RestClient ribbonRestClient(IClientConfig config, ILoadBalancer loadBalancer, ServerIntrospector serverIntrospector, RetryHandler retryHandler) { RestClient client = new OverrideRestClient(config, serverIntrospector); client.setLoadBalancer(loadBalancer); client.setRetryHandler(retryHandler); Monitors.registerObject("Client_" + this.name, client); return client; } } ======= >>>>>>>
<<<<<<< "feign.okhttp.enabled=true", "spring.cloud.httpclientfactories.ok.enabled=true" }) ======= "feign.httpclient.enabled=false", "feign.okhttp.enabled=true" }) >>>>>>> "feign.httpclient.enabled=false", "feign.okhttp.enabled=true", "spring.cloud.httpclientfactories.ok.enabled=true" })
<<<<<<< String reconstructedUrl = loadBalancerClient.reconstructURI(instance, originalUri).toString(); Request newRequest = Request.create(request.httpMethod(), reconstructedUrl, request.headers(), request.body(), request.charset(), request.requestTemplate()); return executeWithLoadBalancerLifecycleProcessing(delegate, options, newRequest, lbResponse, supportedLifecycleProcessors); ======= String reconstructedUrl = loadBalancerClient.reconstructURI(instance, originalUri) .toString(); Request newRequest = buildRequest(request, reconstructedUrl); return delegate.execute(newRequest, options); >>>>>>> String reconstructedUrl = loadBalancerClient.reconstructURI(instance, originalUri).toString(); Request newRequest = buildRequest(request, reconstructedUrl); return executeWithLoadBalancerLifecycleProcessing(delegate, options, newRequest, lbResponse, supportedLifecycleProcessors);
<<<<<<< ======= import feign.form.MultipartFormContentProcessor; import feign.form.spring.SpringFormEncoder; import feign.hystrix.HystrixFeign; >>>>>>> import feign.form.MultipartFormContentProcessor; import feign.form.spring.SpringFormEncoder; <<<<<<< ======= private Encoder springEncoder(ObjectProvider<AbstractFormWriter> formWriterProvider) { AbstractFormWriter formWriter = formWriterProvider.getIfAvailable(); if (formWriter != null) { return new SpringEncoder(new SpringPojoFormEncoder(formWriter), this.messageConverters); } else { return new SpringEncoder(new SpringFormEncoder(), this.messageConverters); } } @Configuration(proxyBeanMethods = false) @ConditionalOnClass({ HystrixCommand.class, HystrixFeign.class }) protected static class HystrixFeignConfiguration { @Bean @Scope("prototype") @ConditionalOnMissingBean @ConditionalOnProperty(name = "feign.hystrix.enabled") public Feign.Builder feignHystrixBuilder() { return HystrixFeign.builder(); } } private class SpringPojoFormEncoder extends SpringFormEncoder { SpringPojoFormEncoder(AbstractFormWriter formWriter) { super(); MultipartFormContentProcessor processor = (MultipartFormContentProcessor) getContentProcessor( MULTIPART); processor.addFirstWriter(formWriter); } } >>>>>>> private Encoder springEncoder(ObjectProvider<AbstractFormWriter> formWriterProvider) { AbstractFormWriter formWriter = formWriterProvider.getIfAvailable(); if (formWriter != null) { return new SpringEncoder(new SpringPojoFormEncoder(formWriter), this.messageConverters); } else { return new SpringEncoder(new SpringFormEncoder(), this.messageConverters); } } private class SpringPojoFormEncoder extends SpringFormEncoder { SpringPojoFormEncoder(AbstractFormWriter formWriter) { super(); MultipartFormContentProcessor processor = (MultipartFormContentProcessor) getContentProcessor( MULTIPART); processor.addFirstWriter(formWriter); } }
<<<<<<< ======= import static org.thingsboard.server.dao.DaoUtil.convertDataList; import static org.thingsboard.server.dao.DaoUtil.getData; import static org.thingsboard.server.dao.service.Validator.validateId; import java.io.IOException; import java.util.List; import java.util.Optional; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; >>>>>>> import static org.thingsboard.server.dao.service.Validator.validateId; import java.io.IOException; import java.util.List; import java.util.Optional; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; <<<<<<< import org.thingsboard.server.common.data.Tenant; ======= import org.thingsboard.server.common.data.asset.Asset; >>>>>>> import org.thingsboard.server.common.data.Tenant; import org.thingsboard.server.common.data.asset.Asset; <<<<<<< ======= import org.thingsboard.server.dao.model.AssetEntity; import org.thingsboard.server.dao.model.CustomerEntity; import org.thingsboard.server.dao.model.TenantEntity; >>>>>>> import org.thingsboard.server.dao.model.AssetEntity; <<<<<<< import java.util.List; ======= import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.thingsboard.server.dao.service.Validator; >>>>>>>
<<<<<<< ======= import java.util.Collections; import java.util.Optional; import com.netflix.loadbalancer.BaseLoadBalancer; import com.netflix.loadbalancer.ILoadBalancer; import com.netflix.loadbalancer.Server; >>>>>>> import java.util.Optional;
<<<<<<< ======= import org.thingsboard.server.dao.model.*; >>>>>>> import org.thingsboard.server.dao.model.*; <<<<<<< List<Asset> assets = assetDao.findAssetsByTenantId(tenantId.getId(), pageLink); return new TextPageData<>(assets, pageLink); ======= List<AssetEntity> assetEntities = assetDao.findAssetsByTenantId(tenantId.getId(), pageLink); List<Asset> assets = convertDataList(assetEntities); return new TextPageData<>(assets, pageLink); } @Override public TextPageData<Asset> findAssetsByTenantIdAndType(TenantId tenantId, String type, TextPageLink pageLink) { log.trace("Executing findAssetsByTenantIdAndType, tenantId [{}], type [{}], pageLink [{}]", tenantId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<AssetEntity> assetEntities = assetDao.findAssetsByTenantIdAndType(tenantId.getId(), type, pageLink); List<Asset> assets = convertDataList(assetEntities); return new TextPageData<>(assets, pageLink); >>>>>>> List<Asset> assets = assetDao.findAssetsByTenantId(tenantId.getId(), pageLink); return new TextPageData<>(assets, pageLink); } @Override public TextPageData<Asset> findAssetsByTenantIdAndType(TenantId tenantId, String type, TextPageLink pageLink) { log.trace("Executing findAssetsByTenantIdAndType, tenantId [{}], type [{}], pageLink [{}]", tenantId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<Asset> assets = assetDao.findAssetsByTenantIdAndType(tenantId.getId(), type, pageLink); return new TextPageData<>(assets, pageLink); <<<<<<< List<Asset> assets = assetDao.findAssetsByTenantIdAndCustomerId(tenantId.getId(), customerId.getId(), pageLink); return new TextPageData<Asset>(assets, pageLink); ======= List<AssetEntity> assetEntities = assetDao.findAssetsByTenantIdAndCustomerId(tenantId.getId(), customerId.getId(), pageLink); List<Asset> assets = convertDataList(assetEntities); return new TextPageData<>(assets, pageLink); } @Override public TextPageData<Asset> findAssetsByTenantIdAndCustomerIdAndType(TenantId tenantId, CustomerId customerId, String type, TextPageLink pageLink) { log.trace("Executing findAssetsByTenantIdAndCustomerIdAndType, tenantId [{}], customerId [{}], type [{}], pageLink [{}]", tenantId, customerId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateId(customerId, "Incorrect customerId " + customerId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<AssetEntity> assetEntities = assetDao.findAssetsByTenantIdAndCustomerIdAndType(tenantId.getId(), customerId.getId(), type, pageLink); List<Asset> assets = convertDataList(assetEntities); return new TextPageData<>(assets, pageLink); >>>>>>> List<Asset> assets = assetDao.findAssetsByTenantIdAndCustomerId(tenantId.getId(), customerId.getId(), pageLink); return new TextPageData<Asset>(assets, pageLink); } @Override public TextPageData<Asset> findAssetsByTenantIdAndCustomerIdAndType(TenantId tenantId, CustomerId customerId, String type, TextPageLink pageLink) { log.trace("Executing findAssetsByTenantIdAndCustomerIdAndType, tenantId [{}], customerId [{}], type [{}], pageLink [{}]", tenantId, customerId, type, pageLink); validateId(tenantId, "Incorrect tenantId " + tenantId); validateId(customerId, "Incorrect customerId " + customerId); validateString(type, "Incorrect type " + type); validatePageLink(pageLink, "Incorrect page link " + pageLink); List<Asset> assets = assetDao.findAssetsByTenantIdAndCustomerIdAndType(tenantId.getId(), customerId.getId(), type, pageLink); return new TextPageData<>(assets, pageLink);
<<<<<<< public Mono<Organization> getByName(String name) { return repository.findByName(name, AclPermission.READ_ORGANIZATIONS); } private Set<Policy> crudOrgPolicy(User user) { Set<Policy> policySet = user.getPolicies().stream() .filter(policy -> policy.getPermission().equals(USER_MANAGE_ORGANIZATIONS.getValue()) ).collect(Collectors.toSet()); Set<Policy> documentPolicies = policySet.stream() .map(policy -> { AclPermission aclPermission = AclPermission .getPermissionByValue(policy.getPermission(), User.class); // Get all the child policies for the given policy and aclPermission return policyGenerator.getChildPolicies(policy, aclPermission, user); }).flatMap(Collection::stream) .collect(Collectors.toSet()); return documentPolicies; ======= public Mono<Organization> getBySlug(String slug) { return repository.findBySlug(slug); } @Override public Mono<String> getNextUniqueSlug(String initialSlug) { return repository.countSlugsByPrefix(initialSlug) .map(max -> initialSlug + (max == 0 ? "" : (max + 1))); >>>>>>> public Mono<Organization> getBySlug(String slug) { return repository.findBySlug(slug); } @Override public Mono<String> getNextUniqueSlug(String initialSlug) { return repository.countSlugsByPrefix(initialSlug) .map(max -> initialSlug + (max == 0 ? "" : (max + 1))); } private Set<Policy> crudOrgPolicy(User user) { Set<Policy> policySet = user.getPolicies().stream() .filter(policy -> policy.getPermission().equals(USER_MANAGE_ORGANIZATIONS.getValue()) ).collect(Collectors.toSet()); Set<Policy> documentPolicies = policySet.stream() .map(policy -> { AclPermission aclPermission = AclPermission .getPermissionByValue(policy.getPermission(), User.class); // Get all the child policies for the given policy and aclPermission return policyGenerator.getChildPolicies(policy, aclPermission, user); }).flatMap(Collection::stream) .collect(Collectors.toSet()); return documentPolicies; <<<<<<< boolean isManageOrgPolicyPresent = user.getPolicies().stream() .filter(policy -> policy.getPermission().equals(USER_MANAGE_ORGANIZATIONS.getValue())) .findFirst() .isPresent(); if (!isManageOrgPolicyPresent) { return Mono.error(new AppsmithException(AppsmithError.UNAUTHORIZED_ACCESS)); } // Set the admin policies for this organization & user organization.setPolicies(crudOrgPolicy(user)); Mono<Organization> organizationMono = Mono.just(organization) ======= Mono<Organization> setSlugMono; if (organization.getName() == null) { setSlugMono = Mono.just(organization); } else { setSlugMono = getNextUniqueSlug(organization.makeSlug()) .map(slug -> { organization.setSlug(slug); return organization; }); } Mono<Organization> organizationMono = setSlugMono >>>>>>> boolean isManageOrgPolicyPresent = user.getPolicies().stream() .filter(policy -> policy.getPermission().equals(USER_MANAGE_ORGANIZATIONS.getValue())) .findFirst() .isPresent(); if (!isManageOrgPolicyPresent) { return Mono.error(new AppsmithException(AppsmithError.UNAUTHORIZED_ACCESS)); } // Set the admin policies for this organization & user organization.setPolicies(crudOrgPolicy(user)); Mono<Organization> setSlugMono; if (organization.getName() == null) { setSlugMono = Mono.just(organization); } else { setSlugMono = getNextUniqueSlug(organization.makeSlug()) .map(slug -> { organization.setSlug(slug); return organization; }); } Mono<Organization> organizationMono = setSlugMono
<<<<<<< private final PolicyGenerator policyGenerator; ======= private final SequenceService sequenceService; >>>>>>> private final PolicyGenerator policyGenerator; private final SequenceService sequenceService; <<<<<<< PluginExecutorHelper pluginExecutorHelper, PolicyGenerator policyGenerator) { ======= PluginExecutorHelper pluginExecutorHelper, SequenceService sequenceService) { >>>>>>> PluginExecutorHelper pluginExecutorHelper, PolicyGenerator policyGenerator, SequenceService sequenceService) { <<<<<<< this.policyGenerator = policyGenerator; ======= this.sequenceService = sequenceService; >>>>>>> this.policyGenerator = policyGenerator; this.sequenceService = sequenceService; <<<<<<< return datasourceMono .flatMap(datasource1 -> sessionUserService.getCurrentUser() .flatMap(user -> { // Create policies for this datasource -> This datasource should inherit its permissions and policies from // the organization and this datasource should also allow the current user to crud this datasource. return organizationService.findById(datasource1.getOrganizationId(), AclPermission.ORGANIZATION_MANAGE_APPLICATIONS) .switchIfEmpty(Mono.error(new AppsmithException(AppsmithError.NO_RESOURCE_FOUND, FieldName.ORGANIZATION, datasource1.getOrganizationId()))) .map(org -> { Set<Policy> policySet = org.getPolicies().stream() .filter(policy -> policy.getPermission().equals(ORGANIZATION_MANAGE_APPLICATIONS.getValue()) || policy.getPermission().equals(ORGANIZATION_READ_APPLICATIONS.getValue()) ).collect(Collectors.toSet()); Set<Policy> documentPolicies = policyGenerator.getAllChildPolicies(user, policySet, Organization.class, Datasource.class); datasource1.setPolicies(documentPolicies); return datasource1; }); }) ) .flatMap(this::validateAndSaveDatasourceToRepository); } @Override public Mono<String> getNextUniqueName(String namePrefix) { return repository.countNamesByPrefix(namePrefix) .map(max -> namePrefix + (max == 0 ? "" : " " + (max + 1))); ======= return datasourceMono .flatMap(this::validateAndSaveDatasourceToRepository); >>>>>>> return datasourceMono .flatMap(datasource1 -> sessionUserService.getCurrentUser() .flatMap(user -> { // Create policies for this datasource -> This datasource should inherit its permissions and policies from // the organization and this datasource should also allow the current user to crud this datasource. return organizationService.findById(datasource1.getOrganizationId(), AclPermission.ORGANIZATION_MANAGE_APPLICATIONS) .switchIfEmpty(Mono.error(new AppsmithException(AppsmithError.NO_RESOURCE_FOUND, FieldName.ORGANIZATION, datasource1.getOrganizationId()))) .map(org -> { Set<Policy> policySet = org.getPolicies().stream() .filter(policy -> policy.getPermission().equals(ORGANIZATION_MANAGE_APPLICATIONS.getValue()) || policy.getPermission().equals(ORGANIZATION_READ_APPLICATIONS.getValue()) ).collect(Collectors.toSet()); Set<Policy> documentPolicies = policyGenerator.getAllChildPolicies(user, policySet, Organization.class, Datasource.class); datasource1.setPolicies(documentPolicies); return datasource1; }); }) ) .flatMap(this::validateAndSaveDatasourceToRepository);
<<<<<<< import com.appsmith.server.repositories.GroupRepository; ======= import com.appsmith.server.repositories.ApplicationRepository; >>>>>>> import com.appsmith.server.repositories.ApplicationRepository; <<<<<<< import java.util.Collection; ======= import java.util.HashMap; >>>>>>> import java.util.Collection; import java.util.HashMap; <<<<<<< private final GroupRepository groupRepository; ======= >>>>>>> <<<<<<< GroupRepository groupRepository, ======= >>>>>>> <<<<<<< this.groupRepository = groupRepository; ======= >>>>>>> <<<<<<< @Override public Mono<Collection<GrantedAuthority>> getAnonymousAuthorities() { return repository.findByEmail("anonymousUser") .map(user -> user.getAuthorities()); } ======= @Override public Mono<User> create(User user) { return createUser(user, null); } >>>>>>> @Override public Mono<Collection<GrantedAuthority>> getAnonymousAuthorities() { return repository.findByEmail("anonymousUser") .map(user -> user.getAuthorities()); } @Override public Mono<User> create(User user) { return createUser(user, null); }
<<<<<<< public void createValidPage() { Policy managePagePolicy = Policy.builder().permission(MANAGE_PAGES.getValue()) .users(Set.of("api_user")) .build(); Policy readPagePolicy = Policy.builder().permission(READ_PAGES.getValue()) .users(Set.of("api_user")) .build(); ======= public void createValidPage() throws ParseException { >>>>>>> public void createValidPage() throws ParseException { Policy managePagePolicy = Policy.builder().permission(MANAGE_PAGES.getValue()) .users(Set.of("api_user")) .build(); Policy readPagePolicy = Policy.builder().permission(READ_PAGES.getValue()) .users(Set.of("api_user")) .build(); <<<<<<< assertThat(page.getPolicies()).isNotEmpty(); assertThat(page.getPolicies()).containsOnly(managePagePolicy, readPagePolicy); ======= assertThat(page.getLayouts()).isNotEmpty(); assertThat(page.getLayouts().get(0).getDsl()).isEqualTo(parsedJson); >>>>>>> assertThat(page.getPolicies()).isNotEmpty(); assertThat(page.getPolicies()).containsOnly(managePagePolicy, readPagePolicy); assertThat(page.getLayouts()).isNotEmpty(); assertThat(page.getLayouts().get(0).getDsl()).isEqualTo(parsedJson);
<<<<<<< ======= import java.net.URLEncoder; >>>>>>> import java.net.URLEncoder;
<<<<<<< ======= import static org.thingsboard.server.dao.DaoUtil.convertDataList; import static org.thingsboard.server.dao.DaoUtil.getData; import static org.thingsboard.server.dao.service.Validator.validateId; import static org.thingsboard.server.dao.service.Validator.validatePageLink; import static org.thingsboard.server.dao.service.Validator.validateString; import java.util.List; import com.google.common.base.Function; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; >>>>>>> import static org.thingsboard.server.dao.DaoUtil.getData; import static org.thingsboard.server.dao.service.Validator.validateId; import static org.thingsboard.server.dao.service.Validator.validatePageLink; import static org.thingsboard.server.dao.service.Validator.validateString; import java.util.List; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture;
<<<<<<< import java.nio.channels.spi.AbstractInterruptibleChannel; import java.nio.channels.spi.AbstractSelectableChannel; ======= import java.nio.channels.SocketChannel; import java.nio.channels.spi.AbstractInterruptibleChannel; >>>>>>> import java.nio.channels.spi.AbstractInterruptibleChannel; import java.nio.channels.spi.AbstractSelectableChannel; import java.nio.channels.SocketChannel;
<<<<<<< if (event.values[0] < steps) { // should always be // increasing if (Logger.LOG) Logger.log(event.sensor.getName() + " invalid received " + event.values[0] + " steps was set to " + steps + " | acc " + event.accuracy + " time " + new Date(event.timestamp / 1000000).toLocaleString()); consecutiveIncorrectValues++; if (consecutiveIncorrectValues >= 10 && lastIncorrectValue < event.values[0]) { // received 10 incorrect step values in a row and this value is // increasing // --> assume this is now the correct value // --> don't return method to get to the reset part consecutiveIncorrectValues = 0; } else if (consecutiveIncorrectValues >= 10 && lastIncorrectValue == event.values[0]) { if (Logger.LOG) Logger.log("re-register"); SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE); try { sm.unregisterListener(this); } catch (Exception e) { if (Logger.LOG) Logger.log(e); e.printStackTrace(); } sm.registerListener(this, sm.getDefaultSensor(Sensor.TYPE_STEP_COUNTER), SensorManager.SENSOR_DELAY_NORMAL, 0); return; } else { lastIncorrectValue = (int) event.values[0]; return; } } else { if (consecutiveIncorrectValues > 1 && Logger.LOG) Logger.log(event.sensor.getName() + " correct received " + event.values[0] + " steps was set to " + steps + " | acc " + event.accuracy + " time " + new Date(event.timestamp / 1000000).toLocaleString()); consecutiveIncorrectValues = 0; } // it seems like sometimes the sensor reports a lower value then it was // a midnight, even if there was no reboot in between. If this happens, // reset the offset for today if (today_offset > Integer.MIN_VALUE && today_offset + (int) event.values[0] < 0) { // try to save already taken steps int alreadyTaken = Math.max(0, today_offset + steps); Database db = new Database(this); // add the difference (which is negative) to the current database // value, so that the steps for today are now 0 (instead of some // negative step value). Then add the 'already taken' steps db.updateSteps(Util.getToday(), -(today_offset + (int) event.values[0]) + alreadyTaken); db.close(); today_offset = -(int) event.values[0]; if (Logger.LOG) { Logger.log("reset todays offset to " + today_offset + " already taken steps today: " + alreadyTaken); } } ======= >>>>>>>
<<<<<<< ======= @BindView(R.id.tvRating) TextView tvRating; @BindView(R.id.metaRatingView) TextView metascore_setter; >>>>>>> @BindView(R.id.tvRating) TextView tvRating; @BindView(R.id.metaRatingView) TextView metascore_setter; <<<<<<< ======= @BindView(R.id.layout_imdb) LinearLayout layout_imdb; @BindView(R.id.layout_flixi) LinearLayout layout_flixi; @BindView(R.id.layout_meta) LinearLayout layout_meta; @BindView(R.id.layout_tmdb) LinearLayout layout_tmdb; @BindView(R.id.layout_tomato) LinearLayout layout_tomato; >>>>>>> @BindView(R.id.layout_imdb) LinearLayout layout_imdb; @BindView(R.id.layout_flixi) LinearLayout layout_flixi; @BindView(R.id.layout_meta) LinearLayout layout_meta; @BindView(R.id.layout_tmdb) LinearLayout layout_tmdb; @BindView(R.id.layout_tomato) LinearLayout layout_tomato; <<<<<<< private String movie_id, trailor = null, trailer = null, movie_desc, quality, movie_tagline, movie_rating, movie_rating_tomatometer, movie_rating_audience, movie_rating_metascore, show_centre_img_url, movie_title, movie_id_final; ======= private String movie_id,trailor = null,trailer = null, movie_desc, quality, movie_tagline, movie_rating,movie_rating_imdb,movie_rating_tmdb, movie_rating_tomatometer,movie_rating_metascore, movie_rating_audience, show_centre_img_url, movie_title, movie_id_final; >>>>>>> private String movie_id,trailor = null,trailer = null, movie_desc, quality, movie_tagline, movie_rating,movie_rating_imdb,movie_rating_tmdb, movie_rating_tomatometer,movie_rating_metascore, movie_rating_audience, show_centre_img_url, movie_title, movie_id_final; <<<<<<< youtubeIcon.setColorFilter(trailorSwatch.getBodyTextColor(), PorterDuff.Mode.SRC_IN); ======= tvRating.setTextColor(trailorSwatch.getTitleTextColor()); // det_rating.setTextColor(trailorSwatch.getBodyTextColor()); >>>>>>> youtubeIcon.setColorFilter(trailorSwatch.getBodyTextColor(), PorterDuff.Mode.SRC_IN); <<<<<<< youtubeIcon.setColorFilter(trailorSwatch.getBodyTextColor(), PorterDuff.Mode.SRC_IN); ======= tvRating.setTextColor(trailorSwatch.getTitleTextColor()); //det_rating.setTextColor(trailorSwatch.getBodyTextColor()); >>>>>>> youtubeIcon.setColorFilter(trailorSwatch.getBodyTextColor(), PorterDuff.Mode.SRC_IN); <<<<<<< youtubeIcon.setColorFilter(trailorSwatch.getBodyTextColor(), PorterDuff.Mode.SRC_IN); ======= tvRating.setTextColor(trailorSwatch.getTitleTextColor()); // det_rating.setTextColor(trailorSwatch.getBodyTextColor()); >>>>>>> youtubeIcon.setColorFilter(trailorSwatch.getBodyTextColor(), PorterDuff.Mode.SRC_IN);
<<<<<<< import com.jd.journalkeeper.core.entry.reserved.LeaderAnnouncementEntry; import com.jd.journalkeeper.core.entry.reserved.LeaderAnnouncementEntrySerializer; import com.jd.journalkeeper.exceptions.ServerBusyException; import com.jd.journalkeeper.utils.event.EventType; ======= >>>>>>> <<<<<<< import com.jd.journalkeeper.rpc.client.*; import com.jd.journalkeeper.rpc.server.*; import com.jd.journalkeeper.utils.threads.AsyncLoopThread; import com.jd.journalkeeper.utils.threads.ThreadBuilder; ======= import com.jd.journalkeeper.rpc.client.LastAppliedResponse; import com.jd.journalkeeper.rpc.client.QueryStateRequest; import com.jd.journalkeeper.rpc.client.QueryStateResponse; import com.jd.journalkeeper.rpc.client.UpdateClusterStateRequest; import com.jd.journalkeeper.rpc.client.UpdateClusterStateResponse; import com.jd.journalkeeper.rpc.server.AsyncAppendEntriesRequest; import com.jd.journalkeeper.rpc.server.AsyncAppendEntriesResponse; import com.jd.journalkeeper.rpc.server.RequestVoteRequest; import com.jd.journalkeeper.rpc.server.RequestVoteResponse; import com.jd.journalkeeper.rpc.server.ServerRpc; import com.jd.journalkeeper.utils.event.EventType; import com.jd.journalkeeper.utils.threads.LoopThread; >>>>>>> import com.jd.journalkeeper.rpc.client.*; import com.jd.journalkeeper.rpc.server.*; import com.jd.journalkeeper.utils.threads.AsyncLoopThread; import com.jd.journalkeeper.utils.threads.ThreadBuilder; <<<<<<< if(logger.isDebugEnabled()) { logger.debug("Append journal entry, {}", voterInfo()); } ======= if (logger.isDebugEnabled()) { logger.debug("Append journal entry, {}", voterInfo()); } >>>>>>> if(logger.isDebugEnabled()) { logger.debug("Append journal entry, {}", voterInfo()); } <<<<<<< count = followers.stream() .filter(follower -> follower.nextIndex - follower.repStartIndex <= config.getOnFlyEntries()) ======= // TODO 有并发问题 // TODO 没必要并发 // count = followers.parallelStream() count = new ArrayList<Follower>(followers).stream() >>>>>>> count = followers.stream() .filter(follower -> follower.nextIndex - follower.repStartIndex <= config.getOnFlyEntries()) <<<<<<< ======= checkTerm(request.getTerm()); if(voterState != VoterState.FOLLOWER) { convertToFollower(); } if(request.getEntries() != null && !request.getEntries().isEmpty()) { if (logger.isDebugEnabled()) { logger.debug("Received appendEntriesRequest, term: {}, leader: {}, prevLogIndex: {}, prevLogTerm: {}, " + "entries: {}, leaderCommit: {}, {}.", request.getTerm(), request.getLeader(), request.getPrevLogIndex(), request.getPrevLogTerm(), request.getEntries().size(), request.getLeaderCommit(), voterInfo()); } } >>>>>>> <<<<<<< ======= // reset heartbeat lastHeartbeat = System.currentTimeMillis(); if (logger.isDebugEnabled()) { logger.debug("Update lastHeartbeat, {}.", voterInfo()); } if(!request.getLeader().equals(leader)) { leader = request.getLeader(); } >>>>>>> <<<<<<< logger.debug("Send appendEntriesResponse, success: {}, journalIndex: {}, entryCount: {}, term: {}, " + "{}.", response.isSuccess(), response.getJournalIndex(), response.getEntryCount(), response.getTerm(), voterInfo()); ======= if (logger.isDebugEnabled()) { logger.debug("Send appendEntriesResponse, success: {}, journalIndex: {}, entryCount: {}, term: {}, " + "{}.", response.isSuccess(), response.getJournalIndex(), response.getEntryCount(), response.getTerm(), voterInfo()); } >>>>>>> if (logger.isDebugEnabled()) { logger.debug("Send appendEntriesResponse, success: {}, journalIndex: {}, entryCount: {}, term: {}, " + "{}.", response.isSuccess(), response.getJournalIndex(), response.getEntryCount(), response.getTerm(), voterInfo()); }
<<<<<<< @Override public CompletableFuture<GetSnapshotsResponse> getSnapshots() { if (voterState.getState() == VoterState.LEADER && leader != null) { return CompletableFuture.completedFuture( snapshots.values() .stream() .map((state) -> new SnapshotEntry(state.getPath().toString(), state.getLastIncludedIndex(), state.getLastIncludedTerm(), state.getMinOffset(), state.timestamp())).collect(Collectors.toList())) .thenApply(SnapshotsEntry::new) .thenApply(GetSnapshotsResponse::new); } else { return CompletableFuture.completedFuture(new GetSnapshotsResponse(new NotLeaderException(leaderUri))); } } private void ensureLeadership(Leader<E, ER, Q, QR> finalLeader) { if(voterState() != VoterState.LEADER || finalLeader == null) { throw new NotLeaderException(leaderUri); } ======= private boolean isLeaderAvailable(Leader<E, ER, Q, QR> finalLeader) { return voterState() == VoterState.LEADER && finalLeader != null; >>>>>>> private boolean isLeaderAvailable(Leader<E, ER, Q, QR> finalLeader) { return voterState() == VoterState.LEADER && finalLeader != null; } @Override public CompletableFuture<GetSnapshotsResponse> getSnapshots() { if (voterState.getState() == VoterState.LEADER && leader != null) { return CompletableFuture.completedFuture( snapshots.values() .stream() .map((state) -> new SnapshotEntry(state.getPath().toString(), state.getLastIncludedIndex(), state.getLastIncludedTerm(), state.getMinOffset(), state.timestamp())).collect(Collectors.toList())) .thenApply(SnapshotsEntry::new) .thenApply(GetSnapshotsResponse::new); } else { return CompletableFuture.completedFuture(new GetSnapshotsResponse(new NotLeaderException(leaderUri))); } } private void ensureLeadership(Leader<E, ER, Q, QR> finalLeader) { if(voterState() != VoterState.LEADER || finalLeader == null) { throw new NotLeaderException(leaderUri); }
<<<<<<< import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; ======= >>>>>>> import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong;
<<<<<<< import com.jd.journalkeeper.core.api.RaftJournal; import com.jd.journalkeeper.core.api.ResponseConfig; import com.jd.journalkeeper.core.server.Server; import com.jd.journalkeeper.exceptions.ServerBusyException; import com.jd.journalkeeper.utils.event.EventType; import com.jd.journalkeeper.utils.event.EventWatcher; ======= >>>>>>> import com.jd.journalkeeper.core.api.RaftJournal; import com.jd.journalkeeper.core.api.ResponseConfig; import com.jd.journalkeeper.core.server.Server; import com.jd.journalkeeper.exceptions.ServerBusyException; import com.jd.journalkeeper.rpc.client.QueryStateResponse; import com.jd.journalkeeper.utils.event.EventType; import com.jd.journalkeeper.utils.event.EventWatcher; <<<<<<< import com.jd.journalkeeper.rpc.client.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; ======= import com.jd.journalkeeper.rpc.client.ClientServerRpc; import com.jd.journalkeeper.rpc.client.ClientServerRpcAccessPoint; import com.jd.journalkeeper.rpc.client.GetServersResponse; import com.jd.journalkeeper.rpc.client.QueryStateRequest; import com.jd.journalkeeper.rpc.client.UpdateClusterStateRequest; import com.jd.journalkeeper.utils.event.EventWatcher; >>>>>>> import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.jd.journalkeeper.rpc.client.ClientServerRpc; import com.jd.journalkeeper.rpc.client.ClientServerRpcAccessPoint; import com.jd.journalkeeper.rpc.client.GetServersResponse; import com.jd.journalkeeper.rpc.client.QueryStateRequest; import com.jd.journalkeeper.rpc.client.UpdateClusterStateRequest; import com.jd.journalkeeper.utils.event.EventWatcher;
<<<<<<< private ConcurrentMap<String, PreparedStatement> preparedStatementMap = new ConcurrentHashMap<>(); protected PreparedStatement prepare(String query) { return preparedStatementMap.computeIfAbsent(query, i -> getSession().prepare(i)); } ======= @Autowired private BufferedRateLimiter rateLimiter; >>>>>>> private ConcurrentMap<String, PreparedStatement> preparedStatementMap = new ConcurrentHashMap<>(); @Autowired private BufferedRateLimiter rateLimiter;
<<<<<<< r.put("isVirtual", this.isVirtual()); ======= r.put("serial", this.getSerialNumber()); >>>>>>> r.put("isVirtual", this.isVirtual()); r.put("serial", this.getSerialNumber());
<<<<<<< List<VisualizableEntityAttribute> visualizableEntityAttributeList = ======= List<VisualizableSensor> visualizableSensorList = >>>>>>> List<VisualizableEntityAttribute> visualizableEntityAttributeList = <<<<<<< visualizablePeripheralMap.values().forEach(this::removeChild); ======= >>>>>>> <<<<<<< visualizablePeripheralMap.values().forEach(this::addChild); ======= visualizableSensorMap.values().forEach(i -> i.raiseToTop()); >>>>>>> visualizablePeripheralMap.values().forEach(PNode::raiseToTop);
<<<<<<< import org.thingsboard.server.common.data.*; ======= import org.thingsboard.server.common.data.Customer; import org.thingsboard.server.common.data.Dashboard; import org.thingsboard.server.common.data.DashboardInfo; import org.thingsboard.server.common.data.DataConstants; import org.thingsboard.server.common.data.Device; import org.thingsboard.server.common.data.DeviceInfo; import org.thingsboard.server.common.data.DeviceProfile; import org.thingsboard.server.common.data.EntityType; import org.thingsboard.server.common.data.EntityView; import org.thingsboard.server.common.data.EntityViewInfo; import org.thingsboard.server.common.data.HasName; import org.thingsboard.server.common.data.HasTenantId; import org.thingsboard.server.common.data.Tenant; import org.thingsboard.server.common.data.TenantInfo; import org.thingsboard.server.common.data.TenantProfile; import org.thingsboard.server.common.data.User; >>>>>>> import org.thingsboard.server.common.data.*; <<<<<<< import org.thingsboard.server.common.data.id.*; ======= import org.thingsboard.server.common.data.id.AlarmId; import org.thingsboard.server.common.data.id.AssetId; import org.thingsboard.server.common.data.id.CustomerId; import org.thingsboard.server.common.data.id.DashboardId; import org.thingsboard.server.common.data.id.DeviceId; import org.thingsboard.server.common.data.id.DeviceProfileId; import org.thingsboard.server.common.data.id.EntityId; import org.thingsboard.server.common.data.id.EntityIdFactory; import org.thingsboard.server.common.data.id.EntityViewId; import org.thingsboard.server.common.data.id.RuleChainId; import org.thingsboard.server.common.data.id.RuleNodeId; import org.thingsboard.server.common.data.id.TenantId; import org.thingsboard.server.common.data.id.TenantProfileId; import org.thingsboard.server.common.data.id.UserId; import org.thingsboard.server.common.data.id.WidgetTypeId; import org.thingsboard.server.common.data.id.WidgetsBundleId; >>>>>>> import org.thingsboard.server.common.data.id.*; import org.thingsboard.server.common.data.id.AlarmId; import org.thingsboard.server.common.data.id.AssetId; import org.thingsboard.server.common.data.id.CustomerId; import org.thingsboard.server.common.data.id.DashboardId; import org.thingsboard.server.common.data.id.DeviceId; import org.thingsboard.server.common.data.id.DeviceProfileId; import org.thingsboard.server.common.data.id.EntityId; import org.thingsboard.server.common.data.id.EntityIdFactory; import org.thingsboard.server.common.data.id.EntityViewId; import org.thingsboard.server.common.data.id.RuleChainId; import org.thingsboard.server.common.data.id.RuleNodeId; import org.thingsboard.server.common.data.id.TenantId; import org.thingsboard.server.common.data.id.TenantProfileId; import org.thingsboard.server.common.data.id.UserId; import org.thingsboard.server.common.data.id.WidgetTypeId; import org.thingsboard.server.common.data.id.WidgetsBundleId;
<<<<<<< import br.com.caelum.vraptor.util.ISO8601Util; ======= import br.com.caelum.vraptor.view.GenericController; >>>>>>> import br.com.caelum.vraptor.util.ISO8601Util; import br.com.caelum.vraptor.view.GenericController; <<<<<<< @Test public void shouldByPassDeserializationWhenHasNoContent() { InputStream stream = new ByteArrayInputStream("".getBytes()); when(provider.parameterNamesFor(bark.getMethod())).thenReturn(new String[] { "pet" }); Object[] deserialized = deserializer.deserialize(stream, bark); assertThat(deserialized.length, is(1)); assertThat(deserialized[0], is(nullValue())); } ======= @Test public void shouldDeserializeFromGenericTypeOneParam() { InputStream stream = new ByteArrayInputStream( "{'entity':{'name':'Brutus','age':7,'birthday':'06/01/1987'}}".getBytes()); ResourceClass resourceClass = new DefaultResourceClass(ExtGenericController.class); Method method = new Mirror().on(GenericController.class).reflect().method("method").withAnyArgs(); ResourceMethod resource = new DefaultResourceMethod(resourceClass, method); when(provider.parameterNamesFor(resource.getMethod())).thenReturn(new String[] { "entity" }); Object[] deserialized = deserializer.deserialize(stream, resource); Dog dog = (Dog) deserialized[0]; assertThat(dog.name, equalTo("Brutus")); } @Test public void shouldDeserializeFromGenericTypeTwoParams() { InputStream stream = new ByteArrayInputStream( "{'entity':{'name':'Brutus','age':7,'birthday':'06/01/1987'}, 'param': 'test', 'over': 'value'}".getBytes()); ResourceClass resourceClass = new DefaultResourceClass(ExtGenericController.class); Method method = new Mirror().on(GenericController.class).reflect().method("anotherMethod").withAnyArgs(); ResourceMethod resource = new DefaultResourceMethod(resourceClass, method); when(provider.parameterNamesFor(resource.getMethod())).thenReturn(new String[] { "entity", "param", "over" }); Object[] deserialized = deserializer.deserialize(stream, resource); Dog dog = (Dog) deserialized[0]; String param = (String) deserialized[1]; assertThat(dog.name, equalTo("Brutus")); assertThat(param, equalTo("test")); assertThat(deserialized.length, equalTo(2)); } @Test public void shouldDeserializeWithoutGenericType() { InputStream stream = new ByteArrayInputStream( "{'param': 'test'}".getBytes()); ResourceClass resourceClass = new DefaultResourceClass(ExtGenericController.class); Method method = new Mirror().on(GenericController.class).reflect().method("methodWithoutGenericType").withArgs(String.class); ResourceMethod resource = new DefaultResourceMethod(resourceClass, method); when(provider.parameterNamesFor(resource.getMethod())).thenReturn(new String[] { "param" }); Object[] deserialized = deserializer.deserialize(stream, resource); String param = (String) deserialized[0]; assertThat(param, equalTo("test")); } >>>>>>> @Test public void shouldByPassDeserializationWhenHasNoContent() { InputStream stream = new ByteArrayInputStream("".getBytes()); when(provider.parameterNamesFor(bark.getMethod())).thenReturn(new String[] { "pet" }); Object[] deserialized = deserializer.deserialize(stream, bark); assertThat(deserialized.length, is(1)); assertThat(deserialized[0], is(nullValue())); } @Test public void shouldDeserializeFromGenericTypeOneParam() { InputStream stream = new ByteArrayInputStream( "{'entity':{'name':'Brutus','age':7,'birthday':'06/01/1987'}}".getBytes()); ResourceClass resourceClass = new DefaultResourceClass(ExtGenericController.class); Method method = new Mirror().on(GenericController.class).reflect().method("method").withAnyArgs(); ResourceMethod resource = new DefaultResourceMethod(resourceClass, method); when(provider.parameterNamesFor(resource.getMethod())).thenReturn(new String[] { "entity" }); Object[] deserialized = deserializer.deserialize(stream, resource); Dog dog = (Dog) deserialized[0]; assertThat(dog.name, equalTo("Brutus")); } @Test public void shouldDeserializeFromGenericTypeTwoParams() { InputStream stream = new ByteArrayInputStream( "{'entity':{'name':'Brutus','age':7,'birthday':'06/01/1987'}, 'param': 'test', 'over': 'value'}".getBytes()); ResourceClass resourceClass = new DefaultResourceClass(ExtGenericController.class); Method method = new Mirror().on(GenericController.class).reflect().method("anotherMethod").withAnyArgs(); ResourceMethod resource = new DefaultResourceMethod(resourceClass, method); when(provider.parameterNamesFor(resource.getMethod())).thenReturn(new String[] { "entity", "param", "over" }); Object[] deserialized = deserializer.deserialize(stream, resource); Dog dog = (Dog) deserialized[0]; String param = (String) deserialized[1]; assertThat(dog.name, equalTo("Brutus")); assertThat(param, equalTo("test")); assertThat(deserialized.length, equalTo(2)); } @Test public void shouldDeserializeWithoutGenericType() { InputStream stream = new ByteArrayInputStream( "{'param': 'test'}".getBytes()); ResourceClass resourceClass = new DefaultResourceClass(ExtGenericController.class); Method method = new Mirror().on(GenericController.class).reflect().method("methodWithoutGenericType").withArgs(String.class); ResourceMethod resource = new DefaultResourceMethod(resourceClass, method); when(provider.parameterNamesFor(resource.getMethod())).thenReturn(new String[] { "param" }); Object[] deserialized = deserializer.deserialize(stream, resource); String param = (String) deserialized[0]; assertThat(param, equalTo("test")); }