conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
public static boolean forwardTxHash(Chain chain, byte[] hash) throws NulsException {
return forwardTxHash(chain, hash, null);
=======
public static boolean forwardTxHash(int chainId, NulsHash hash) throws NulsException {
return forwardTxHash(chainId, hash, null);
>>>>>>>
public static boolean forwardTxHash(Chain chain, NulsHash hash) throws NulsException {
return forwardTxHash(chain, hash, null);
<<<<<<<
public static boolean forwardTxHash(Chain chain, byte[] hash, String excludeNodes) throws NulsException {
=======
public static boolean forwardTxHash(int chainId, NulsHash hash, String excludeNodes) throws NulsException {
>>>>>>>
public static boolean forwardTxHash(Chain chain, NulsHash hash, String excludeNodes) throws NulsException { |
<<<<<<<
null,
taskName,
=======
null,
>>>>>>>
null,
taskName,
<<<<<<<
transform.getClass().getName(),
taskName,
=======
transform,
>>>>>>>
transform,
taskName,
<<<<<<<
null,
taskName,
=======
null,
>>>>>>>
null,
taskName,
<<<<<<<
null,
taskName,
=======
null,
>>>>>>>
null,
taskName,
<<<<<<<
@SuppressWarnings({"ForLoopReplaceableByForEach", "unchecked", "ConstantConditions"})
=======
@SuppressWarnings({"unchecked", "ConstantConditions", "ForLoopReplaceableByForEach"})
>>>>>>>
@SuppressWarnings({"unchecked", "ConstantConditions", "ForLoopReplaceableByForEach"}) |
<<<<<<<
=======
case 18:
UUID subjId0 = commState.getUuid();
if (subjId0 == UUID_NOT_READ)
return false;
subjId = subjId0;
commState.idx++;
>>>>>>>
case 18:
UUID subjId0 = commState.getUuid();
if (subjId0 == UUID_NOT_READ)
return false;
subjId = subjId0;
commState.idx++; |
<<<<<<<
evtLoop = cfg.getProtocol() == GridClientProtocol.TCP ?
new NioEventLoopGroup(workerCnt, new GridClientThreadFactory("nio", true)) : null;
=======
GridNioFilter codecFilter = new GridNioCodecFilter(new NioParser(msgReader), gridLog, true);
if (sslCtx != null) {
GridNioSslFilter sslFilter = new GridNioSslFilter(sslCtx, gridLog);
sslFilter.directMode(true);
sslFilter.clientMode(true);
filters = new GridNioFilter[]{codecFilter, sslFilter};
}
else
filters = new GridNioFilter[]{codecFilter};
srv = GridNioServer.builder().address(U.getLocalHost())
.port(-1)
.listener(new NioListener())
.filters(filters)
.logger(gridLog)
.selectorCount(Runtime.getRuntime().availableProcessors())
.sendQueueLimit(1024)
.byteOrder(ByteOrder.nativeOrder())
.tcpNoDelay(cfg.isTcpNoDelay())
.directBuffer(true)
.directMode(true)
.socketReceiveBufferSize(0)
.socketSendBufferSize(0)
.idleTimeout(TCP_IDLE_CONN_TIMEOUT)
.gridName("gridClient")
.messageWriter(msgWriter)
.build();
srv.start();
}
catch (IOException | GridException e) {
throw new GridClientException("Failed to start connection server.", e);
}
}
>>>>>>>
GridNioFilter codecFilter = new GridNioCodecFilter(new NioParser(msgReader), gridLog, true);
if (sslCtx != null) {
GridNioSslFilter sslFilter = new GridNioSslFilter(sslCtx, gridLog);
sslFilter.directMode(true);
sslFilter.clientMode(true);
filters = new GridNioFilter[]{codecFilter, sslFilter};
}
else
filters = new GridNioFilter[]{codecFilter};
srv = GridNioServer.builder().address(U.getLocalHost())
.port(-1)
.listener(new NioListener())
.filters(filters)
.logger(gridLog)
.selectorCount(Runtime.getRuntime().availableProcessors())
.sendQueueLimit(1024)
.byteOrder(ByteOrder.nativeOrder())
.tcpNoDelay(cfg.isTcpNoDelay())
.directBuffer(true)
.directMode(true)
.socketReceiveBufferSize(0)
.socketSendBufferSize(0)
.idleTimeout(TCP_IDLE_CONN_TIMEOUT)
.gridName("gridClient")
.messageWriter(msgWriter)
.build();
srv.start();
}
catch (IOException | GridException e) {
throw new GridClientException("Failed to start connection server.", e);
}
}
<<<<<<<
if (cfg.getProtocol() == GridClientProtocol.TCP) {
conn = new GridClientTcpConnection(clientId, addr, sslCtx, evtLoop,
cfg.getConnectTimeout(), cfg.getPingInterval(), cfg.getPingTimeout(),
cfg.isTcpNoDelay(), protoId == null ? cfg.getMarshaller() : null,
top, cfg.getCredentials(), protoId);
=======
switch (cfg.getProtocol()) {
case TCP: {
conn = new GridClientNioTcpConnection(srv, clientId, addr, sslCtx, pingExecutor,
cfg.getConnectTimeout(), cfg.getPingInterval(), cfg.getPingTimeout(),
cfg.isTcpNoDelay(), protoId == null ? cfg.getMarshaller() : null,
top, cred, protoId);
break;
}
case HTTP: {
conn = new GridClientHttpConnection(clientId, addr, sslCtx,
// Applying max idle time as read timeout for HTTP connections.
cfg.getConnectTimeout(), (int)cfg.getMaxConnectionIdleTime(), top,
executor == null ? cfg.getExecutorService() : executor, cred);
break;
}
default: {
throw new GridServerUnreachableException("Failed to create client (protocol is not supported): " +
cfg.getProtocol());
}
>>>>>>>
if (cfg.getProtocol() == GridClientProtocol.TCP) {
conn = new GridClientNioTcpConnection(srv, clientId, addr, sslCtx, pingExecutor,
cfg.getConnectTimeout(), cfg.getPingInterval(), cfg.getPingTimeout(),
cfg.isTcpNoDelay(), protoId == null ? cfg.getMarshaller() : null,
top, cred, protoId); |
<<<<<<<
/** Service configuration. */
private GridServiceConfiguration[] svcCfgs;
=======
/** Hadoop configuration. */
private GridHadoopConfiguration hadoopCfg;
>>>>>>>
/** Service configuration. */
private GridServiceConfiguration[] svcCfgs;
/** Hadoop configuration. */
private GridHadoopConfiguration hadoopCfg; |
<<<<<<<
new GridHadoopEmbeddedTaskExecutor(),
new GridHadoopExternalTaskExecutor(),
new GridHadoopShuffle());
=======
new GridHadoopTaskExecutor(),
new GridHadoopShuffler());
>>>>>>>
new GridHadoopEmbeddedTaskExecutor(),
new GridHadoopExternalTaskExecutor(),
new GridHadoopShuffler()); |
<<<<<<<
UUID subjId, String cloClsName, String taskName, GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException,
GridCacheEntryRemovedException, GridCacheFilterFailedException {
=======
UUID subjId, Object transformClo, GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException,
GridCacheEntryRemovedException, GridCacheFilterFailedException {
>>>>>>>
UUID subjId, Object transformClo, String taskName, GridPredicate<GridCacheEntry<K, V>>[] filter) throws GridException,
GridCacheEntryRemovedException, GridCacheFilterFailedException {
<<<<<<<
return innerGet0(tx, readSwap, readThrough, evt, failFast, unmarshal, updateMetrics, subjId, cloClsName,
taskName, filter);
=======
return innerGet0(tx, readSwap, readThrough, evt, failFast, unmarshal, updateMetrics, subjId, transformClo,
filter);
>>>>>>>
return innerGet0(tx, readSwap, readThrough, evt, failFast, unmarshal, updateMetrics, subjId, transformClo,
taskName, filter);
<<<<<<<
boolean unmarshal, boolean updateMetrics, UUID subjId, String cloClsName, String taskName,
GridPredicate<GridCacheEntry<K, V>>[] filter)
=======
boolean unmarshal, boolean updateMetrics, UUID subjId, Object transformClo,
GridPredicate<GridCacheEntry<K, V>>[] filter)
>>>>>>>
boolean unmarshal, boolean updateMetrics, UUID subjId, Object transformClo, String taskName,
GridPredicate<GridCacheEntry<K, V>>[] filter)
<<<<<<<
expiredVal != null || hasOldBytes, subjId, cloClsName, taskName);
=======
expiredVal != null || hasOldBytes, subjId, null);
>>>>>>>
expiredVal != null || hasOldBytes, subjId, null, taskName);
<<<<<<<
hasOldBytes || old != null, subjId, cloClsName, taskName);
=======
hasOldBytes || old != null, subjId,
transformClo != null ? transformClo.getClass().getName() : null);
>>>>>>>
hasOldBytes || old != null, subjId,
transformClo != null ? transformClo.getClass().getName() : null, taskName);
<<<<<<<
return innerGet0(tx, readSwap, readThrough, false, failFast, unmarshal, updateMetrics, subjId,
cloClsName, taskName, filter);
=======
return innerGet0(tx, readSwap, readThrough, false, failFast, unmarshal, updateMetrics, subjId,
transformClo, filter);
>>>>>>>
return innerGet0(tx, readSwap, readThrough, false, failFast, unmarshal, updateMetrics, subjId,
transformClo, taskName, filter);
<<<<<<<
old, hasOldBytes, subjId, cloClsName, taskName);
=======
old, hasOldBytes, subjId, transformClo != null ? transformClo.getClass().getName() : null);
>>>>>>>
old, hasOldBytes, subjId, transformClo != null ? transformClo.getClass().getName() : null,
taskName);
<<<<<<<
return innerGet0(tx, readSwap, readThrough, false, failFast, unmarshal, updateMetrics, subjId,
cloClsName, taskName, filter);
=======
return innerGet0(tx, readSwap, readThrough, false, failFast, unmarshal, updateMetrics, subjId,
transformClo, filter);
>>>>>>>
return innerGet0(tx, readSwap, readThrough, false, failFast, unmarshal, updateMetrics, subjId,
transformClo, taskName, filter);
<<<<<<<
newVer, EVT_CACHE_OBJECT_PUT, val, val != null, old, old != null || hasValueUnlocked(),
subjId, null, taskName);
=======
newVer, EVT_CACHE_OBJECT_PUT, val, val != null, old, old != null || hasValueUnlocked(),
subjId, null);
>>>>>>>
newVer, EVT_CACHE_OBJECT_PUT, val, val != null, old, old != null || hasValueUnlocked(),
subjId, null, taskName);
<<<<<<<
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hasValueUnlocked(), subjId, null,
taskName);
=======
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hasValueUnlocked(), subjId, null);
>>>>>>>
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hasValueUnlocked(), subjId, null,
taskName);
<<<<<<<
(GridCacheVersion)null, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName, taskName);
if (cctx.events().isRecordable(EVT_CACHE_OBJECT_PUT))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null,
(GridCacheVersion)null, EVT_CACHE_OBJECT_PUT, updated, updated != null, old,
old != null || hadVal, subjId, null, taskName);
}
=======
(GridCacheVersion)null, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName);
if (cctx.events().isRecordable(EVT_CACHE_OBJECT_PUT))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null,
(GridCacheVersion)null, EVT_CACHE_OBJECT_PUT, updated, updated != null, old,
old != null || hadVal, subjId, null);
}
>>>>>>>
(GridCacheVersion)null, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName, taskName);
if (cctx.events().isRecordable(EVT_CACHE_OBJECT_PUT))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null,
(GridCacheVersion)null, EVT_CACHE_OBJECT_PUT, updated, updated != null, old,
old != null || hadVal, subjId, null, taskName);
}
<<<<<<<
if (evt) {
if (transformCloClsName != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null,
(GridCacheVersion)null, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName, taskName);
if (cctx.events().isRecordable(EVT_CACHE_OBJECT_REMOVED))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null, (GridCacheVersion)null,
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hadVal, subjId, null, taskName);
}
=======
if (evt) {
if (transformCloClsName != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null,
(GridCacheVersion)null, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName);
if (cctx.events().isRecordable(EVT_CACHE_OBJECT_REMOVED))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null, (GridCacheVersion)null,
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hadVal, subjId, null);
}
>>>>>>>
if (evt) {
if (transformCloClsName != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null,
(GridCacheVersion)null, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName, taskName);
if (cctx.events().isRecordable(EVT_CACHE_OBJECT_REMOVED))
cctx.events().addEvent(partition(), key, cctx.localNodeId(), null, (GridCacheVersion)null,
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hadVal, subjId, null, taskName);
}
<<<<<<<
String transformCloClsName = null;
=======
Object transformClo = null;
>>>>>>>
Object transformClo = null;
<<<<<<<
if (op == TRANSFORM) {
transformCloClsName = writeObj.getClass().getName();
writeObj = ((GridClosure<V, V>) writeObj).apply(oldVal);
}
=======
if (op == TRANSFORM) {
transformClo = writeObj;
writeObj = ((GridClosure<V, V>) writeObj).apply(oldVal);
}
>>>>>>>
if (op == TRANSFORM) {
transformClo = writeObj;
writeObj = ((GridClosure<V, V>) writeObj).apply(oldVal);
}
<<<<<<<
transformCloClsName = writeObj.getClass().getName();
=======
transformClo = writeObj;
>>>>>>>
transformClo = writeObj;
<<<<<<<
if (evt) {
if (transformCloClsName != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName, taskName);
if (newVer != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_PUT))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_PUT, updated, updated != null, old,
old != null || hadVal, subjId, null, taskName);
}
=======
if (evt) {
if (transformClo != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformClo.getClass().getName());
if (newVer != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_PUT))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_PUT, updated, updated != null, old,
old != null || hadVal, subjId, null);
}
>>>>>>>
if (evt) {
if (transformClo != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformClo.getClass().getName(), taskName);
if (newVer != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_PUT))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_PUT, updated, updated != null, old,
old != null || hadVal, subjId, null, taskName);
}
<<<<<<<
if (evt) {
if (transformCloClsName != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformCloClsName, taskName);
if (newVer != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_REMOVED))
cctx.events().addEvent(partition(), key, evtNodeId, null, newVer,
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hadVal, subjId, null, taskName);
}
=======
if (evt) {
if (transformClo != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformClo.getClass().getName());
if (newVer != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_REMOVED))
cctx.events().addEvent(partition(), key, evtNodeId, null, newVer,
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hadVal, subjId, null);
}
>>>>>>>
if (evt) {
if (transformClo != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ))
cctx.events().addEvent(partition(), key, evtNodeId, null,
newVer, EVT_CACHE_OBJECT_READ, old, old != null || hadVal, old,
old != null || hadVal, subjId, transformClo.getClass().getName(), taskName);
if (newVer != null && cctx.events().isRecordable(EVT_CACHE_OBJECT_REMOVED))
cctx.events().addEvent(partition(), key, evtNodeId, null, newVer,
EVT_CACHE_OBJECT_REMOVED, null, false, old, old != null || hadVal, subjId, null, taskName);
}
<<<<<<<
null, false, expiredVal, expiredVal != null || hasOldBytes, null, null, null);
=======
null, false, expiredVal, expiredVal != null || hasOldBytes, null, null);
>>>>>>>
null, false, expiredVal, expiredVal != null || hasOldBytes, null, null, null); |
<<<<<<<
@Nullable public GridNode node(UUID nid);
/**
* @return // TODO
*/
@Nullable public GridNode node();
/**
* Tells whether or not this projection is dynamic.
* <p>
* Dynamic projection is based on predicate and in any particular moment of time
* can consist of a different set of nodes. Static project does not change and always
* consist of the same set of nodes (excluding the node that have left the topology
* since the creation of the static projection).
*
* @return Whether or not projection is dynamic.
*/
public boolean dynamic();
=======
@Nullable public GridNode node(UUID id);
>>>>>>>
@Nullable public GridNode node(UUID nid);
/**
* @return // TODO
*/
@Nullable public GridNode node(); |
<<<<<<<
* // TODO
* @param prj
* @return
*/
public GridProjection forOthers(GridProjection prj);
/**
* Creates monadic projection with a given set of node IDs out of this projection.
* Note that nodes not in this projection at the moment of call will excluded.
=======
* Creates a grid projection over nodes with specified node IDs.
>>>>>>>
* // TODO
* @param prj
* @return
*/
public GridProjection forOthers(GridProjection prj);
/**
* Creates a grid projection over nodes with specified node IDs. |
<<<<<<<
GridCacheConfiguration[] clone = new GridCacheConfiguration[cacheCfgs.length + drSysCaches.size() +
(hasHadoop ? 1 : 0)];
=======
GridCacheConfiguration[] clone = new GridCacheConfiguration[cacheCfgs.length +
drSysCaches.size() +
(U.securityEnabled(cfg) ? 1 : 0)];
>>>>>>>
GridCacheConfiguration[] clone = new GridCacheConfiguration[cacheCfgs.length + drSysCaches.size() +
(hasHadoop ? 1 : 0) + (U.securityEnabled(cfg) ? 1 : 0)];
<<<<<<<
else if (!drSysCaches.isEmpty() || hasHadoop) {
// Populate system caches/
GridCacheConfiguration[] ccfgs = new GridCacheConfiguration[drSysCaches.size() + (hasHadoop ? 1 : 0)];
=======
else if (!drSysCaches.isEmpty() || U.securityEnabled(cfg)) {
GridCacheConfiguration[] ccfgs = new GridCacheConfiguration[drSysCaches.size() +
(U.securityEnabled(cfg) ? 1 : 0)];
>>>>>>>
else if (!drSysCaches.isEmpty() || hasHadoop || U.securityEnabled(cfg)) {
// Populate system caches/
GridCacheConfiguration[] ccfgs = new GridCacheConfiguration[drSysCaches.size() + (hasHadoop ? 1 : 0) +
(U.securityEnabled(cfg) ? 1 : 0)];
<<<<<<<
=======
* Creates system cache configuration used by data center replication component.
*
* @param cacheName Cache name.
* @return Replication cache configuration.
*/
private GridCacheConfiguration drSystemCache(String cacheName) {
GridCacheConfiguration cache = new GridCacheConfiguration();
cache.setName(cacheName);
cache.setCacheMode(REPLICATED);
cache.setAtomicityMode(TRANSACTIONAL);
cache.setSwapEnabled(false);
cache.setWriteSynchronizationMode(FULL_SYNC);
return cache;
}
/**
* Creates security system cache configuration.
*
* @return Security system cache configuration.
*/
private GridCacheConfiguration securitySystemCache() {
GridCacheConfiguration cache = new GridCacheConfiguration();
cache.setName(CU.SECURITY_SYS_CACHE_NAME);
cache.setCacheMode(REPLICATED);
cache.setAtomicityMode(TRANSACTIONAL);
cache.setSwapEnabled(false);
cache.setWriteSynchronizationMode(FULL_SYNC);
return cache;
}
/**
>>>>>>>
* Creates security system cache configuration.
*
* @return Security system cache configuration.
*/
private GridCacheConfiguration securitySystemCache() {
GridCacheConfiguration cache = new GridCacheConfiguration();
cache.setName(CU.SECURITY_SYS_CACHE_NAME);
cache.setCacheMode(REPLICATED);
cache.setAtomicityMode(TRANSACTIONAL);
cache.setSwapEnabled(false);
cache.setWriteSynchronizationMode(FULL_SYNC);
return cache;
}
/** |
<<<<<<<
import com.intellij.psi.xml.XmlAttributeValue;
import org.apache.camel.catalog.CamelCatalog;
=======
>>>>>>>
import com.intellij.psi.xml.XmlAttributeValue;
import org.apache.camel.catalog.CamelCatalog;
<<<<<<<
import org.apache.camel.idea.util.CamelService;
=======
import org.apache.camel.idea.util.IdeaUtils;
>>>>>>>
import org.apache.camel.idea.util.CamelService;
import org.apache.camel.idea.util.IdeaUtils;
<<<<<<<
Project project = element.getProject();
CamelCatalog camelCatalog = ServiceManager.getService(project, CamelCatalogService.class).get();
String componentName = StringUtils.asComponentName(val);
if (componentName != null) {
return generateCamelComponentDocumentation(componentName, val, camelCatalog);
} else {
// its maybe a method call for a Camel language
PsiMethodCallExpression call = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
if (call != null) {
PsiMethod method = call.resolveMethod();
if (method != null) {
// try to see if we have a Camel language with the method name
String name = asLanguageName(method.getName());
if (camelCatalog.findLanguageNames().contains(name)) {
// okay its a potential Camel language so see if the psi method call is using
// camel-core types so we know for a fact its really a Camel language
if (isPsiMethodCamelLanguage(method)) {
String html = camelCatalog.languageHtmlDoc(name);
if (html != null) {
return html;
}
=======
String componentName = StringUtils.asComponentName(val);
if (componentName != null) {
return generateCamelComponentDocumentation(componentName, val, -1);
} else {
// its maybe a method call for a Camel language
PsiMethodCallExpression call = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
if (call != null) {
PsiMethod method = call.resolveMethod();
if (method != null) {
// try to see if we have a Camel language with the method name
String name = asLanguageName(method.getName());
if (CamelCatalogService.getInstance().findLanguageNames().contains(name)) {
// okay its a potential Camel language so see if the psi method call is using
// camel-core types so we know for a fact its really a Camel language
if (isPsiMethodCamelLanguage(method)) {
String html = CamelCatalogService.getInstance().languageHtmlDoc(name);
if (html != null) {
return html;
>>>>>>>
String componentName = StringUtils.asComponentName(val);
if (componentName != null) {
return generateCamelComponentDocumentation(componentName, val, -1);
} else {
// its maybe a method call for a Camel language
PsiMethodCallExpression call = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
if (call != null) {
PsiMethod method = call.resolveMethod();
if (method != null) {
// try to see if we have a Camel language with the method name
String name = asLanguageName(method.getName());
if (CamelCatalogService.getInstance().findLanguageNames().contains(name)) {
// okay its a potential Camel language so see if the psi method call is using
// camel-core types so we know for a fact its really a Camel language
if (isPsiMethodCamelLanguage(method)) {
String html = CamelCatalogService.getInstance().languageHtmlDoc(name);
if (html != null) {
return html;
<<<<<<<
private String generateCamelComponentDocumentation(String componentName, String val, CamelCatalog camelCatalog) {
=======
private String generateCamelComponentDocumentation(String componentName, String val, int wrapLength) {
>>>>>>>
private String generateCamelComponentDocumentation(String componentName, String val, int wrapLength) {
<<<<<<<
String json = camelCatalog.componentJSonSchema(componentName);
=======
String json = CamelCatalogService.getInstance().componentJSonSchema(componentName);
if (json == null) {
return null;
}
>>>>>>>
String json = CamelCatalogService.getInstance().componentJSonSchema(componentName);
if (json == null) {
return null;
} |
<<<<<<<
import org.apache.camel.idea.service.CamelCatalogService;
=======
import static org.apache.camel.idea.util.IdeaUtils.isElementFromAnnotation;
import static org.apache.camel.idea.util.IdeaUtils.isElementFromConstructor;
import static org.apache.camel.idea.util.IdeaUtils.isElementFromSetterProperty;
import static org.apache.camel.idea.util.IdeaUtils.isFromFileType;
import static org.apache.camel.idea.util.IdeaUtils.isFromJavaMethodCall;
>>>>>>>
import org.apache.camel.idea.service.CamelCatalogService;
<<<<<<<
return Arrays.stream(SIMPLE_PREDICATE).anyMatch((n) -> getIdeaUtils().hasParentXmlTag(xml, n));
=======
return Arrays.stream(SIMPLE_PREDICATE).anyMatch(n -> IdeaUtils.hasParentXmlTag(xml, n));
>>>>>>>
return Arrays.stream(SIMPLE_PREDICATE).anyMatch(n -> getIdeaUtils().hasParentXmlTag(xml, n)); |
<<<<<<<
=======
import org.apache.jena.ontology.ConversionException;
import org.apache.jena.ontology.Individual;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
>>>>>>>
import org.apache.jena.ontology.ConversionException;
import org.apache.jena.ontology.Individual;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property; |
<<<<<<<
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
=======
import com.clarkparsia.owlapiv3.OWL;
import com.clarkparsia.pellet.owlapiv3.PelletReasoner;
import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory;
>>>>>>>
import com.clarkparsia.pellet.owlapiv3.PelletReasoner;
import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; |
<<<<<<<
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.types.NodeType;
=======
import alien4cloud.component.ICSARRepositorySearchService;
import alien4cloud.model.service.ServiceResource;
import alien4cloud.service.ServiceResourceService;
import com.google.common.collect.Lists;
import org.alien4cloud.tosca.catalog.index.IToscaTypeSearchService;
import org.alien4cloud.tosca.catalog.index.ToscaTypeSearchService;
import org.alien4cloud.tosca.model.CSARDependency;
import org.alien4cloud.tosca.model.Csar;
import org.alien4cloud.tosca.model.definitions.CapabilityDefinition;
import org.alien4cloud.tosca.model.templates.ServiceNodeTemplate;
import org.alien4cloud.tosca.model.types.CapabilityType;
>>>>>>>
import org.alien4cloud.tosca.catalog.index.IToscaTypeSearchService;
import org.alien4cloud.tosca.model.CSARDependency;
import org.alien4cloud.tosca.model.Csar;
import org.alien4cloud.tosca.model.definitions.CapabilityDefinition;
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.templates.ServiceNodeTemplate;
import org.alien4cloud.tosca.model.types.CapabilityType;
import org.alien4cloud.tosca.model.types.NodeType;
<<<<<<<
@Inject
private LocationSecurityService locationSecurityService;
=======
@Inject
private IToscaTypeSearchService toscaTypeSearchService;
@Inject
private ICSARRepositorySearchService csarRepoSearchService;
>>>>>>>
@Inject
private LocationSecurityService locationSecurityService;
@Inject
private IToscaTypeSearchService toscaTypeSearchService;
@Inject
private ICSARRepositorySearchService csarRepoSearchService;
<<<<<<<
public Map<String, List<LocationResourceTemplate>> match(Map<String, NodeType> nodesTypes, Map<String, NodeTemplate> nodesToMatch, String locationId) {
return match(nodesTypes, nodesToMatch, locationId, null);
}
public Map<String, List<LocationResourceTemplate>> match(Map<String, NodeType> nodesTypes, Map<String, NodeTemplate> nodesToMatch,
String locationId, String environmentId) {
=======
public Map<String, List<LocationResourceTemplate>> match(Map<String, NodeType> nodesTypes, Map<String, NodeTemplate> nodesToMatch, String locationId) {
>>>>>>>
public Map<String, List<LocationResourceTemplate>> match(Map<String, NodeType> nodesTypes, Map<String, NodeTemplate> nodesToMatch, String locationId,
String environmentId) {
<<<<<<<
filterOnAuthorization(locationResources, environmentId);
=======
ServiceResource[] services = serviceResourceService.searchByLocation(locationId);
populateLocationResourcesWithServiceResource(locationResources, services, locationId);
>>>>>>>
// Authorization filtering of services resources
filterOnAuthorization(locationResources, environmentId);
// TODO Services resources not yet have authorizations
ServiceResource[] services = serviceResourceService.searchByLocation(locationId);
populateLocationResourcesWithServiceResource(locationResources, services, locationId);
<<<<<<<
private void filterOnAuthorization(LocationResources locationResources, String environmentId) {
locationResources.getNodeTemplates()
.removeIf(locationResourceTemplate -> !locationSecurityService.isAuthorised(locationResourceTemplate, environmentId));
}
=======
/**
* Populate this {@link LocationResources} using these {@link ServiceResource}s in order to make them available as {@link LocationResourceTemplate} for
* matching purpose.
*
* FIXME: ugly code to put ServiceResource in LocationResourceTemplates.
*/
private void populateLocationResourcesWithServiceResource(LocationResources locationResources, ServiceResource[] services, String locationId) {
for (ServiceResource serviceResource : services) {
LocationResourceTemplate lrt = new LocationResourceTemplate();
lrt.setService(true);
lrt.setEnabled(true);
// for a service we also want to display the version, so just add it to the name
lrt.setName(serviceResource.getName() + ":" + serviceResource.getVersion());
lrt.setId(serviceResource.getId());
ServiceNodeTemplate serviceNodeTemplate = new ServiceNodeTemplate(serviceResource.getNodeInstance());
lrt.setTemplate(serviceNodeTemplate);
lrt.setLocationId(locationId);
String serviceTypeName = serviceResource.getNodeInstance().getNodeTemplate().getType();
List<String> types = Lists.newArrayList(serviceTypeName);
lrt.setTypes(types);
NodeType serviceType = toscaTypeSearchService.findOrFail(NodeType.class, serviceTypeName, serviceResource.getNodeInstance().getTypeVersion());
types.addAll(serviceType.getDerivedFrom());
locationResources.getNodeTypes().put(serviceTypeName, serviceType);
Csar csar = toscaTypeSearchService.getArchive(serviceType.getArchiveName(), serviceType.getArchiveVersion());
Set<CSARDependency> dependencies = Sets.newHashSet();
if (csar.getDependencies() != null) {
dependencies.addAll(csar.getDependencies());
}
dependencies.add(new CSARDependency(csar.getName(), csar.getVersion()));
if (serviceType.getCapabilities() != null && !serviceType.getCapabilities().isEmpty()) {
for (CapabilityDefinition capabilityDefinition : serviceType.getCapabilities()) {
locationResources.getCapabilityTypes().put(capabilityDefinition.getType(),
csarRepoSearchService.getRequiredElementInDependencies(CapabilityType.class, capabilityDefinition.getType(), dependencies));
}
}
locationResources.getNodeTemplates().add(lrt);
}
}
>>>>>>>
private void filterOnAuthorization(LocationResources locationResources, String environmentId) {
locationResources.getNodeTemplates()
.removeIf(locationResourceTemplate -> !locationSecurityService.isAuthorised(locationResourceTemplate, environmentId));
}
/**
* Populate this {@link LocationResources} using these {@link ServiceResource}s in order to make them available as {@link LocationResourceTemplate} for
* matching purpose.
*
* TODO: Imrpove this ugly code to put ServiceResource in LocationResourceTemplates.
*/
private void populateLocationResourcesWithServiceResource(LocationResources locationResources, ServiceResource[] services, String locationId) {
for (ServiceResource serviceResource : services) {
LocationResourceTemplate lrt = new LocationResourceTemplate();
lrt.setService(true);
lrt.setEnabled(true);
// for a service we also want to display the version, so just add it to the name
lrt.setName(serviceResource.getName() + ":" + serviceResource.getVersion());
lrt.setId(serviceResource.getId());
ServiceNodeTemplate serviceNodeTemplate = new ServiceNodeTemplate(serviceResource.getNodeInstance());
lrt.setTemplate(serviceNodeTemplate);
lrt.setLocationId(locationId);
String serviceTypeName = serviceResource.getNodeInstance().getNodeTemplate().getType();
List<String> types = Lists.newArrayList(serviceTypeName);
lrt.setTypes(types);
NodeType serviceType = toscaTypeSearchService.findOrFail(NodeType.class, serviceTypeName, serviceResource.getNodeInstance().getTypeVersion());
types.addAll(serviceType.getDerivedFrom());
locationResources.getNodeTypes().put(serviceTypeName, serviceType);
Csar csar = toscaTypeSearchService.getArchive(serviceType.getArchiveName(), serviceType.getArchiveVersion());
Set<CSARDependency> dependencies = Sets.newHashSet();
if (csar.getDependencies() != null) {
dependencies.addAll(csar.getDependencies());
}
dependencies.add(new CSARDependency(csar.getName(), csar.getVersion()));
if (serviceType.getCapabilities() != null && !serviceType.getCapabilities().isEmpty()) {
for (CapabilityDefinition capabilityDefinition : serviceType.getCapabilities()) {
locationResources.getCapabilityTypes().put(capabilityDefinition.getType(),
csarRepoSearchService.getRequiredElementInDependencies(CapabilityType.class, capabilityDefinition.getType(), dependencies));
}
}
locationResources.getNodeTemplates().add(lrt);
}
} |
<<<<<<<
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import javax.annotation.Resource;
import javax.inject.Inject;
=======
import alien4cloud.dao.IGenericSearchDAO;
import alien4cloud.model.components.*;
import alien4cloud.model.templates.TopologyTemplate;
import alien4cloud.model.topology.Topology;
import alien4cloud.paas.wf.WorkflowsBuilderService;
import alien4cloud.security.model.User;
import alien4cloud.topology.TopologyDTO;
import alien4cloud.topology.TopologyServiceCore;
import alien4cloud.tosca.ArchiveUploadService;
import alien4cloud.tosca.parser.ParsingErrorLevel;
import alien4cloud.tosca.parser.ParsingResult;
import alien4cloud.utils.FileUtil;
import com.google.common.collect.Maps;
import cucumber.api.DataTable;
import cucumber.api.java.Before;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
import gherkin.formatter.model.DataTableRow;
import lombok.extern.slf4j.Slf4j;
>>>>>>>
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import javax.annotation.Resource;
import javax.inject.Inject;
<<<<<<<
import com.google.common.collect.Maps;
import alien4cloud.dao.IGenericSearchDAO;
import alien4cloud.model.components.*;
import alien4cloud.model.templates.TopologyTemplate;
import alien4cloud.model.topology.Topology;
import alien4cloud.paas.wf.WorkflowsBuilderService;
import alien4cloud.security.model.User;
import alien4cloud.topology.TopologyDTO;
import alien4cloud.topology.TopologyServiceCore;
import alien4cloud.tosca.ArchiveUploadService;
import alien4cloud.tosca.parser.ParsingResult;
import cucumber.api.DataTable;
import cucumber.api.java.Before;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
import gherkin.formatter.model.DataTableRow;
import lombok.extern.slf4j.Slf4j;
=======
import javax.annotation.Resource;
import javax.inject.Inject;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
>>>>>>>
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import alien4cloud.dao.IGenericSearchDAO;
import alien4cloud.model.components.*;
import alien4cloud.model.templates.TopologyTemplate;
import alien4cloud.model.topology.Topology;
import alien4cloud.paas.wf.WorkflowsBuilderService;
import alien4cloud.security.model.User;
import alien4cloud.topology.TopologyDTO;
import alien4cloud.topology.TopologyServiceCore;
import alien4cloud.tosca.ArchiveUploadService;
import alien4cloud.tosca.parser.ParsingErrorLevel;
import alien4cloud.tosca.parser.ParsingResult;
import alien4cloud.utils.FileUtil;
import cucumber.api.DataTable;
import cucumber.api.java.Before;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
import gherkin.formatter.model.DataTableRow;
import lombok.extern.slf4j.Slf4j;
<<<<<<<
public void i_upload_CSAR_from_path(String arg1) throws Throwable {
ParsingResult<Csar> result = csarUploadService.upload(Paths.get(arg1), CSARSource.UPLOAD);
System.out.println(result);
=======
public void i_upload_CSAR_from_path(String path) throws Throwable {
checkUploadError(uploadCsarFromPath(Paths.get(path)));
}
private void checkUploadError(ParsingResult<Csar> csarParsingResult) {
System.out.println(csarParsingResult.getContext().getParsingErrors());
Assert.assertFalse(csarParsingResult.hasError(ParsingErrorLevel.ERROR));
}
private ParsingResult<Csar> uploadCsarFromPath(Path path) throws Throwable {
return csarUploadService.upload(path, CSARSource.UPLOAD);
>>>>>>>
public void i_upload_CSAR_from_path(String path) throws Throwable {
uploadCsar(Paths.get(path));
}
@When("^I upload unzipped CSAR from path \"(.*?)\"$")
public void i_upload_unzipped_CSAR_From_path(String path) throws Throwable {
Path source = Paths.get(path);
Path csarTargetPath = CSAR_TARGET_PATH.resolve(source.getFileName() + ".csar");
FileUtil.zip(source, csarTargetPath);
uploadCsar(csarTargetPath);
}
private void uploadCsar(Path path) throws Throwable {
ParsingResult<Csar> result = csarUploadService.upload(path, CSARSource.UPLOAD);
Assert.assertFalse(result.hasError(ParsingErrorLevel.ERROR));
if (result.getContext().getParsingErrors().size() > 0) {
System.out.println(result);
}
}
@Given("^I cleanup archives$")
public void i_cleanup_archives() throws Throwable {
for (Class<?> type : typesToClean) {
alienDAO.delete(type, QueryBuilders.matchAllQuery());
}
}
@When("^I get the topology related to the template with name \"(.*?)\"$")
public void iGetTheTopologyRelatedToTheTemplateWithName(String templateName) throws Throwable {
TopologyTemplate topologyTeplate = topologyServiceCore.searchTopologyTemplateByName(templateName);
Topology topology = alienDAO.customFind(Topology.class, QueryBuilders.matchQuery("delegateId", topologyTeplate.getId()));
topologyIds.addLast(topology.getId());
}
@When("^I get the edited topology$")
public void I_get_the_edited_topology() {
thrownException = null;
try {
editionContextManager.init(topologyIds.getLast());
Topology topology = editionContextManager.getTopology();
topologyEvaluationContext = new StandardEvaluationContext(topology);
} catch (Exception e) {
log.error("Exception ocrured while getting the topology", e);
thrownException = e;
exceptionEvaluationContext = new StandardEvaluationContext(e);
} finally {
editionContextManager.destroy();
} |
<<<<<<<
// here we check that the template is not used in a topology or template composition
for (TopologyTemplateVersion ttv : versionService.getByDelegateId(topologyTemplate.getId())) {
Topology topology = topologyServiceCore.getTopology(ttv.getTopologyId());
if (topology != null && topology.getSubstitutionMapping() != null && topology.getSubstitutionMapping().getSubstitutionType() != null) {
// this topology template expose some substitution stuffs
// we have to check that it is not used by another topology
Csar csar = csarService.getTopologySubstitutionCsar(topology.getId());
Topology[] dependentTopologies = csarService.getDependantTopologies(csar.getName(), csar.getVersion());
if (dependentTopologies != null && dependentTopologies.length > 0) {
throw new DeleteReferencedObjectException("This csar can not be deleted since it's a dependencie for others");
}
}
}
// none of the version is used as embeded topology, we have to delete each version
=======
// here we check that the template is not used in a topology or template composition
for (TopologyTemplateVersion ttv : versionService.getByDelegateId(topologyTemplate.getId())) {
Topology topology = topologyServiceCore.getTopology(ttv.getTopologyId());
if (topology != null && topology.getSubstitutionMapping() != null && topology.getSubstitutionMapping().getSubstitutionType() != null) {
// this topology template expose some substitution stuffs
// we have to check that it is not used by another topology
Csar csar = csarService.getTopologySubstitutionCsar(topology.getId());
if (csar != null) {
Topology[] dependentTopologies = csarService.getDependantTopologies(csar.getName(), csar.getVersion());
if (dependentTopologies != null && dependentTopologies.length > 0) {
throw new DeleteReferencedObjectException("This csar can not be deleted since it's a dependencie for others");
}
}
}
}
// none of the version is used as embeded topology, we have to delete each version
>>>>>>>
// here we check that the template is not used in a topology or template composition
for (TopologyTemplateVersion ttv : versionService.getByDelegateId(topologyTemplate.getId())) {
Topology topology = topologyServiceCore.getTopology(ttv.getTopologyId());
if (topology != null && topology.getSubstitutionMapping() != null && topology.getSubstitutionMapping().getSubstitutionType() != null) {
// this topology template expose some substitution stuffs
// we have to check that it is not used by another topology
Csar csar = csarService.getTopologySubstitutionCsar(topology.getId());
Topology[] dependentTopologies = csarService.getDependantTopologies(csar.getName(), csar.getVersion());
if (dependentTopologies != null && dependentTopologies.length > 0) {
throw new DeleteReferencedObjectException("This csar can not be deleted since it's a dependencie for others");
}
if (csar != null) {
Topology[] dependentTopologies = csarService.getDependantTopologies(csar.getName(), csar.getVersion());
if (dependentTopologies != null && dependentTopologies.length > 0) {
throw new DeleteReferencedObjectException("This csar can not be deleted since it's a dependencie for others");
}
}
}
}
// none of the version is used as embeded topology, we have to delete each version |
<<<<<<<
import alien4cloud.dao.model.GetMultipleDataResult;
import alien4cloud.exception.AlreadyExistException;
import alien4cloud.model.deployment.Deployment;
import alien4cloud.utils.MapUtil;
=======
>>>>>>> |
<<<<<<<
import org.alien4cloud.alm.deployment.configuration.services.DeploymentConfigurationDao;
=======
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingCandidateModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingCompositeModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingConfigAutoSelectModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingConfigCleanupModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingReplaceModifier;
>>>>>>>
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingCandidateModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingCompositeModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingConfigAutoSelectModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingConfigCleanupModifier;
import org.alien4cloud.alm.deployment.configuration.flow.modifiers.matching.PolicyMatchingReplaceModifier;
import org.alien4cloud.alm.deployment.configuration.services.DeploymentConfigurationDao;
<<<<<<<
private PolicyMatchingModifier policyMatchingModifier;
=======
private PolicyMatchingCandidateModifier policyMatchingCandidateModifier;
@Inject
private PolicyMatchingConfigCleanupModifier policyMatchingConfigCleanupModifier;
@Inject
private PolicyMatchingConfigAutoSelectModifier policyMatchingConfigAutoSelectModifier;
@Inject
private PolicyMatchingReplaceModifier policyMatchingReplaceModifier;
>>>>>>>
private PolicyMatchingCandidateModifier policyMatchingCandidateModifier;
@Inject
private PolicyMatchingConfigCleanupModifier policyMatchingConfigCleanupModifier;
@Inject
private PolicyMatchingConfigAutoSelectModifier policyMatchingConfigAutoSelectModifier;
@Inject
private PolicyMatchingReplaceModifier policyMatchingReplaceModifier;
<<<<<<<
topologyModifiers.add(new NodeMatchingModifier(nodeMatchingCandidateModifier, // find matching candidates (do not change topology)
nodeMatchingConfigCleanupModifier, // cleanup user configuration if some config are not valid anymore
nodeMatchingConfigAutoSelectModifier, // auto-select missing nodes
nodeMatchingReplaceModifier // Impact the topology to replace matched nodes as configured
));
=======
topologyModifiers.add(new NodeMatchingCompositeModifier(nodeMatchingCandidateModifier, // find matching candidates (do not change topology)
nodeMatchingConfigCleanupModifier, // cleanup user configuration if some config are not valid anymore
nodeMatchingConfigAutoSelectModifier, // auto-select missing nodes
nodeMatchingReplaceModifier // Impact the topology to replace matched nodes as configured
));
>>>>>>>
topologyModifiers.add(new NodeMatchingCompositeModifier(nodeMatchingCandidateModifier, // find matching candidates (do not change topology)
nodeMatchingConfigCleanupModifier, // cleanup user configuration if some config are not valid anymore
nodeMatchingConfigAutoSelectModifier, // auto-select missing nodes
nodeMatchingReplaceModifier // Impact the topology to replace matched nodes as configured
)); |
<<<<<<<
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.inject.Inject;
import javax.validation.Valid;
import alien4cloud.tosca.context.ToscaContextualAspect;
import org.alien4cloud.git.GitLocationDao;
import org.alien4cloud.git.LocalGitManager;
import org.alien4cloud.git.model.GitLocation;
import org.alien4cloud.tosca.model.Csar;
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.templates.Topology;
import org.alien4cloud.tosca.model.types.NodeType;
import org.alien4cloud.tosca.topology.TopologyDTOBuilder;
import org.elasticsearch.common.joda.time.DateTime;
import org.elasticsearch.common.joda.time.DateTimeZone;
import org.hibernate.validator.constraints.NotBlank;
import org.springframework.http.MediaType;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.context.request.async.DeferredResult;
import com.google.common.collect.Maps;
=======
>>>>>>>
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.inject.Inject;
import javax.validation.Valid;
import org.alien4cloud.alm.deployment.configuration.model.SecretCredentialInfo;
import org.alien4cloud.git.GitLocationDao;
import org.alien4cloud.git.LocalGitManager;
import org.alien4cloud.git.model.GitLocation;
import org.alien4cloud.secret.services.SecretProviderService;
import org.alien4cloud.tosca.model.Csar;
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.templates.Topology;
import org.alien4cloud.tosca.model.types.NodeType;
import org.alien4cloud.tosca.topology.TopologyDTOBuilder;
import org.elasticsearch.common.joda.time.DateTime;
import org.elasticsearch.common.joda.time.DateTimeZone;
import org.hibernate.validator.constraints.NotBlank;
import org.springframework.http.MediaType;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.request.async.DeferredResult;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
<<<<<<<
@Inject
private ToscaContextualAspect toscaContextualAspect;
=======
@Inject
private LocationService locationService;
@Inject
private SecretProviderService secretProviderService;
>>>>>>>
@Inject
private ToscaContextualAspect toscaContextualAspect;
@Inject
private LocationService locationService;
@Inject
private SecretProviderService secretProviderService; |
<<<<<<<
import java.util.List;
import java.util.Map;
import java.util.Set;
=======
>>>>>>>
import java.util.List;
import java.util.Map;
import java.util.Set;
<<<<<<<
import alien4cloud.model.application.ApplicationVersion;
=======
>>>>>>>
import alien4cloud.model.application.ApplicationVersion;
<<<<<<<
import alien4cloud.paas.model.DeploymentStatus;
import alien4cloud.security.ApplicationEnvironmentRole;
import alien4cloud.security.ApplicationRole;
import alien4cloud.security.AuthorizationUtil;
=======
>>>>>>>
import alien4cloud.paas.model.DeploymentStatus;
import alien4cloud.security.ApplicationEnvironmentRole;
import alien4cloud.security.ApplicationRole;
import alien4cloud.security.AuthorizationUtil;
<<<<<<<
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
=======
>>>>>>>
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
<<<<<<<
if (!this.isDeployed(environment.getId())) {
delete(environment.getId());
} else {
// collect all deployed environment
deployedEnvironments.add(environment.getId());
}
}
// couln't delete deployed environment
if (deployedEnvironments.size() > 0) {
// error could not deployed all app environment for this applcation
log.error("Cannot delete these deployed environments : {}", deployedEnvironments.toString());
}
}
/**
* True when an application environment is deployed
*
* @return true if the environment is currently deployed
* @throws CloudDisabledException
*/
public boolean isDeployed(String appEnvironmentId) throws CloudDisabledException {
GetMultipleDataResult<Deployment> dataResult = alienDAO.search(
Deployment.class,
null,
MapUtil.newHashMap(new String[] { "deploymentSetup.environmentId", "endDate" }, new String[][] { new String[] { appEnvironmentId },
new String[] { null } }), 1);
if (dataResult.getData() != null && dataResult.getData().length > 0) {
return true;
=======
delete(environment.getId());
>>>>>>>
if (!this.isDeployed(environment.getId())) {
delete(environment.getId());
} else {
// collect all deployed environment
deployedEnvironments.add(environment.getId());
}
}
// couln't delete deployed environment
if (deployedEnvironments.size() > 0) {
// error could not deployed all app environment for this applcation
log.error("Cannot delete these deployed environments : {}", deployedEnvironments.toString()); |
<<<<<<<
=======
import alien4cloud.model.components.IndexedNodeType;
import alien4cloud.model.templates.TopologyTemplate;
import alien4cloud.model.topology.AbstractTopologyVersion;
import alien4cloud.model.topology.Topology;
>>>>>>>
<<<<<<<
=======
private TopologyTemplateVersionService topologyTemplateVersionService;
@Resource
>>>>>>>
<<<<<<<
@Resource
private IFileRepository artifactRepository;
=======
>>>>>>> |
<<<<<<<
import alien4cloud.model.components.AbstractPropertyValue;
=======
import alien4cloud.model.components.AttributeDefinition;
import alien4cloud.model.components.ConcatPropertyValue;
>>>>>>>
import alien4cloud.model.components.AbstractPropertyValue;
import alien4cloud.model.components.AttributeDefinition;
import alien4cloud.model.components.ConcatPropertyValue;
<<<<<<<
import alien4cloud.model.components.ScalarPropertyValue;
=======
import alien4cloud.model.components.PropertyDefinition;
import alien4cloud.model.components.ScalarPropertyValue;
>>>>>>>
import alien4cloud.model.components.PropertyDefinition;
import alien4cloud.model.components.ScalarPropertyValue;
<<<<<<<
import com.google.common.collect.Maps;
=======
import com.google.common.collect.Lists;
>>>>>>>
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
<<<<<<<
Matcher matcher = getPropertyPattern.matcher(str);
StringBuilder sb = new StringBuilder();
int cursor = 0;
while (matcher.find()) {
String nodeName = matcher.group(1);
String propertyName = matcher.group(2);
// get the actual value for the property.
sb.append(str.substring(cursor, matcher.start()));
cursor = matcher.end();
NodeTemplate template = topology.getNodeTemplates().get(nodeName);
if (template != null) {
String propertyValue = null;
AbstractPropertyValue abstractPropertyValue = template.getProperties().get(propertyName);
if (abstractPropertyValue instanceof ScalarPropertyValue) {
propertyValue = ((ScalarPropertyValue) abstractPropertyValue).getValue();
}
if (propertyValue != null) {
sb.append(propertyValue);
=======
// handle AttributeDefinition type
if (attributeValue instanceof AttributeDefinition) {
String runtimeAttributeValue = extractRuntimeInformationAttribute(runtimeInformations, currentInstance, new String[] { basePaaSTemplate.getId() },
attributeId);
if (runtimeAttributeValue != null) {
if (!runtimeAttributeValue.contains("=Error!]") && !runtimeAttributeValue.equals("") && !runtimeAttributeValue.equals(null)) {
return runtimeAttributeValue;
>>>>>>>
// handle AttributeDefinition type
if (attributeValue instanceof AttributeDefinition) {
String runtimeAttributeValue = extractRuntimeInformationAttribute(runtimeInformations, currentInstance, new String[] { basePaaSTemplate.getId() },
attributeId);
if (runtimeAttributeValue != null) {
if (!runtimeAttributeValue.contains("=Error!]") && !runtimeAttributeValue.equals("") && !runtimeAttributeValue.equals(null)) {
return runtimeAttributeValue;
<<<<<<<
case ToscaFunctionConstants.HOST:
return getHostNodeId(basePaaSTemplate);
case ToscaFunctionConstants.SELF:
return getSelfNodeId(basePaaSTemplate);
case ToscaFunctionConstants.SOURCE:
return getSourceNodeId(basePaaSTemplate);
case ToscaFunctionConstants.TARGET:
return getTargetNodeId(basePaaSTemplate);
default:
return stringToEval;
=======
case ToscaFunctionConstants.HOST:
return getHostNodeId(basePaaSTemplate);
case ToscaFunctionConstants.SELF:
return new String[] { getSelfNodeId(basePaaSTemplate) };
case ToscaFunctionConstants.SOURCE:
return new String[] { getSourceNodeId(basePaaSTemplate) };
case ToscaFunctionConstants.TARGET:
return new String[] { getTargetNodeId(basePaaSTemplate) };
default:
return new String[] { stringToEval };
>>>>>>>
case ToscaFunctionConstants.HOST:
return getHostNodeId(basePaaSTemplate);
case ToscaFunctionConstants.SELF:
return new String[] { getSelfNodeId(basePaaSTemplate) };
case ToscaFunctionConstants.SOURCE:
return new String[] { getSourceNodeId(basePaaSTemplate) };
case ToscaFunctionConstants.TARGET:
return new String[] { getTargetNodeId(basePaaSTemplate) };
default:
return new String[] { stringToEval }; |
<<<<<<<
import alien4cloud.model.components.*;
=======
import alien4cloud.model.application.ApplicationEnvironment;
import alien4cloud.model.application.ApplicationVersion;
import alien4cloud.model.application.DeploymentSetup;
import alien4cloud.model.cloud.CloudResourceMatcherConfig;
import alien4cloud.model.components.AbstractPropertyValue;
import alien4cloud.model.components.ComplexPropertyValue;
import alien4cloud.model.components.DeploymentArtifact;
import alien4cloud.model.components.IndexedCapabilityType;
import alien4cloud.model.components.IndexedNodeType;
import alien4cloud.model.components.IndexedRelationshipType;
import alien4cloud.model.components.ListPropertyValue;
import alien4cloud.model.components.PropertyDefinition;
import alien4cloud.model.components.ScalarPropertyValue;
>>>>>>>
import alien4cloud.model.components.*;
<<<<<<<
Topology topology = topologyServiceCore.getOrFail(topologyId);
topologyService.checkAuthorizations(topology, ApplicationRole.APPLICATION_MANAGER, ApplicationRole.APPLICATION_DEVOPS,
ApplicationRole.APPLICATION_USER);
=======
Topology topology = topologyServiceCore.getMandatoryTopology(topologyId);
topologyService
.checkAuthorizations(topology, ApplicationRole.APPLICATION_MANAGER, ApplicationRole.APPLICATION_DEVOPS, ApplicationRole.APPLICATION_USER);
workflowBuilderService.initWorkflows(workflowBuilderService.buildTopologyContext(topology));
>>>>>>>
Topology topology = topologyServiceCore.getOrFail(topologyId);
topologyService.checkAuthorizations(topology, ApplicationRole.APPLICATION_MANAGER, ApplicationRole.APPLICATION_DEVOPS,
ApplicationRole.APPLICATION_USER);
workflowBuilderService.initWorkflows(workflowBuilderService.buildTopologyContext(topology));
<<<<<<<
PropertyUtil.setPropertyValue(nodeTemp, propertyDefinition, propertyName, propertyValue);
=======
// case "rest" : take the default value
if (propertyValue == null) {
propertyValue = node.getProperties().get(propertyName).getDefault();
}
// if the default value is also empty, we set the property value to null
if (propertyValue == null) {
nodeTemp.getProperties().put(propertyName, null);
} else {
if (propertyValue instanceof String) {
nodeTemp.getProperties().put(propertyName, new ScalarPropertyValue((String) propertyValue));
} else if (propertyValue instanceof Map) {
nodeTemp.getProperties().put(propertyName, new ComplexPropertyValue((Map<String, Object>) propertyValue));
} else if (propertyValue instanceof List) {
nodeTemp.getProperties().put(propertyName, new ListPropertyValue((List<Object>) propertyValue));
} else {
throw new InvalidArgumentException("Property type " + propertyValue.getClass().getName() + " is invalid");
}
}
>>>>>>>
PropertyUtil.setPropertyValue(nodeTemp, propertyDefinition, propertyName, propertyValue); |
<<<<<<<
DeploymentTopology deploymentTopology = deploymentTopologyService.getOrFail(version.getId(), environment.getId());
String locationId = TopologyLocationUtils.getLocationIdOrFail(deploymentTopology);
=======
DeploymentTopology deploymentTopology = deploymentTopologyService.getDeployedTopology(version.getId(), environment.getId());
String locationId = TopologyLocationService.getLocationId(deploymentTopology);
>>>>>>>
DeploymentTopology deploymentTopology = deploymentTopologyService.getDeployedTopology(version.getId(), environment.getId());
String locationId = TopologyLocationUtils.getLocationIdOrFail(deploymentTopology); |
<<<<<<<
import java.beans.IntrospectionException;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.NodeBuilder;
import org.jclouds.openstack.nova.v2_0.domain.Server;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
import org.springframework.beans.factory.config.YamlPropertiesFactoryBean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.expression.EvaluationContext;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import org.springframework.util.PropertyPlaceholderHelper;
import alien4cloud.exception.NotFoundException;
=======
>>>>>>>
import java.beans.IntrospectionException;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.NodeBuilder;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
import org.springframework.beans.factory.config.YamlPropertiesFactoryBean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.expression.EvaluationContext;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import org.springframework.util.PropertyPlaceholderHelper; |
<<<<<<<
if (StringUtils.isNotEmpty(newPropertyDefinition.getDefault())) {
DeploymentSetup[] deploymentSetups = deploymentSetupService.getByTopologyId(topologyId);
for (DeploymentSetup deploymentSetup : deploymentSetups) {
if (deploymentSetup.getInputProperties() == null) {
deploymentSetup.setInputProperties(Maps.<String, String> newHashMap());
}
deploymentSetup.getInputProperties().put(inputId, newPropertyDefinition.getDefault());
alienDAO.save(deploymentSetup);
}
}
=======
topologyServiceCore.updateSubstitutionType(topology);
>>>>>>>
if (StringUtils.isNotEmpty(newPropertyDefinition.getDefault())) {
DeploymentSetup[] deploymentSetups = deploymentSetupService.getByTopologyId(topologyId);
for (DeploymentSetup deploymentSetup : deploymentSetups) {
if (deploymentSetup.getInputProperties() == null) {
deploymentSetup.setInputProperties(Maps.<String, String> newHashMap());
}
deploymentSetup.getInputProperties().put(inputId, newPropertyDefinition.getDefault());
alienDAO.save(deploymentSetup);
}
}
topologyServiceCore.updateSubstitutionType(topology);
<<<<<<<
DeploymentSetup[] deploymentSetups = deploymentSetupService.getByTopologyId(topologyId);
for (DeploymentSetup deploymentSetup : deploymentSetups) {
if (deploymentSetup.getInputProperties() != null && deploymentSetup.getInputProperties().containsKey(inputId)) {
String oldValue = deploymentSetup.getInputProperties().remove(inputId);
deploymentSetup.getInputProperties().put(newInputId, oldValue);
alienDAO.save(deploymentSetup);
}
}
=======
topologyServiceCore.updateSubstitutionType(topology);
>>>>>>>
DeploymentSetup[] deploymentSetups = deploymentSetupService.getByTopologyId(topologyId);
for (DeploymentSetup deploymentSetup : deploymentSetups) {
if (deploymentSetup.getInputProperties() != null && deploymentSetup.getInputProperties().containsKey(inputId)) {
String oldValue = deploymentSetup.getInputProperties().remove(inputId);
deploymentSetup.getInputProperties().put(newInputId, oldValue);
alienDAO.save(deploymentSetup);
}
}
topologyServiceCore.updateSubstitutionType(topology);
<<<<<<<
DeploymentSetup[] deploymentSetups = deploymentSetupService.getByTopologyId(topologyId);
for (DeploymentSetup deploymentSetup : deploymentSetups) {
if (deploymentSetup.getInputProperties() != null && deploymentSetup.getInputProperties().containsKey(inputId)) {
deploymentSetup.getInputProperties().remove(inputId);
alienDAO.save(deploymentSetup);
}
}
=======
topologyServiceCore.updateSubstitutionType(topology);
>>>>>>>
DeploymentSetup[] deploymentSetups = deploymentSetupService.getByTopologyId(topologyId);
for (DeploymentSetup deploymentSetup : deploymentSetups) {
if (deploymentSetup.getInputProperties() != null && deploymentSetup.getInputProperties().containsKey(inputId)) {
deploymentSetup.getInputProperties().remove(inputId);
alienDAO.save(deploymentSetup);
}
}
topologyServiceCore.updateSubstitutionType(topology); |
<<<<<<<
import alien4cloud.model.cloud.Network;
import alien4cloud.tosca.ToscaService;
=======
import alien4cloud.tosca.ToscaUtils;
>>>>>>>
import alien4cloud.model.cloud.Network;
import alien4cloud.tosca.ToscaUtils;
<<<<<<<
if (toscaService.isOfType(NormativeComputeConstants.COMPUTE_TYPE, types.get(nodeTemplateEntry.getKey()))) {
matchableNodeTemplates.computeTemplates.put(nodeTemplateEntry.getKey(), nodeTemplateEntry.getValue());
}
if (toscaService.isOfType(NormativeNetworkConstants.NETWORK_TYPE, types.get(nodeTemplateEntry.getKey()))) {
matchableNodeTemplates.networkTemplates.put(nodeTemplateEntry.getKey(), nodeTemplateEntry.getValue());
=======
if (ToscaUtils.isFromType(NormativeComputeConstants.COMPUTE_TYPE, types.get(nodeTemplateEntry.getKey()))) {
// TODO check also network and other cloud related resources ...
matchableNodeTemplates.put(nodeTemplateEntry.getKey(), nodeTemplateEntry.getValue());
>>>>>>>
if (ToscaUtils.isFromType(NormativeComputeConstants.COMPUTE_TYPE, types.get(nodeTemplateEntry.getKey()))) {
matchableNodeTemplates.computeTemplates.put(nodeTemplateEntry.getKey(), nodeTemplateEntry.getValue());
}
if (ToscaUtils.isFromType(NormativeNetworkConstants.NETWORK_TYPE, types.get(nodeTemplateEntry.getKey()))) {
matchableNodeTemplates.networkTemplates.put(nodeTemplateEntry.getKey(), nodeTemplateEntry.getValue());
<<<<<<<
=======
if (!ToscaUtils.isFromType(NormativeComputeConstants.COMPUTE_TYPE, nodeType)) {
throw new InvalidArgumentException("Node is not a compute but of type [" + nodeTemplate.getType() + "]");
}
>>>>>>> |
<<<<<<<
import alien4cloud.security.CloudRole;
import alien4cloud.security.Role;
import alien4cloud.security.services.ResourceRoleService;
import alien4cloud.tosca.properties.constraints.ConstraintUtil.ConstraintInformation;
import alien4cloud.utils.services.ConstraintPropertyService;
=======
import alien4cloud.security.model.CloudRole;
import alien4cloud.security.model.Role;
import alien4cloud.security.ResourceRoleService;
>>>>>>>
import alien4cloud.security.ResourceRoleService;
import alien4cloud.security.model.CloudRole;
import alien4cloud.security.model.Role;
import alien4cloud.tosca.properties.constraints.ConstraintUtil.ConstraintInformation;
import alien4cloud.utils.services.ConstraintPropertyService; |
<<<<<<<
=======
import alien4cloud.model.topology.AbstractPolicy;
import alien4cloud.model.topology.Capability;
import alien4cloud.model.topology.LocationPlacementPolicy;
import alien4cloud.model.topology.NodeGroup;
import alien4cloud.model.topology.NodeTemplate;
import alien4cloud.model.topology.Topology;
>>>>>>> |
<<<<<<<
import alien4cloud.tosca.parser.INodeParser;
import alien4cloud.tosca.parser.ParserUtils;
import alien4cloud.tosca.parser.ParsingContextExecution;
import alien4cloud.tosca.parser.impl.base.ReferencedParser;
=======
import alien4cloud.tosca.parser.*;
import alien4cloud.tosca.parser.mapping.Wd03OperationDefinition;
>>>>>>>
import alien4cloud.tosca.parser.*;
import alien4cloud.tosca.parser.impl.base.ReferencedParser; |
<<<<<<<
import alien4cloud.paas.model.AbstractMonitorEvent;
import alien4cloud.paas.model.DeploymentStatus;
import alien4cloud.paas.model.InstanceInformation;
import alien4cloud.paas.model.OperationExecRequest;
import alien4cloud.paas.model.PaaSDeploymentContext;
import alien4cloud.paas.model.PaaSDeploymentStatusMonitorEvent;
import alien4cloud.paas.model.PaaSInstanceStateMonitorEvent;
import alien4cloud.paas.model.PaaSInstanceStorageMonitorEvent;
import alien4cloud.paas.model.PaaSMessageMonitorEvent;
import alien4cloud.paas.model.PaaSNodeTemplate;
import alien4cloud.paas.model.PaaSTopology;
import alien4cloud.paas.model.PaaSTopologyDeploymentContext;
import alien4cloud.paas.plan.TopologyTreeBuilderService;
import alien4cloud.tosca.container.model.topology.Topology;
=======
import alien4cloud.paas.model.*;
import alien4cloud.model.topology.Topology;
>>>>>>>
import alien4cloud.paas.model.AbstractMonitorEvent;
import alien4cloud.paas.model.DeploymentStatus;
import alien4cloud.paas.model.InstanceInformation;
import alien4cloud.paas.model.OperationExecRequest;
import alien4cloud.paas.model.PaaSDeploymentContext;
import alien4cloud.paas.model.PaaSDeploymentStatusMonitorEvent;
import alien4cloud.paas.model.PaaSInstanceStateMonitorEvent;
import alien4cloud.paas.model.PaaSInstanceStorageMonitorEvent;
import alien4cloud.paas.model.PaaSMessageMonitorEvent;
import alien4cloud.paas.model.PaaSNodeTemplate;
import alien4cloud.paas.model.PaaSTopology;
import alien4cloud.paas.model.PaaSTopologyDeploymentContext;
import alien4cloud.paas.plan.TopologyTreeBuilderService; |
<<<<<<<
import org.apache.http.client.methods.*;
=======
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPatch;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
>>>>>>>
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPatch;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut; |
<<<<<<<
import io.nuls.core.rpc.util.TimeUtils;
=======
import io.nuls.core.rpc.protocol.MessageProcessor;
import io.nuls.core.rpc.util.RPCUtil;
import io.nuls.core.rpc.util.NulsDateUtils;
>>>>>>>
import io.nuls.core.rpc.util.TimeUtils;
import io.nuls.core.rpc.protocol.MessageProcessor;
import io.nuls.core.rpc.util.RPCUtil;
import io.nuls.core.rpc.util.NulsDateUtils; |
<<<<<<<
IndexedDataType dataType = dependencyService.getDataType(propertyType, new ArchiveDependencyContext(archiveRoot));
if (dataType == null) {
context.getParsingErrors().add(
new ParsingError(ErrorCode.VALIDATION_ERROR, "ToscaPropertyType", node.getStartMark(), "Type " + propertyType
+ " is not valid for the property definition", node.getEndMark(), "type"));
=======
if (!searchService.isElementExistInDependencies(IndexedDataType.class, propertyType, archiveRoot.getArchive().getDependencies())) {
context.getParsingErrors().add(new ParsingError(ErrorCode.TYPE_NOT_FOUND, "ToscaPropertyType", node.getStartMark(),
"Type " + propertyType + " is not valid for the property definition", node.getEndMark(), propertyType));
>>>>>>>
IndexedDataType dataType = dependencyService.getDataType(propertyType, new ArchiveDependencyContext(archiveRoot));
if (dataType == null) {
context.getParsingErrors().add(new ParsingError(ErrorCode.TYPE_NOT_FOUND, "ToscaPropertyType", node.getStartMark(),
"Type " + propertyType + " is not found.", node.getEndMark(), "type"));
<<<<<<<
=======
private void validateDefaultValue(PropertyDefinition propertyDefinition, ParsingContextExecution context, Node node) {
String defaultAsString = propertyDefinition.getDefault();
if (defaultAsString == null) {
return;
}
IPropertyType<?> toscaType = ToscaType.fromYamlTypeName(propertyDefinition.getType());
if (toscaType != null) {
Object defaultValue;
try {
defaultValue = toscaType.parse(defaultAsString);
} catch (InvalidPropertyValueException e) {
addNonCompatiblePropertyTypeError(propertyDefinition, defaultAsString, node, context);
return;
}
if (propertyDefinition.getConstraints() != null && !propertyDefinition.getConstraints().isEmpty()) {
for (int i = 0; i < propertyDefinition.getConstraints().size(); i++) {
PropertyConstraint constraint = propertyDefinition.getConstraints().get(i);
try {
constraint.validate(defaultValue);
} catch (ConstraintViolationException e) {
addNonCompatibleConstraintError(constraint, defaultAsString, node, context);
}
}
}
} else if (!AlienCustomTypes.checkDefaultIsObject(defaultAsString)) {
// TODO check if complex object that default is validated
addNonCompatiblePropertyTypeError(propertyDefinition, defaultAsString, node, context);
}
}
private void addNonCompatiblePropertyTypeError(PropertyDefinition propertyDefinition, String defaultAsString, Node node, ParsingContextExecution context) {
context.getParsingErrors()
.add(new ParsingError(ErrorCode.VALIDATION_ERROR, "ToscaPropertyDefaultValueType", node.getStartMark(),
"Default value " + defaultAsString + " is not valid or is not supported for the property type " + propertyDefinition.getType(),
node.getEndMark(), "default"));
}
private void addNonCompatibleConstraintError(PropertyConstraint constraint, String defaultAsString, Node node, ParsingContextExecution context) {
context.getParsingErrors()
.add(new ParsingError(ErrorCode.VALIDATION_ERROR, "ToscaPropertyDefaultValueConstraints", node.getStartMark(),
"Default value " + defaultAsString + " is not valid for the constraint " + constraint.getClass().getSimpleName(), node.getEndMark(),
"constraints"));
}
>>>>>>> |
<<<<<<<
@TermFilter
=======
@Setter
>>>>>>>
@TermFilter
@Setter |
<<<<<<<
DeploymentStatus currentStatus = deploymentRuntimeStateService.getDeploymentStatus(deployment);
if (DeploymentStatus.UNDEPLOYED.equals(currentStatus)) {
deploymentService.markUndeployed(deployment);
}
return currentStatus;
=======
return deploymentLockService.doWithDeploymentReadLock(deployment.getOrchestratorDeploymentId(), () -> {
final SettableFuture<DeploymentStatus> statusSettableFuture = SettableFuture.create();
// update the deployment status from PaaS if it cannot be found.
deploymentRuntimeStateService.getDeploymentStatus(deployment, new IPaaSCallback<DeploymentStatus>() {
@Override
public void onSuccess(DeploymentStatus data) {
statusSettableFuture.set(data);
}
@Override
public void onFailure(Throwable throwable) {
statusSettableFuture.setException(throwable);
}
});
try {
DeploymentStatus currentStatus = statusSettableFuture.get();
if (DeploymentStatus.UNDEPLOYED.equals(currentStatus)) {
deploymentService.markUndeployed(deployment);
}
return currentStatus;
} catch (Exception e) {
throw new PaaSTechnicalException("Could not retrieve status from PaaS", e);
}
});
>>>>>>>
return deploymentLockService.doWithDeploymentReadLock(deployment.getOrchestratorDeploymentId(), () -> {
DeploymentStatus currentStatus = deploymentRuntimeStateService.getDeploymentStatus(deployment);
if (DeploymentStatus.UNDEPLOYED.equals(currentStatus)) {
deploymentService.markUndeployed(deployment);
}
return currentStatus;
}); |
<<<<<<<
import alien4cloud.rest.model.RestError;
=======
import alien4cloud.rest.internal.PropertyRequest;
>>>>>>>
import alien4cloud.rest.internal.PropertyRequest;
import alien4cloud.rest.model.RestError;
<<<<<<<
=======
import alien4cloud.tosca.properties.constraints.ConstraintUtil.ConstraintInformation;
import alien4cloud.utils.MetaPropertiesServiceWrapper;
>>>>>>>
import alien4cloud.tosca.properties.constraints.ConstraintUtil.ConstraintInformation;
import alien4cloud.utils.MetaPropertiesServiceWrapper; |
<<<<<<<
package alien4cloud.topology;
import alien4cloud.json.deserializer.TaskDeserializer;
import alien4cloud.topology.task.AbstractTask;
import alien4cloud.utils.jackson.ConditionalAttributes;
import alien4cloud.utils.jackson.ConditionalOnAttribute;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.apache.commons.collections4.CollectionUtils;
import java.util.List;
/**
* Validation result that contains a boolean determining if a topology is valid for deployment.
* If not, contains also a list of tasks of components to implement .
*
* @author igor ngouagna
*/
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public class TopologyValidationResult {
private boolean isValid;
@ConditionalOnAttribute(ConditionalAttributes.REST)
@JsonDeserialize(contentUsing = TaskDeserializer.class)
private List<AbstractTask> taskList;
@ConditionalOnAttribute(ConditionalAttributes.REST)
@JsonDeserialize(contentUsing = TaskDeserializer.class)
private List<AbstractTask> warningList;
public <T extends AbstractTask> void addTasks(List<T> tasks) {
if (CollectionUtils.isEmpty(tasks)) {
return;
}
if (taskList == null) {
taskList = Lists.newArrayList();
}
taskList.addAll(tasks);
}
public <T extends AbstractTask> void addTask(T task) {
if (task == null) {
return;
}
if (taskList == null) {
taskList = Lists.newArrayList();
}
taskList.add(task);
}
public <T extends AbstractTask> void addWarnings(List<T> warnings) {
if (CollectionUtils.isEmpty(warnings)) {
return;
}
if (warningList == null) {
warningList = Lists.newArrayList();
}
warningList.addAll(warnings);
}
public <T extends AbstractTask> void addWarning(T warning) {
if (warning == null) {
return;
}
if (warningList == null) {
warningList = Lists.newArrayList();
}
warningList.add(warning);
}
=======
package alien4cloud.topology;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.apache.commons.collections4.CollectionUtils;
import alien4cloud.json.deserializer.TaskDeserializer;
import alien4cloud.topology.task.AbstractTask;
import alien4cloud.utils.jackson.ConditionalAttributes;
import alien4cloud.utils.jackson.ConditionalOnAttribute;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.google.common.collect.Lists;
/**
* Validation result that contains a boolean determining if a topology is valid for deployment.
* If not, contains also a list of tasks of components to implement .
*
* @author igor ngouagna
*/
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor(suppressConstructorProperties = true)
public class TopologyValidationResult {
private boolean isValid;
@ConditionalOnAttribute(ConditionalAttributes.REST)
@JsonDeserialize(contentUsing = TaskDeserializer.class)
private List<AbstractTask> taskList;
@ConditionalOnAttribute(ConditionalAttributes.REST)
@JsonDeserialize(contentUsing = TaskDeserializer.class)
private List<AbstractTask> warningList;
public <T extends AbstractTask> void addTasks(List<T> tasks) {
if (CollectionUtils.isEmpty(tasks)) {
return;
}
if (taskList == null) {
taskList = Lists.newArrayList();
}
taskList.addAll(tasks);
}
public <T extends AbstractTask> void addTask(T task) {
if (task == null) {
return;
}
if (taskList == null) {
taskList = Lists.newArrayList();
}
taskList.add(task);
}
public <T extends AbstractTask> void addWarnings(List<T> warnings) {
if (CollectionUtils.isEmpty(warnings)) {
return;
}
if (warningList == null) {
warningList = Lists.newArrayList();
}
warningList.addAll(warnings);
}
public <T extends AbstractTask> void addWarning(T warning) {
if (warning == null) {
return;
}
if (warningList == null) {
warningList = Lists.newArrayList();
}
warningList.add(warning);
}
>>>>>>>
package alien4cloud.topology;
import alien4cloud.json.deserializer.TaskDeserializer;
import alien4cloud.topology.task.AbstractTask;
import alien4cloud.utils.jackson.ConditionalAttributes;
import alien4cloud.utils.jackson.ConditionalOnAttribute;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.apache.commons.collections4.CollectionUtils;
import java.util.List;
/**
* Validation result that contains a boolean determining if a topology is valid for deployment.
* If not, contains also a list of tasks of components to implement .
*
* @author igor ngouagna
*/
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor(suppressConstructorProperties = true)
public class TopologyValidationResult {
private boolean isValid;
@ConditionalOnAttribute(ConditionalAttributes.REST)
@JsonDeserialize(contentUsing = TaskDeserializer.class)
private List<AbstractTask> taskList;
@ConditionalOnAttribute(ConditionalAttributes.REST)
@JsonDeserialize(contentUsing = TaskDeserializer.class)
private List<AbstractTask> warningList;
public <T extends AbstractTask> void addTasks(List<T> tasks) {
if (CollectionUtils.isEmpty(tasks)) {
return;
}
if (taskList == null) {
taskList = Lists.newArrayList();
}
taskList.addAll(tasks);
}
public <T extends AbstractTask> void addTask(T task) {
if (task == null) {
return;
}
if (taskList == null) {
taskList = Lists.newArrayList();
}
taskList.add(task);
}
public <T extends AbstractTask> void addWarnings(List<T> warnings) {
if (CollectionUtils.isEmpty(warnings)) {
return;
}
if (warningList == null) {
warningList = Lists.newArrayList();
}
warningList.addAll(warnings);
}
public <T extends AbstractTask> void addWarning(T warning) {
if (warning == null) {
return;
}
if (warningList == null) {
warningList = Lists.newArrayList();
}
warningList.add(warning);
} |
<<<<<<<
=======
import com.android.launcher3.DragController;
import com.android.launcher3.DragLayer;
>>>>>>> |
<<<<<<<
/*
* Copyright (C) 2012-2019 52ยฐNorth Initiative for Geospatial Open Source
=======
/**
* Copyright (C) 2012-2020 52ยฐNorth Initiative for Geospatial Open Source
>>>>>>>
/*
* Copyright (C) 2012-2020 52ยฐNorth Initiative for Geospatial Open Source |
<<<<<<<
import com.android.launcher3.config.ProviderConfig;
=======
import com.android.launcher3.config.FeatureFlags;
>>>>>>>
import com.android.launcher3.config.FeatureFlags;
import com.android.launcher3.config.ProviderConfig;
<<<<<<<
float scale = ProviderConfig.IS_DOGFOOD_BUILD ?
=======
float scale = FeatureFlags.LAUNCHER3_ICON_NORMALIZATION ?
>>>>>>>
float scale = FeatureFlags.LAUNCHER3_ICON_NORMALIZATION ? |
<<<<<<<
public Animator setStateWithAnimation(State toState, boolean animated,
boolean hasOverlaySearchBar, HashMap<View, Integer> layerViews) {
=======
public Animator setStateWithAnimation(State toState, int toPage, boolean animated,
HashMap<View, Integer> layerViews) {
>>>>>>>
public Animator setStateWithAnimation(State toState, boolean animated,
HashMap<View, Integer> layerViews) {
<<<<<<<
toState, animated, hasOverlaySearchBar, layerViews);
=======
toState, toPage, animated, layerViews);
>>>>>>>
toState, animated, layerViews); |
<<<<<<<
final Workspace.State toWorkspaceState,
final boolean animated, final Runnable onCompleteRunnable) {
=======
final Workspace.State fromWorkspaceState, final Workspace.State toWorkspaceState,
final int toWorkspacePage, final boolean animated, final Runnable onCompleteRunnable) {
>>>>>>>
final Workspace.State fromWorkspaceState, final Workspace.State toWorkspaceState,
final boolean animated, final Runnable onCompleteRunnable) {
<<<<<<<
Animator workspaceAnim = mLauncher.startWorkspaceStateChangeAnimation(toWorkspaceState,
animated, overlaySearchBarView != null /* hasOverlaySearchBar */, layerViews);
=======
Animator workspaceAnim = mLauncher.startWorkspaceStateChangeAnimation(toWorkspaceState, -1,
animated, layerViews);
>>>>>>>
Animator workspaceAnim = mLauncher.startWorkspaceStateChangeAnimation(toWorkspaceState,
animated, layerViews);
<<<<<<<
private void startAnimationToWorkspaceFromAllApps(final Workspace.State toWorkspaceState,
final boolean animated, final Runnable onCompleteRunnable) {
=======
private void startAnimationToWorkspaceFromAllApps(final Workspace.State fromWorkspaceState,
final Workspace.State toWorkspaceState, final int toWorkspacePage,
final boolean animated, final Runnable onCompleteRunnable) {
>>>>>>>
private void startAnimationToWorkspaceFromAllApps(final Workspace.State fromWorkspaceState,
final Workspace.State toWorkspaceState, final boolean animated,
final Runnable onCompleteRunnable) {
<<<<<<<
startAnimationToWorkspaceFromOverlay(toWorkspaceState, mLauncher.getAllAppsButton(),
appsView, appsView.getContentView(), appsView.getRevealView(),
appsView.getSearchBarView(), animated, onCompleteRunnable, cb);
=======
mCurrentAnimation = startAnimationToWorkspaceFromOverlay(fromWorkspaceState, toWorkspaceState,
toWorkspacePage, mLauncher.getAllAppsButton(), appsView, appsView.getContentView(),
appsView.getRevealView(), appsView.getSearchBarView(), animated,
onCompleteRunnable, cb);
>>>>>>>
mCurrentAnimation = startAnimationToWorkspaceFromOverlay(fromWorkspaceState, toWorkspaceState,
mLauncher.getAllAppsButton(), appsView, appsView.getContentView(),
appsView.getRevealView(), appsView.getSearchBarView(), animated,
onCompleteRunnable, cb);
<<<<<<<
private void startAnimationToWorkspaceFromWidgets(final Workspace.State toWorkspaceState,
final boolean animated, final Runnable onCompleteRunnable) {
=======
private void startAnimationToWorkspaceFromWidgets(final Workspace.State fromWorkspaceState,
final Workspace.State toWorkspaceState, final int toWorkspacePage,
final boolean animated, final Runnable onCompleteRunnable) {
>>>>>>>
private void startAnimationToWorkspaceFromWidgets(final Workspace.State fromWorkspaceState,
final Workspace.State toWorkspaceState, final boolean animated,
final Runnable onCompleteRunnable) {
<<<<<<<
startAnimationToWorkspaceFromOverlay(toWorkspaceState, mLauncher.getWidgetsButton(),
widgetsView, widgetsView.getContentView(), widgetsView.getRevealView(), null,
animated, onCompleteRunnable, cb);
}
=======
mCurrentAnimation = startAnimationToWorkspaceFromOverlay(fromWorkspaceState,
toWorkspaceState, toWorkspacePage, mLauncher.getWidgetsButton(), widgetsView,
widgetsView.getContentView(), widgetsView.getRevealView(), null, animated,
onCompleteRunnable, cb);
}
>>>>>>>
mCurrentAnimation = startAnimationToWorkspaceFromOverlay(fromWorkspaceState,
toWorkspaceState, mLauncher.getWidgetsButton(), widgetsView,
widgetsView.getContentView(), widgetsView.getRevealView(), null, animated,
onCompleteRunnable, cb);
}
<<<<<<<
private void startAnimationToWorkspaceFromOverlay(final Workspace.State toWorkspaceState,
final View buttonView, final View fromView, final View contentView,
final View revealView, final View overlaySearchBarView, final boolean animated,
final Runnable onCompleteRunnable, final PrivateTransitionCallbacks pCb) {
=======
private AnimatorSet startAnimationToWorkspaceFromOverlay(final Workspace.State fromWorkspaceState,
final Workspace.State toWorkspaceState, final int toWorkspacePage, final View buttonView,
final View fromView, final View contentView, final View revealView,
final View overlaySearchBarView, final boolean animated, final Runnable onCompleteRunnable,
final PrivateTransitionCallbacks pCb) {
final AnimatorSet animation = LauncherAnimUtils.createAnimatorSet();
>>>>>>>
private AnimatorSet startAnimationToWorkspaceFromOverlay(final Workspace.State fromWorkspaceState,
final Workspace.State toWorkspaceState, final View buttonView,
final View fromView, final View contentView, final View revealView,
final View overlaySearchBarView, final boolean animated, final Runnable onCompleteRunnable,
final PrivateTransitionCallbacks pCb) {
final AnimatorSet animation = LauncherAnimUtils.createAnimatorSet();
<<<<<<<
animated, overlaySearchBarView != null /* hasOverlaySearchBar */,
layerViews);
=======
toWorkspacePage, animated, layerViews);
>>>>>>>
animated, layerViews); |
<<<<<<<
ItemInfo info = d.dragInfo;
=======
>>>>>>>
<<<<<<<
return getContext().getString(R.string.workspace_scroll_format,
page + 1 - delta, getChildCount() - delta);
=======
int nScreens = getChildCount() - delta;
int extraScreenId = mScreenOrder.indexOf(EXTRA_EMPTY_SCREEN_ID);
if (extraScreenId >= 0 && nScreens > 1) {
if (page == extraScreenId) {
return getContext().getString(R.string.workspace_new_page);
}
nScreens--;
}
return String.format(getContext().getString(R.string.workspace_scroll_format),
page + 1 - delta, nScreens);
>>>>>>>
int nScreens = getChildCount() - delta;
int extraScreenId = mScreenOrder.indexOf(EXTRA_EMPTY_SCREEN_ID);
if (extraScreenId >= 0 && nScreens > 1) {
if (page == extraScreenId) {
return getContext().getString(R.string.workspace_new_page);
}
nScreens--;
}
return String.format(getContext().getString(R.string.workspace_scroll_format),
page + 1 - delta, nScreens); |
<<<<<<<
mStateTransitionAnimation.startAnimationToWorkspace(mState, Workspace.State.NORMAL,
animated, onCompleteRunnable);
// Show the search bar (only animate if we were showing the drop target bar in spring
// loaded mode)
if (mSearchDropTargetBar != null) {
mSearchDropTargetBar.showSearchBar(animated && wasInSpringLoadedMode);
}
=======
mStateTransitionAnimation.startAnimationToWorkspace(mState, mWorkspace.getState(),
Workspace.State.NORMAL, snapToPage, animated, onCompleteRunnable);
>>>>>>>
mStateTransitionAnimation.startAnimationToWorkspace(mState, mWorkspace.getState(),
Workspace.State.NORMAL, animated, onCompleteRunnable);
<<<<<<<
mStateTransitionAnimation.startAnimationToWorkspace(mState, Workspace.State.OVERVIEW,
animated, null /* onCompleteRunnable */);
=======
mStateTransitionAnimation.startAnimationToWorkspace(mState, mWorkspace.getState(),
Workspace.State.OVERVIEW,
WorkspaceStateTransitionAnimation.SCROLL_TO_CURRENT_PAGE, animated,
null /* onCompleteRunnable */);
>>>>>>>
mStateTransitionAnimation.startAnimationToWorkspace(mState, mWorkspace.getState(),
Workspace.State.OVERVIEW, animated, null /* onCompleteRunnable */);
<<<<<<<
public Animator startWorkspaceStateChangeAnimation(Workspace.State toState,
boolean animated, boolean hasOverlaySearchBar, HashMap<View, Integer> layerViews) {
=======
public Animator startWorkspaceStateChangeAnimation(Workspace.State toState, int toPage,
boolean animated, HashMap<View, Integer> layerViews) {
>>>>>>>
public Animator startWorkspaceStateChangeAnimation(Workspace.State toState,
boolean animated, HashMap<View, Integer> layerViews) {
<<<<<<<
Animator anim = mWorkspace.setStateWithAnimation(
toState, animated, hasOverlaySearchBar, layerViews);
=======
Animator anim = mWorkspace.setStateWithAnimation(toState, toPage, animated, layerViews);
>>>>>>>
Animator anim = mWorkspace.setStateWithAnimation(toState, animated, layerViews);
<<<<<<<
mStateTransitionAnimation.startAnimationToWorkspace(mState, Workspace.State.SPRING_LOADED,
true /* animated */, null /* onCompleteRunnable */);
=======
mStateTransitionAnimation.startAnimationToWorkspace(mState, mWorkspace.getState(),
Workspace.State.SPRING_LOADED,
WorkspaceStateTransitionAnimation.SCROLL_TO_CURRENT_PAGE, true /* animated */,
null /* onCompleteRunnable */);
>>>>>>>
mStateTransitionAnimation.startAnimationToWorkspace(mState, mWorkspace.getState(),
Workspace.State.SPRING_LOADED, true /* animated */,
null /* onCompleteRunnable */); |
<<<<<<<
ObjectAnimator oa = LauncherAnimUtils.ofViewAlphaAndScale(
mFolderIconImageView, 0, 1.5f, 1.5f);
if (Utilities.isLmpOrAbove()) {
=======
ObjectAnimator oa = LauncherAnimUtils.ofPropertyValuesHolder(mFolderIconImageView, alpha,
scaleX, scaleY);
if (Utilities.ATLEAST_LOLLIPOP) {
>>>>>>>
ObjectAnimator oa = LauncherAnimUtils.ofViewAlphaAndScale(
mFolderIconImageView, 0, 1.5f, 1.5f);
if (Utilities.ATLEAST_LOLLIPOP) { |
<<<<<<<
import io.nuls.core.exception.NulsException;
import io.nuls.core.exception.NulsRuntimeException;
import io.nuls.core.log.Log;
import io.nuls.core.model.BigIntegerUtils;
import io.nuls.core.model.ObjectUtils;
import io.nuls.core.model.StringUtils;
import io.nuls.core.parse.JSONUtils;
import io.nuls.core.rpc.model.ModuleE;
import io.nuls.core.rpc.model.message.Response;
import io.nuls.core.rpc.netty.processor.ResponseMessageProcessor;
import io.nuls.core.rpc.util.TimeUtils;
=======
import io.nuls.core.rpc.util.NulsDateUtils;
>>>>>>>
import io.nuls.core.exception.NulsException;
import io.nuls.core.exception.NulsRuntimeException;
import io.nuls.core.log.Log;
import io.nuls.core.model.BigIntegerUtils;
import io.nuls.core.model.ObjectUtils;
import io.nuls.core.model.StringUtils;
import io.nuls.core.parse.JSONUtils;
import io.nuls.core.rpc.model.ModuleE;
import io.nuls.core.rpc.model.message.Response;
import io.nuls.core.rpc.netty.processor.ResponseMessageProcessor;
import io.nuls.core.rpc.util.TimeUtils;
import io.nuls.core.rpc.util.NulsDateUtils;
<<<<<<<
=======
import io.nuls.core.rpc.model.ModuleE;
import io.nuls.core.rpc.model.message.Response;
import io.nuls.core.rpc.netty.processor.ResponseMessageProcessor;
import io.nuls.core.rpc.util.RPCUtil;
import io.nuls.core.basic.Result;
import io.nuls.core.constant.TxType;
import io.nuls.core.core.annotation.Autowired;
import io.nuls.core.exception.NulsException;
import io.nuls.core.exception.NulsRuntimeException;
import io.nuls.core.log.Log;
import io.nuls.core.model.BigIntegerUtils;
import io.nuls.core.model.ObjectUtils;
import io.nuls.core.model.StringUtils;
import io.nuls.core.parse.JSONUtils;
>>>>>>>
import io.nuls.core.rpc.model.ModuleE;
import io.nuls.core.rpc.model.message.Response;
import io.nuls.core.rpc.netty.processor.ResponseMessageProcessor;
import io.nuls.core.rpc.util.RPCUtil;
import io.nuls.core.basic.Result;
import io.nuls.core.constant.TxType;
import io.nuls.core.core.annotation.Autowired;
import io.nuls.core.exception.NulsException;
import io.nuls.core.exception.NulsRuntimeException;
import io.nuls.core.log.Log;
import io.nuls.core.model.BigIntegerUtils;
import io.nuls.core.model.ObjectUtils;
import io.nuls.core.model.StringUtils;
import io.nuls.core.parse.JSONUtils; |
<<<<<<<
if (child.getTag() == null || !(child.getTag() instanceof ItemInfo)) {
String msg = "Drag started with a view that has no tag set. This "
+ "will cause a crash (issue 11627249) down the line. "
+ "View: " + child + " tag: " + child.getTag();
throw new IllegalStateException(msg);
}
mDragController.startDrag(b, dragLayerX, dragLayerY, source, child.getTag(),
=======
DragView dv = mDragController.startDrag(b, dragLayerX, dragLayerY, source, child.getTag(),
>>>>>>>
if (child.getTag() == null || !(child.getTag() instanceof ItemInfo)) {
String msg = "Drag started with a view that has no tag set. This "
+ "will cause a crash (issue 11627249) down the line. "
+ "View: " + child + " tag: " + child.getTag();
throw new IllegalStateException(msg);
}
DragView dv = mDragController.startDrag(b, dragLayerX, dragLayerY, source, child.getTag(), |
<<<<<<<
private DragInfo mDragInfo = null;
private final SparseArray<AccessibilityAction> mActions = new SparseArray<>();
=======
private final SparseArray<AccessibilityAction> mActions =
new SparseArray<AccessibilityAction>();
>>>>>>>
private final SparseArray<AccessibilityAction> mActions = new SparseArray<>(); |
<<<<<<<
=======
final View buttonView = mLauncher.getWidgetsButton();
>>>>>>>
final View buttonView = mLauncher.getWidgetsButton();
<<<<<<<
startAnimationToWorkspaceFromOverlay(toWorkspaceState, appsView, appsView.getContentView(),
appsView.getRevealView(), appsView.getSearchBarView(),
animated, onCompleteRunnable, cb);
=======
startAnimationToWorkspaceFromOverlay(toWorkspaceState, toWorkspacePage,
mLauncher.getAllAppsButton(), appsView, appsView.getContentView(),
appsView.getRevealView(), appsView.getSearchBarView(), animated,
onCompleteRunnable, cb);
>>>>>>>
startAnimationToWorkspaceFromOverlay(toWorkspaceState, mLauncher.getAllAppsButton(),
appsView, appsView.getContentView(), appsView.getRevealView(),
appsView.getSearchBarView(), animated, onCompleteRunnable, cb);
<<<<<<<
startAnimationToWorkspaceFromOverlay(toWorkspaceState, widgetsView,
widgetsView.getContentView(), widgetsView.getRevealView(), null, animated,
onCompleteRunnable, cb);
=======
startAnimationToWorkspaceFromOverlay(toWorkspaceState, toWorkspacePage,
mLauncher.getWidgetsButton(), widgetsView, widgetsView.getContentView(),
widgetsView.getRevealView(), null, animated, onCompleteRunnable, cb);
>>>>>>>
startAnimationToWorkspaceFromOverlay(toWorkspaceState, mLauncher.getWidgetsButton(),
widgetsView, widgetsView.getContentView(), widgetsView.getRevealView(), null,
animated, onCompleteRunnable, cb);
<<<<<<<
final View fromView, final View contentView,
final View revealView, final View overlaySearchBarView, final boolean animated,
final Runnable onCompleteRunnable, final PrivateTransitionCallbacks pCb) {
=======
final int toWorkspacePage, final View buttonView, final View fromView,
final View contentView, final View revealView, final View overlaySearchBarView,
final boolean animated, final Runnable onCompleteRunnable,
final PrivateTransitionCallbacks pCb) {
>>>>>>>
final View buttonView, final View fromView, final View contentView,
final View revealView, final View overlaySearchBarView, final boolean animated,
final Runnable onCompleteRunnable, final PrivateTransitionCallbacks pCb) { |
<<<<<<<
PrivateTransitionCallbacks cb = new PrivateTransitionCallbacks() {
@Override
public float getMaterialRevealViewFinalAlpha(View revealView) {
return 0.3f;
}
};
=======
>>>>>>>
<<<<<<<
playCommonTransitionAnimations(fromWorkspaceState, toWorkspaceState, fromView, toView,
overlaySearchBarView, animated, initialized, animation, revealDuration, layerViews);
=======
// Create the workspace animation.
// NOTE: this call apparently also sets the state for the workspace if !animated
Animator workspaceAnim = mLauncher.startWorkspaceStateChangeAnimation(toWorkspaceState, -1,
animated, layerViews);
// Animate the search bar
startWorkspaceSearchBarAnimation(
toWorkspaceState, animated ? revealDuration : 0, animation);
Animator updateTransitionStepAnim = dispatchOnLauncherTransitionStepAnim(fromView, toView);
>>>>>>>
playCommonTransitionAnimations(toWorkspaceState, fromView, toView,
animated, initialized, animation, revealDuration, layerViews);
<<<<<<<
mLauncher.getAllAppsButton(), appsView, appsView.getContentView(),
appsView.getRevealView(), appsView.getSearchBarView(), animated,
onCompleteRunnable, cb);
=======
toWorkspacePage, mLauncher.getAllAppsButton(), appsView,
animated, onCompleteRunnable, cb);
>>>>>>>
mLauncher.getAllAppsButton(), appsView,
animated, onCompleteRunnable, cb);
<<<<<<<
mCurrentAnimation = startAnimationToWorkspaceFromOverlay(fromWorkspaceState,
toWorkspaceState, mLauncher.getWidgetsButton(), widgetsView,
widgetsView.getContentView(), widgetsView.getRevealView(), null, animated,
onCompleteRunnable, cb);
=======
mCurrentAnimation = startAnimationToWorkspaceFromOverlay(
fromWorkspaceState, toWorkspaceState,
toWorkspacePage, mLauncher.getWidgetsButton(), widgetsView,
animated, onCompleteRunnable, cb);
>>>>>>>
mCurrentAnimation = startAnimationToWorkspaceFromOverlay(
fromWorkspaceState, toWorkspaceState,
mLauncher.getWidgetsButton(), widgetsView,
animated, onCompleteRunnable, cb);
<<<<<<<
private AnimatorSet startAnimationToWorkspaceFromOverlay(final Workspace.State fromWorkspaceState,
final Workspace.State toWorkspaceState, final View buttonView,
final View fromView, final View contentView, final View revealView,
final View overlaySearchBarView, final boolean animated, final Runnable onCompleteRunnable,
=======
private AnimatorSet startAnimationToWorkspaceFromOverlay(
final Workspace.State fromWorkspaceState, final Workspace.State toWorkspaceState,
final int toWorkspacePage,
final View buttonView, final BaseContainerView fromView,
final boolean animated, final Runnable onCompleteRunnable,
>>>>>>>
private AnimatorSet startAnimationToWorkspaceFromOverlay(
final Workspace.State fromWorkspaceState, final Workspace.State toWorkspaceState,
final View buttonView, final BaseContainerView fromView,
final boolean animated, final Runnable onCompleteRunnable, |
<<<<<<<
mWorkspace.getCurrentPage(), cellInfo.cellX, cellInfo.cellY, false);
mFolders.put(folderInfo.id, folderInfo);
=======
mWorkspace.getCurrentScreen(), cellInfo.cellX, cellInfo.cellY, false);
sFolders.put(folderInfo.id, folderInfo);
>>>>>>>
mWorkspace.getCurrentPage(), cellInfo.cellX, cellInfo.cellY, false);
sFolders.put(folderInfo.id, folderInfo); |
<<<<<<<
void addAppWidgetFromPick(Intent data) {
=======
private void manageApps() {
startActivity(new Intent(android.provider.Settings.ACTION_MANAGE_ALL_APPLICATIONS_SETTINGS));
}
void addAppWidget(Intent data) {
>>>>>>>
private void manageApps() {
startActivity(new Intent(android.provider.Settings.ACTION_MANAGE_ALL_APPLICATIONS_SETTINGS));
}
void addAppWidgetFromPick(Intent data) { |
<<<<<<<
import android.util.Log;
=======
import java.lang.ref.WeakReference;
import java.net.URISyntaxException;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
>>>>>>>
import android.util.Log;
import java.lang.ref.WeakReference;
import java.net.URISyntaxException;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List; |
<<<<<<<
@Override
@SuppressWarnings("unchecked")
public List<DefaultConfiguration> retrieveAll() {
return getSession().createQuery("from DefaultConfiguration").list();
}
=======
@Override
public DefaultConfiguration loadCurrentConfiguration() {
DefaultConfiguration configuration;
List<DefaultConfiguration> list = retrieveAll();
if (list.size() == 0) {
configuration = DefaultConfiguration.getInitialConfig();
} else if (list.size() > 1) {
DefaultConfiguration config = list.get(0);
list.remove(0);
for (DefaultConfiguration defaultConfig : list) {
delete(defaultConfig);
}
configuration = config;
} else {
configuration = list.get(0);
}
return configuration;
}
>>>>>>>
@Override
@SuppressWarnings("unchecked")
public List<DefaultConfiguration> retrieveAll() {
return getSession().createQuery("from DefaultConfiguration").list();
}
@Override
public DefaultConfiguration loadCurrentConfiguration() {
DefaultConfiguration configuration;
List<DefaultConfiguration> list = retrieveAll();
if (list.size() == 0) {
configuration = DefaultConfiguration.getInitialConfig();
} else if (list.size() > 1) {
DefaultConfiguration config = list.get(0);
list.remove(0);
for (DefaultConfiguration defaultConfig : list) {
delete(defaultConfig);
}
configuration = config;
} else {
configuration = list.get(0);
}
return configuration;
} |
<<<<<<<
@Autowired
private UserService userService;
=======
@Autowired
private ChannelSeverityService channelSeverityService;
>>>>>>>
@Autowired
private UserService userService;
@Autowired
private ChannelSeverityService channelSeverityService; |
<<<<<<<
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import com.denimgroup.threadfix.data.entities.*;
import com.denimgroup.threadfix.service.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.springframework.web.bind.support.SessionStatus;
import com.denimgroup.threadfix.logging.SanitizedLogger;
=======
import com.denimgroup.threadfix.data.entities.ChannelSeverity;
import com.denimgroup.threadfix.data.entities.Finding;
import com.denimgroup.threadfix.data.entities.Permission;
import com.denimgroup.threadfix.data.entities.ScannerType;
import com.denimgroup.threadfix.logging.SanitizedLogger;
import com.denimgroup.threadfix.service.FindingService;
import com.denimgroup.threadfix.service.ManualFindingService;
import com.denimgroup.threadfix.service.util.PermissionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.bind.support.SessionStatus;
import javax.validation.Valid;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
>>>>>>>
import com.denimgroup.threadfix.data.entities.ChannelSeverity;
import com.denimgroup.threadfix.data.entities.Finding;
import com.denimgroup.threadfix.data.entities.Permission;
import com.denimgroup.threadfix.data.entities.ScannerType;
import com.denimgroup.threadfix.logging.SanitizedLogger;
import com.denimgroup.threadfix.service.ChannelVulnerabilityService;
import com.denimgroup.threadfix.service.FindingService;
import com.denimgroup.threadfix.service.ManualFindingService;
import com.denimgroup.threadfix.service.VulnerabilityService;
import com.denimgroup.threadfix.service.util.ControllerUtils;
import com.denimgroup.threadfix.service.util.PermissionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.bind.support.SessionStatus;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
<<<<<<<
private PermissionService permissionService = null;
private ManualFindingService manualFindingService = null;
@Autowired
private ChannelVulnerabilityService channelVulnerabilityService;
@Autowired
private VulnerabilityService vulnerabilityService;
=======
>>>>>>>
@Autowired
private ChannelVulnerabilityService channelVulnerabilityService;
@Autowired
private VulnerabilityService vulnerabilityService; |
<<<<<<<
=======
import com.denimgroup.threadfix.data.dao.ChannelSeverityDao;
import com.denimgroup.threadfix.data.dao.ChannelTypeDao;
import com.denimgroup.threadfix.data.dao.ChannelVulnerabilityDao;
import com.denimgroup.threadfix.data.dao.GenericVulnerabilityDao;
>>>>>>>
<<<<<<<
public ZaproxyChannelImporter() {
super(ChannelType.ZAPROXY);
=======
public ZaproxyChannelImporter(ChannelTypeDao channelTypeDao,
ChannelVulnerabilityDao channelVulnerabilityDao,
ChannelSeverityDao channelSeverityDao,
GenericVulnerabilityDao genericVulnerabilityDao) {
this.channelTypeDao = channelTypeDao;
this.channelVulnerabilityDao = channelVulnerabilityDao;
this.channelSeverityDao = channelSeverityDao;
this.genericVulnerabilityDao = genericVulnerabilityDao;
this.channelType = channelTypeDao.retrieveByName(ChannelType.ZAPROXY);
>>>>>>>
public ZaproxyChannelImporter() {
super(ChannelType.ZAPROXY); |
<<<<<<<
import com.denimgroup.threadfix.remote.response.RestResponse;
=======
import com.denimgroup.threadfix.service.ApplicationService;
>>>>>>>
import com.denimgroup.threadfix.remote.response.RestResponse;
import com.denimgroup.threadfix.service.ApplicationService; |
<<<<<<<
private String formatTime(Calendar calendar){
if (calendar!=null) {
return calendar.getDisplayName(Calendar.MONTH, Calendar.SHORT, Locale.ENGLISH) + " " + calendar.get(Calendar.DAY_OF_MONTH) + ", " + calendar.get(Calendar.YEAR);
}
else {
return null;
}
}
@Transient
@JsonProperty("humanTimes")
@JsonView(AllViews.VulnerabilityDetail.class)
public Map<String, String> getHumanTimes(){
return map("openTime", formatTime(openTime),
"closeTime", formatTime(closeTime),
"wafRuleGeneratedTime", formatTime(wafRuleGeneratedTime),
"defectSubmittedTime", formatTime(defectSubmittedTime),
"defectClosedTime", formatTime(defectClosedTime));
}
=======
@ManyToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinTable(name="Vulnerability_Tag",
joinColumns={@JoinColumn(name="Vulnerability_Id")},
inverseJoinColumns={@JoinColumn(name="Tag_Id")})
@JsonView({AllViews.VulnerabilityDetail.class, AllViews.UIVulnSearch.class, AllViews.VulnSearchApplications.class})
public List<Tag> getTags() {
return tags;
}
public void setTags(List<Tag> tags) {
this.tags = tags;
}
>>>>>>>
private String formatTime(Calendar calendar){
if (calendar!=null) {
return calendar.getDisplayName(Calendar.MONTH, Calendar.SHORT, Locale.ENGLISH) + " " + calendar.get(Calendar.DAY_OF_MONTH) + ", " + calendar.get(Calendar.YEAR);
}
else {
return null;
}
}
@Transient
@JsonProperty("humanTimes")
@JsonView(AllViews.VulnerabilityDetail.class)
public Map<String, String> getHumanTimes(){
return map("openTime", formatTime(openTime),
"closeTime", formatTime(closeTime),
"wafRuleGeneratedTime", formatTime(wafRuleGeneratedTime),
"defectSubmittedTime", formatTime(defectSubmittedTime),
"defectClosedTime", formatTime(defectClosedTime));
}
@ManyToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinTable(name="Vulnerability_Tag",
joinColumns={@JoinColumn(name="Vulnerability_Id")},
inverseJoinColumns={@JoinColumn(name="Tag_Id")})
@JsonView({AllViews.VulnerabilityDetail.class, AllViews.UIVulnSearch.class, AllViews.VulnSearchApplications.class})
public List<Tag> getTags() {
return tags;
}
public void setTags(List<Tag> tags) {
this.tags = tags;
} |
<<<<<<<
import io.nuls.core.rpc.util.TimeUtils;
import io.nuls.poc.constant.ConsensusConstant;
import io.nuls.poc.constant.ConsensusErrorCode;
import io.nuls.poc.model.bo.Chain;
import io.nuls.poc.model.dto.CmdRegisterDto;
import io.nuls.poc.utils.compare.BlockHeaderComparator;
=======
import io.nuls.core.rpc.util.RPCUtil;
import io.nuls.core.rpc.util.NulsDateUtils;
import io.nuls.core.exception.NulsException;
import io.nuls.core.log.Log;
import io.nuls.core.model.StringUtils;
import io.nuls.core.parse.JSONUtils;
>>>>>>>
import io.nuls.core.rpc.util.TimeUtils;
import io.nuls.poc.constant.ConsensusConstant;
import io.nuls.poc.constant.ConsensusErrorCode;
import io.nuls.poc.model.bo.Chain;
import io.nuls.poc.model.dto.CmdRegisterDto;
import io.nuls.poc.utils.compare.BlockHeaderComparator;
import io.nuls.core.rpc.util.RPCUtil;
import io.nuls.core.rpc.util.NulsDateUtils;
import io.nuls.core.exception.NulsException;
import io.nuls.core.log.Log;
import io.nuls.core.model.StringUtils;
import io.nuls.core.parse.JSONUtils; |
<<<<<<<
ROLLER_SOURCE_LOCATION = testRoot + ROLLER_FOLDER_NAME,
=======
RAILSGOAT_FOLDER_NAME = "railsgoat-master",
>>>>>>>
ROLLER_SOURCE_LOCATION = testRoot + ROLLER_FOLDER_NAME,
RAILSGOAT_FOLDER_NAME = "railsgoat-master", |
<<<<<<<
if (finding.getDependency() != null && finding.getDependency().getCve() != null) {
if (vulnFinding.getDependency() != null && vulnFinding.getDependency().getCve() != null) {
return finding.getDependency().getCve().equals(vulnFinding.getDependency().getCve());
}
} else if (vulnFinding.getDependency() == null) {
if (!finding.getIsStatic()) {
if (!vulnFinding.getIsStatic()
&& dynamicToDynamicMatch(finding, vulnFinding))
return true;
else if (vulnFinding.getIsStatic()
&& dynamicToStaticMatch(finding, vulnFinding))
return true;
} else if (finding.getIsStatic()) {
if (!vulnFinding.getIsStatic()
&& dynamicToStaticMatch(vulnFinding, finding))
return true;
else if (vulnFinding.getIsStatic()
&& staticToStaticMatch(finding, vulnFinding))
return true;
}
=======
if (!finding.getIsStatic()) {
if (!vulnFinding.getIsStatic()
&& dynamicToDynamicMatch(finding, vulnFinding)) {
return true;
} else if (vulnFinding.getIsStatic()
&& dynamicToStaticMatch(finding, vulnFinding)) {
return true;
}
} else if (finding.getIsStatic()) {
if (!vulnFinding.getIsStatic()
&& dynamicToStaticMatch(vulnFinding, finding)) {
return true;
} else if (vulnFinding.getIsStatic()
&& staticToStaticMatch(finding, vulnFinding)) {
return true;
}
>>>>>>>
if (finding.getDependency() != null && finding.getDependency().getCve() != null) {
if (vulnFinding.getDependency() != null && vulnFinding.getDependency().getCve() != null) {
return finding.getDependency().getCve().equals(vulnFinding.getDependency().getCve());
}
} else if (vulnFinding.getDependency() == null) {
if (!finding.getIsStatic()) {
if (!vulnFinding.getIsStatic()
&& dynamicToDynamicMatch(finding, vulnFinding)) {
return true;
} else if (vulnFinding.getIsStatic()
&& dynamicToStaticMatch(finding, vulnFinding)) {
return true;
}
} else if (finding.getIsStatic()) {
if (!vulnFinding.getIsStatic()
&& dynamicToStaticMatch(vulnFinding, finding)) {
return true;
} else if (vulnFinding.getIsStatic()
&& staticToStaticMatch(finding, vulnFinding)) {
return true;
}
} |
<<<<<<<
=======
import java.util.ArrayList;
import java.util.Arrays;
>>>>>>> |
<<<<<<<
import com.denimgroup.threadfix.data.enums.EventAction;
=======
import com.denimgroup.threadfix.data.enums.TagType;
>>>>>>>
import com.denimgroup.threadfix.data.enums.EventAction;
import com.denimgroup.threadfix.data.enums.TagType; |
<<<<<<<
canManageCustomCweText, canManageEmailReports, canManageGroups;
=======
canManageCustomCweText, canManageEmailReports, canManagePolicies;
>>>>>>>
canManageCustomCweText, canManageEmailReports, canManageGroups, canManagePolicies;
<<<<<<<
@Column
@JsonView(AllViews.TableRow.class)
public Boolean getCanManageGroups() {
return canManageGroups != null && canManageGroups;
}
public void setCanManageGroups(Boolean canManageGroups) {
this.canManageGroups = canManageGroups;
}
=======
@Column
@JsonView(AllViews.TableRow.class)
public Boolean getCanManagePolicies() {
return canManagePolicies != null && canManagePolicies;
}
public void setCanManagePolicies(Boolean canManagePolicies) {
this.canManagePolicies = canManagePolicies;
}
>>>>>>>
@Column
@JsonView(AllViews.TableRow.class)
public Boolean getCanManageGroups() {
return canManageGroups != null && canManageGroups;
}
public void setCanManageGroups(Boolean canManageGroups) {
this.canManageGroups = canManageGroups;
}
@Column
@JsonView(AllViews.TableRow.class)
public Boolean getCanManagePolicies() {
return canManagePolicies != null && canManagePolicies;
}
public void setCanManagePolicies(Boolean canManagePolicies) {
this.canManagePolicies = canManagePolicies;
} |
<<<<<<<
private List<CSVExportField> csvExportFields;
=======
private Boolean closeVulnWhenNoScannersReport = null;
>>>>>>>
private Boolean closeVulnWhenNoScannersReport = null;
private List<CSVExportField> csvExportFields; |
<<<<<<<
@Transient
public String getDisplayPath() {
String path = null;
if (calculatedUrlPath != null) {
path = calculatedUrlPath;
} else if (surfaceLocation != null) {
path = surfaceLocation.getPath();
}
return path;
}
=======
@Override
public String toString() {
return "[" + genericVulnerability +
" " + surfaceLocation.getPath() +
" " + surfaceLocation.getParameter() +
"]";
}
>>>>>>>
@Transient
public String getDisplayPath() {
String path = null;
if (calculatedUrlPath != null) {
path = calculatedUrlPath;
} else if (surfaceLocation != null) {
path = surfaceLocation.getPath();
}
return path;
}
@Override
public String toString() {
return "[" + genericVulnerability +
" " + surfaceLocation.getPath() +
" " + surfaceLocation.getParameter() +
"]";
} |
<<<<<<<
import static com.denimgroup.threadfix.CollectionUtils.list;
=======
import static com.denimgroup.threadfix.framework.impl.model.FieldSetLookupUtils.addSuperClassFieldsToModels;
>>>>>>>
import static com.denimgroup.threadfix.CollectionUtils.list;
import static com.denimgroup.threadfix.framework.impl.model.FieldSetLookupUtils.addSuperClassFieldsToModels;
<<<<<<<
public List<BeanField> getFieldsFromMethodCalls(@Nullable String methodCalls, @Nullable BeanField initialField) {
List<BeanField> fields = list();
=======
public List<ModelField> getFieldsFromMethodCalls(@Nullable String methodCalls, @Nullable ModelField initialField) {
List<ModelField> fields = new ArrayList<>();
>>>>>>>
public List<ModelField> getFieldsFromMethodCalls(@Nullable String methodCalls, @Nullable ModelField initialField) {
List<ModelField> fields = new ArrayList<>(); |
<<<<<<<
import com.denimgroup.threadfix.data.dao.DefectDao;
import com.denimgroup.threadfix.data.dao.FindingDao;
import com.denimgroup.threadfix.data.dao.ScanDao;
import com.denimgroup.threadfix.data.dao.VulnerabilityCommentDao;
import com.denimgroup.threadfix.data.dao.WafRuleDao;
=======
import java.io.File;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
>>>>>>>
import com.denimgroup.threadfix.data.dao.DefectDao;
import com.denimgroup.threadfix.data.dao.FindingDao;
import com.denimgroup.threadfix.data.dao.ScanDao;
import com.denimgroup.threadfix.data.dao.VulnerabilityCommentDao;
import com.denimgroup.threadfix.data.dao.WafRuleDao;
import java.io.File;
import java.util.Calendar;
import java.util.Collections;
import java.util.List; |
<<<<<<<
=======
static {
// TODO detect language first and use that to narrow down the frameworks
// TODO incorporate python code
// TODO add .NET code
register(new JavaAndJspFrameworkChecker());
register(new DotNetFrameworkChecker());
register(new WebFormsFrameworkChecker());
register(new RailsFrameworkChecker());
}
public static void register(FrameworkChecker checker) {
INSTANCE.frameworkCheckers.add(checker);
}
>>>>>>>
static {
// TODO detect language first and use that to narrow down the frameworks
// TODO incorporate python code
register(new JavaAndJspFrameworkChecker());
register(new DotNetFrameworkChecker());
register(new WebFormsFrameworkChecker());
register(new RailsFrameworkChecker());
}
public static void register(FrameworkChecker checker) {
INSTANCE.frameworkCheckers.add(checker);
} |
<<<<<<<
import java.util.*;
=======
import java.util.HashMap;
import java.util.List;
import java.util.Map;
>>>>>>>
import java.util.HashMap;
import java.util.List;
import java.util.Map;
<<<<<<<
=======
@Autowired
private GenericVulnerabilityDao genericVulnerabilityDao;
@Autowired
>>>>>>>
@Autowired
private GenericVulnerabilityService genericVulnerabilityService;
@Autowired
<<<<<<<
@Autowired
private GenericVulnerabilityService genericVulnerabilityService;
=======
@Autowired
private StatisticsCounterService statisticsCounterService;
>>>>>>>
@Autowired
private StatisticsCounterService statisticsCounterService;
<<<<<<<
@Autowired
public VulnerabilityFilterServiceImpl(
SeverityFilterService severityFilterService,
OrganizationDao organizationDao,
ApplicationDao applicationDao,
VulnerabilityDao vulnerabilityDao,
GenericSeverityDao genericSeverityDao,
VulnerabilityFilterDao vulnerabilityFilterDao,
ScanDao scanDao) {
this.vulnerabilityDao = vulnerabilityDao;
this.severityFilterService = severityFilterService;
this.applicationDao = applicationDao;
this.organizationDao = organizationDao;
this.vulnerabilityFilterDao = vulnerabilityFilterDao;
this.genericSeverityDao = genericSeverityDao;
this.scanDao = scanDao;
}
=======
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Lookups
//////////////////////////////////////////////////////////////////////////////////////////////////////
>>>>>>>
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Lookups
//////////////////////////////////////////////////////////////////////////////////////////////////////
<<<<<<<
GenericVulnerability vuln = genericVulnerabilityService.loadByName(
vulnerabilityFilter.getSourceGenericVulnerability().getName());
=======
GenericVulnerability vuln = getGenericVulnerability(
vulnerabilityFilter.getSourceGenericVulnerability().getName());
>>>>>>>
GenericVulnerability vuln = genericVulnerabilityService.loadByName(
vulnerabilityFilter.getSourceGenericVulnerability().getName());
<<<<<<<
=======
public GenericVulnerability getGenericVulnerability(String formString) {
GenericVulnerability returnValue = null;
String result = getRegexResult(formString, "\\(CWE ([0-9]+)\\)");
if (result != null) {
// Try ID
GenericVulnerability genericVulnerability =
genericVulnerabilityDao.retrieveByDisplayId(Integer.valueOf(result));
if (genericVulnerability != null && genericVulnerability.getId() != null) {
returnValue = genericVulnerability;
}
} else {
// Try full name
GenericVulnerability genericVulnerability =
genericVulnerabilityDao.retrieveByName(formString);
if (genericVulnerability != null && genericVulnerability.getId() != null) {
returnValue = genericVulnerability;
}
}
return returnValue;
}
>>>>>>>
<<<<<<<
// TODO incorporate this into new system
boolean hasHiddenFindings = false;
boolean hasUnhiddenFindings = false;
GenericSeverity maxUnhiddenSeverity = null;
if(vulnerability.getFindings() != null && !vulnerability.getFindings().isEmpty()){
for(Finding finding : vulnerability.getFindings()){
if(finding.isHidden() != null && finding.isHidden()){
hasHiddenFindings = true;
}else{
hasUnhiddenFindings = true;
if(maxUnhiddenSeverity == null || maxUnhiddenSeverity.getIntValue() < finding.getChannelSeverity().getSeverityMap().getGenericSeverity().getIntValue()){
maxUnhiddenSeverity = finding.getChannelSeverity().getSeverityMap().getGenericSeverity();
}
}
}
if(maxUnhiddenSeverity != null){
vulnerability.setGenericSeverity(maxUnhiddenSeverity);
}
}
boolean shouldHide = hiddenByVulnFilter || severityFilters.shouldHide(vulnerability.getGenericSeverity())
|| (hasHiddenFindings && !hasUnhiddenFindings);
vulnerability.setHidden(shouldHide);
if (shouldHide) {
ignoredIds.add(vulnerability.getId());
}
vulnerabilityDao.saveOrUpdate(vulnerability);
=======
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Update Application
//////////////////////////////////////////////////////////////////////////////////////////////////////
private void updateApplicationVulnerabilities(Application application) {
if (application != null) {
statisticsCounterService.checkStatisticsCounters();
updateVulnerabilities(application);
updateScanCounts(application.getScans());
}
}
public void updateVulnerabilities(Application application) {
long start = System.currentTimeMillis();
applyVulnerabilityFilters(application);
applySeverityFilters(application);
vulnerabilityService.updateVulnerabilityReport(application);
System.out.println("Took " + (System.currentTimeMillis() - start) + " to update all vulnerability hidden statuses");
}
private void applySeverityFilters(Application application) {
List<SeverityFilter> severityFilters = severityFilterService.loadAllFilters();
List<SeverityFilter> applicationFilters = list();
for (SeverityFilter filter : severityFilters) {
if (filter.getApplication() != null && filter.getApplication().getId().equals(application.getId())) {
applicationFilters.add(filter);
}
}
for (SeverityFilter applicationFilter : applicationFilters) {
vulnerabilityFilterDao.applySeverityFilterToApplication(applicationFilter.getApplication().getId(), applicationFilter);
}
}
private void applyVulnerabilityFilters(Application application) {
List<VulnerabilityFilter> filters = vulnerabilityFilterDao.retrieveAll();
List<VulnerabilityFilter> applicationFilters = list();
for (VulnerabilityFilter filter : filters) {
if (filter.getApplication() != null && filter.getApplication().getId().equals(application.getId())) {
applicationFilters.add(filter);
>>>>>>>
/**
// TODO incorporate this into new system
boolean hasHiddenFindings = false;
boolean hasUnhiddenFindings = false;
GenericSeverity maxUnhiddenSeverity = null;
if(vulnerability.getFindings() != null && !vulnerability.getFindings().isEmpty()){
for(Finding finding : vulnerability.getFindings()){
if(finding.isHidden() != null && finding.isHidden()){
hasHiddenFindings = true;
}else{
hasUnhiddenFindings = true;
if(maxUnhiddenSeverity == null || maxUnhiddenSeverity.getIntValue() < finding.getChannelSeverity().getSeverityMap().getGenericSeverity().getIntValue()){
maxUnhiddenSeverity = finding.getChannelSeverity().getSeverityMap().getGenericSeverity();
}
}
}
if(maxUnhiddenSeverity != null){
vulnerability.setGenericSeverity(maxUnhiddenSeverity);
}
}
boolean shouldHide = hiddenByVulnFilter || severityFilters.shouldHide(vulnerability.getGenericSeverity())
|| (hasHiddenFindings && !hasUnhiddenFindings);
vulnerability.setHidden(shouldHide);
if (shouldHide) {
ignoredIds.add(vulnerability.getId());
}
vulnerabilityDao.saveOrUpdate(vulnerability);
*/
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Update Application
//////////////////////////////////////////////////////////////////////////////////////////////////////
private void updateApplicationVulnerabilities(Application application) {
if (application != null) {
statisticsCounterService.checkStatisticsCounters();
updateVulnerabilities(application);
updateScanCounts(application.getScans());
}
}
public void updateVulnerabilities(Application application) {
long start = System.currentTimeMillis();
applyVulnerabilityFilters(application);
applySeverityFilters(application);
vulnerabilityService.updateVulnerabilityReport(application);
System.out.println("Took " + (System.currentTimeMillis() - start) + " to update all vulnerability hidden statuses");
}
private void applySeverityFilters(Application application) {
List<SeverityFilter> severityFilters = severityFilterService.loadAllFilters();
List<SeverityFilter> applicationFilters = list();
for (SeverityFilter filter : severityFilters) {
if (filter.getApplication() != null && filter.getApplication().getId().equals(application.getId())) {
applicationFilters.add(filter);
}
}
for (SeverityFilter applicationFilter : applicationFilters) {
vulnerabilityFilterDao.applySeverityFilterToApplication(applicationFilter.getApplication().getId(), applicationFilter);
}
}
private void applyVulnerabilityFilters(Application application) {
List<VulnerabilityFilter> filters = vulnerabilityFilterDao.retrieveAll();
List<VulnerabilityFilter> applicationFilters = list();
for (VulnerabilityFilter filter : filters) {
if (filter.getApplication() != null && filter.getApplication().getId().equals(application.getId())) {
applicationFilters.add(filter); |
<<<<<<<
public @ResponseBody Object leftReport(HttpServletRequest request) {
long start = System.currentTimeMillis();
log.info("Processing left report");
=======
public @ResponseBody Object leftReport(HttpServletRequest request) throws JsonProcessingException {
>>>>>>>
public @ResponseBody Object leftReport(HttpServletRequest request) throws JsonProcessingException {
long start = System.currentTimeMillis();
log.info("Processing left report");
<<<<<<<
public @ResponseBody RestResponse<List<Map<String, Object>>> rightReport(HttpServletRequest request) {
long start = System.currentTimeMillis();
log.info("Processing right report");
=======
public @ResponseBody RestResponse rightReport(HttpServletRequest request) {
>>>>>>>
public @ResponseBody RestResponse rightReport(HttpServletRequest request) {
long start = System.currentTimeMillis();
log.info("Processing right report");
<<<<<<<
log.info("Right report took " + (System.currentTimeMillis() - start) + " ms");
return RestResponse.success(resultBean.getReportList());
=======
return RestResponse.success(map(
"map", resultBean.getReportList(),
"genericSeverities", genericSeverityService.loadAll())
);
>>>>>>>
log.info("Right report took " + (System.currentTimeMillis() - start) + " ms");
return RestResponse.success(map(
"map", resultBean.getReportList(),
"genericSeverities", genericSeverityService.loadAll())
); |
<<<<<<<
import com.denimgroup.threadfix.logging.SanitizedLogger;
import com.denimgroup.threadfix.service.ScanMergeService;
import com.denimgroup.threadfix.service.VulnerabilityService;
=======
>>>>>>>
import com.denimgroup.threadfix.logging.SanitizedLogger;
import com.denimgroup.threadfix.service.ScanMergeService;
import com.denimgroup.threadfix.service.VulnerabilityService; |
<<<<<<<
private static final Logger LOG = Logger.getLogger(BurpScanAgent.class);
=======
private static final String TARGET_URL = "target_url";
private static final String WORKING_DIRECTORY = "working_directory";
private static final String STATE_FILE = "state_file";
private static final String EXPORT_RESULT_FILE_NAME = "burp_scan_result.xml";
private static final String STATE_FILE_NAME = "application.state";
private static final Logger log = Logger.getLogger(BurpScanAgent.class);
>>>>>>>
private static final Logger LOG = Logger.getLogger(BurpScanAgent.class);
private static final String TARGET_URL = "target_url";
private static final String WORKING_DIRECTORY = "working_directory";
private static final String STATE_FILE = "state_file";
private static final String EXPORT_RESULT_FILE_NAME = "burp_scan_result.xml";
private static final String STATE_FILE_NAME = "application.state";
<<<<<<<
String javaHome = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java.exe";
String burpFile = System.getProperty("user.dir") + File.separator + "burp-agent.jar;" + this.burpExecutableFile;
=======
String javaHome = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java";
String burpFile = System.getProperty("user.dir")+ File.separator + "burp-agent.jar;" + this.burpExecutableFile;
>>>>>>>
String javaHome = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java";
String burpFile = System.getProperty("user.dir") + File.separator + "burp-agent.jar;" + this.burpExecutableFile;
<<<<<<<
LOG.warn("Unable to save acunetix config file to working dir");
e1.printStackTrace();
=======
log.warn("Unable to save Burp Suite state file to working dir" + e1.getMessage(), e1);
>>>>>>>
LOG.warn("Unable to save acunetix config file to working dir");
e1.printStackTrace(); |
<<<<<<<
import org.springframework.web.bind.annotation.ModelAttribute;
=======
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
>>>>>>>
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
<<<<<<<
import com.denimgroup.threadfix.data.entities.ApplicationCriticality;
=======
import com.denimgroup.threadfix.data.entities.ApplicationChannel;
import com.denimgroup.threadfix.data.entities.ApplicationCriticality;
import com.denimgroup.threadfix.data.entities.ChannelType;
>>>>>>>
import com.denimgroup.threadfix.data.entities.ApplicationCriticality;
<<<<<<<
import com.denimgroup.threadfix.service.ApplicationCriticalityService;
=======
import com.denimgroup.threadfix.service.ApplicationChannelService;
import com.denimgroup.threadfix.service.ApplicationCriticalityService;
>>>>>>>
import com.denimgroup.threadfix.service.ApplicationCriticalityService;
<<<<<<<
private ApplicationCriticalityService applicationCriticalityService = null;
=======
private ApplicationCriticalityService applicationCriticalityService = null;
private ApplicationChannelService applicationChannelService = null;
private PermissionService permissionService = null;
private ChannelTypeService channelTypeService = null;
private UploadScanController uploadScanController = null;
private ScanService scanService = null;
>>>>>>>
private ApplicationCriticalityService applicationCriticalityService = null;
private PermissionService permissionService = null;
private ChannelTypeService channelTypeService = null;
<<<<<<<
ApplicationCriticalityService applicationCriticalityService,
PermissionService permissionService, ApplicationService applicationService) {
=======
ScanService scanService, ApplicationChannelService applicationChannelService,
ChannelTypeService channelTypeService, PermissionService permissionService,
ApplicationService applicationService, UploadScanController uploadScanController,
ApplicationCriticalityService applicationCriticalityService) {
>>>>>>>
ChannelTypeService channelTypeService, PermissionService permissionService,
ApplicationService applicationService,
ApplicationCriticalityService applicationCriticalityService) { |
<<<<<<<
import com.alibaba.nacos.naming.consistency.ApplyAction;
=======
import com.alibaba.nacos.core.utils.ClassUtils;
import com.alibaba.nacos.naming.NamingApp;
import com.alibaba.nacos.consistency.DataOperation;
>>>>>>>
import com.alibaba.nacos.consistency.DataOperation;
<<<<<<<
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(datum.key).action(ApplyAction.CHANGE).build());
=======
notifier.addTask(datum.key, DataOperation.CHANGE);
>>>>>>>
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(datum.key).action(DataOperation.CHANGE).build());
Loggers.RAFT.info("data added/updated, key={}, term={}", datum.key, local.term);
}
/**
* Do delete. If leader, commit delete to store. If not leader, stop delete because should signal to leader.
*
* @param datumKey datum key
* @param source source raft peer
* @throws Exception any exception during delete
*/
public void onDelete(String datumKey, RaftPeer source) throws Exception {
if (stopWork) {
throw new IllegalStateException("old raft protocol already stop work");
}
RaftPeer local = peers.local();
if (!peers.isLeader(source.ip)) {
Loggers.RAFT
.warn("peer {} tried to publish data but wasn't leader, leader: {}", JacksonUtils.toJson(source),
JacksonUtils.toJson(getLeader()));
throw new IllegalStateException("peer(" + source.ip + ") tried to publish data but wasn't leader");
}
if (source.term.get() < local.term.get()) {
Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", JacksonUtils.toJson(source),
JacksonUtils.toJson(local));
throw new IllegalStateException(
"out of date publish, pub-term:" + source.term + ", cur-term: " + local.term);
}
local.resetLeaderDue();
// do apply
String key = datumKey;
deleteDatum(key);
if (KeyBuilder.matchServiceMetaKey(key)) {
if (local.term.get() + PUBLISH_TERM_INCREASE_COUNT > source.term.get()) {
//set leader term:
getLeader().term.set(source.term.get());
local.term.set(getLeader().term.get());
} else {
local.term.addAndGet(PUBLISH_TERM_INCREASE_COUNT);
}
raftStore.updateTerm(local.term.get());
}
<<<<<<<
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(newDatum.key).action(ApplyAction.CHANGE).build());
=======
notifier.addTask(newDatum.key, DataOperation.CHANGE);
>>>>>>>
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(newDatum.key).action(DataOperation.CHANGE).build());
<<<<<<<
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(datum.key).action(ApplyAction.CHANGE).build());
=======
notifier.addTask(datum.key, DataOperation.CHANGE);
>>>>>>>
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(datum.key).action(DataOperation.CHANGE).build());
<<<<<<<
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(URLDecoder.decode(key, "UTF-8")).action(ApplyAction.DELETE).build());
=======
notifier.addTask(URLDecoder.decode(key, "UTF-8"), DataOperation.DELETE);
>>>>>>>
NotifyCenter.publishEvent(ValueChangeEvent.builder().key(URLDecoder.decode(key, "UTF-8")).action(DataOperation.DELETE).build()); |
<<<<<<<
final String md5 = Md5Utils.getMD5(content, Constants.ENCODE);
if (!PropertyUtil.isDirectRead()) {
=======
final String md5 = MD5Utils.md5Hex(content, Constants.ENCODE);
if (!STANDALONE_MODE || PropertyUtil.isStandaloneUseMysql()) {
>>>>>>>
final String md5 = MD5Utils.md5Hex(content, Constants.ENCODE);
if (!PropertyUtil.isDirectRead()) { |
<<<<<<<
import com.alipay.sofa.jraft.util.Endpoint;
import com.google.protobuf.Message;
=======
import com.google.common.base.Joiner;
import org.slf4j.Logger;
>>>>>>>
import com.alipay.sofa.jraft.util.Endpoint;
import com.google.protobuf.Message;
import com.google.common.base.Joiner;
<<<<<<<
Response get(final GetRequest request, final int failoverRetries) {
=======
GetResponse get(final GetRequest request) {
>>>>>>>
Response get(final GetRequest request) {
<<<<<<<
catch (Throwable e) {
=======
catch (Throwable e) {
Loggers.RAFT.warn("Raft linear read failed, go to Leader read logic : {}", e.toString());
>>>>>>>
catch (Throwable e) {
Loggers.RAFT.warn("Raft linear read failed, go to Leader read logic : {}", e.toString());
<<<<<<<
final CompletableFuture<Response> future) {
commit(request.getGroup(), request, future, failoverRetries)
.whenComplete(new BiConsumer<Response, Throwable>() {
=======
final CompletableFuture<GetResponse> future) {
Log readLog = Log.newBuilder().setGroup(request.getGroup())
.setData(request.getData())
.putExtendInfo(JRaftConstants.JRAFT_EXTEND_INFO_KEY,
JRaftLogOperation.READ_OPERATION).build();
CompletableFuture<byte[]> f = new CompletableFuture<byte[]>();
commit(readLog, f)
.whenComplete(new BiConsumer<byte[], Throwable>() {
>>>>>>>
final CompletableFuture<Response> future) {
commit(request.getGroup(), request, future)
.whenComplete(new BiConsumer<Response, Throwable>() {
<<<<<<<
public <T> CompletableFuture<T> commit(String group, Message data, final CompletableFuture<T> future,
final int retryLeft) {
=======
public <T> CompletableFuture<T> commit(Log data, final CompletableFuture<T> future) {
>>>>>>>
public <T> CompletableFuture<T> commit(final String group, final Message data, final CompletableFuture<T> future) {
<<<<<<<
RetryRunner runner = () -> commit(group, data, future, retryLeft - 1);
FailoverClosureImpl closure = new FailoverClosureImpl(future, retryLeft, runner);
=======
FailoverClosureImpl closure = new FailoverClosureImpl(future);
>>>>>>>
FailoverClosureImpl closure = new FailoverClosureImpl(future); |
<<<<<<<
=======
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonProcessingException;
>>>>>>>
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonProcessingException;
<<<<<<<
mapper.registerSubtypes(new NamedType(clz, type));
}
=======
mapper.registerSubtypes(new NamedType(clz, type));
}
public static ObjectNode createEmptyJsonNode() {
return new ObjectNode(mapper.getNodeFactory());
}
public static ArrayNode createEmptyArrayNode() {
return new ArrayNode(mapper.getNodeFactory());
}
public static JsonNode transferToJsonNode(Object obj) {
return mapper.valueToTree(obj);
}
>>>>>>>
mapper.registerSubtypes(new NamedType(clz, type));
}
public static ObjectNode createEmptyJsonNode() {
return new ObjectNode(mapper.getNodeFactory());
}
public static ArrayNode createEmptyArrayNode() {
return new ArrayNode(mapper.getNodeFactory());
}
public static JsonNode transferToJsonNode(Object obj) {
return mapper.valueToTree(obj);
} |
<<<<<<<
public static void addClient(String namespaceId,
String dom,
=======
public static void setTotalPush(int totalPush) {
PushService.totalPush = totalPush;
}
public static void addClient(String dom,
>>>>>>>
public static void addClient(String namespaceId,
String dom,
public static void setTotalPush(int totalPush) {
PushService.totalPush = totalPush;
}
public static void addClient(String dom, |
<<<<<<<
=======
import com.alibaba.nacos.common.utils.Md5Utils;
import com.alibaba.nacos.config.server.constant.Constants;
import com.alibaba.nacos.config.server.enums.FileTypeEnum;
>>>>>>>
import com.alibaba.nacos.common.utils.Md5Utils;
import com.alibaba.nacos.config.server.constant.Constants;
import com.alibaba.nacos.config.server.enums.FileTypeEnum;
<<<<<<<
String md5 = MD5.getInstance().getMD5String(configInfo.getContent());
String sql = "INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," +
"src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)";
Object[] args = new Object[]{
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5,
betaIps, srcIp, srcUser, time, time
};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใ็ฐๅบฆใ้
็ฝฎๅๅธๅคฑ่ดฅ");
}
=======
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
jt.update(
"INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip,"
+ "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)",
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5,
betaIps, srcIp, srcUser, time, time);
>>>>>>>
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
String sql = "INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," +
"src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)";
Object[] args = new Object[]{
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5,
betaIps, srcIp, srcUser, time, time
};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใ็ฐๅบฆใ้
็ฝฎๅๅธๅคฑ่ดฅ");
}
<<<<<<<
String md5 = MD5.getInstance().getMD5String(configInfo.getContent());
String sql = "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user,"
+ "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)";
Object[] args = new Object[]{
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5,
srcIp, srcUser, time, time};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใๆ ็ญพใ้
็ฝฎๆทปๅ ๅคฑ่ดฅ");
}
=======
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
jt.update(
"INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user,"
+ "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)",
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(),
md5,
srcIp, srcUser, time, time);
>>>>>>>
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
String sql = "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user,"
+ "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)";
Object[] args = new Object[]{
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5,
srcIp, srcUser, time, time};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใๆ ็ญพใ้
็ฝฎๆทปๅ ๅคฑ่ดฅ");
}
<<<<<<<
String md5 = MD5.getInstance().getMD5String(configInfo.getContent());
final String sql = "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE "
+ "data_id=? AND group_id=? AND tenant_id=?";
final Object[] args = new Object[]{
configInfo.getContent(), md5, srcIp, srcUser, time, appNameTmp, configInfo.getDataId(),
configInfo.getGroup(), tenantTmp
};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใ็ฐๅบฆใ้
็ฝฎไฟฎๆนๅคฑ่ดฅ");
}
=======
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
jt.update(
"UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE "
+ "data_id=? AND group_id=? AND tenant_id=?",
configInfo.getContent(), md5, srcIp, srcUser, time, appNameTmp, configInfo.getDataId(),
configInfo.getGroup(), tenantTmp);
>>>>>>>
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
final String sql = "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE "
+ "data_id=? AND group_id=? AND tenant_id=?";
final Object[] args = new Object[]{
configInfo.getContent(), md5, srcIp, srcUser, time, appNameTmp, configInfo.getDataId(),
configInfo.getGroup(), tenantTmp
};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใ็ฐๅบฆใ้
็ฝฎไฟฎๆนๅคฑ่ดฅ");
}
<<<<<<<
String md5 = MD5.getInstance().getMD5String(configInfo.getContent());
final String sql = "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE "
+ "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?";
final Object[] args = new Object[]{
configInfo.getContent(), md5, srcIp, srcUser, time, appNameTmp, configInfo.getDataId(),
configInfo.getGroup(), tenantTmp, tagTmp
};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใๆ ็ญพใ้
็ฝฎไฟฎๆนๅคฑ่ดฅ");
}
=======
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
jt.update(
"UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE "
+ "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?",
configInfo.getContent(), md5, srcIp, srcUser, time, appNameTmp, configInfo.getDataId(),
configInfo.getGroup(), tenantTmp, tagTmp);
>>>>>>>
String md5 = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
final String sql = "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE "
+ "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?";
final Object[] args = new Object[]{
configInfo.getContent(), md5, srcIp, srcUser, time, appNameTmp, configInfo.getDataId(),
configInfo.getGroup(), tenantTmp, tagTmp
};
SqlContextUtils.addSqlContext(sql, args);
boolean result = databaseOperate.update(SqlContextUtils.getCurrentSqlContext());
if (!result) {
throw new NacosConfigException("ใๆ ็ญพใ้
็ฝฎไฟฎๆนๅคฑ่ดฅ");
}
<<<<<<<
final String md5Tmp = MD5.getInstance().getMD5String(configInfo.getContent());
=======
final String md5Tmp = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
>>>>>>>
final String md5Tmp = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
<<<<<<<
final String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName();
final String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant();
final String md5Tmp = MD5.getInstance().getMD5String(configInfo.getContent());
final String desc = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("desc");
final String use = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("use");
final String effect = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("effect");
final String type = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("type");
final String schema = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("schema");
=======
String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName();
String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant();
final String md5Tmp = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
String desc = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("desc");
String use = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("use");
String effect = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("effect");
String type = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("type");
String schema = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("schema");
>>>>>>>
final String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName();
final String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant();
final String md5Tmp = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
final String desc = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("desc");
final String use = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("use");
final String effect = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("effect");
final String type = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("type");
final String schema = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("schema");
<<<<<<<
final String md5Tmp = MD5.getInstance().getMD5String(configInfo.getContent());
final String sql;
final Object[] args;
if (id == null) {
sql = "INSERT INTO his_config_info (data_id,group_id,tenant_id,app_name,content,md5," +
"src_ip,src_user,gmt_modified,op_type) VALUES(?,?,?,?,?,?,?,?,?,?)";
args = new Object[]{
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(),
md5Tmp, srcIp, srcUser, time, ops
};
} else {
sql = "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5," +
"src_ip,src_user,gmt_modified,op_type) VALUES(?,?,?,?,?,?,?,?,?,?,?)";
args = new Object[]{
id, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(),
md5Tmp, srcIp, srcUser, time, ops
};
=======
final String md5Tmp = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
try {
jt.update(
"INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) VALUES(?,?,?,?,?,?,?,?,?,?,?)",
id, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(),
md5Tmp, srcIp, srcUser, time, ops);
} catch (DataAccessException e) {
fatalLog.error("[db-error] " + e.toString(), e);
throw e;
>>>>>>>
final String md5Tmp = Md5Utils.getMD5(configInfo.getContent(), Constants.ENCODE);
final String sql;
final Object[] args;
if (id == null) {
sql = "INSERT INTO his_config_info (data_id,group_id,tenant_id,app_name,content,md5," +
"src_ip,src_user,gmt_modified,op_type) VALUES(?,?,?,?,?,?,?,?,?,?)";
args = new Object[]{
configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(),
md5Tmp, srcIp, srcUser, time, ops
};
} else {
sql = "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5," +
"src_ip,src_user,gmt_modified,op_type) VALUES(?,?,?,?,?,?,?,?,?,?,?)";
args = new Object[]{
id, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(),
md5Tmp, srcIp, srcUser, time, ops
};
<<<<<<<
final String md5 = MD5.getInstance().getMD5String(
content);
=======
final String md5 = Md5Utils.getMD5(content, Constants.ENCODE);
>>>>>>>
final String md5 = Md5Utils.getMD5(content, Constants.ENCODE); |
<<<<<<<
chain.getLogger().debug("[OrphanTxProcessTask] ๅ ๅ
ฅๅพ
ๆๅ
้ๅ....hash:{}", HashUtil.toHex(tx.getHash()));
=======
chain.getLoggerMap().get(TxConstant.LOG_NEW_TX_PROCESS).debug("[OrphanTxProcessTask] ๅ ๅ
ฅๅพ
ๆๅ
้ๅ....hash:{}", tx.getHash().toHex());
>>>>>>>
chain.getLogger().debug("[OrphanTxProcessTask] ๅ ๅ
ฅๅพ
ๆๅ
้ๅ....hash:{}", tx.getHash().toHex()); |
<<<<<<<
import com.alibaba.nacos.common.utils.ByteUtils;
import com.alibaba.nacos.common.utils.ConvertUtils;
=======
import com.alibaba.nacos.common.utils.ThreadUtils;
>>>>>>>
import com.alibaba.nacos.common.utils.ByteUtils;
import com.alibaba.nacos.common.utils.ThreadUtils;
<<<<<<<
private int failoverRetries = 1;
private String failoverRetriesStr = String.valueOf(failoverRetries);
private final Serializer serializer = SerializeFactory.getDefault();
=======
>>>>>>>
private final Serializer serializer = SerializeFactory.getDefault();
<<<<<<<
public Response getData(GetRequest request) throws Exception {
int retryCnt = Integer.parseInt(
request.getExtendInfoOrDefault(RaftSysConstants.REQUEST_FAILOVER_RETRIES,
failoverRetriesStr));
return raftServer.get(request, retryCnt);
=======
public GetResponse getData(GetRequest request) throws Exception {
return raftServer.get(request);
>>>>>>>
public Response getData(GetRequest request) throws Exception {
return raftServer.get(request);
<<<<<<<
CompletableFuture<Response> f = new CompletableFuture<>();
raftServer.commit(data.getGroup(), data, f,
retryCnt).whenComplete(new BiConsumer<Response, Throwable>() {
=======
CompletableFuture<Object> f = new CompletableFuture<>();
raftServer.commit(JRaftUtils
.injectExtendInfo(data, JRaftLogOperation.MODIFY_OPERATION), f).whenComplete(new BiConsumer<Object, Throwable>() {
>>>>>>>
CompletableFuture<Response> f = new CompletableFuture<>();
raftServer.commit(data.getGroup(), data, f).whenComplete(new BiConsumer<Response, Throwable>() { |
<<<<<<<
HttpClientManager.shutdown();
=======
>>>>>>>
HttpClientManager.shutdown();
<<<<<<<
LOGGER.error("Nacos failed to start, please see {} for more details.",
Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log"));
context.close();
=======
context.close();
LOGGER.error("Nacos failed to start, please see {} for more details.",
Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log"));
>>>>>>>
context.close();
LOGGER.error("Nacos failed to start, please see {} for more details.",
Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); |
<<<<<<<
private static ScheduledExecutorService emptyServiceAutoCleanExecutor = Executors.newSingleThreadScheduledExecutor(
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("com.alibaba.nacos.naming.service.empty.auto-clean");
t.setDaemon(true);
return t;
}
}
);
=======
private static ScheduledExecutorService distroNotifyExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setDaemon(true);
t.setName("com.alibaba.nacos.naming.distro.notifier");
return t;
}
});
>>>>>>>
private static ScheduledExecutorService emptyServiceAutoCleanExecutor = Executors.newSingleThreadScheduledExecutor(
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("com.alibaba.nacos.naming.service.empty.auto-clean");
t.setDaemon(true);
return t;
}
}
);
private static ScheduledExecutorService distroNotifyExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setDaemon(true);
t.setName("com.alibaba.nacos.naming.distro.notifier");
return t;
}
}); |
<<<<<<<
=======
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.SystemPropertyKeyConst;
import com.alibaba.nacos.api.common.Constants;
>>>>>>>
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.SystemPropertyKeyConst;
import com.alibaba.nacos.api.common.Constants;
<<<<<<<
import com.alibaba.nacos.client.naming.utils.*;
=======
import com.alibaba.nacos.client.naming.utils.*;
import com.alibaba.nacos.client.utils.TemplateUtils;
>>>>>>>
import com.alibaba.nacos.client.naming.utils.*;
import com.alibaba.nacos.client.naming.utils.*;
import com.alibaba.nacos.client.utils.TemplateUtils;
<<<<<<<
import java.util.*;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
=======
import java.util.*;
import java.util.concurrent.*;
>>>>>>>
import java.util.*;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
<<<<<<<
List<String> headers = Arrays.asList("Client-Version", UtilAndComs.VERSION,
"User-Agent", UtilAndComs.VERSION,
"Accept-Encoding", "gzip,deflate,sdch",
"Connection", "Keep-Alive",
"RequestId", UuidUtils.generateUuid());
=======
List<String> headers = builderHeaders();
>>>>>>>
List<String> headers = Arrays.asList("Client-Version", UtilAndComs.VERSION,
"User-Agent", UtilAndComs.VERSION,
"Accept-Encoding", "gzip,deflate,sdch",
"Connection", "Keep-Alive",
"RequestId", UuidUtils.generateUuid());
List<String> headers = builderHeaders();
<<<<<<<
List<String> headers = Arrays.asList("Client-Version", UtilAndComs.VERSION,
"User-Agent", UtilAndComs.VERSION,
"Accept-Encoding", "gzip,deflate,sdch",
"Connection", "Keep-Alive",
"RequestId", UuidUtils.generateUuid());
=======
List<String> headers = builderHeaders();
>>>>>>>
List<String> headers = Arrays.asList("Client-Version", UtilAndComs.VERSION,
"User-Agent", UtilAndComs.VERSION,
"Accept-Encoding", "gzip,deflate,sdch",
"Connection", "Keep-Alive",
"RequestId", UuidUtils.generateUuid());
List<String> headers = builderHeaders(); |
<<<<<<<
import com.alibaba.nacos.common.lifecycle.ResourceLifeCycleManager;
=======
import com.alibaba.nacos.common.utils.JacksonUtils;
>>>>>>>
import com.alibaba.nacos.common.utils.JacksonUtils;
import com.alibaba.nacos.common.lifecycle.ResourceLifeCycleManager;
<<<<<<<
Assert.assertEquals(true, JSON.parseObject(result.content).getBoolean("data"));
NacosFactory.destroyConfigService(iconfig);
agent.shutdown();
// Judge whether the register life cycle resource number equals to zero or not.
Assert.assertEquals(0, ResourceLifeCycleManager.getRegisterResourceNum());
=======
Assert.assertTrue(JacksonUtils.toObj(result.content).get("data").booleanValue());
>>>>>>>
Assert.assertTrue(JacksonUtils.toObj(result.content).get("data").booleanValue());
NacosFactory.destroyConfigService(iconfig);
agent.shutdown();
// Judge whether the register life cycle resource number equals to zero or not.
Assert.assertEquals(0, ResourceLifeCycleManager.getRegisterResourceNum());
Assert.assertTrue(JacksonUtils.toObj(result.content).get("data").booleanValue()); |
<<<<<<<
Loggers.EVT_LOG.info("{} {SYNC} IP-{} : {}@{}",
domName, (ipAddress.isValid() ? "ENABLED" : "DISABLED"),
ipAddress.getIp(), ipAddress.getPort(), ipAddress.getClusterName());
=======
Loggers.EVT_LOG.info("{" + domName + "} {SYNC} " +
"{IP-" + (ipAddress.isValid() ? "ENABLED" : "DISABLED") + "} " + ipAddress.getIp()
+ ":" + ipAddress.getPort() + "@" + ipAddress.getClusterName());
>>>>>>>
Loggers.EVT_LOG.info("{} {SYNC} IP-{} : {}@{}",
domName, (ipAddress.isValid() ? "ENABLED" : "DISABLED"),
ipAddress.getIp(), ipAddress.getPort(), ipAddress.getClusterName());
<<<<<<<
public void easyAddIP4Dom(String namespaceId, String domName, List<IpAddress> ips, long timestamp, long term) throws Exception {
=======
public void easyAddIP4Dom(String domName, List<IpAddress> ips, long term) throws Exception {
easyUpdateIP4Dom(domName, ips, term, "add");
}
public void easyRemvIP4Dom(String domName, List<IpAddress> ips, long term) throws Exception {
easyUpdateIP4Dom(domName, ips, term, "remove");
}
public void easyUpdateIP4Dom(String domName, List<IpAddress> ips, long term, String action) throws Exception {
>>>>>>>
public void easyAddIP4Dom(String namespaceId, String domName, List<IpAddress> ips, long timestamp, long term) throws Exception {
public void easyAddIP4Dom(String domName, List<IpAddress> ips, long term) throws Exception {
easyUpdateIP4Dom(domName, ips, term, "add");
}
public void easyRemvIP4Dom(String domName, List<IpAddress> ips, long term) throws Exception {
easyUpdateIP4Dom(domName, ips, term, "remove");
}
public void easyUpdateIP4Dom(String domName, List<IpAddress> ips, long term, String action) throws Exception {
<<<<<<<
if (ipAddressMap.size() <= 0) {
throw new IllegalArgumentException("ip list can not be empty, dom: " + dom.getName() + ", ip list: "
+ JSON.toJSONString(ipAddressMap.values()));
}
=======
}
if (ipAddressMap.size() <= 0 && UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD.equals(action)) {
throw new IllegalArgumentException("ip list can not be empty, dom: " + dom.getName() + ", ip list: "
+ JSON.toJSONString(ipAddressMap.values()));
}
Loggers.EVT_LOG.info("{" + dom + "} {POS} {IP-UPDATE}" + ips +
", action:" + action);
String key = UtilsAndCommons.getIPListStoreKey(dom);
String value = JSON.toJSONString(ipAddressMap.values());
Datum datum = new Datum();
datum.key = key;
datum.value = value;
>>>>>>>
}
if (ipAddressMap.size() <= 0 && UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD.equals(action)) {
throw new IllegalArgumentException("ip list can not be empty, dom: " + dom.getName() + ", ip list: "
+ JSON.toJSONString(ipAddressMap.values()));
}
Loggers.EVT_LOG.info("{" + dom + "} {POS} {IP-UPDATE}" + ips +
", action:" + action);
String key = UtilsAndCommons.getIPListStoreKey(dom);
String value = JSON.toJSONString(ipAddressMap.values());
Datum datum = new Datum();
datum.key = key;
datum.value = value;
<<<<<<<
public void easyRemvIP4Dom(String namespaceId, String domName, List<IpAddress> ips) throws Exception {
Lock lock = dom2LockMap.get(UtilsAndCommons.assembleFullServiceName(namespaceId, domName));
if (lock == null) {
throw new IllegalStateException("no lock for " + domName + ", operation is disabled now.");
}
try {
lock.lock();
Domain dom = chooseDomMap(namespaceId).get(domName);
if (dom == null) {
throw new IllegalArgumentException("domain doesn't exist: " + domName);
}
Datum datum = RaftCore.getDatum(UtilsAndCommons.getIPListStoreKey(dom));
String oldJson = StringUtils.EMPTY;
List<IpAddress> currentIPs = dom.allIPs();
if (currentIPs.size() <= 0) {
return;
}
Map<String, IpAddress> map = new ConcurrentHashMap<String, IpAddress>(currentIPs.size());
for (IpAddress ipAddress : currentIPs) {
map.put(ipAddress.toIPAddr(), ipAddress);
}
if (datum != null) {
oldJson = datum.value;
}
List<IpAddress> ipAddrs = setValid(oldJson, map);
ipAddrs.removeAll(ips);
RaftCore.doSignalPublish(UtilsAndCommons.getIPListStoreKey(dom), JSON.toJSONString(ipAddrs));
} finally {
lock.unlock();
}
}
public Domain getDomain(String namespaceId, String serviceName) {
if (serviceMap.get(namespaceId) == null) {
return null;
}
return serviceMap.get(namespaceId).get(serviceName);
=======
public Domain getDomain(String domName) {
return chooseDomMap().get(domName);
>>>>>>>
public Domain getDomain(String domName) {
return chooseDomMap().get(domName); |
<<<<<<<
try {
return toObj(StringUtils.newString4UTF8(json), cls);
} catch (Exception e) {
throw new NacosDeserializationException(e);
}
}
public static <T> T toObj(InputStream inputStream, Class<T> tClass) throws Exception {
return mapper.readValue(inputStream, tClass);
}
public static <T> T toObj(byte[] json, TypeReference<T> typeReference) {
try {
return toObj(StringUtils.newString4UTF8(json), typeReference);
} catch (Exception e) {
throw new NacosDeserializationException(e);
}
}
=======
try {
return toObj(StringUtils.newString4UTF8(json), cls);
}
catch (Exception e) {
throw new NacosDeserializationException(e);
}
}
public static <T> T toObj(byte[] json, TypeReference<T> typeReference) {
try {
return toObj(StringUtils.newString4UTF8(json), typeReference);
}
catch (Exception e) {
throw new NacosDeserializationException(e);
}
}
>>>>>>>
try {
return toObj(StringUtils.newString4UTF8(json), cls);
} catch (Exception e) {
throw new NacosDeserializationException(e);
}
}
public static <T> T toObj(InputStream inputStream, Class<T> tClass) throws Exception {
return mapper.readValue(inputStream, tClass);
}
public static <T> T toObj(byte[] json, TypeReference<T> typeReference) {
try {
return toObj(StringUtils.newString4UTF8(json), typeReference);
} catch (Exception e) {
throw new NacosDeserializationException(e);
}
}
try {
return toObj(StringUtils.newString4UTF8(json), cls);
}
catch (Exception e) {
throw new NacosDeserializationException(e);
}
}
public static <T> T toObj(byte[] json, TypeReference<T> typeReference) {
try {
return toObj(StringUtils.newString4UTF8(json), typeReference);
}
catch (Exception e) {
throw new NacosDeserializationException(e);
}
} |
<<<<<<<
public void addClient(String namespaceId,
=======
public static void setTotalPush(int totalPush) {
PushService.totalPush = totalPush;
}
public static void addClient(String namespaceId,
>>>>>>>
public void addClient(String namespaceId,
public static void setTotalPush(int totalPush) {
PushService.totalPush = totalPush;
}
public static void addClient(String namespaceId, |
<<<<<<<
import com.alibaba.nacos.consistency.entity.GetRequest;
=======
import com.alibaba.nacos.common.utils.ThreadUtils;
>>>>>>>
import com.alibaba.nacos.consistency.SerializeFactory;
import com.alibaba.nacos.consistency.entity.GetRequest;
import com.alibaba.nacos.common.utils.ThreadUtils;
<<<<<<<
=======
import com.alipay.remoting.ConnectionEventType;
import com.alipay.remoting.rpc.RpcServer;
import com.alipay.sofa.jraft.CliService;
import com.alipay.sofa.jraft.RouteTable;
import com.alipay.sofa.jraft.Status;
import com.alipay.sofa.jraft.conf.Configuration;
>>>>>>>
import com.alipay.sofa.jraft.CliService;
import com.alipay.sofa.jraft.RouteTable;
import com.alipay.sofa.jraft.Status;
import com.alipay.sofa.jraft.conf.Configuration;
<<<<<<<
=======
import java.util.Set;
import java.util.concurrent.Executor;
>>>>>>>
import java.util.Set;
<<<<<<<
=======
public static void joinCluster(CliService cliService, Collection<String> members, Configuration conf, String group, PeerId self) {
ServerMemberManager memberManager = ApplicationUtils.getBean(ServerMemberManager.class);
if (!memberManager.isFirstIp()) {
return;
}
Set<PeerId> peerIds = new HashSet<>();
for (String s : members) {
peerIds.add(PeerId.parsePeer(s));
}
peerIds.remove(self);
for ( ; ; ) {
if (peerIds.isEmpty()) {
return;
}
conf = RouteTable.getInstance().getConfiguration(group);
Iterator<PeerId> iterator = peerIds.iterator();
while (iterator.hasNext()) {
final PeerId peerId = iterator.next();
if (conf.contains(peerId)) {
iterator.remove();
continue;
}
Status status = cliService.addPeer(group, conf, peerId);
if (status.isOk()) {
iterator.remove();
}
}
ThreadUtils.sleep(1000L);
}
}
public static void addRaftRequestProcessors(final RpcServer rpcServer, final Executor raftExecutor,
final Executor cliExecutor) {
// raft core processors
final AppendEntriesRequestProcessor appendEntriesRequestProcessor = new AppendEntriesRequestProcessor(
raftExecutor);
rpcServer.addConnectionEventProcessor(ConnectionEventType.CLOSE, appendEntriesRequestProcessor);
rpcServer.registerUserProcessor(appendEntriesRequestProcessor);
rpcServer.registerUserProcessor(new GetFileRequestProcessor(raftExecutor));
rpcServer.registerUserProcessor(new InstallSnapshotRequestProcessor(raftExecutor));
rpcServer.registerUserProcessor(new RequestVoteRequestProcessor(raftExecutor));
rpcServer.registerUserProcessor(new PingRequestProcessor());
rpcServer.registerUserProcessor(new TimeoutNowRequestProcessor(raftExecutor));
rpcServer.registerUserProcessor(new ReadIndexRequestProcessor(raftExecutor));
// raft cli service
rpcServer.registerUserProcessor(new AddPeerRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new RemovePeerRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new ResetPeerRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new ChangePeersRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new GetLeaderRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new SnapshotRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new TransferLeaderRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new GetPeersRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new AddLearnersRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new RemoveLearnersRequestProcessor(cliExecutor));
rpcServer.registerUserProcessor(new ResetLearnersRequestProcessor(cliExecutor));
}
>>>>>>>
public static void joinCluster(CliService cliService, Collection<String> members, Configuration conf, String group, PeerId self) {
ServerMemberManager memberManager = ApplicationUtils.getBean(ServerMemberManager.class);
if (!memberManager.isFirstIp()) {
return;
}
Set<PeerId> peerIds = new HashSet<>();
for (String s : members) {
peerIds.add(PeerId.parsePeer(s));
}
peerIds.remove(self);
for ( ; ; ) {
if (peerIds.isEmpty()) {
return;
}
conf = RouteTable.getInstance().getConfiguration(group);
Iterator<PeerId> iterator = peerIds.iterator();
while (iterator.hasNext()) {
final PeerId peerId = iterator.next();
if (conf.contains(peerId)) {
iterator.remove();
continue;
}
Status status = cliService.addPeer(group, conf, peerId);
if (status.isOk()) {
iterator.remove();
}
}
ThreadUtils.sleep(1000L);
}
} |
<<<<<<<
import java.util.List;
import javax.servlet.http.HttpServletRequest;
=======
import com.alibaba.fastjson.JSONObject;
import com.alibaba.nacos.naming.core.DomainsManager;
import com.alibaba.nacos.naming.core.VirtualClusterDomain;
import com.alibaba.nacos.naming.exception.NacosException;
import com.alibaba.nacos.naming.healthcheck.HealthCheckMode;
import com.alibaba.nacos.naming.misc.UtilsAndCommons;
import com.alibaba.nacos.naming.web.BaseServlet;
import org.apache.commons.lang3.StringUtils;
>>>>>>>
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.nacos.naming.core.DomainsManager;
import com.alibaba.nacos.naming.core.VirtualClusterDomain;
import com.alibaba.nacos.naming.exception.NacosException;
import com.alibaba.nacos.naming.healthcheck.HealthCheckMode;
import com.alibaba.nacos.naming.misc.UtilsAndCommons;
import com.alibaba.nacos.naming.web.BaseServlet;
import org.apache.commons.lang3.StringUtils;
<<<<<<<
import com.alibaba.fastjson.JSONObject;
import com.alibaba.nacos.naming.core.DomainsManager;
import com.alibaba.nacos.naming.misc.UtilsAndCommons;
import com.alibaba.nacos.naming.web.BaseServlet;
=======
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
>>>>>>>
import java.util.Map;
<<<<<<<
=======
@RequestMapping(value = "/update", method = RequestMethod.POST)
public String update(HttpServletRequest request) throws Exception {
String serviceName = BaseServlet.required(request, "serviceName");
float protectThreshold = NumberUtils.toFloat(BaseServlet.required(request, "protectThreshold"));
String healthCheckMode = BaseServlet.required(request, "healthCheckMode");
String metadata = BaseServlet.optional(request, "metadata", StringUtils.EMPTY);
VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(serviceName);
if (domain == null) {
throw new NacosException(NacosException.INVALID_PARAM, "service " + serviceName + " not found!");
}
domain.setProtectThreshold(protectThreshold);
if (HealthCheckMode.server.name().equals(healthCheckMode)) {
domain.setEnableHealthCheck(true);
domain.setEnableClientBeat(false);
}
if (HealthCheckMode.client.name().equals(healthCheckMode)) {
domain.setEnableClientBeat(true);
domain.setEnableHealthCheck(false);
}
if (HealthCheckMode.none.name().equals(healthCheckMode)) {
domain.setEnableClientBeat(false);
domain.setEnableHealthCheck(false);
}
Map<String, String> metadataMap = UtilsAndCommons.parseMetadata(metadata);
domain.setMetadata(metadataMap);
domain.setLastModifiedMillis(System.currentTimeMillis());
domain.recalculateChecksum();
domain.valid();
domainsManager.easyAddOrReplaceDom(domain);
return "ok";
}
>>>>>>>
@RequestMapping(value = "/update", method = RequestMethod.POST)
public String update(HttpServletRequest request) throws Exception {
String serviceName = BaseServlet.required(request, "serviceName");
float protectThreshold = NumberUtils.toFloat(BaseServlet.required(request, "protectThreshold"));
String healthCheckMode = BaseServlet.required(request, "healthCheckMode");
String metadata = BaseServlet.optional(request, "metadata", StringUtils.EMPTY);
VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(serviceName);
if (domain == null) {
throw new NacosException(NacosException.INVALID_PARAM, "service " + serviceName + " not found!");
}
domain.setProtectThreshold(protectThreshold);
if (HealthCheckMode.server.name().equals(healthCheckMode)) {
domain.setEnableHealthCheck(true);
domain.setEnableClientBeat(false);
}
if (HealthCheckMode.client.name().equals(healthCheckMode)) {
domain.setEnableClientBeat(true);
domain.setEnableHealthCheck(false);
}
if (HealthCheckMode.none.name().equals(healthCheckMode)) {
domain.setEnableClientBeat(false);
domain.setEnableHealthCheck(false);
}
Map<String, String> metadataMap = UtilsAndCommons.parseMetadata(metadata);
domain.setMetadata(metadataMap);
domain.setLastModifiedMillis(System.currentTimeMillis());
domain.recalculateChecksum();
domain.valid();
domainsManager.easyAddOrReplaceDom(domain);
return "ok";
} |
<<<<<<<
public static String localServer() {
=======
private static String localIpAddress = null;
public static String localIP() {
>>>>>>>
private static String localIpAddress = null;
public static String localIP() {
public static String localServer() {
<<<<<<<
InetAddress inetAddress = InetAddress.getLocalHost();
String serverAddress = inetAddress.getHostAddress();
if (PREFER_HOSTNAME_OVER_IP) {
if (inetAddress.getHostName().equals(inetAddress.getCanonicalHostName())) {
serverAddress = inetAddress.getHostName();
} else {
serverAddress = inetAddress.getCanonicalHostName();
}
}
return serverAddress + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort();
=======
if (StringUtils.isBlank(localIpAddress)) {
localIpAddress = InetAddress.getLocalHost().getHostAddress();
}
return localIpAddress + ":" + RunningConfig.getServerPort();
>>>>>>>
if (StringUtils.isBlank(localIpAddress)) {
localIpAddress = InetAddress.getLocalHost().getHostAddress();
}
return localIpAddress + ":" + RunningConfig.getServerPort();
InetAddress inetAddress = InetAddress.getLocalHost();
String serverAddress = inetAddress.getHostAddress();
if (PREFER_HOSTNAME_OVER_IP) {
if (inetAddress.getHostName().equals(inetAddress.getCanonicalHostName())) {
serverAddress = inetAddress.getHostName();
} else {
serverAddress = inetAddress.getCanonicalHostName();
}
}
return serverAddress + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); |
<<<<<<<
@DisplayName("csv ํ์ผ๋ก ์นตํ
์ผ์ ์ ์ฅํ๋ค.")
@Test
void saveAll() {
String content =
"name,abv,description,origin,imageUrl,tag,sweet,sour,bitter,liquor,liquorQuantity,oz,special,specialQuantity,from\n"
+ "๊ฐ๋ง๋,30,๊ฐํ๋์์ ์์คํค ๋์ ๋ณด๋์นด๋ฅผ ์ฌ์ฉํ ์นตํ
์ผ์
๋๋ค.,God Mother ๋๋ชจ๋ผ๋ ๋ป์
๋๋ค.,\"https://images.cocktailflow.com/v1/cocktail/w_300,h_540/cocktail_god_mother-1.png\",\"์๋ชฌ๋,๋ถ๋๋ฌ์\",1,,1,\"๋ณด๋์นด,์๋ง๋ ๋\",\"45,30\",1.5,\"์คํ,๋ฉ๋ก \",\"1,2\",http://www.flickriver.com/photos/31027007@N08/31782815995/\n"
+ "๊ฐํ๋,39,\"๋ฒ ์ด์ค๋ก ์ค์นด์น ์์คํค๋ฅผ ์ฌ์ฉํฉ๋๋ค. ๊ธฐ์ฃผ๋ฅผ ๋ณด๋์นด๋ก ๋ฐ๊พธ๋ฉด '๊ฐ๋ง๋'๋ก, ๋ธ๋๋(์ฝ๋)๋ก ๋ฐ๊พธ๋ฉด 'ํ๋ ์น ์ปค๋ฅ์
'์ด ๋ฉ๋๋ค.\",์ํ '๋๋ถ'์์ ๋นํ ์ฝ๋ ์ค๋ค(๋ง๋ก ๋ธ๋๋ ์ญ)๊ฐ ๋ง์ ์นตํ
์ผ์
๋๋ค.,https://s7d9.scene7.com/is/image/SAQ/godfather-ec?$saq-fiche-cocktail$,์๋ชฌ๋,1,,1,\"์์คํค,์๋ง๋ ๋\",\"45,22.5\",1.5,,,http://www.flickriver.com/search/godfather+cocktail/\n"
+ "๊ทธ๋์คํธํผ,15,\"ํฌ๋ฆผ์ ์ฌ์ฉํ๊ณ , ๋
น์๋น์ ๋๊ณ ์๋ ๋ฏผํธํฅ ์นตํ
์ผ์
๋๋ค.\",์นตํ
์ผ์ ์์ด ๋
น์์ด์ด์ ๋ฉ๋๊ธฐ๋ผ๋ ์ด๋ฆ์ด ๋ถ์์ต๋๋ค.,https://banner2.kisspng.com/20180221/qde/kisspng-cocktail-sidecar-gimlet-martini-grasshopper-cocktail-5a8defe0899285.9571797915192514245635.jpg,\"๋ฏผํธ,์ด์ฝ,๋ถ๋๋ฌ์\",1,1,,\"ํฌ๋ ๋ ๋ฉํธ,ํฌ๋ ๋ ์นด์นด์ค, ์ฐ์ \",\"30,30,30\",1,,,http://www.flickriver.com/search/grasshopper+cocktail/";
MultipartFile file = new MockMultipartFile("file", "์นตํ
์ผ.csv", "text/csv",
content.getBytes());
cocktailService.saveAll(file);
verify(tagRepository, times(3)).saveAll(anyCollection());
verify(cocktailRepository, times(3)).save(any());
}
=======
@DisplayName("์นตํ
์ผ์ ์ญ์ ํ๋ค.")
@Test
void deleteCocktail() {
cocktailService.deleteCocktail(1L);
verify(cocktailRepository).deleteById(1L);
}
>>>>>>>
@DisplayName("์นตํ
์ผ์ ์ญ์ ํ๋ค.")
@Test
void deleteCocktail() {
cocktailService.deleteCocktail(1L);
verify(cocktailRepository).deleteById(1L);
}
@DisplayName("csv ํ์ผ๋ก ์นตํ
์ผ์ ์ ์ฅํ๋ค.")
@Test
void saveAll() {
String content =
"name,abv,description,origin,imageUrl,tag,sweet,sour,bitter,liquor,liquorQuantity,oz,special,specialQuantity,from\n"
+ "๊ฐ๋ง๋,30,๊ฐํ๋์์ ์์คํค ๋์ ๋ณด๋์นด๋ฅผ ์ฌ์ฉํ ์นตํ
์ผ์
๋๋ค.,God Mother ๋๋ชจ๋ผ๋ ๋ป์
๋๋ค.,\"https://images.cocktailflow.com/v1/cocktail/w_300,h_540/cocktail_god_mother-1.png\",\"์๋ชฌ๋,๋ถ๋๋ฌ์\",1,,1,\"๋ณด๋์นด,์๋ง๋ ๋\",\"45,30\",1.5,\"์คํ,๋ฉ๋ก \",\"1,2\",http://www.flickriver.com/photos/31027007@N08/31782815995/\n"
+ "๊ฐํ๋,39,\"๋ฒ ์ด์ค๋ก ์ค์นด์น ์์คํค๋ฅผ ์ฌ์ฉํฉ๋๋ค. ๊ธฐ์ฃผ๋ฅผ ๋ณด๋์นด๋ก ๋ฐ๊พธ๋ฉด '๊ฐ๋ง๋'๋ก, ๋ธ๋๋(์ฝ๋)๋ก ๋ฐ๊พธ๋ฉด 'ํ๋ ์น ์ปค๋ฅ์
'์ด ๋ฉ๋๋ค.\",์ํ '๋๋ถ'์์ ๋นํ ์ฝ๋ ์ค๋ค(๋ง๋ก ๋ธ๋๋ ์ญ)๊ฐ ๋ง์ ์นตํ
์ผ์
๋๋ค.,https://s7d9.scene7.com/is/image/SAQ/godfather-ec?$saq-fiche-cocktail$,์๋ชฌ๋,1,,1,\"์์คํค,์๋ง๋ ๋\",\"45,22.5\",1.5,,,http://www.flickriver.com/search/godfather+cocktail/\n"
+ "๊ทธ๋์คํธํผ,15,\"ํฌ๋ฆผ์ ์ฌ์ฉํ๊ณ , ๋
น์๋น์ ๋๊ณ ์๋ ๋ฏผํธํฅ ์นตํ
์ผ์
๋๋ค.\",์นตํ
์ผ์ ์์ด ๋
น์์ด์ด์ ๋ฉ๋๊ธฐ๋ผ๋ ์ด๋ฆ์ด ๋ถ์์ต๋๋ค.,https://banner2.kisspng.com/20180221/qde/kisspng-cocktail-sidecar-gimlet-martini-grasshopper-cocktail-5a8defe0899285.9571797915192514245635.jpg,\"๋ฏผํธ,์ด์ฝ,๋ถ๋๋ฌ์\",1,1,,\"ํฌ๋ ๋ ๋ฉํธ,ํฌ๋ ๋ ์นด์นด์ค, ์ฐ์ \",\"30,30,30\",1,,,http://www.flickriver.com/search/grasshopper+cocktail/";
MultipartFile file = new MockMultipartFile("file", "์นตํ
์ผ.csv", "text/csv",
content.getBytes());
cocktailService.saveAll(file);
verify(tagRepository, times(3)).saveAll(anyCollection());
verify(cocktailRepository, times(3)).save(any());
} |
<<<<<<<
JasonHelper.next("success", action, new JSONObject(), event, context);
=======
JSONObject postObject = new JSONObject();
if(action.getJSONObject("options").has("form")){
for(int i = 0; i<textFields.size();i++){
EditText textField = (EditText) textFields.get(i);
postObject.put(textField.getTag().toString(),textField.getText().toString());
}
}
JasonHelper.next("success", action, postObject, context);
>>>>>>>
JSONObject postObject = new JSONObject();
if(action.getJSONObject("options").has("form")){
for(int i = 0; i<textFields.size();i++){
EditText textField = (EditText) textFields.get(i);
postObject.put(textField.getTag().toString(),textField.getText().toString());
}
}
JasonHelper.next("success", action, postObject, event, context); |
<<<<<<<
requestIdHeaderName = getProperty(IClientConfigKey.CommonKeys.RequestIdHeaderName, null, null);
if (requestIdHeaderName != null) {
requestIdProvider = new HttpRequestIdProvider(requestIdHeaderName, RxContexts.DEFAULT_CORRELATOR);
}
listener = HttpClientListener.newHttpListener(getName());
=======
requestIdHeaderName = getProperty(IClientConfigKey.Keys.RequestIdHeaderName, null, DefaultClientConfigImpl.DEFAULT_REQUEST_ID_HEADER_NAME);
requestIdProvider = new HttpRequestIdProvider(requestIdHeaderName, RxContexts.DEFAULT_CORRELATOR);
>>>>>>>
requestIdHeaderName = getProperty(IClientConfigKey.Keys.RequestIdHeaderName, null, null);
if (requestIdHeaderName != null) {
requestIdProvider = new HttpRequestIdProvider(requestIdHeaderName, RxContexts.DEFAULT_CORRELATOR);
}
listener = HttpClientListener.newHttpListener(getName());
<<<<<<<
HttpClientBuilder<I, O> clientBuilder;
if (requestIdProvider != null) {
clientBuilder = RxContexts.<I, O>newHttpClientBuilder(server.getHost(), server.getPort(),
requestIdProvider, RxContexts.DEFAULT_CORRELATOR, pipelineConfigurator);
} else {
clientBuilder = RxContexts.<I, O>newHttpClientBuilder(server.getHost(), server.getPort(),
RxContexts.DEFAULT_CORRELATOR, pipelineConfigurator);
}
Integer connectTimeout = getProperty(IClientConfigKey.CommonKeys.ConnectTimeout, null, DefaultClientConfigImpl.DEFAULT_CONNECT_TIMEOUT);
Integer readTimeout = getProperty(IClientConfigKey.CommonKeys.ReadTimeout, null, DefaultClientConfigImpl.DEFAULT_READ_TIMEOUT);
Boolean followRedirect = getProperty(IClientConfigKey.CommonKeys.FollowRedirects, null, null);
=======
HttpClientBuilder<I, O> clientBuilder = RxContexts.<I, O>newHttpClientBuilder(server.getHost(), server.getPort(),
requestIdHeaderName, RxContexts.DEFAULT_CORRELATOR)
.pipelineConfigurator(ContextPipelineConfigurators.httpClientConfigurator(requestIdProvider,
RxContexts.DEFAULT_CORRELATOR,
pipelineConfigurator));
Integer connectTimeout = getProperty(IClientConfigKey.Keys.ConnectTimeout, null, DefaultClientConfigImpl.DEFAULT_CONNECT_TIMEOUT);
Integer readTimeout = getProperty(IClientConfigKey.Keys.ReadTimeout, null, DefaultClientConfigImpl.DEFAULT_READ_TIMEOUT);
Boolean followRedirect = getProperty(IClientConfigKey.Keys.FollowRedirects, null, null);
>>>>>>>
HttpClientBuilder<I, O> clientBuilder;
if (requestIdProvider != null) {
clientBuilder = RxContexts.<I, O>newHttpClientBuilder(server.getHost(), server.getPort(),
requestIdProvider, RxContexts.DEFAULT_CORRELATOR, pipelineConfigurator);
} else {
clientBuilder = RxContexts.<I, O>newHttpClientBuilder(server.getHost(), server.getPort(),
RxContexts.DEFAULT_CORRELATOR, pipelineConfigurator);
}
Integer connectTimeout = getProperty(IClientConfigKey.Keys.ConnectTimeout, null, DefaultClientConfigImpl.DEFAULT_CONNECT_TIMEOUT);
Integer readTimeout = getProperty(IClientConfigKey.Keys.ReadTimeout, null, DefaultClientConfigImpl.DEFAULT_READ_TIMEOUT);
Boolean followRedirect = getProperty(IClientConfigKey.Keys.FollowRedirects, null, null); |
<<<<<<<
=======
import rx.Subscription;
import rx.functions.Func1;
>>>>>>>
import rx.Subscription; |
<<<<<<<
import com.netflix.client.config.DefaultClientConfigImpl;
=======
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.netflix.loadbalancer.BaseLoadBalancer;
>>>>>>>
import com.netflix.client.config.DefaultClientConfigImpl;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.netflix.loadbalancer.BaseLoadBalancer;
<<<<<<<
import com.google.common.util.concurrent.ThreadFactoryBuilder;
=======
>>>>>>> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.