conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.print.DocFlavor.STRING;
import stroom.entity.server.util.StroomEntityManager;
import stroom.util.logging.StroomLogger;
=======
import event.logging.BaseAdvancedQueryItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import stroom.entity.server.util.StroomEntityManager; |
<<<<<<<
public Long findDelete(final FindStreamCriteria criteria) throws RuntimeException {
final Context context = new Context(null, System.currentTimeMillis());
final OldFindStreamCriteria oldFindStreamCriteria = expressionToFindCriteria.convert(criteria, context);
return findDelete(oldFindStreamCriteria);
}
=======
@Transactional(isolation = Isolation.READ_COMMITTED, propagation = Propagation.REQUIRES_NEW)
public Long findDelete(final FindStreamCriteria criteria) {
// Turn all folders in the criteria into feeds.
convertFoldersToFeeds(criteria, DocumentPermissionNames.READ);
>>>>>>>
@Transactional(isolation = Isolation.READ_COMMITTED, propagation = Propagation.REQUIRES_NEW)
public Long findDelete(final FindStreamCriteria criteria) throws RuntimeException {
final Context context = new Context(null, System.currentTimeMillis());
final OldFindStreamCriteria oldFindStreamCriteria = expressionToFindCriteria.convert(criteria, context);
return findDelete(oldFindStreamCriteria);
} |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
=======
import stroom.util.logging.StroomLogger;
>>>>>>>
<<<<<<<
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalUnit;
=======
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
>>>>>>>
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter; |
<<<<<<<
import stroom.entity.shared.EntityServiceSaveAction;
import stroom.pipeline.shared.PipelineDoc;
import stroom.docref.DocRef;
import stroom.query.api.v2.ExpressionItem;
=======
import stroom.pipeline.shared.PipelineEntity;
import stroom.process.shared.StreamProcessorFilterRow;
import stroom.process.shared.StreamProcessorRow;
import stroom.query.api.v2.DocRef;
>>>>>>>
import stroom.entity.shared.EntityServiceSaveAction;
import stroom.pipeline.shared.PipelineDoc;
import stroom.docref.DocRef;
import stroom.query.api.v2.ExpressionItem;
<<<<<<<
import stroom.data.meta.api.MetaDataSource;
import stroom.streamtask.shared.CreateProcessorAction;
import stroom.streamtask.shared.ProcessorFilter;
import stroom.streamtask.shared.ProcessorFilterRow;
import stroom.streamtask.shared.ProcessorRow;
=======
import stroom.streamtask.shared.StreamProcessorFilter;
>>>>>>>
import stroom.data.meta.api.MetaDataSource;
import stroom.streamtask.shared.CreateProcessorAction;
import stroom.streamtask.shared.ProcessorFilter;
import stroom.streamtask.shared.ProcessorFilterRow;
import stroom.streamtask.shared.ProcessorRow;
<<<<<<<
if (pipelineDoc != null) {
addOrEditProcessor(null);
=======
if (pipelineEntity != null) {
processorEditPresenterProvider.get().show(DocRefUtil.create(pipelineEntity), null, result -> {
if (result != null) {
refresh(result);
}
});
>>>>>>>
if (pipelineDoc != null) {
processorEditPresenterProvider.get().show(DocRefUtil.create(pipelineEntity), null, result -> {
if (result != null) {
refresh(result);
}
}); |
<<<<<<<
import stroom.query.api.Field;
import stroom.query.api.FieldBuilder;
import stroom.query.api.Format;
import stroom.query.api.OffsetRange;
import stroom.query.api.Query;
import stroom.query.api.QueryKey;
import stroom.query.api.ResultRequest;
import stroom.query.api.Row;
import stroom.query.api.SearchRequest;
import stroom.query.api.SearchResponse;
import stroom.query.api.TableResult;
import stroom.query.api.TableSettings;
import stroom.search.server.SearchResource;
import stroom.util.logging.StroomLogger;
=======
>>>>>>>
import stroom.query.api.Field;
import stroom.query.api.FieldBuilder;
import stroom.query.api.Format;
import stroom.query.api.OffsetRange;
import stroom.query.api.Query;
import stroom.query.api.QueryKey;
import stroom.query.api.ResultRequest;
import stroom.query.api.Row;
import stroom.query.api.SearchRequest;
import stroom.query.api.SearchResponse;
import stroom.query.api.TableResult;
import stroom.query.api.TableSettings;
import stroom.search.server.SearchResource;
<<<<<<<
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
public class TestTagCloudSearch extends AbstractCoreIntegrationTest {
private static final StroomLogger LOGGER = StroomLogger.getLogger(TestTagCloudSearch.class);
=======
import java.util.*;
public class TestTagCloudSearch extends AbstractSearchTest {
private static final Logger LOGGER = LoggerFactory.getLogger(TestTagCloudSearch.class);
>>>>>>>
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
public class TestTagCloudSearch extends AbstractSearchTest {
private static final Logger LOGGER = LoggerFactory.getLogger(TestTagCloudSearch.class); |
<<<<<<<
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
LOGGER.trace("recordStatistics() - abort {}", metaData, ex);
=======
LOGGER.error("recordStatistics() - abort %s", metaData, ex);
>>>>>>>
LOGGER.error("recordStatistics() - abort {}", metaData, ex); |
<<<<<<<
=======
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.PathParam;
import javax.ws.rs.container.ResourceContext;
import javax.ws.rs.container.ResourceInfo;
import javax.ws.rs.ext.WriterInterceptorContext;
>>>>>>>
<<<<<<<
RestResourceAutoLoggerImpl filter;
ObjectMapper objectMapper;
Random random = new Random();
private final HttpServletRequest request = new MockHttpServletRequest();
private final MockContainerRequestContext requestContext = new MockContainerRequestContext();
private final SecurityContext securityContext = new MockSecurityContext();
=======
private static final Integer BEFORE_ID = 78910;
private HttpServletRequest request = new MockHttpServletRequest();
private MockContainerRequestContext requestContext = new MockContainerRequestContext();
private SecurityContext securityContext = new MockSecurityContext();
>>>>>>>
private static final Integer BEFORE_ID = 78910;
private HttpServletRequest request = new MockHttpServletRequest();
private MockContainerRequestContext requestContext = new MockContainerRequestContext();
private SecurityContext securityContext = new MockSecurityContext();
<<<<<<<
private final StroomEventLoggingService eventLoggingService = new MockStroomEventLoggingService();
=======
private StroomEventLoggingService eventLoggingService = new MockStroomEventLoggingService();
>>>>>>>
private StroomEventLoggingService eventLoggingService = new MockStroomEventLoggingService();
<<<<<<<
private final RequestLoggingConfig config = new RequestLoggingConfig();
=======
@Mock
private DelegatingExceptionMapper delegatingExceptionMapper;
private LoggingConfig config = new LoggingConfig();
private TestResource testResource = new TestResource();
>>>>>>>
@Mock
private DelegatingExceptionMapper delegatingExceptionMapper;
private LoggingConfig config = new LoggingConfig();
private TestResource testResource = new TestResource();
<<<<<<<
TestRestResourceAutoLogger() {
injector = Guice.createInjector(new MockRsLoggingModule());
=======
TestRestResourceAutoLogger(){
injector = Guice.createInjector(new MockRSLoggingModule());
>>>>>>>
TestRestResourceAutoLogger() {
injector = Guice.createInjector(new MockRsLoggingModule());
<<<<<<<
} catch (Exception e) {
// Ignore errors
}
=======
} catch (Exception e) {}
>>>>>>>
} catch (Exception e) {
// Ignore any error
}
<<<<<<<
public static class TestObj implements Serializable {
=======
private static ObjectMapper createObjectMapper() {
final ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.INDENT_OUTPUT, false);
mapper.setSerializationInclusion(Include.NON_NULL);
return mapper;
}
public static class TestObj implements Serializable, HasIntegerId {
>>>>>>>
private static ObjectMapper createObjectMapper() {
final ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.INDENT_OUTPUT, false);
mapper.setSerializationInclusion(Include.NON_NULL);
return mapper;
}
public static class TestObj implements Serializable, HasIntegerId { |
<<<<<<<
import stroom.internalstatistics.MetaDataStatistic;
=======
import stroom.feed.MetaMap;
import stroom.proxy.repo.StroomHeaderStreamHandler;
import stroom.proxy.repo.StroomStreamHandler;
import stroom.proxy.repo.StroomZipEntry;
import stroom.proxy.repo.StroomZipFileType;
import stroom.proxy.repo.StroomZipNameSet;
import stroom.statistic.server.MetaDataStatistic;
>>>>>>>
import stroom.internalstatistics.MetaDataStatistic;
import stroom.feed.MetaMap;
import stroom.proxy.repo.StroomHeaderStreamHandler;
import stroom.proxy.repo.StroomStreamHandler;
import stroom.proxy.repo.StroomZipEntry;
import stroom.proxy.repo.StroomZipFileType;
import stroom.proxy.repo.StroomZipNameSet;
import stroom.statistic.server.MetaDataStatistic;
<<<<<<<
import stroom.util.zip.HeaderMap;
import stroom.util.zip.StroomHeaderArguments;
import stroom.util.zip.StroomHeaderStreamHandler;
import stroom.util.zip.StroomStreamHandler;
import stroom.util.zip.StroomZipEntry;
import stroom.util.zip.StroomZipFileType;
import stroom.util.zip.StroomZipNameSet;
=======
import stroom.util.logging.StroomLogger;
import stroom.util.zip.MetaMapFactory;
import stroom.util.zip.StroomHeaderArguments;
>>>>>>>
import stroom.util.zip.StroomHeaderArguments; |
<<<<<<<
import stroom.feed.server.FeedService;
=======
import stroom.entity.shared.NamedEntity;
>>>>>>>
import stroom.feed.server.FeedService;
import stroom.entity.shared.NamedEntity;
<<<<<<<
import stroom.streamstore.server.StreamAttributeKeyService;
=======
import stroom.stats.shared.StroomStatsStoreEntity;
>>>>>>>
import stroom.stats.shared.StroomStatsStoreEntity;
import stroom.streamstore.server.StreamAttributeKeyService;
<<<<<<<
import java.sql.Connection;
=======
import java.time.Instant;
>>>>>>>
import java.sql.Connection;
import java.time.Instant;
<<<<<<<
LOGGER.info("Loading sample data for directory: " + FileUtil.getCanonicalPath(importRootDir));
=======
LOGGER.info("Loading sample data for directory: {}", importRootDir.toAbsolutePath());
>>>>>>>
LOGGER.info("Loading sample data for directory: " + FileUtil.getCanonicalPath(importRootDir));
<<<<<<<
=======
private void createRandomExplorerData(final Folder parentFolder, final String path, final int depth, final int maxDepth) {
for (int i = 0; i < 100; i++) {
final String folderName = "TEST_FOLDER_" + path + i;
LOGGER.info("Creating folder: {}", folderName);
final Folder folder = folderService.create(DocRefUtil.create(parentFolder), folderName);
for (int j = 0; j < 20; j++) {
final String newPath = path + String.valueOf(i) + "_";
final String feedName = "TEST_FEED_" + newPath + j;
LOGGER.info("Creating feed: " + feedName);
feedService.create(DocRefUtil.create(folder), feedName);
if (depth < maxDepth) {
createRandomExplorerData(folder, newPath, depth + 1, maxDepth);
}
}
}
}
>>>>>>> |
<<<<<<<
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.springframework.context.annotation.Scope;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Scope;
<<<<<<<
=======
import stroom.util.logging.StroomLogger;
import stroom.util.spring.StroomScope;
import javax.inject.Inject;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
>>>>>>>
import stroom.util.spring.StroomScope;
import javax.inject.Inject;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry; |
<<<<<<<
import stroom.util.shared.PageRequest;
import stroom.util.shared.Sort;
=======
import stroom.query.common.v2.DateExpressionParser;
>>>>>>>
import stroom.util.shared.PageRequest;
import stroom.util.shared.Sort;
import stroom.query.common.v2.DateExpressionParser; |
<<<<<<<
import stroom.entity.shared.ResultList;
import stroom.query.api.DocRef;
=======
>>>>>>>
import stroom.query.api.DocRef; |
<<<<<<<
public class TabContentProvider<E> implements HasDocumentRead<E>, HasWrite<E>, HasPermissionCheck {
private final Map<TabData, Provider<?>> tabProviders = new HashMap<>();
private final Map<TabData, PresenterWidget<?>> presenterCache = new HashMap<>();
=======
public class TabContentProvider<E> implements HasRead<E>, HasWrite<E>, HasPermissionCheck {
private final Map<TabData, Provider<?>> tabProviders = new HashMap<>();
private final Map<TabData, PresenterWidget<?>> presenterCache = new HashMap<>();
>>>>>>>
public class TabContentProvider<E> implements HasDocumentRead<E>, HasWrite<E>, HasPermissionCheck {
private final Map<TabData, Provider<?>> tabProviders = new HashMap<>();
private final Map<TabData, PresenterWidget<?>> presenterCache = new HashMap<>();
<<<<<<<
private void read(final PresenterWidget<?> presenter,
final DocRef docRef,
final E entity) {
if (presenter != null && presenter instanceof HasDocumentRead<?>) {
final HasDocumentRead<E> hasDocumentRead = (HasDocumentRead<E>) presenter;
hasDocumentRead.read(docRef, entity);
=======
private void read(final PresenterWidget<?> presenter, final E entity) {
if (entity != null && presenter instanceof HasRead<?>) {
final HasRead<E> hasRead = (HasRead<E>) presenter;
hasRead.read(entity);
>>>>>>>
private void read(final PresenterWidget<?> presenter,
final DocRef docRef,
final E entity) {
if (entity != null && presenter instanceof HasDocumentRead<?>) {
final HasDocumentRead<E> hasDocumentRead = (HasDocumentRead<E>) presenter;
hasDocumentRead.read(docRef, entity); |
<<<<<<<
@Api(tags = "Api Keys")
=======
@Api(tags = {"ApiKey"})
>>>>>>>
@Api(tags = "Api Keys")
<<<<<<<
@ApiOperation(value = "Get all tokens.")
=======
>>>>>>>
<<<<<<<
@ApiOperation(value = "Submit a search request for tokens")
ResultPage<Token> search(@Context @NotNull HttpServletRequest httpServletRequest,
@ApiParam("SearchRequest") @NotNull @Valid SearchTokenRequest request);
=======
@ApiOperation(value = "Submit a search request for tokens")
TokenResultPage search(@Context @NotNull HttpServletRequest httpServletRequest,
@ApiParam("SearchRequest") @NotNull @Valid SearchTokenRequest request);
>>>>>>>
@ApiOperation(value = "Submit a search request for tokens")
TokenResultPage search(@Context @NotNull HttpServletRequest httpServletRequest,
@ApiParam("SearchRequest") @NotNull @Valid SearchTokenRequest request);
<<<<<<<
@ApiOperation(value = "Provides access to this service's current public key. " +
"A client may use these keys to verify JWTs issued by this service.")
=======
@ApiOperation(value = "Provides access to this service's current public key. " +
"A client may use these keys to verify JWTs issued by this service.")
>>>>>>>
@ApiOperation(value = "Provides access to this service's current public key. " +
"A client may use these keys to verify JWTs issued by this service.") |
<<<<<<<
static ThreadLocal<ArrayDeque<StackTraceElement[]>> threadTransactionStack = new ThreadLocal<>();
private HibernateJpaDialect jpaDialect = new StroomHibernateJpaDialect();
@Override
public HibernateJpaDialect getJpaDialect() {
return jpaDialect;
}
=======
static ThreadLocal<ArrayDeque<StackTraceElement[]>> threadTransactionStack = ThreadLocal.withInitial(ArrayDeque::new);
>>>>>>>
static ThreadLocal<ArrayDeque<StackTraceElement[]>> threadTransactionStack = ThreadLocal.withInitial(ArrayDeque::new);
private HibernateJpaDialect jpaDialect = new StroomHibernateJpaDialect();
@Override
public HibernateJpaDialect getJpaDialect() {
return jpaDialect;
} |
<<<<<<<
import stroom.util.thread.BufferFactory;
=======
import stroom.util.thread.ThreadLocalBuffer;
import stroom.util.zip.StroomHeaderArguments;
>>>>>>>
import stroom.util.thread.BufferFactory;
<<<<<<<
import java.io.UncheckedIOException;
=======
import java.nio.file.Files;
>>>>>>>
import java.io.UncheckedIOException;
import java.nio.file.Files;
<<<<<<<
stroomZipOutputStream = new StroomZipOutputStreamImpl(data, zipProgressMonitor, false);
long id = 0;
long fileCount = 0;
final String fileBasePath = FileUtil.getCanonicalPath(data).substring(0, FileUtil.getCanonicalPath(data).lastIndexOf(".zip"));
=======
>>>>>>>
<<<<<<<
streamProgressMonitor.info("Stream Input {}/{}", entryProgress, entryTotal);
=======
streamProgressMonitor.info("Stream Input %s/%s", entryProgress, entryTotal);
>>>>>>>
streamProgressMonitor.info("Stream Input {}/{}", entryProgress, entryTotal);
<<<<<<<
// Write out the manifest
if (part == 1 || part == -1) {
dataSource.getAttributeMap().write(stroomZipOutputStream
.addEntry(new StroomZipEntry(null, basePartName, StroomZipFileType.Manifest).getFullName()), true);
}
=======
// Write out the manifest
if (part == 1 || part == -1) {
dataSource.getAttributeMap().write(stroomZipOutputStream
.addEntry(new StroomZipEntry(null, basePartName, StroomZipFileType.Manifest)), true);
}
>>>>>>>
// Write out the manifest
if (part == 1 || part == -1) {
dataSource.getAttributeMap().write(stroomZipOutputStream
.addEntry(new StroomZipEntry(null, basePartName, StroomZipFileType.Manifest).getFullName()), true);
} |
<<<<<<<
expectedNodeResultCount.incrementAndGet();
final ClusterSearchTask clusterSearchTask = new ClusterSearchTask(task.getUserToken(), "Cluster Search", query, shards, sourceNode, storedFields,
=======
final ClusterSearchTask clusterSearchTask = new ClusterSearchTask(task.getUserIdentity(), "Cluster Search", query, shards, sourceNode, storedFields,
>>>>>>>
expectedNodeResultCount.incrementAndGet();
final ClusterSearchTask clusterSearchTask = new ClusterSearchTask(task.getUserIdentity(), "Cluster Search", query, shards, sourceNode, storedFields, |
<<<<<<<
import stroom.docref.DocRef;
import stroom.kafka.pipeline.AbstractKafkaProducerFilter;
import stroom.kafka.pipeline.KafkaProducerFactory;
=======
import stroom.docref.DocRef;
import stroom.kafka.AbstractKafkaProducerFilter;
import stroom.kafka.StroomKafkaProducerFactoryService;
>>>>>>>
import stroom.docref.DocRef;
import stroom.kafka.pipeline.AbstractKafkaProducerFilter;
import stroom.kafka.pipeline.KafkaProducerFactory;
<<<<<<<
=======
import stroom.properties.api.PropertyService;
>>>>>>>
<<<<<<<
final LocationFactoryProxy locationFactory,
final KafkaProducerFactory stroomKafkaProducerFactoryService,
final HBaseStatisticsConfig hBaseStatisticsConfig,
final StroomStatsStoreStore stroomStatsStoreStore) {
=======
final LocationFactoryProxy locationFactory,
final PropertyService propertyService,
final StroomKafkaProducerFactoryService stroomKafkaProducerFactoryService,
final TopicNameFactory topicNameFactory,
final StroomStatsStoreStore stroomStatsStoreStore) {
>>>>>>>
final LocationFactoryProxy locationFactory,
final KafkaProducerFactory kafkaProducerFactory,
final HBaseStatisticsConfig hBaseStatisticsConfig,
final StroomStatsStoreStore stroomStatsStoreStore) { |
<<<<<<<
public List<Data> getDataItems(java.lang.Object obj) {
if (obj == null || loggingConfig.getMaxDataElementStringLength() == 0){
=======
public List<Data> getDataItems(Object obj) {
if (obj == null){
>>>>>>>
public List<Data> getDataItems(Object obj) {
if (obj == null || loggingConfig.getMaxDataElementStringLength() == 0){ |
<<<<<<<
=======
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationToken;
>>>>>>>
<<<<<<<
=======
private static String toToken(byte[] key, JwtClaims claims) {
final JsonWebSignature jws = new JsonWebSignature();
jws.setPayload(claims.toJson());
jws.setAlgorithmHeaderValue(HMAC_SHA256);
jws.setKey(new HmacKey(key));
jws.setDoKeyValidation(false);
try {
return jws.getCompactSerialization();
} catch (JoseException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
>>>>>>>
private static String toToken(byte[] key, JwtClaims claims) {
final JsonWebSignature jws = new JsonWebSignature();
jws.setPayload(claims.toJson());
jws.setAlgorithmHeaderValue(HMAC_SHA256);
jws.setKey(new HmacKey(key));
jws.setDoKeyValidation(false);
try {
return jws.getCompactSerialization();
} catch (JoseException e) {
throw new RuntimeException(e.getMessage(), e);
}
} |
<<<<<<<
public Flowable<String[]> search(final StatisticStoreDoc statisticStoreEntity,
final FindEventCriteria criteria,
final FieldIndexMap fieldIndexMap) {
=======
public Flowable<Val[]> search(final StatisticStoreEntity statisticStoreEntity,
final FindEventCriteria criteria,
final FieldIndexMap fieldIndexMap) {
>>>>>>>
public Flowable<Val[]> search(final StatisticStoreDoc statisticStoreEntity,
final FindEventCriteria criteria,
final FieldIndexMap fieldIndexMap) { |
<<<<<<<
import stroom.explorer.shared.ExplorerConstants;
import stroom.explorer.shared.ExplorerNode;
import stroom.query.api.v1.DocRef;
=======
import stroom.entity.shared.Folder;
import stroom.explorer.shared.EntityData;
import stroom.explorer.shared.ExplorerData;
import stroom.query.api.v2.DocRef;
>>>>>>>
import stroom.explorer.shared.ExplorerConstants;
import stroom.explorer.shared.ExplorerNode;
import stroom.query.api.v2.DocRef; |
<<<<<<<
private final AboutResources resources;
=======
>>>>>>> |
<<<<<<<
if (stepLocation != null) {
step(StepType.REFRESH, new StepLocation(
meta.getId(),
stepLocation.getPartNo(),
stepLocation.getRecordNo()));
=======
if (stepLocation != null && stepLocation.getRecordNo() > 0) {
step(StepType.REFRESH, new StepLocation(meta.getId(), stepLocation.getPartNo(), stepLocation.getRecordNo()));
>>>>>>>
if (stepLocation != null && stepLocation.getRecordNo() > 0) {
step(StepType.REFRESH, new StepLocation(
meta.getId(),
stepLocation.getPartNo(),
stepLocation.getRecordNo())); |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
=======
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
<<<<<<<
import stroom.pipeline.server.reader.InputStreamElement;
=======
>>>>>>>
import stroom.pipeline.server.reader.InputStreamElement; |
<<<<<<<
=======
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
import stroom.util.pipeline.scope.PipelineScopeRunnable;
import stroom.util.shared.OffsetRange;
>>>>>>>
import stroom.util.pipeline.scope.PipelineScopeRunnable;
import stroom.util.shared.OffsetRange;
<<<<<<<
=======
private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(DataResourceImpl.class);
private final DataFetcher dataFetcher;
>>>>>>>
private final DataFetcher dataFetcher;
<<<<<<<
DataResourceImpl(
final ResourceStore resourceStore,
final DataUploadTaskHandler dataUploadTaskHandler,
final DataDownloadTaskHandler dataDownloadTaskHandler,
final StreamEventLog streamEventLog,
final SecurityContext securityContext) {
=======
DataResourceImpl(final Store streamStore,
final FeedProperties feedProperties,
final Provider<FeedHolder> feedHolderProvider,
final Provider<MetaDataHolder> metaDataHolderProvider,
final Provider<PipelineHolder> pipelineHolderProvider,
final Provider<MetaHolder> metaHolderProvider,
final PipelineStore pipelineStore,
final Provider<PipelineFactory> pipelineFactoryProvider,
final Provider<ErrorReceiverProxy> errorReceiverProxyProvider,
final PipelineDataCache pipelineDataCache,
final PipelineScopeRunnable pipelineScopeRunnable,
final ResourceStore resourceStore,
final TaskManager taskManager,
final StreamEventLog streamEventLog,
final SecurityContext securityContext) {
dataFetcher = new DataFetcher(streamStore,
feedProperties,
feedHolderProvider,
metaDataHolderProvider,
pipelineHolderProvider,
metaHolderProvider,
pipelineStore,
pipelineFactoryProvider,
errorReceiverProxyProvider,
pipelineDataCache,
streamEventLog,
securityContext,
pipelineScopeRunnable);
>>>>>>>
DataResourceImpl(final Store streamStore,
final FeedProperties feedProperties,
final Provider<FeedHolder> feedHolderProvider,
final Provider<MetaDataHolder> metaDataHolderProvider,
final Provider<PipelineHolder> pipelineHolderProvider,
final Provider<MetaHolder> metaHolderProvider,
final PipelineStore pipelineStore,
final Provider<PipelineFactory> pipelineFactoryProvider,
final Provider<ErrorReceiverProxy> errorReceiverProxyProvider,
final PipelineDataCache pipelineDataCache,
final PipelineScopeRunnable pipelineScopeRunnable,
final ResourceStore resourceStore,
final DataUploadTaskHandler dataUploadTaskHandler,
final DataDownloadTaskHandler dataDownloadTaskHandler,
final StreamEventLog streamEventLog,
final SecurityContext securityContext) {
dataFetcher = new DataFetcher(streamStore,
feedProperties,
feedHolderProvider,
metaDataHolderProvider,
pipelineHolderProvider,
metaHolderProvider,
pipelineStore,
pipelineFactoryProvider,
errorReceiverProxyProvider,
pipelineDataCache,
streamEventLog,
securityContext,
pipelineScopeRunnable); |
<<<<<<<
import stroom.searchable.impl.SearchableResource;
import stroom.searchable.impl.spring.SearchableConfiguration;
=======
import stroom.security.server.AuthenticationResource;
>>>>>>>
import stroom.searchable.impl.SearchableResource;
import stroom.searchable.impl.spring.SearchableConfiguration;
import stroom.security.server.AuthenticationResource;
<<<<<<<
SpringUtil.addResource(environment.jersey(), applicationContext, StroomIndexQueryResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, StroomSolrIndexQueryResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, SqlStatisticsQueryResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, SearchableResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, AuthorisationResource.class);
=======
>>>>>>>
SpringUtil.addResource(environment.jersey(), applicationContext, SearchableResource.class);
<<<<<<<
SpringUtil.addResource(environment.jersey(), applicationContext, FeedStatusResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, AnnotationResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, UserResource.class);
=======
SpringUtil.addResource(environment.jersey(), applicationContext, SqlStatisticsQueryResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, StroomIndexQueryResource.class);
>>>>>>>
SpringUtil.addResource(environment.jersey(), applicationContext, SqlStatisticsQueryResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, StroomIndexQueryResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, StroomSolrIndexQueryResource.class);
SpringUtil.addResource(environment.jersey(), applicationContext, UserResource.class); |
<<<<<<<
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.fusesource.restygwt.client.DirectRestService;
import java.util.List;
=======
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.fusesource.restygwt.client.DirectRestService;
>>>>>>>
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.fusesource.restygwt.client.DirectRestService;
import java.util.List;
<<<<<<<
@ApiOperation(
value = "Update a pipeline doc",
response = PipelineDoc.class)
PipelineDoc update(@ApiParam("PipelineDoc") PipelineDoc pipelineDoc);
=======
@ApiOperation("Update a pipeline doc")
PipelineDoc update(@ApiParam("PipelineDoc") PipelineDoc PipelineDoc);
>>>>>>>
@ApiOperation("Update a pipeline doc")
PipelineDoc update(@ApiParam("PipelineDoc") PipelineDoc pipelineDoc); |
<<<<<<<
=======
@Override
protected void finalize() throws Throwable {
destroy();
super.finalize();
}
}
/**
* Record and modify the test state in a static thread local as we want to
* reset in static beforeClass() method.
*/
private static ThreadLocal<State> stateThreadLocal = ThreadLocal.withInitial(() -> {
final State state = new State();
state.create();
return state;
});
public static State getState() {
return stateThreadLocal.get();
>>>>>>> |
<<<<<<<
import stroom.servicediscovery.ServiceDiscoveryConfig;
import stroom.servlet.HttpServletRequestHolder;
=======
import stroom.util.spring.StroomBeanStore;
import stroom.util.spring.StroomScope;
>>>>>>>
import stroom.servicediscovery.ServiceDiscoveryConfig;
<<<<<<<
=======
private static final String PROP_KEY_SERVICE_DISCOVERY_ENABLED = "stroom.serviceDiscovery.enabled";
>>>>>>>
<<<<<<<
DataSourceProviderRegistryImpl(final SecurityContext securityContext,
final ServiceDiscoveryConfig serviceDiscoveryConfig,
final StroomBeanStore stroomBeanStore,
final HttpServletRequestHolder httpServletRequestHolder,
final DataSourceUrlConfig dataSourceUrlConfig) {
final boolean isServiceDiscoveryEnabled = serviceDiscoveryConfig.isEnabled();
=======
public DataSourceProviderRegistryImpl(final SecurityContext securityContext,
final StroomPropertyService stroomPropertyService,
final StroomBeanStore stroomBeanStore) {
boolean isServiceDiscoveryEnabled = stroomPropertyService.getBooleanProperty(
PROP_KEY_SERVICE_DISCOVERY_ENABLED,
false);
>>>>>>>
DataSourceProviderRegistryImpl(final SecurityContext securityContext,
final ServiceDiscoveryConfig serviceDiscoveryConfig,
final StroomBeanStore stroomBeanStore,
final DataSourceUrlConfig dataSourceUrlConfig) {
final boolean isServiceDiscoveryEnabled = serviceDiscoveryConfig.isEnabled();
<<<<<<<
dataSourceUrlConfig,
httpServletRequestHolder);
=======
stroomPropertyService);
>>>>>>>
dataSourceUrlConfig); |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.ext.Attributes2Impl;
import stroom.entity.server.GenericEntityMarshaller;
=======
import org.springframework.stereotype.Component;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
<<<<<<<
import stroom.entity.server.util.BaseEntityBeanWrapper;
import stroom.entity.server.util.BaseEntityUtil;
import stroom.entity.server.util.EntityServiceExceptionUtil;
import stroom.entity.server.util.XMLUtil;
import stroom.entity.shared.BaseEntity;
import stroom.entity.shared.DocRefUtil;
=======
import stroom.entity.shared.DocRef;
import stroom.entity.shared.DocRefs;
>>>>>>>
import stroom.entity.shared.DocRefUtil;
import stroom.entity.shared.DocRefs;
<<<<<<<
import stroom.query.api.DocRef;
import stroom.streamstore.server.fs.FileSystemUtil;
=======
import stroom.security.SecurityContext;
import stroom.security.shared.DocumentPermissionNames;
>>>>>>>
import stroom.query.api.DocRef;
import stroom.security.SecurityContext;
import stroom.security.shared.DocumentPermissionNames;
<<<<<<<
import stroom.util.date.DateUtil;
import stroom.util.io.StreamUtil;
import stroom.util.shared.EqualsUtil;
=======
import stroom.util.logging.StroomLogger;
import stroom.util.shared.Message;
>>>>>>>
import stroom.util.shared.Message;
<<<<<<<
=======
import stroom.util.shared.Severity;
>>>>>>>
import stroom.util.shared.Severity;
<<<<<<<
private static final Logger LOGGER = LoggerFactory.getLogger(ImportExportSerializerImpl.class);
private static final Attributes2Impl BLANK_ATTRIBUTES = new Attributes2Impl();
=======
private static final DocRef SYSTEM_FOLDER = new DocRef(Folder.ENTITY_TYPE, "0", "System");
protected static final StroomLogger LOGGER = StroomLogger.getLogger(ImportExportSerializerImpl.class);
>>>>>>>
private static final DocRef SYSTEM_FOLDER = new DocRef(Folder.ENTITY_TYPE, "0", "System");
private static final Logger LOGGER = LoggerFactory.getLogger(ImportExportSerializerImpl.class);
<<<<<<<
private <E extends DocumentEntity> boolean doPerformExport(final File dir, final EntityIdSet<Folder> entityIdSet,
final Folder folder, final Class<E> entityClass, final String entityType, final List<Property> propertyList,
final boolean ignoreErrors, final List<String> messageList) {
final Set<Folder> exportedFolders = new HashSet<>();
boolean exportedSomething = false;
// OK to export?
if (entityClass.equals(Folder.class)) {
// If it's the folder we query them and then check that they
// are in the set to export.
final FindFolderCriteria findFolderCriteria = new FindFolderCriteria();
findFolderCriteria.getFolderIdSet().setDeep(false);
if (folder == null) {
findFolderCriteria.getFolderIdSet().setMatchNull(true);
} else {
findFolderCriteria.getFolderIdSet().add(folder);
}
final List<Folder> list = folderService.find(findFolderCriteria);
for (final Folder entity : list) {
if (entityIdSet.isMatch(entity.getId())) {
performExport(dir, folder, Folder.class, entityType, propertyList, entity, ignoreErrors,
messageList);
exportedFolders.add(entity);
exportedSomething = true;
}
}
} else {
// Otherwise just check the parent is in the group we are supposed
// to export.
if (folder != null && entityIdSet.isMatch(folder.getId())) {
try {
final List<E> list = genericEntityService.findByFolder(entityType, DocRefUtil.create(folder), RESOURCE_FETCH_SET);
=======
// Create a map of all of the data required to import this document.
final Map<String, String> dataMap = new HashMap<>();
try (final Stream<Path> stream = Files.list(path.getParent())) {
final List<Path> parts = stream
.filter(p -> p.getFileName().toString().startsWith(matchingConfig))
.collect(Collectors.toList());
>>>>>>>
// Create a map of all of the data required to import this document.
final Map<String, String> dataMap = new HashMap<>();
try (final Stream<Path> stream = Files.list(path.getParent())) {
final List<Path> parts = stream
.filter(p -> p.getFileName().toString().startsWith(matchingConfig))
.collect(Collectors.toList());
<<<<<<<
LOGGER.info("performExport() - {} {}", entityType, entity);
try {
final String name = entity.getName();
final FileOutputStream fileOutputStream = new FileOutputStream(getXMLFile(dir, entityType, name));
=======
// Add the required folders for this new item.
if (importMode == ImportMode.IGNORE_CONFIRMATION
|| (importMode == ImportMode.ACTION_CONFIRMATION && importState.isAction())) {
if (pathIndex > 0) {
pathIndex = pathIndex - 1;
}
for (; pathIndex < relativePath.getNameCount() - 1; pathIndex++) {
final String folderName = relativePath.getName(pathIndex).toString();
Folder folder = folderService.create(nearestFolder, folderName);
nearestFolder = DocRef.create(folder);
folderRef = nearestFolder;
}
}
>>>>>>>
// Add the required folders for this new item.
if (importMode == ImportMode.IGNORE_CONFIRMATION
|| (importMode == ImportMode.ACTION_CONFIRMATION && importState.isAction())) {
if (pathIndex > 0) {
pathIndex = pathIndex - 1;
}
for (; pathIndex < relativePath.getNameCount() - 1; pathIndex++) {
final String folderName = relativePath.getName(pathIndex).toString();
Folder folder = folderService.create(nearestFolder, folderName);
nearestFolder = DocRefUtil.create(folder);
folderRef = nearestFolder;
}
}
<<<<<<<
@SuppressWarnings("unchecked")
private <E extends DocumentEntity> void performImport(final File file, final Folder folder,
final Class<E> entityClass, final String entityType, final List<Property> propertyList, final String name,
final Map<String, EntityActionConfirmation> confirmMap, final ImportMode importMode) {
init();
LOGGER.info("performImport() - {} {}", entityType, file);
try {
final String entityActionPath = toPath(folder, name);
final String confirmMapPath = toUniquePath(entityActionPath, entityType);
EntityActionConfirmation entityActionConfirmation = confirmMap.get(confirmMapPath);
if (entityActionConfirmation == null) {
entityActionConfirmation = new EntityActionConfirmation();
entityActionConfirmation.setPath(entityActionPath);
entityActionConfirmation.setEntityType(entityType);
confirmMap.put(confirmMapPath, entityActionConfirmation);
}
E entity = entityPathResolver.getEntity(entityType, folder, name, RESOURCE_FETCH_SET);
if (entity == null) {
if (importMode == ImportMode.CREATE_CONFIRMATION) {
entityActionConfirmation.setEntityAction(EntityAction.ADD);
// We are adding an item ... no point checking anything else
return;
}
entity = BaseEntityUtil.newInstance(entityClass);
entity.setFolder(folder);
} else {
if (importMode == ImportMode.CREATE_CONFIRMATION) {
entityActionConfirmation.setEntityAction(EntityAction.UPDATE);
}
}
if (importMode == ImportMode.ACTION_CONFIRMATION) {
if (!entityActionConfirmation.isAction()) {
return;
}
}
=======
private DocRef getExistingFolder(final DocRef docRef) {
// TODO : In v6 replace this method with calls to local explorer service to get folder.
final DocumentEntityService documentEntityService = getService(docRef);
if (documentEntityService == null) {
throw new RuntimeException("Unable to find service to import " + docRef);
}
>>>>>>>
private DocRef getExistingFolder(final DocRef docRef) {
// TODO : In v6 replace this method with calls to local explorer service to get folder.
final DocumentEntityService documentEntityService = getService(docRef);
if (documentEntityService == null) {
throw new RuntimeException("Unable to find service to import " + docRef);
}
<<<<<<<
genericEntityMarshaller.unmarshal(entityType, entity);
} catch (final EntityDependencyServiceException dependency) {
if (importMode == ImportMode.CREATE_CONFIRMATION) {
if (entityActionConfirmation.getEntityAction() != EntityAction.ADD) {
entityActionConfirmation.setEntityAction(EntityAction.UPDATE);
}
} else {
throw dependency;
}
}
// Did we update anything?
if (importMode == ImportMode.CREATE_CONFIRMATION
&& entityActionConfirmation.getEntityAction() == EntityAction.UPDATE) {
if (entityActionConfirmation.getUpdatedFieldList().size() == 0) {
entityActionConfirmation.setEntityAction(EntityAction.EQUAL);
} else {
try {
final Long originalUpdateTime = entity.getUpdateTime();
final List<Object> newDateString = propertyValues.get("updateTime");
if (newDateString != null && newDateString.size() == 1) {
Long newTime = null;
try {
newTime = DateUtil.parseNormalDateTimeString(newDateString.get(0).toString());
} catch (final Exception e) {
// Ignore.
}
try {
newTime = Long.valueOf(newDateString.get(0).toString());
} catch (final Exception e) {
// Ignore.
}
if (originalUpdateTime != null && newTime != null && originalUpdateTime > newTime) {
entityActionConfirmation.setWarning(true);
entityActionConfirmation.getMessageList().add(
"The item you are attempting to import is older than the current version.");
entityActionConfirmation.getMessageList()
.add("Current is "
+ DateUtil.createNormalDateTimeString(originalUpdateTime)
+ " (" + entity.getUpdateUser() + "), Import is "
+ DateUtil.createNormalDateTimeString(newTime));
}
}
} catch (final Exception ex) {
LOGGER.error("Unable to add date!", ex);
}
}
}
if (importMode == ImportMode.IGNORE_CONFIRMATION
|| (importMode == ImportMode.ACTION_CONFIRMATION && entityActionConfirmation.isAction())) {
genericEntityService.importEntity(entity, DocRefUtil.create(folder));
=======
performExport(dir, docRef, omitAuditFields, messageList);
} catch (final Exception e) {
messageList.add(new Message(Severity.ERROR, "Error created while exporting (" + docRef.toString() + ") : " + e.getMessage()));
>>>>>>>
performExport(dir, docRef, omitAuditFields, messageList);
} catch (final Exception e) {
messageList.add(new Message(Severity.ERROR, "Error created while exporting (" + docRef.toString() + ") : " + e.getMessage())); |
<<<<<<<
=======
import com.github.privacystreams.core.UQI;
import com.github.privacystreams.core.providers.mock.MockItem;
import com.github.privacystreams.core.purposes.Purpose;
>>>>>>>
import com.github.privacystreams.core.UQI;
import com.github.privacystreams.core.providers.mock.MockItem;
import com.github.privacystreams.core.purposes.Purpose; |
<<<<<<<
=======
private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(ContainerResourceInfo.class);
private final ResourceContext resourceContext;
>>>>>>>
private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(ContainerResourceInfo.class);
private final ResourceContext resourceContext;
<<<<<<<
public String getTypeId() {
=======
public boolean isAutologgerAnnotationPresent() {
return autologgerAnnotationPresent;
}
public Class<? extends EventActionDecorator> getEventActionDecoratorClass() {
return eventActionDecoratorClass;
}
public String getTypeId(){
>>>>>>>
public boolean isAutologgerAnnotationPresent() {
return autologgerAnnotationPresent;
}
public Class<? extends EventActionDecorator> getEventActionDecoratorClass() {
return eventActionDecoratorClass;
}
public String getTypeId() { |
<<<<<<<
=======
import stroom.util.logging.StroomLogger;
import stroom.util.scheduler.SimpleCron;
>>>>>>>
<<<<<<<
=======
private static final StroomLogger LOGGER = StroomLogger.getLogger(RollingStreamDestination.class);
private static final int ONE_MINUTE = 60000;
private final StreamKey key;
private final Long oldestAllowed;
private final long rollSize;
>>>>>>>
<<<<<<<
=======
private volatile long lastFlushTime;
private volatile byte[] footer;
private volatile boolean rolled;
private final ByteCountOutputStream outputStream;
private final RASegmentOutputStream segmentOutputStream;
>>>>>>>
<<<<<<<
=======
this.rollSize = rollSize;
>>>>>>>
<<<<<<<
=======
// Determine the oldest this destination can be.
Long time = null;
if (schedule != null) {
time = schedule.getNextTime(creationTime);
}
if (frequency != null) {
final long value = creationTime + frequency;
if (time == null || time > value) {
time = value;
}
}
oldestAllowed = time;
>>>>>>>
<<<<<<<
super.beforeRoll(exceptionConsumer);
=======
private boolean shouldRoll(final long currentTime) {
return (oldestAllowed != null && currentTime > oldestAllowed) ||
outputStream.getCount() > rollSize;
>>>>>>>
super.beforeRoll(exceptionConsumer); |
<<<<<<<
// Create a data retention rule decorator for adding data retention information to returned stream attribute maps.
List<DataRetentionRule> rules = Collections.emptyList();
final DataRetentionService dataRetentionService = dataRetentionServiceProvider.get();
if (dataRetentionService != null) {
final DataRetentionPolicy dataRetentionPolicy = dataRetentionService.load();
if (dataRetentionPolicy != null && dataRetentionPolicy.getRules() != null) {
rules = dataRetentionPolicy.getRules();
}
final StreamAttributeMapRetentionRuleDecorator ruleDecorator = new StreamAttributeMapRetentionRuleDecorator(dictionaryService, rules);
// Query the database for the attribute values
if (criteria.isUseCache()) {
loadAttributeMapFromDatabase(criteria, streamMDList, streamList, ruleDecorator);
} else {
loadAttributeMapFromFileSystem(criteria, streamMDList, streamList, ruleDecorator);
}
=======
// Query the database for the attribute values
if (criteria.isUseCache()) {
LOGGER.info("Loading attribute map from DB");
loadAttributeMapFromDatabase(criteria, streamMDList, streamList);
} else {
LOGGER.info("Loading attribute map from filesystem");
loadAttributeMapFromFileSystem(criteria, streamMDList, streamList);
>>>>>>>
// Create a data retention rule decorator for adding data retention information to returned stream attribute maps.
List<DataRetentionRule> rules = Collections.emptyList();
final DataRetentionService dataRetentionService = dataRetentionServiceProvider.get();
if (dataRetentionService != null) {
final DataRetentionPolicy dataRetentionPolicy = dataRetentionService.load();
if (dataRetentionPolicy != null && dataRetentionPolicy.getRules() != null) {
rules = dataRetentionPolicy.getRules();
}
final StreamAttributeMapRetentionRuleDecorator ruleDecorator = new StreamAttributeMapRetentionRuleDecorator(dictionaryService, rules);
// Query the database for the attribute values
if (criteria.isUseCache()) {
LOGGER.info("Loading attribute map from DB");
loadAttributeMapFromDatabase(criteria, streamMDList, streamList, ruleDecorator);
} else {
LOGGER.info("Loading attribute map from filesystem");
loadAttributeMapFromFileSystem(criteria, streamMDList, streamList, ruleDecorator);
} |
<<<<<<<
import java.nio.file.StandardOpenOption;
=======
import java.util.function.Consumer;
>>>>>>>
import java.nio.file.StandardOpenOption;
import java.util.function.Consumer;
<<<<<<<
outputStream = new ByteCountOutputStream(new BufferedOutputStream(Files.newOutputStream(file, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND)));
=======
setOutputStream(new ByteCountOutputStream(new BufferedOutputStream(new FileOutputStream(file, true))));
>>>>>>>
setOutputStream(new ByteCountOutputStream(new BufferedOutputStream(Files.newOutputStream(file, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND))));
<<<<<<<
outputStream = new ByteCountOutputStream(new BufferedOutputStream(Files.newOutputStream(file, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND)));
=======
setOutputStream(new ByteCountOutputStream(new BufferedOutputStream(new FileOutputStream(file, true))));
>>>>>>>
setOutputStream(new ByteCountOutputStream(new BufferedOutputStream(Files.newOutputStream(file, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND))));
<<<<<<<
Path source = file;
=======
Path source = null;
try {
source = file.toPath();
} catch (final Throwable t) {
exceptionConsumer.accept(wrapRollException(file, destFile, t));
}
>>>>>>>
Path source = file;
<<<<<<<
Path dest = destFile;
=======
Path dest = null;
try {
dest = destFile.toPath();
} catch (final Throwable t) {
exceptionConsumer.accept(wrapRollException(file, destFile, t));
}
>>>>>>>
Path dest = destFile;
<<<<<<<
private void write(final byte[] bytes) throws IOException {
outputStream.write(bytes, 0, bytes.length);
}
private void flush() throws IOException {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Flushing: {}", key);
}
outputStream.flush();
}
private void close() throws IOException {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Closing: {}", key);
}
if (outputStream != null) {
outputStream.close();
}
}
@Override
public String toString() {
return key;
}
private IOException handleException(final IOException existingException, final Throwable newException) {
LOGGER.error(newException.getMessage(), newException);
if (existingException != null) {
return existingException;
}
if (newException instanceof IOException) {
return (IOException) newException;
}
return new IOException(newException.getMessage(), newException);
}
private IOException handleRollException(final Path sourceFile, final Path destFile,
final IOException existingException, final Throwable newException) {
final String message = "Failed to roll file '" +
getFullPath(sourceFile) +
"' to '" +
getFullPath(destFile) +
"' - " +
newException.getMessage();
LOGGER.error(message, newException);
if (existingException != null) {
return existingException;
}
if (newException instanceof IOException) {
return (IOException) newException;
}
=======
private Throwable wrapRollException(final File sourceFile,
final File destFile,
final Throwable e) {
final String msg = String.format("Failed to roll file '%s' to '%s' - %s",
getFullPath(sourceFile),
getFullPath(destFile),
e.getMessage());
>>>>>>>
private Throwable wrapRollException(final Path sourceFile,
final Path destFile,
final Throwable e) {
final String msg = String.format("Failed to roll file '%s' to '%s' - %s",
getFullPath(sourceFile),
getFullPath(destFile),
e.getMessage()); |
<<<<<<<
import stroom.datasource.api.v1.DataSourceField;
import stroom.datasource.api.v1.DataSourceField.DataSourceFieldType;
=======
import stroom.dispatch.client.ClientDispatchAsync;
import stroom.entity.shared.DocRef;
>>>>>>>
import stroom.datasource.api.v1.DataSourceField;
import stroom.datasource.api.v1.DataSourceField.DataSourceFieldType;
import stroom.dispatch.client.ClientDispatchAsync;
import stroom.entity.shared.DocRef;
<<<<<<<
import stroom.query.api.v1.DocRef;
import stroom.query.api.v1.ExpressionTerm.Condition;
=======
import stroom.query.shared.ExpressionTerm.Condition;
import stroom.query.shared.IndexField;
import stroom.query.shared.IndexFieldType;
>>>>>>>
import stroom.query.api.v1.DocRef;
import stroom.query.api.v1.ExpressionTerm.Condition;
import stroom.query.shared.ExpressionTerm.Condition;
import stroom.query.shared.IndexField;
import stroom.query.shared.IndexFieldType;
<<<<<<<
private Term term;
private List<DataSourceField> indexFields;
=======
private Term term;
private List<IndexField> indexFields;
>>>>>>>
private Term term;
private List<DataSourceField> indexFields;
<<<<<<<
public void setFields(final List<DataSourceField> indexFields) {
=======
public void init(final ClientDispatchAsync dispatcher, final DocRef dataSource, final List<IndexField> indexFields) {
suggestOracle.setDispatcher(dispatcher);
suggestOracle.setDataSource(dataSource);
>>>>>>>
public void init(final ClientDispatchAsync dispatcher, final DocRef dataSource, final List<DataSourceField> indexFields) {
suggestOracle.setDispatcher(dispatcher);
suggestOracle.setDataSource(dataSource);
<<<<<<<
private void changeField(final DataSourceField field) {
=======
private void changeField(final IndexField field) {
suggestOracle.setField(field);
>>>>>>>
private void changeField(final DataSourceField field) {
suggestOracle.setField(field); |
<<<<<<<
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
=======
import stroom.query.common.v2.SearchDebugUtil;
import stroom.search.coprocessor.Values;
>>>>>>>
import stroom.query.common.v2.SearchDebugUtil;
<<<<<<<
consumer.accept(values);
=======
SearchDebugUtil.writeExtractionData(values);
consumer.accept(new Values(values));
>>>>>>>
SearchDebugUtil.writeExtractionData(values);
consumer.accept(values); |
<<<<<<<
import stroom.pipeline.server.errorhandler.TerminatedException;
import stroom.security.SecurityHelper;
=======
>>>>>>>
import stroom.security.SecurityHelper; |
<<<<<<<
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
=======
import stroom.security.api.SecurityContext;
>>>>>>>
import stroom.security.api.SecurityContext;
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
<<<<<<<
request.setAttribute(REQUEST_LOG_INFO_PROPERTY, new RequestInfo(
containerResourceInfo, stream.getRequestEntity()));
=======
request.setAttribute(REQUEST_LOG_INFO_PROPERTY, new RequestInfo(securityContext, containerResourceInfo, stream.getRequestEntity()));
>>>>>>>
request.setAttribute(REQUEST_LOG_INFO_PROPERTY,
new RequestInfo(securityContext, containerResourceInfo, stream.getRequestEntity())); |
<<<<<<<
import stroom.entity.shared.DocRefUtil;
import stroom.entity.shared.FindFolderCriteria;
=======
import stroom.entity.shared.DocRef;
import stroom.entity.shared.DocRefs;
import stroom.entity.shared.Folder;
>>>>>>>
import stroom.entity.shared.DocRefUtil;
import stroom.entity.shared.DocRefs;
import stroom.entity.shared.Folder;
<<<<<<<
import stroom.importexport.server.ImportExportSerializer.ImportMode;
import stroom.query.api.DocRef;
=======
import stroom.query.shared.DataSource;
>>>>>>>
import stroom.query.api.DocRef; |
<<<<<<<
// That places an interface requirement on the entity, which I think is best avoided.
public GenericDao(final Table<RecordType> table,
final TableField<RecordType, IdType> idField,
final Class<EntityType> entityTypeClass,
final DataSource connectionProvider) {
=======
// That places an interface requirement on the object, which I think is best avoided.
public GenericDao(@Nonnull final Table<RecordType> table,
@Nonnull final TableField<RecordType, IdType> idField,
@Nonnull final Class<ObjectType> objectTypeClass,
@Nonnull final DataSource connectionProvider) {
>>>>>>>
// That places an interface requirement on the entity, which I think is best avoided.
public GenericDao(final Table<RecordType> table,
final TableField<RecordType, IdType> idField,
final Class<ObjectType> objectTypeClass,
final DataSource connectionProvider) {
<<<<<<<
public EntityType update(final EntityType entity) {
return JooqUtil.contextWithOptimisticLocking(connectionProvider, (context) -> {
RecordType record = context.newRecord(table, entity);
LAMBDA_LOGGER.debug(() -> LambdaLogger.buildMessage(
"Updating a {} with id {}", table.getName(), record.get(idField)));
=======
public ObjectType update(@Nonnull final ObjectType object) {
return JooqUtil.contextWithOptimisticLocking(connectionProvider, context -> {
final RecordType record = objectToRecordMapper.apply(object, context.newRecord(table));
LAMBDA_LOGGER.debug(() -> LambdaLogger.buildMessage("Updating a {} with id {}", table.getName(), record.get(idField)));
>>>>>>>
public ObjectType update(final ObjectType object) {
return JooqUtil.contextWithOptimisticLocking(connectionProvider, context -> {
final RecordType record = objectToRecordMapper.apply(object, context.newRecord(table));
LAMBDA_LOGGER.debug(() -> LambdaLogger.buildMessage(
"Updating a {} with id {}", table.getName(), record.get(idField))); |
<<<<<<<
import stroom.docref.DocRef;
=======
import stroom.core.client.HasSave;
>>>>>>>
import stroom.docref.DocRef;
import stroom.core.client.HasSave; |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import java.util.Map;
import java.util.zip.GZIPOutputStream;
import javax.net.ssl.HttpsURLConnection;
import stroom.util.ArgsUtil;
import stroom.util.CIStringHashMap;
import stroom.util.logging.StroomLogger;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import java.util.Map;
import java.util.zip.GZIPOutputStream;
import javax.net.ssl.HttpsURLConnection;
import stroom.util.ArgsUtil;
import stroom.util.CIStringHashMap; |
<<<<<<<
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
=======
>>>>>>> |
<<<<<<<
import java.util.List;
=======
import event.logging.ComplexLoggedOutcome;
import event.logging.ExportEventAction;
import event.logging.ImportEventAction;
import event.logging.MultiObject;
import event.logging.ViewEventAction;
>>>>>>>
import event.logging.ComplexLoggedOutcome;
import event.logging.ExportEventAction;
import event.logging.ImportEventAction;
import event.logging.MultiObject;
import event.logging.ViewEventAction;
import java.util.List;
import java.util.Set;
<<<<<<<
// streamEventLog.viewStream(
// streamId + ":" + streamsOffsetrequest.getFeedName(), request.getFileName(), null);
=======
>>>>>>>
<<<<<<<
// @Override
// public AbstractFetchDataResult fetchData( final long streamId,
// final Long streamsOffset,
// final Long streamsLength,
// final Long pageOffset,
// final Long pageSize) {
//
// final OffsetRange pageRange = new OffsetRange(pageOffset, pageSize);
// final OffsetRange streamRange = new OffsetRange(streamsOffset, streamsLength);
//
// // Used for organising errors but only relevant when the data is in fact errors
// final boolean isMarkerMode = true;
// final boolean showAsHtml = false; // Used for dashboards so false here.
// final Severity[] expandedSeverities = new Severity[]{
// Severity.INFO, Severity.WARNING, Severity.ERROR, Severity.FATAL_ERROR};
//
// //TODO Used for child streams. Needs implementing.
// String childStreamTypeName = null;
//
// return securityContext.secureResult(PermissionNames.VIEW_DATA_PERMISSION, () -> {
// dataFetcher.reset();
// return dataFetcher.getData(
// streamId,
// childStreamTypeName,
// streamRange,
// pageRange,
// isMarkerMode,
// null,
// showAsHtml,
// expandedSeverities);
// });
// }
=======
@AutoLogged(OperationType.MANUALLY_LOGGED)
@Override
public AbstractFetchDataResult fetch(final FetchDataRequest request) {
final String idStr = request.getSourceLocation() != null
? request.getSourceLocation().getIdentifierString()
: "?";
final StroomEventLoggingService stroomEventLoggingService = stroomEventLoggingServiceProvider.get();
return stroomEventLoggingService.loggedResult(
StroomEventLoggingUtil.buildTypeId(this, "fetch"),
"Viewing stream " + idStr,
ViewEventAction.builder()
.build(),
eventAction -> {
ComplexLoggedOutcome<AbstractFetchDataResult, ViewEventAction> outcome;
try {
// Do the fetch
final AbstractFetchDataResult fetchDataResult = dataServiceProvider.get()
.fetch(request);
outcome = ComplexLoggedOutcome.success(
fetchDataResult,
ViewEventAction.builder()
.withObjects(stroomEventLoggingService.convert(fetchDataResult))
.build());
} catch (ViewDataException vde) {
// Convert an ex into a fetch result
final AbstractFetchDataResult fetchDataResult = createErrorResult(vde);
outcome = ComplexLoggedOutcome.failure(
fetchDataResult,
ViewEventAction.builder()
.withObjects(stroomEventLoggingService.convert(fetchDataResult))
.build(),
vde.getMessage());
}
return outcome;
},
null);
}
@AutoLogged(OperationType.UNLOGGED) // Not an explicit user action
@Override
public Set<String> getChildStreamTypes(final long id, final long partNo) {
final Set<String> childStreamTypes = dataServiceProvider.get()
.getChildStreamTypes(id, partNo);
return childStreamTypes;
}
private FetchDataResult createErrorResult(final ViewDataException viewDataException) {
return new FetchDataResult(
null,
StreamTypeNames.RAW_EVENTS,
null,
viewDataException.getSourceLocation(),
OffsetRange.zero(),
Count.of(0L, true),
Count.of(0L, true),
0L,
null,
viewDataException.getMessage(),
false,
null);
}
@Override
public List<DataInfoSection> read(final Long id) {
// Provide the info when failing to read the info
return viewInfo(id);
}
>>>>>>>
@AutoLogged(OperationType.MANUALLY_LOGGED)
@Override
public AbstractFetchDataResult fetch(final FetchDataRequest request) {
final String idStr = request.getSourceLocation() != null
? request.getSourceLocation().getIdentifierString()
: "?";
final StroomEventLoggingService stroomEventLoggingService = stroomEventLoggingServiceProvider.get();
return stroomEventLoggingService.loggedResult(
StroomEventLoggingUtil.buildTypeId(this, "fetch"),
"Viewing stream " + idStr,
ViewEventAction.builder()
.build(),
eventAction -> {
ComplexLoggedOutcome<AbstractFetchDataResult, ViewEventAction> outcome;
try {
// Do the fetch
final AbstractFetchDataResult fetchDataResult = dataServiceProvider.get()
.fetch(request);
outcome = ComplexLoggedOutcome.success(
fetchDataResult,
ViewEventAction.builder()
.withObjects(stroomEventLoggingService.convert(fetchDataResult))
.build());
} catch (ViewDataException vde) {
// Convert an ex into a fetch result
final AbstractFetchDataResult fetchDataResult = createErrorResult(vde);
outcome = ComplexLoggedOutcome.failure(
fetchDataResult,
ViewEventAction.builder()
.withObjects(stroomEventLoggingService.convert(fetchDataResult))
.build(),
vde.getMessage());
}
return outcome;
},
null);
}
@AutoLogged(OperationType.UNLOGGED) // Not an explicit user action
@Override
public Set<String> getChildStreamTypes(final long id, final long partNo) {
final Set<String> childStreamTypes = dataServiceProvider.get()
.getChildStreamTypes(id, partNo);
return childStreamTypes;
}
private FetchDataResult createErrorResult(final ViewDataException viewDataException) {
return new FetchDataResult(
null,
StreamTypeNames.RAW_EVENTS,
null,
viewDataException.getSourceLocation(),
OffsetRange.zero(),
Count.of(0L, true),
Count.of(0L, true),
0L,
null,
viewDataException.getMessage(),
false,
null);
}
@Override
public List<DataInfoSection> read(final Long id) {
// Provide the info when failing to read the info
return viewInfo(id);
} |
<<<<<<<
GetPipelineForStreamHandler(final StreamStore streamStore, final PipelineService pipelineService, final FeedService feedService) {
=======
GetPipelineForStreamHandler(final StreamStore streamStore, final PipelineEntityService pipelineEntityService, final FeedService feedService, final SecurityContext securityContext) {
>>>>>>>
GetPipelineForStreamHandler(final StreamStore streamStore,
final PipelineService pipelineService,
final FeedService feedService,
final SecurityContext securityContext) {
<<<<<<<
// if (docRef == null) {
// // If we still don't have a pipeline docRef then just try and find the
// // first pipeline we can in the folder that the stream belongs
// // to.
// final Stream stream = getStream(action.getStreamId());
// if (stream != null) {
// final Feed feed = feedService.load(stream.getFeed());
// if (feed != null) {
//
//
// final Folder folder = feed.getFolder();
// if (folder != null) {
// final FindPipelineEntityCriteria findPipelineCriteria = new FindPipelineEntityCriteria();
// findPipelineCriteria.getFolderIdSet().add(folder);
// final List<PipelineEntity> pipelines = pipelineService.find(findPipelineCriteria);
// if (pipelines != null && pipelines.size() > 0) {
// final PipelineEntity pipelineEntity = pipelines.get(0);
// docRef = DocRefUtil.create(pipelineEntity);
// }
// }
// }
// }
// }
=======
if (docRef == null) {
// If we still don't have a pipeline docRef then just try and find the
// first pipeline we can in the folder that the stream belongs
// to.
final Stream stream = getStream(action.getStreamId());
if (stream != null) {
final Folder folder = getFolder(stream);
if (folder != null) {
final FindPipelineEntityCriteria findPipelineCriteria = new FindPipelineEntityCriteria();
findPipelineCriteria.getFolderIdSet().add(folder);
final List<PipelineEntity> pipelines = pipelineEntityService.find(findPipelineCriteria);
if (pipelines != null && pipelines.size() > 0) {
final PipelineEntity pipelineEntity = pipelines.get(0);
docRef = DocRefUtil.create(pipelineEntity);
}
}
}
}
>>>>>>>
// if (docRef == null) {
// // If we still don't have a pipeline docRef then just try and find the
// // first pipeline we can in the folder that the stream belongs
// // to.
// final Stream stream = getStream(action.getStreamId());
// if (stream != null) {
// final Feed feed = feedService.load(stream.getFeed());
// if (feed != null) {
//
//
// final Folder folder = feed.getFolder();
// if (folder != null) {
// final FindPipelineEntityCriteria findPipelineCriteria = new FindPipelineEntityCriteria();
// findPipelineCriteria.getFolderIdSet().add(folder);
// final List<PipelineEntity> pipelines = pipelineService.find(findPipelineCriteria);
// if (pipelines != null && pipelines.size() > 0) {
// final PipelineEntity pipelineEntity = pipelines.get(0);
// docRef = DocRefUtil.create(pipelineEntity);
// }
// }
// }
// }
// }
<<<<<<<
final FindStreamCriteria criteria = new FindStreamCriteria();
criteria.getFetchSet().add(StreamProcessor.ENTITY_TYPE);
criteria.getFetchSet().add(PipelineEntity.ENTITY_TYPE);
criteria.getFetchSet().add(Feed.ENTITY_TYPE);
criteria.obtainStreamIdSet().add(id);
final List<Stream> streamList = streamStore.find(criteria);
if (streamList != null && streamList.size() > 0) {
stream = streamList.get(0);
=======
securityContext.pushUser(ServerTask.INTERNAL_PROCESSING_USER_TOKEN);
try {
final FindStreamCriteria criteria = new FindStreamCriteria();
criteria.getFetchSet().add(StreamProcessor.ENTITY_TYPE);
criteria.getFetchSet().add(PipelineEntity.ENTITY_TYPE);
criteria.getFetchSet().add(Feed.ENTITY_TYPE);
criteria.getFetchSet().add(Folder.ENTITY_TYPE);
criteria.obtainStreamIdSet().add(id);
final List<Stream> streamList = streamStore.find(criteria);
if (streamList != null && streamList.size() > 0) {
stream = streamList.get(0);
}
} finally {
securityContext.popUser();
>>>>>>>
securityContext.pushUser(ServerTask.INTERNAL_PROCESSING_USER_TOKEN);
try {
final FindStreamCriteria criteria = new FindStreamCriteria();
criteria.getFetchSet().add(StreamProcessor.ENTITY_TYPE);
criteria.getFetchSet().add(PipelineEntity.ENTITY_TYPE);
criteria.getFetchSet().add(Feed.ENTITY_TYPE);
criteria.obtainStreamIdSet().add(id);
final List<Stream> streamList = streamStore.find(criteria);
if (streamList != null && streamList.size() > 0) {
stream = streamList.get(0);
}
} finally {
securityContext.popUser(); |
<<<<<<<
import stroom.widget.tickbox.client.view.TickBox;
=======
>>>>>>>
<<<<<<<
public ExportConfigPresenter(final EventBus eventBus, final ExportConfigView view, final ExportProxy proxy,
final EntityCheckTreePresenter folderCheckTreePresenter, final ClientDispatchAsync clientDispatchAsync) {
=======
public ExportConfigPresenter(final EventBus eventBus, final ExportConfigView view, final ExportProxy proxy, final LocationManager locationManager,
final EntityCheckTreePresenter treePresenter, final ClientDispatchAsync clientDispatchAsync) {
>>>>>>>
public ExportConfigPresenter(final EventBus eventBus, final ExportConfigView view, final ExportProxy proxy, final LocationManager locationManager,
final EntityCheckTreePresenter treePresenter, final ClientDispatchAsync clientDispatchAsync) { |
<<<<<<<
import stroom.AbstractCoreIntegrationTest;
import stroom.CommonIndexingTest;
import stroom.dashboard.shared.DataSourceFieldsMap;
import stroom.datasource.api.v1.DataSourceField;
import stroom.datasource.api.v1.DataSourceField.DataSourceFieldType;
=======
import stroom.AbstractCoreIntegrationTest;
import stroom.CommonIndexingTest;
>>>>>>>
import stroom.AbstractCoreIntegrationTest;
import stroom.CommonIndexingTest;
import stroom.dashboard.shared.DataSourceFieldsMap;
import stroom.datasource.api.v1.DataSourceField;
import stroom.datasource.api.v1.DataSourceField.DataSourceFieldType;
import stroom.AbstractCoreIntegrationTest;
import stroom.CommonIndexingTest;
<<<<<<<
import stroom.query.api.v1.ExpressionBuilder;
import stroom.query.api.v1.ExpressionOperator;
import stroom.query.api.v1.ExpressionOperator.Op;
import stroom.query.api.v1.ExpressionTerm.Condition;
import stroom.search.server.IndexDataSourceFieldUtil;
import javax.annotation.Resource;
import java.util.List;
=======
import stroom.query.shared.ExpressionBuilder;
import stroom.query.shared.ExpressionOperator;
import stroom.query.shared.ExpressionOperator.Op;
import stroom.query.shared.ExpressionTerm.Condition;
import stroom.query.shared.IndexField;
import stroom.query.shared.IndexFieldsMap;
import javax.annotation.Resource;
import java.util.List;
>>>>>>>
import stroom.query.api.v1.ExpressionBuilder;
import stroom.query.api.v1.ExpressionOperator;
import stroom.query.api.v1.ExpressionOperator.Op;
import stroom.query.api.v1.ExpressionTerm.Condition;
import stroom.search.server.IndexDataSourceFieldUtil;
import javax.annotation.Resource;
import java.util.List;
import stroom.query.shared.ExpressionBuilder;
import stroom.query.shared.ExpressionOperator;
import stroom.query.shared.ExpressionOperator.Op;
import stroom.query.shared.ExpressionTerm.Condition;
import stroom.query.shared.IndexField;
import stroom.query.shared.IndexFieldsMap;
import javax.annotation.Resource;
import java.util.List; |
<<<<<<<
=======
import stroom.query.common.v2.SearchResponseCreatorCache;
import stroom.query.common.v2.SearchResponseCreatorManager;
>>>>>>>
import stroom.query.common.v2.SearchResponseCreatorCache;
import stroom.query.common.v2.SearchResponseCreatorManager;
<<<<<<<
@Inject
private SearchResultCreatorManager searchResultCreatorManager;
=======
@Resource(name="luceneSearchResponseCreatorManager")
private SearchResponseCreatorManager searchResponseCreatorManager;
>>>>>>>
@Inject
@Named("luceneSearchResponseCreatorManager")
private SearchResponseCreatorManager searchResponseCreatorManager;
<<<<<<<
extractValues, resultMapConsumer, maxShardTasks,
maxExtractionTasks, indexService, searchResultCreatorManager);
=======
extractValues, resultMapConsumer, sleepTimeMs, maxShardTasks,
maxExtractionTasks, indexService, searchResponseCreatorManager);
>>>>>>>
extractValues, resultMapConsumer, maxShardTasks,
maxExtractionTasks, indexService, searchResponseCreatorManager); |
<<<<<<<
public static final int DATE_LENGTH = "2000-01-01T00:00:00.000Z".length();
private static final String DEFAULT_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSSXX";
private static final DateTimeFormatter NORMAL_STROOM_TIME_FORMATTER = DateTimeFormatter.ofPattern(DEFAULT_PATTERN);
private static final DateTimeFormatter FILE_TIME_STROOM_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH'#'mm'#'ss,SSSXX");
=======
private static final int DATE_LENGTH = "2000-01-01T00:00:00.000Z".length();
private static final java.time.format.DateTimeFormatter NORMAL_STROOM_TIME_FORMATTER = java.time.format.DateTimeFormatter
.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXX");
private static final long MIN_MS = 1000 * 60;
private static final long HOUR_MS = MIN_MS * 60;
private static final long DAY_MS = HOUR_MS * 24;
private static final Logger LOGGER = LoggerFactory.getLogger(DateUtil.class);
private static final String NULL = "NULL";
private static final java.time.format.DateTimeFormatter FILE_TIME_STROOM_TIME_FORMATTER = java.time.format.DateTimeFormatter
.ofPattern("yyyy-MM-dd'T'HH'#'mm'#'ss,SSSXX");
private static final String GMT_BST_GUESS = "GMT/BST";
private static final ZoneId EUROPE_LONDON_TIME_ZONE = ZoneId.of("Europe/London");
>>>>>>>
private static final int DATE_LENGTH = "2000-01-01T00:00:00.000Z".length();
private static final String DEFAULT_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSSXX";
private static final DateTimeFormatter NORMAL_STROOM_TIME_FORMATTER = DateTimeFormatter.ofPattern(DEFAULT_PATTERN);
private static final DateTimeFormatter FILE_TIME_STROOM_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH'#'mm'#'ss,SSSXX");
<<<<<<<
if (date == null) {
throw new IllegalArgumentException("Unable to parse null date");
}
if (date.length() != DATE_LENGTH) {
return Long.parseLong(date);
=======
if (!looksLikeDate(date)) {
Long.parseLong(date);
>>>>>>>
if (date == null) {
throw new IllegalArgumentException("Unable to parse null date");
}
if (!looksLikeDate(date)) {
Long.parseLong(date); |
<<<<<<<
import io.swagger.annotations.Api;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
=======
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
>>>>>>>
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
<<<<<<<
pipelineScopeRunnable.scopeRunnable(() -> {
Map<PipelineElementType, Map<String, PipelinePropertyType>> pts = pipelineElementRegistryFactory.get()
.getPropertyTypes();
=======
pipelineScopeRunnableProvider.get().scopeRunnable(() -> {
Map<PipelineElementType, Map<String, PipelinePropertyType>> pts = elementRegistryFactoryProvider.get().get()
.getPropertyTypes();
>>>>>>>
pipelineScopeRunnableProvider.get().scopeRunnable(() -> {
Map<PipelineElementType, Map<String, PipelinePropertyType>> pts = elementRegistryFactoryProvider.get().get()
.getPropertyTypes();
<<<<<<<
pipelineScopeRunnable.scopeRunnable(() -> {
Map<PipelineElementType, Map<String, PipelinePropertyType>> pts = pipelineElementRegistryFactory.get()
.getPropertyTypes();
=======
pipelineScopeRunnableProvider.get().scopeRunnable(() -> {
Map<PipelineElementType, Map<String, PipelinePropertyType>> pts = elementRegistryFactoryProvider.get()
.get().getPropertyTypes();
>>>>>>>
pipelineScopeRunnableProvider.get().scopeRunnable(() -> {
Map<PipelineElementType, Map<String, PipelinePropertyType>> pts = elementRegistryFactoryProvider.get()
.get().getPropertyTypes(); |
<<<<<<<
import stroom.meta.shared.Meta;
=======
import stroom.pipeline.shared.StepLocation;
import stroom.query.api.v2.DocRef;
import stroom.streamstore.client.presenter.ClassificationUiHandlers;
import stroom.streamstore.client.presenter.ClassificationWrapperPresenter.ClassificationWrapperView;
import stroom.streamstore.shared.Stream;
import stroom.streamstore.shared.StreamType;
>>>>>>>
import stroom.meta.shared.Meta;
import stroom.pipeline.shared.StepLocation;
<<<<<<<
public void read(final DocRef pipeline, final Meta meta, final long eventId,
final String childStreamType) {
=======
public void read(final DocRef pipeline,
final StepLocation stepLocation,
final Stream stream,
final StreamType childStreamType) {
>>>>>>>
public void read(final DocRef pipeline,
final StepLocation stepLocation,
final Meta meta,
final String childStreamType) {
<<<<<<<
steppingPresenter.read(pipeline, meta, eventId, childStreamType);
=======
steppingPresenter.read(pipeline, stepLocation, stream, childStreamType);
>>>>>>>
steppingPresenter.read(pipeline, stepLocation, meta, childStreamType); |
<<<<<<<
taskManager.exec(new StreamUploadTask(null, null, "test.zip", file, DocRefUtil.create(feed),
DocRefUtil.create(StreamType.RAW_EVENTS), null, null));
=======
taskManager.exec(new StreamUploadTask(ServerTask.INTERNAL_PROCESSING_USER_TOKEN, "test.zip", file, DocRef.create(feed),
DocRef.create(StreamType.RAW_EVENTS), null, null));
>>>>>>>
taskManager.exec(new StreamUploadTask(ServerTask.INTERNAL_PROCESSING_USER_TOKEN, "test.zip", file, DocRefUtil.create(feed),
DocRefUtil.create(StreamType.RAW_EVENTS), null, null));
<<<<<<<
taskManager.exec(new StreamUploadTask(null, null, "test.dat", file, DocRefUtil.create(feed),
DocRefUtil.create(StreamType.RAW_EVENTS), null, "Tom:One\nJames:Two\n"));
=======
taskManager.exec(new StreamUploadTask(ServerTask.INTERNAL_PROCESSING_USER_TOKEN, "test.dat", file, DocRef.create(feed),
DocRef.create(StreamType.RAW_EVENTS), null, "Tom:One\nJames:Two\n"));
>>>>>>>
taskManager.exec(new StreamUploadTask(ServerTask.INTERNAL_PROCESSING_USER_TOKEN, "test.dat", file, DocRefUtil.create(feed),
DocRefUtil.create(StreamType.RAW_EVENTS), null, "Tom:One\nJames:Two\n"));
<<<<<<<
taskManager.exec(new StreamUploadTask(null, null, "test.zip", file, DocRefUtil.create(feed),
DocRefUtil.create(StreamType.RAW_EVENTS), null, extraMeta));
=======
taskManager.exec(new StreamUploadTask(ServerTask.INTERNAL_PROCESSING_USER_TOKEN, "test.zip", file, DocRef.create(feed),
DocRef.create(StreamType.RAW_EVENTS), null, extraMeta));
>>>>>>>
taskManager.exec(new StreamUploadTask(ServerTask.INTERNAL_PROCESSING_USER_TOKEN, "test.zip", file, DocRefUtil.create(feed),
DocRefUtil.create(StreamType.RAW_EVENTS), null, extraMeta)); |
<<<<<<<
=======
import stroom.streamstore.shared.StreamType;
import stroom.widget.htree.client.treelayout.util.DefaultTreeForTreeLayout;
>>>>>>>
import stroom.widget.htree.client.treelayout.util.DefaultTreeForTreeLayout;
<<<<<<<
void testMove() {
=======
public void testMove1() {
>>>>>>>
void testMove1() { |
<<<<<<<
import org.hsqldb.types.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import stroom.entity.server.util.ConnectionUtil;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.streamstore.shared.StreamAttributeValue;
import stroom.util.logging.LogExecutionTime;
=======
import org.hsqldb.types.Types;
import org.springframework.stereotype.Component;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.streamstore.shared.StreamAttributeValue;
import stroom.util.logging.LogExecutionTime;
import stroom.util.logging.StroomLogger;
>>>>>>>
import org.hsqldb.types.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.streamstore.shared.StreamAttributeValue;
import stroom.util.logging.LogExecutionTime;
<<<<<<<
private static final Logger LOGGER = LoggerFactory.getLogger(StreamAttributeValueServiceTransactionHelper.class);
=======
private static StroomLogger LOGGER = StroomLogger.getLogger(StreamAttributeValueServiceTransactionHelper.class);
private final DataSource dataSource;
private final StroomDatabaseInfo stroomDatabaseInfo;
>>>>>>>
private static final Logger LOGGER = LoggerFactory.getLogger(StreamAttributeValueServiceTransactionHelper.class); |
<<<<<<<
import event.logging.BaseAdvancedQueryItem;
import event.logging.BaseAdvancedQueryOperator.And;
import event.logging.BaseAdvancedQueryOperator.Or;
import event.logging.TermCondition;
import event.logging.util.EventLoggingUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
=======
import event.logging.BaseAdvancedQueryItem;
import event.logging.BaseAdvancedQueryOperator.And;
import event.logging.BaseAdvancedQueryOperator.Or;
import event.logging.TermCondition;
import event.logging.util.EventLoggingUtil;
import org.joda.time.DateTimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
>>>>>>>
import event.logging.BaseAdvancedQueryItem;
import event.logging.BaseAdvancedQueryOperator.And;
import event.logging.BaseAdvancedQueryOperator.Or;
import event.logging.TermCondition;
import event.logging.util.EventLoggingUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
<<<<<<<
import stroom.entity.server.util.EntityServiceLogUtil;
import stroom.entity.server.util.SQLBuilder;
import stroom.entity.server.util.SQLUtil;
import stroom.entity.server.util.StroomDatabaseInfo;
=======
import stroom.entity.server.util.EntityServiceLogUtil;
import stroom.entity.server.util.FieldMap;
import stroom.entity.server.util.HqlBuilder;
import stroom.entity.server.util.SqlBuilder;
import stroom.entity.server.util.SqlUtil;
import stroom.entity.server.util.StroomDatabaseInfo;
>>>>>>>
import stroom.entity.server.util.EntityServiceLogUtil;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.entity.server.util.EntityServiceLogUtil;
import stroom.entity.server.util.FieldMap;
import stroom.entity.server.util.HqlBuilder;
import stroom.entity.server.util.SqlBuilder;
import stroom.entity.server.util.SqlUtil;
import stroom.entity.server.util.StroomDatabaseInfo;
<<<<<<<
import stroom.query.api.v1.DocRef;
import stroom.query.api.v1.ExpressionTerm;
=======
import stroom.query.shared.ExpressionTerm.Condition;
>>>>>>>
import stroom.query.api.v1.DocRef;
import stroom.query.api.v1.ExpressionTerm;
<<<<<<<
import stroom.util.zip.HeaderMap;
=======
>>>>>>>
import stroom.util.zip.HeaderMap;
<<<<<<<
=======
static {
// Set the default timezone and locale for all date time operations.
DateTimeZone.setDefault(DateTimeZone.UTC);
Locale.setDefault(Locale.ROOT);
}
private static final FieldMap FIELD_MAP = new FieldMap()
.add(FindStreamCriteria.FIELD_ID, BaseEntity.ID, "id")
.add(FindStreamCriteria.FIELD_CREATE_MS, Stream.CREATE_MS, "createMs");
>>>>>>>
static {
// Set the default timezone and locale for all date time operations.
DateTimeZone.setDefault(DateTimeZone.UTC);
Locale.setDefault(Locale.ROOT);
}
private static final FieldMap FIELD_MAP = new FieldMap()
.add(FindStreamCriteria.FIELD_ID, BaseEntity.ID, "id")
.add(FindStreamCriteria.FIELD_CREATE_MS, Stream.CREATE_MS, "createMs");
<<<<<<<
LOGGER.error("Unable to delete stream target!", e);
=======
LOGGER.error(e.getMessage(), e);
>>>>>>>
LOGGER.error("Unable to delete stream target!", e.getMessage(), e);
<<<<<<<
LOGGER.error("Unable to close stream source!", e);
=======
LOGGER.error(e.getMessage(), e);
>>>>>>>
LOGGER.error("Unable to close stream source!", e.getMessage(), e); |
<<<<<<<
import stroom.util.io.FileUtil;
=======
import stroom.util.logging.StroomLogger;
import stroom.util.scheduler.SimpleCron;
>>>>>>>
import stroom.util.io.FileUtil;
import stroom.util.scheduler.SimpleCron;
<<<<<<<
=======
this.rollSize = rollSize;
>>>>>>>
<<<<<<<
=======
// Determine the oldest this destination can be.
Long time = null;
if (schedule != null) {
time = schedule.getNextTime(creationTime);
}
if (frequency != null) {
final long value = creationTime + frequency;
if (time == null || time > value) {
time = value;
}
}
oldestAllowed = time;
>>>>>>>
<<<<<<<
void afterRoll(Consumer<Throwable> exceptionConsumer) {
=======
Object getKey() {
return key;
}
@Override
public OutputStream getOutputStream() throws IOException {
return getOutputStream(null, null);
}
@Override
public OutputStream getOutputStream(final byte[] header, final byte[] footer) throws IOException {
try {
if (!rolled) {
this.footer = footer;
// If we haven't written yet then create the output stream and
// write a header if we have one.
if (header != null && outputStream != null && outputStream.getCount() == 0) {
// Write the header.
write(header);
}
return outputStream;
}
} catch (final Throwable e) {
throw handleException(null, e);
}
return null;
}
@Override
boolean tryFlushAndRoll(final boolean force, final long currentTime) throws IOException {
IOException exception = null;
try {
if (!rolled) {
// Flush the output if we need to.
if (force || shouldFlush(currentTime)) {
try {
flush();
} catch (final Throwable e) {
exception = handleException(exception, e);
}
}
// Roll the output if we need to.
if (force || shouldRoll(currentTime)) {
try {
roll();
} catch (final Throwable e) {
exception = handleException(exception, e);
}
}
}
} catch (final Throwable t) {
exception = handleException(exception, t);
}
if (exception != null) {
throw exception;
}
return rolled;
}
private boolean shouldFlush(final long currentTime) {
final long lastFlushTime = this.lastFlushTime;
this.lastFlushTime = currentTime;
return lastFlushTime > 0 && currentTime - lastFlushTime > ONE_MINUTE;
}
private boolean shouldRoll(final long currentTime) {
return (oldestAllowed != null && currentTime > oldestAllowed) ||
outputStream.getCount() > rollSize;
}
private void roll() throws IOException {
rolled = true;
>>>>>>>
void afterRoll(Consumer<Throwable> exceptionConsumer) { |
<<<<<<<
import stroom.document.client.event.DirtyEvent;
import stroom.document.client.event.DirtyEvent.DirtyHandler;
import stroom.document.client.event.HasDirtyHandlers;
import stroom.query.api.v1.DocRef;
=======
import stroom.entity.client.event.DirtyEvent;
import stroom.entity.client.event.DirtyEvent.DirtyHandler;
import stroom.entity.client.event.HasDirtyHandlers;
import stroom.query.api.v2.DocRef;
>>>>>>>
import stroom.document.client.event.DirtyEvent;
import stroom.document.client.event.DirtyEvent.DirtyHandler;
import stroom.document.client.event.HasDirtyHandlers;
import stroom.query.api.v2.DocRef; |
<<<<<<<
public void testEmail(){uqi.getData(Email.asUpdates(),Purpose.TEST("test")).debug();}
=======
public void testEmail(){uqi.getData(Email.asEmailUpdates(),Purpose.TEST("test")).debug();}
>>>>>>>
public void testEmailUpdates(){uqi.getData(Email.asGmailUpdates(),Purpose.TEST("test")).debug();}
public void testEmailList(){
uqi.getData(Email.asGmailList(System.currentTimeMillis()-Duration.hours(100),
System.currentTimeMillis()-Duration.hours(50),
100),Purpose.TEST("test")).debug();
}
<<<<<<<
// String getPostcode() throws PSException {
// return uqi
// .getData(Geolocation.asLastKnown(Geolocation.LEVEL_CITY), Purpose.FEATURE("get postcode for nearby search"));
//// .output(GeolocationOperators.(Geolocation.COORDINATES));
// }
=======
// String getPostcode() throws PSException {
// return uqi
// .getData(Geolocation.asLastKnown(Geolocation.LEVEL_CITY), Purpose.FEATURE("get postcode for nearby search"))
//// .output(GeolocationOperators.(Geolocation.COORDINATES));
// }
>>>>>>>
// String getPostcode() throws PSException {
// return uqi
// .getData(Geolocation.asLastKnown(Geolocation.LEVEL_CITY), Purpose.FEATURE("get postcode for nearby search"));
// .getData(Geolocation.asLastKnown(Geolocation.LEVEL_CITY), Purpose.FEATURE("get postcode for nearby search"))
//// .output(GeolocationOperators.(Geolocation.COORDINATES));
// }
<<<<<<<
// double getAverageSentimentOfSMS() throws PSException {
//// return uqi
//// .getData(Message.getAllSMS(), Purpose.FEATURE("calculate the sentiment across all Message messages"));
//// .setField("sentiment", StringOperators.sentiment(Message.CONTENT))
//// .outputItems(StatisticOperators.average("sentiment"));
// }
=======
// double getAverageSentimentOfSMS() throws PSException {
// return uqi
// .getData(Message.getAllSMS(), Purpose.FEATURE("calculate the sentiment across all Message messages"))
//// .setField("sentiment", StringOperators.sentiment(Message.CONTENT))
//// .outputItems(StatisticOperators.average("sentiment"));
// }
>>>>>>>
// double getAverageSentimentOfSMS() throws PSException {
//// return uqi
//// .getData(Message.getAllSMS(), Purpose.FEATURE("calculate the sentiment across all Message messages"));
// return uqi
// .getData(Message.getAllSMS(), Purpose.FEATURE("calculate the sentiment across all Message messages"))
//// .setField("sentiment", StringOperators.sentiment(Message.CONTENT))
//// .outputItems(StatisticOperators.average("sentiment"));
// } |
<<<<<<<
import stroom.importexport.api.ImportExportActionHandler;
import stroom.importexport.shared.Base64EncodedDocumentData;
=======
>>>>>>>
import stroom.importexport.api.ImportExportActionHandler;
import stroom.importexport.shared.Base64EncodedDocumentData; |
<<<<<<<
HashMap<Long, List<Event>> createEventMap(final List<String[]> storedDataList) {
try (final SecurityHelper securityHelper = SecurityHelper.elevate(securityContext)) {
// Put the events into a map to group them by stream id.
final Map<Long, List<Event>> storedDataMap = new HashMap<>();
for (final String[] storedData : storedDataList) {
final Long longStreamId = getLong(storedData, streamIdIndex);
final Long longEventId = getLong(storedData, eventIdIndex);
final boolean include = true;
if (longStreamId != null && longEventId != null && include) {
storedDataMap.computeIfAbsent(longStreamId, k -> new ArrayList<>()).add(new Event(longEventId, storedData));
}
}
// Filter the streams by ones that should be visible to the current
// user.
final HashMap<Long, List<Event>> filteredDataMap = new HashMap<>();
for (final Entry<Long, List<Event>> entry : storedDataMap.entrySet()) {
final Long streamId = entry.getKey();
final Stream stream = getStreamById(streamId);
// If the stream's id is undefined then it is a dummy we either
// couldn't find it or are not allowed to use it.
if (stream.isPersistent()) {
filteredDataMap.put(stream.getId(), entry.getValue());
=======
void addEvent(final Map<Long, List<Event>> storedDataMap, final String[] storedData) {
securityContext.elevatePermissions();
try {
final Long longStreamId = getLong(storedData, streamIdIndex);
final Long longEventId = getLong(storedData, eventIdIndex);
if (longStreamId != null && longEventId != null) {
// Filter the streams by ones that should be visible to the current user.
final Optional<Stream> optional = getStreamById(longStreamId);
if (optional.isPresent()) {
storedDataMap.compute(longStreamId, (k, v) -> {
if (v == null) {
v = new ArrayList<>();
}
v.add(new Event(longEventId, storedData));
return v;
});
>>>>>>>
void addEvent(final Map<Long, List<Event>> storedDataMap, final String[] storedData) {
try (final SecurityHelper securityHelper = SecurityHelper.elevate(securityContext)) {
final Long longStreamId = getLong(storedData, streamIdIndex);
final Long longEventId = getLong(storedData, eventIdIndex);
if (longStreamId != null && longEventId != null) {
// Filter the streams by ones that should be visible to the current user.
final Optional<Stream> optional = getStreamById(longStreamId);
if (optional.isPresent()) {
storedDataMap.compute(longStreamId, (k, v) -> {
if (v == null) {
v = new ArrayList<>();
}
v.add(new Event(longEventId, storedData));
return v;
});
<<<<<<<
return filteredDataMap;
=======
} finally {
securityContext.restorePermissions();
>>>>>>>
return filteredDataMap; |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; |
<<<<<<<
clusterService.addListener(clusterListener);
=======
store.setDelegate(delegate);
>>>>>>>
clusterService.addListener(clusterListener);
store.setDelegate(delegate);
<<<<<<<
clusterService.removeListener(clusterListener);
=======
store.unsetDelegate(delegate);
>>>>>>>
clusterService.removeListener(clusterListener);
store.unsetDelegate(delegate);
<<<<<<<
//callback for reacting to cluster events
private class InternalClusterEventListener implements ClusterEventListener {
@Override
public void event(ClusterEvent event) {
switch (event.type()) {
//FIXME: worry about addition when the time comes
case INSTANCE_ADDED:
case INSTANCE_ACTIVATED:
break;
case INSTANCE_REMOVED:
case INSTANCE_DEACTIVATED:
break;
default:
log.warn("unknown cluster event {}", event);
}
}
}
=======
public class InternalDelegate implements MastershipStoreDelegate {
@Override
public void notify(MastershipEvent event) {
log.info("dispatching mastership event {}", event);
eventDispatcher.post(event);
}
}
>>>>>>>
//callback for reacting to cluster events
private class InternalClusterEventListener implements ClusterEventListener {
@Override
public void event(ClusterEvent event) {
switch (event.type()) {
//FIXME: worry about addition when the time comes
case INSTANCE_ADDED:
case INSTANCE_ACTIVATED:
break;
case INSTANCE_REMOVED:
case INSTANCE_DEACTIVATED:
break;
default:
log.warn("unknown cluster event {}", event);
}
}
}
public class InternalDelegate implements MastershipStoreDelegate {
@Override
public void notify(MastershipEvent event) {
log.info("dispatching mastership event {}", event);
eventDispatcher.post(event);
}
} |
<<<<<<<
import stroom.index.impl.db.IndexDbConfig;
import stroom.lifecycle.LifecycleConfig;
import stroom.node.NodeConfig;
=======
import stroom.job.JobSystemConfig;
import stroom.lifecycle.impl.LifecycleConfig;
import stroom.node.impl.NodeConfig;
>>>>>>>
import stroom.index.impl.db.IndexDbConfig;
import stroom.job.JobSystemConfig;
import stroom.lifecycle.impl.LifecycleConfig;
import stroom.node.impl.NodeConfig;
<<<<<<<
private IndexDbConfig indexDbConfig;
=======
private JobSystemConfig jobSystemConfig;
>>>>>>>
private IndexDbConfig indexDbConfig;
private JobSystemConfig jobSystemConfig;
<<<<<<<
this.indexDbConfig = new IndexDbConfig();
=======
this.jobSystemConfig = new JobSystemConfig();
>>>>>>>
this.indexDbConfig = new IndexDbConfig();
this.jobSystemConfig = new JobSystemConfig();
<<<<<<<
final IndexDbConfig indexDbConfig,
=======
final JobSystemConfig jobSystemConfig,
>>>>>>>
final IndexDbConfig indexDbConfig,
final JobSystemConfig jobSystemConfig,
<<<<<<<
this.indexDbConfig = indexDbConfig;
=======
this.jobSystemConfig = jobSystemConfig;
>>>>>>>
this.indexDbConfig = indexDbConfig;
this.jobSystemConfig = jobSystemConfig;
<<<<<<<
@JsonProperty("indexDb")
public IndexDbConfig getIndexDbConfig() {
return indexDbConfig;
}
public void setIndexDbConfig(final IndexDbConfig indexDbConfig) {
this.indexDbConfig = indexDbConfig;
}
=======
@JsonProperty("job")
public JobSystemConfig getJobSystemConfig() {
return jobSystemConfig;
}
public void setJobSystemConfig(final JobSystemConfig jobSystemConfig) {
this.jobSystemConfig = jobSystemConfig;
}
>>>>>>>
@JsonProperty("indexDb")
public IndexDbConfig getIndexDbConfig() {
return indexDbConfig;
}
public void setIndexDbConfig(final IndexDbConfig indexDbConfig) {
this.indexDbConfig = indexDbConfig;
}
@JsonProperty("job")
public JobSystemConfig getJobSystemConfig() {
return jobSystemConfig;
}
public void setJobSystemConfig(final JobSystemConfig jobSystemConfig) {
this.jobSystemConfig = jobSystemConfig;
} |
<<<<<<<
import stroom.query.api.DocRef;
=======
import stroom.importexport.server.Config;
import stroom.importexport.server.EntityPathResolver;
import stroom.importexport.server.ImportExportHelper;
import stroom.util.shared.Message;
import stroom.util.shared.Severity;
>>>>>>>
import stroom.importexport.server.Config;
import stroom.importexport.server.EntityPathResolver;
import stroom.importexport.server.ImportExportHelper;
import stroom.query.api.DocRef;
import stroom.util.shared.Message;
import stroom.util.shared.Severity; |
<<<<<<<
=======
import org.n52.sos.ds.hibernate.entities.HibernateRelations.HasDisabledFlag;
import org.n52.sos.util.Constants;
>>>>>>>
import org.n52.sos.util.Constants;
<<<<<<<
private Procedure typeOf;
private boolean isType;
private boolean isAggregation;
=======
>>>>>>>
private Procedure typeOf;
private boolean isType;
private boolean isAggregation; |
<<<<<<<
private final HttpServletRequestHolder httpServletRequestHolder;
=======
private final CurrentActivity currentActivity;
>>>>>>>
private final HttpServletRequestHolder httpServletRequestHolder;
private final CurrentActivity currentActivity;
<<<<<<<
StroomEventLoggingService(final SecurityContext security,
final HttpServletRequestHolder httpServletRequestHolder) {
=======
StroomEventLoggingService(final SecurityContext security,
final CurrentActivity currentActivity) {
>>>>>>>
StroomEventLoggingService(final SecurityContext security,
final HttpServletRequestHolder httpServletRequestHolder,
final CurrentActivity currentActivity) {
<<<<<<<
this.httpServletRequestHolder = httpServletRequestHolder;
=======
this.currentActivity = currentActivity;
>>>>>>>
this.httpServletRequestHolder = httpServletRequestHolder;
this.currentActivity = currentActivity; |
<<<<<<<
function = new ScheduledJobFunction(scheduledJob, running);
=======
final Provider<TaskConsumer> consumerProvider = scheduledJobsMap.get(scheduledJob);
function = new StroomBeanFunction(scheduledJob, consumerProvider.get(), running);
>>>>>>>
final Provider<TaskConsumer> consumerProvider = scheduledJobsMap.get(scheduledJob);
function = new ScheduledJobFunction(scheduledJob, consumerProvider.get(), running); |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
=======
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import stroom.entity.shared.DocRef;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
<<<<<<<
import stroom.query.api.v2.DocRef;
=======
import stroom.security.SecurityContext;
>>>>>>>
import stroom.query.api.v2.DocRef;
import stroom.security.SecurityContext; |
<<<<<<<
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
=======
import stroom.util.logging.StroomLogger;
import stroom.util.zip.HeaderMap;
>>>>>>>
import stroom.util.zip.HeaderMap; |
<<<<<<<
public TableSettings(
String queryId,
List<Field> fields,
Boolean extractValues,
DocRef extractionPipeline,
Integer[] maxResults,
Boolean showDetail){
this.queryId = queryId;
this.fields = fields;
this.extractValues = extractValues;
this.extractionPipeline = extractionPipeline;
this.maxResults = maxResults;
this.showDetail = showDetail;
}
@XmlElement
=======
>>>>>>>
@XmlElement |
<<<<<<<
import stroom.jobsystem.shared.ClusterLock;
=======
>>>>>>>
import stroom.jobsystem.shared.ClusterLock;
<<<<<<<
import stroom.streamstore.server.StreamAttributeValueFlush;
=======
import stroom.streamstore.server.StreamAttributeKeyService;
>>>>>>>
import stroom.streamstore.server.StreamAttributeKeyService;
import stroom.streamstore.server.StreamAttributeValueFlush;
<<<<<<<
private static final List<String> TABLES_TO_CLEAR = Arrays.asList(
AppPermission.TABLE_NAME,
ClusterLock.TABLE_NAME,
Dashboard.TABLE_NAME,
Dictionary.TABLE_NAME,
DocumentPermission.TABLE_NAME,
Feed.TABLE_NAME,
Folder.TABLE_NAME,
Index.TABLE_NAME,
Index.TABLE_NAME_INDEX_VOLUME, //link table between IDX and VOL so no entity of its own
IndexShard.TABLE_NAME,
Job.TABLE_NAME,
JobNode.TABLE_NAME,
Node.TABLE_NAME,
Permission.TABLE_NAME,
PipelineEntity.TABLE_NAME,
Policy.TABLE_NAME,
QueryEntity.TABLE_NAME,
Rack.TABLE_NAME,
Res.TABLE_NAME,
Script.TABLE_NAME,
StatisticStoreEntity.TABLE_NAME,
Stream.TABLE_NAME,
StreamAttributeKey.TABLE_NAME,
StreamAttributeValue.TABLE_NAME,
StreamProcessor.TABLE_NAME,
StreamProcessorFilter.TABLE_NAME,
StreamProcessorFilterTracker.TABLE_NAME,
StroomStatsStoreEntity.TABLE_NAME,
StreamTask.TABLE_NAME,
StreamVolume.TABLE_NAME,
TextConverter.TABLE_NAME,
User.TABLE_NAME,
UserGroupUser.TABLE_NAME,
Visualisation.TABLE_NAME,
Volume.TABLE_NAME,
VolumeState.TABLE_NAME,
XMLSchema.TABLE_NAME,
XSLT.TABLE_NAME);
private final VolumeService volumeService;
private final ContentImportService contentImportService;
private final StreamAttributeKeyService streamAttributeKeyService;
private final StreamAttributeValueFlush streamAttributeValueFlush;
private final IndexShardManager indexShardManager;
private final IndexShardWriterCache indexShardWriterCache;
private final DatabaseCommonTestControlTransactionHelper databaseCommonTestControlTransactionHelper;
private final NodeConfig nodeConfig;
private final StreamTaskCreator streamTaskCreator;
private final StroomCacheManager stroomCacheManager;
=======
@Resource
private VolumeService volumeService;
@Resource
private IndexShardService indexShardService;
@Resource
private ContentImportService contentImportService;
@Resource
private StreamAttributeKeyService streamAttributeKeyService;
@Resource
private IndexShardManager indexShardManager;
@Resource
private IndexShardWriterCache indexShardWriterCache;
@Resource
private DatabaseCommonTestControlTransactionHelper databaseCommonTestControlTransactionHelper;
@Resource
private NodeConfig nodeConfig;
@Resource
private StreamTaskCreator streamTaskCreator;
@Resource
private LifecycleServiceImpl lifecycleServiceImpl;
@Resource
private StroomCacheManager stroomCacheManager;
@Resource
private ExplorerNodeService explorerNodeService;
>>>>>>>
private static final List<String> TABLES_TO_CLEAR = Arrays.asList(
AppPermission.TABLE_NAME,
ClusterLock.TABLE_NAME,
Dashboard.TABLE_NAME,
"doc",
DocumentPermission.TABLE_NAME,
Feed.TABLE_NAME,
Index.TABLE_NAME,
Index.TABLE_NAME_INDEX_VOLUME, //link table between IDX and VOL so no entity of its own
IndexShard.TABLE_NAME,
Job.TABLE_NAME,
JobNode.TABLE_NAME,
Node.TABLE_NAME,
Permission.TABLE_NAME,
PipelineEntity.TABLE_NAME,
Policy.TABLE_NAME,
QueryEntity.TABLE_NAME,
Rack.TABLE_NAME,
Res.TABLE_NAME,
Script.TABLE_NAME,
StatisticStoreEntity.TABLE_NAME,
Stream.TABLE_NAME,
StreamAttributeKey.TABLE_NAME,
StreamAttributeValue.TABLE_NAME,
StreamProcessor.TABLE_NAME,
StreamProcessorFilter.TABLE_NAME,
StreamProcessorFilterTracker.TABLE_NAME,
StroomStatsStoreEntity.TABLE_NAME,
StreamTask.TABLE_NAME,
StreamVolume.TABLE_NAME,
TextConverter.TABLE_NAME,
User.TABLE_NAME,
UserGroupUser.TABLE_NAME,
Visualisation.TABLE_NAME,
Volume.TABLE_NAME,
VolumeState.TABLE_NAME,
XMLSchema.TABLE_NAME,
XSLT.TABLE_NAME);
private final VolumeService volumeService;
private final ContentImportService contentImportService;
private final StreamAttributeKeyService streamAttributeKeyService;
private final StreamAttributeValueFlush streamAttributeValueFlush;
private final IndexShardManager indexShardManager;
private final IndexShardWriterCache indexShardWriterCache;
private final DatabaseCommonTestControlTransactionHelper databaseCommonTestControlTransactionHelper;
private final NodeConfig nodeConfig;
private final StreamTaskCreator streamTaskCreator;
private final StroomCacheManager stroomCacheManager;
<<<<<<<
=======
deleteEntity(StreamTask.class);
deleteEntity(StreamVolume.class);
deleteEntity(StreamAttributeValue.class);
deleteEntity(Stream.class);
deleteEntity(Policy.class);
deleteEntity(QueryEntity.class);
deleteEntity(Dashboard.class);
deleteEntity(Visualisation.class);
deleteEntity(Script.class);
deleteEntity(Res.class);
deleteEntity(StatisticStoreEntity.class);
deleteEntity(StroomStatsStoreEntity.class);
>>>>>>>
<<<<<<<
=======
deleteEntity(IndexShard.class);
deleteEntity(Index.class);
deleteEntity(Feed.class);
deleteEntity(XMLSchema.class);
deleteEntity(TextConverter.class);
deleteEntity(XSLT.class);
deleteEntity(StreamProcessorFilter.class);
deleteEntity(StreamProcessorFilterTracker.class);
deleteEntity(StreamProcessor.class);
deleteEntity(PipelineEntity.class);
deleteEntity(UserGroupUser.class);
deleteEntity(DocumentPermission.class);
deleteEntity(Permission.class);
deleteEntity(User.class);
// Delete all explorer nodes.
explorerNodeService.deleteAllNodes();
// deleteTable("sys_user_role");
// deleteTable("sys_user_group");
// deleteTable("sys_user");
// deleteTable("sys_group");
>>>>>>>
<<<<<<<
//ensure any hibernate entities are flushed down before we clear the tables
=======
databaseCommonTestControlTransactionHelper.truncateTable("doc");
>>>>>>>
//ensure any hibernate entities are flushed down before we clear the tables |
<<<<<<<
import org.apache.karaf.shell.api.action.Argument;
import org.apache.karaf.shell.api.action.Command;
import org.apache.karaf.shell.api.action.lifecycle.Service;
=======
import com.google.common.collect.ImmutableSet;
import org.apache.karaf.shell.commands.Argument;
import org.apache.karaf.shell.commands.Command;
>>>>>>>
import com.google.common.collect.ImmutableSet;
import org.apache.karaf.shell.api.action.Argument;
import org.apache.karaf.shell.api.action.Command;
import org.apache.karaf.shell.api.action.lifecycle.Service;
<<<<<<<
protected void doExecute() {
OpenstackVtap.Type type = getVtapTypeFromString(vTapType);
Set<OpenstackVtap> openstackVtaps = vTapService.getVtaps(type);
for (OpenstackVtap vTap : openstackVtaps) {
=======
protected void execute() {
OpenstackVtap.Type type = getVtapTypeFromString(vtapType);
Set<OpenstackVtap> openstackVtaps = vtapService.getVtaps(type);
for (OpenstackVtap vtap : openstackVtaps) {
>>>>>>>
protected void doExecute() {
OpenstackVtap.Type type = getVtapTypeFromString(vtapType);
Set<OpenstackVtap> openstackVtaps = vtapService.getVtaps(type);
for (OpenstackVtap vtap : openstackVtaps) { |
<<<<<<<
import stroom.explorer.shared.SharedDocRef;
import stroom.meta.shared.FindMetaCriteria;
import stroom.meta.shared.FindMetaRowAction;
import stroom.meta.shared.Meta;
import stroom.meta.shared.MetaRow;
import stroom.pipeline.shared.PipelineDoc;
import stroom.pipeline.shared.stepping.GetPipelineForMetaAction;
=======
import stroom.feed.shared.Feed;
import stroom.pipeline.shared.PipelineEntity;
import stroom.pipeline.shared.StepLocation;
import stroom.pipeline.shared.stepping.GetPipelineForStreamAction;
>>>>>>>
import stroom.explorer.shared.SharedDocRef;
import stroom.meta.shared.FindMetaCriteria;
import stroom.meta.shared.FindMetaRowAction;
import stroom.meta.shared.Meta;
import stroom.meta.shared.MetaRow;
import stroom.pipeline.shared.PipelineDoc;
import stroom.pipeline.shared.StepLocation;
import stroom.pipeline.shared.stepping.GetPipelineForMetaAction;
<<<<<<<
if (event.getPipelineRef() != null) {
choosePipeline(event.getPipelineRef(), event.getStreamId(), event.getEventId(),
event.getChildStreamType());
} else {
// If we don't have a pipeline id then try to guess one for the
// supplied stream.
dispatcher.exec(new GetPipelineForMetaAction(event.getStreamId(), event.getChildStreamId())).onSuccess(result ->
choosePipeline(result, event.getStreamId(), event.getEventId(), event.getChildStreamType()));
=======
if (event.getStreamId() != null) {
if (event.getPipelineRef() != null) {
choosePipeline(event.getPipelineRef(),
event.getStepLocation(),
event.getChildStreamType());
} else {
// If we don't have a pipeline id then try to guess one for the
// supplied stream.
dispatcher.exec(new GetPipelineForStreamAction(event.getStreamId(), event.getChildStreamId())).onSuccess(result -> choosePipeline(result, event.getStepLocation(), event.getChildStreamType()));
}
>>>>>>>
if (event.getPipelineRef() != null) {
choosePipeline(event.getPipelineRef(),
event.getStepLocation(),
event.getChildStreamType());
} else {
// If we don't have a pipeline id then try to guess one for the
// supplied stream.
dispatcher.exec(new GetPipelineForMetaAction(event.getStreamId(), event.getChildStreamId())).onSuccess(result ->
choosePipeline(result, event.getStepLocation(), event.getChildStreamType()));
<<<<<<<
private void choosePipeline(final SharedDocRef initialPipelineRef,
final long streamId,
final long eventId,
final String childStreamType) {
=======
private void choosePipeline(final SharedDocRef initialPipelineRef,
final StepLocation stepLocation,
final StreamType childStreamType) {
>>>>>>>
private void choosePipeline(final SharedDocRef initialPipelineRef,
final StepLocation stepLocation,
final String childStreamType) {
<<<<<<<
final FindMetaCriteria findMetaCriteria = new FindMetaCriteria();
findMetaCriteria.obtainSelectedIdSet().add(streamId);
=======
final FindStreamAttributeMapCriteria streamAttributeMapCriteria = new FindStreamAttributeMapCriteria();
streamAttributeMapCriteria.obtainFindStreamCriteria().obtainSelectedIdSet().add(stepLocation.getStreamId());
streamAttributeMapCriteria.getFetchSet().add(Feed.ENTITY_TYPE);
streamAttributeMapCriteria.getFetchSet().add(StreamType.ENTITY_TYPE);
streamAttributeMapCriteria.getFetchSet().add(StreamProcessor.ENTITY_TYPE);
streamAttributeMapCriteria.getFetchSet().add(PipelineEntity.ENTITY_TYPE);
>>>>>>>
final FindMetaCriteria findMetaCriteria = new FindMetaCriteria();
findMetaCriteria.obtainSelectedIdSet().add(stepLocation.getStreamId());
<<<<<<<
final MetaRow row = result.get(0);
openEditor(pipeline, row.getMeta(), eventId, childStreamType);
=======
final StreamAttributeMap row = result.get(0);
openEditor(pipeline, stepLocation, row.getStream(), childStreamType);
>>>>>>>
final MetaRow row = result.get(0);
openEditor(pipeline, stepLocation, row.getMeta(), childStreamType);
<<<<<<<
private void openEditor(final DocRef pipeline, final Meta meta, final long eventId,
final String childStreamType) {
=======
private void openEditor(final DocRef pipeline,
final StepLocation stepLocation,
final Stream stream,
final StreamType childStreamType) {
>>>>>>>
private void openEditor(final DocRef pipeline,
final StepLocation stepLocation,
final Meta meta,
final String childStreamType) {
<<<<<<<
editor.read(pipeline, meta, eventId, childStreamType);
=======
editor.read(pipeline, stepLocation, stream, childStreamType);
>>>>>>>
editor.read(pipeline, stepLocation, meta, childStreamType); |
<<<<<<<
LOGGER.error("init() - Added initial stream type {}", streamType, ex);
} finally {
sql.setLength(0);
=======
LOGGER.error("init() - Added initial stream type %s", streamType, ex);
>>>>>>>
LOGGER.error("init() - Added initial stream type {}", streamType, ex); |
<<<<<<<
import stroom.query.api.v1.DocRef;
=======
import stroom.entity.shared.PermissionInheritance;
import stroom.entity.shared.SharedDocRef;
import stroom.explorer.client.event.HighlightExplorerItemEvent;
import stroom.explorer.client.event.RefreshExplorerTreeEvent;
import stroom.explorer.shared.EntityData;
import stroom.query.api.v2.DocRef;
import stroom.security.client.ClientSecurityContext;
import stroom.task.client.TaskEndEvent;
import stroom.task.client.TaskStartEvent;
import stroom.widget.popup.client.event.HidePopupEvent;
import stroom.widget.util.client.Future;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public abstract class EntityPlugin<E extends NamedEntity> extends Plugin {
private final ClientDispatchAsync dispatcher;
private final ClientSecurityContext securityContext;
private final Map<DocRef, EntityTabData> entityToTabDataMap = new HashMap<>();
private final Map<EntityTabData, DocRef> tabDataToEntityMap = new HashMap<>();
private final ContentManager contentManager;
private final EntityPluginEventManager entityPluginEventManager;
>>>>>>>
import stroom.query.api.v2.DocRef; |
<<<<<<<
import stroom.processor.impl.db.StreamTargetStroomStreamHandler;
import stroom.proxy.repo.StroomZipEntry;
import stroom.proxy.repo.StroomZipFileType;
=======
import stroom.meta.shared.AttributeMap;
import stroom.meta.shared.StandardHeaderArguments;
import stroom.receive.common.StreamTargetStroomStreamHandler;
>>>>>>>
import stroom.processor.impl.db.StreamTargetStroomStreamHandler;
import stroom.proxy.repo.StroomZipEntry;
import stroom.proxy.repo.StroomZipFileType;
import stroom.meta.shared.AttributeMap;
import stroom.meta.shared.StandardHeaderArguments;
import stroom.receive.common.StreamTargetStroomStreamHandler; |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
=======
static {
// Set the default timezone and locale for all date time operations.
DateTimeZone.setDefault(DateTimeZone.UTC);
Locale.setDefault(Locale.ROOT);
}
private static final Logger LOGGER = LoggerFactory.getLogger(DateUtil.class);
private static final String NULL = "NULL";
private static final DateTimeFormatter FILE_TIME_STROOM_TIME_FORMATTER = DateTimeFormat
.forPattern("yyyy-MM-dd'T'HH#mm#ss,SSS'Z'").withZone(DateTimeZone.UTC);
private static final String GMT_BST_GUESS = "GMT/BST";
private static final DateTimeZone EUROPE_LONDON_TIME_ZONE = DateTimeZone.forID("Europe/London");
>>>>>>>
private static final Logger LOGGER = LoggerFactory.getLogger(DateUtil.class);
<<<<<<<
LOGGER.debug(e.getMessage(), e);
}
if (dateTimeZone == null) {
dateTimeZone = ZoneOffset.UTC;
=======
LOGGER.debug("Unable to parse time zone!", e);
>>>>>>>
LOGGER.debug("Unable to parse time zone!", e);
}
if (dateTimeZone == null) {
dateTimeZone = ZoneOffset.UTC;
<<<<<<<
} catch (final IllegalArgumentException e) {
LOGGER.debug(e.getMessage(), e);
=======
} catch (final IllegalArgumentException e) {
LOGGER.debug("Unable to parse date!", e);
>>>>>>>
} catch (final IllegalArgumentException e) {
LOGGER.debug("Unable to parse date!", e); |
<<<<<<<
private static final StroomLogger LOGGER = StroomLogger.getLogger(SecurityContextImpl.class);
private static final UserRef INTERNAL_PROCESSING_USER = new UserRef(User.ENTITY_TYPE, "0", "INTERNAL_PROCESSING_USER", false, true);
=======
private static final Logger LOGGER = LoggerFactory.getLogger(SecurityContextImpl.class);
private static final UserRef INTERNAL_PROCESSING_USER = new UserRef(User.ENTITY_TYPE, "0", "INTERNAL_PROCESSING_USER", false);
>>>>>>>
private static final Logger LOGGER = LoggerFactory.getLogger(SecurityContextImpl.class);
private static final UserRef INTERNAL_PROCESSING_USER = new UserRef(User.ENTITY_TYPE, "0", "INTERNAL_PROCESSING_USER", false, true); |
<<<<<<<
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MarkerFactory;
=======
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MarkerFactory;
<<<<<<<
private static class ProcessInfoOutputStreamProvider extends AbstractElement
implements DestinationProvider, Destination {
private final StreamStore streamStore;
private final StreamCloser streamCloser;
private final MetaData metaData;
private final Stream stream;
private final StreamProcessor streamProcessor;
private final StreamTask streamTask;
private OutputStream processInfoOutputStream;
private StreamTarget processInfoStreamTarget;
public ProcessInfoOutputStreamProvider(final StreamStore streamStore, final StreamCloser streamCloser,
final MetaData metaData, final Stream stream, final StreamProcessor streamProcessor,
final StreamTask streamTask) {
this.streamStore = streamStore;
this.streamCloser = streamCloser;
this.metaData = metaData;
this.stream = stream;
this.streamProcessor = streamProcessor;
this.streamTask = streamTask;
}
@Override
public Destination borrowDestination() throws IOException {
return this;
}
@Override
public void returnDestination(final Destination destination) throws IOException {
}
@Override
public OutputStream getOutputStream() throws IOException {
return getOutputStream(null, null);
}
@Override
public OutputStream getOutputStream(final byte[] header, final byte[] footer) throws IOException {
if (processInfoOutputStream == null) {
// Create a processing info stream to write all processing
// information to.
final Stream errorStream = Stream.createProcessedStream(stream, stream.getFeed(), StreamType.ERROR,
streamProcessor, streamTask);
processInfoStreamTarget = streamStore.openStreamTarget(errorStream);
streamCloser.add(processInfoStreamTarget);
processInfoOutputStream = new WrappedOutputStream(processInfoStreamTarget.getOutputStream()) {
@Override
public void close() throws IOException {
super.flush();
super.close();
// Only do something if an output stream was used.
if (processInfoStreamTarget != null) {
// Write meta data.
final HeaderMap headerMap = metaData.getHeaderMap();
processInfoStreamTarget.getAttributeMap().putAll(headerMap);
// We let the streamCloser close the stream target
// with the stream store as it may want to delete it
}
}
};
streamCloser.add(processInfoOutputStream);
}
return processInfoOutputStream;
}
@Override
public List<Processor> createProcessors() {
return Collections.emptyList();
}
}
private static final Logger LOGGER = LoggerFactory.getLogger(PipelineStreamProcessor.class);
=======
private static final StroomLogger LOGGER = StroomLogger.getLogger(PipelineStreamProcessor.class);
>>>>>>>
private static final Logger LOGGER = LoggerFactory.getLogger(PipelineStreamProcessor.class);
<<<<<<<
// Loop around all the streams above looking for ones to delete
final FindStreamCriteria findDeleteStreamCriteria = new FindStreamCriteria();
for (final Stream stream : streamList) {
// If the stream is not associated with the latest stream task
// and is not already deleted then select it for deletion.
if (!latestStreamTaskId.equals(stream.getStreamTaskId())
&& !StreamStatus.DELETED.equals(stream.getStatus())) {
findDeleteStreamCriteria.obtainStreamIdSet().add(stream);
}
}
// If we have found any to delete then delete them now.
if (findDeleteStreamCriteria.obtainStreamIdSet().isConstrained()) {
final long deleteCount = streamStore.findDelete(findDeleteStreamCriteria);
LOGGER.info("checkSuperseded() - Removed {}", deleteCount);
=======
// Loop around all the streams found above looking for ones to delete
final FindStreamCriteria findDeleteStreamCriteria = new FindStreamCriteria();
for (final Stream stream : streamList) {
// If the stream is not associated with the latest stream task
// and is not already deleted then select it for deletion.
if ((latestStreamTaskId == null || !latestStreamTaskId.equals(stream.getStreamTaskId()))
&& !StreamStatus.DELETED.equals(stream.getStatus())) {
findDeleteStreamCriteria.obtainStreamIdSet().add(stream);
>>>>>>>
// Loop around all the streams found above looking for ones to delete
final FindStreamCriteria findDeleteStreamCriteria = new FindStreamCriteria();
for (final Stream stream : streamList) {
// If the stream is not associated with the latest stream task
// and is not already deleted then select it for deletion.
if ((latestStreamTaskId == null || !latestStreamTaskId.equals(stream.getStreamTaskId()))
&& !StreamStatus.DELETED.equals(stream.getStatus())) {
findDeleteStreamCriteria.obtainStreamIdSet().add(stream); |
<<<<<<<
import stroom.meta.shared.Meta;
import stroom.meta.shared.MetaService;
import stroom.meta.shared.FindMetaCriteria;
=======
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import stroom.data.meta.shared.Data;
import stroom.data.meta.shared.DataMetaService;
import stroom.data.meta.shared.FindDataCriteria;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; |
<<<<<<<
import stroom.query.api.v2.DocRef;
import stroom.refdata.offheapstore.RefDataStore;
import stroom.refdata.offheapstore.RefDataStoreProvider;
import stroom.refdata.offheapstore.RefStreamDefinition;
=======
import stroom.pipeline.task.StreamMetaDataProvider;
>>>>>>>
import stroom.refdata.offheapstore.RefDataStore;
import stroom.refdata.offheapstore.RefDataStoreProvider;
import stroom.refdata.offheapstore.RefStreamDefinition;
import stroom.pipeline.task.StreamMetaDataProvider;
<<<<<<<
this.refDataStore = refDataStoreProvider.get();
=======
this.metaDataHolder = metaDataHolder;
>>>>>>>
this.refDataStore = refDataStoreProvider.get();
this.metaDataHolder = metaDataHolder;
<<<<<<<
final PipelineEntity pipelineEntity = pipelineService
.loadByUuid(refStreamDefinition.getPipelineDocRef().getUuid());
pipelineHolder.setPipeline(pipelineEntity);
=======
final PipelineDoc pipelineDoc = pipelineStore.readDocument(mapStorePoolKey.getPipeline());
pipelineHolder.setPipeline(mapStorePoolKey.getPipeline());
>>>>>>>
final PipelineDoc pipelineDoc = pipelineStore
.readDocument(refStreamDefinition.getPipelineDocRef());
pipelineHolder.setPipeline(refStreamDefinition.getPipelineDocRef());
<<<<<<<
final PipelineEntity pipelineEntity = Objects.requireNonNull(pipelineHolder.getPipeline());
final DocRef pipelineDocRef = DocRefUtil.create(pipelineEntity);
final RefStreamDefinition refStreamDefinition = new RefStreamDefinition(
pipelineDocRef,
pipelineEntity.getVersion(),
streamHolder.getStream().getId());
=======
// Loop over the stream boundaries and process each
// sequentially.
final long streamCount = mainProvider.getStreamCount();
for (long streamNo = 0; streamNo < streamCount && !Thread.currentThread().isInterrupted(); streamNo++) {
streamHolder.setStreamNo(streamNo);
streamLocationFactory.setStreamNo(streamNo + 1);
>>>>>>>
final PipelineDoc pipelineDoc = Objects.requireNonNull(pipelineHolder.getPipeline());
final DocRef pipelineDocRef = DocRefUtil.create(pipelineEntity);
final RefStreamDefinition refStreamDefinition = new RefStreamDefinition(
pipelineDocRef,
pipelineEntity.getVersion(),
streamHolder.getStream().getId()); |
<<<<<<<
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.springframework.stereotype.Component;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
<<<<<<<
import org.springframework.stereotype.Component;
import stroom.node.server.GlobalProperties;
import stroom.util.spring.StroomBeanStore;
=======
import stroom.util.logging.StroomLogger;
>>>>>>>
<<<<<<<
public PropertyConfigurer() {
LOGGER.debug("Initialising: {}", this.getClass().getCanonicalName());
GlobalProperties.getInstance();
=======
@Inject
public PropertyConfigurer(final StroomPropertyService stroomPropertyService) {
this.stroomPropertyService = stroomPropertyService;
>>>>>>>
@Inject
public PropertyConfigurer(final StroomPropertyService stroomPropertyService) {
this.stroomPropertyService = stroomPropertyService; |
<<<<<<<
@Reference(cardinality = ReferenceCardinality.MANDATORY)
=======
private static final String VXLAN = "VXLAN";
private static final String VLAN = "VLAN";
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
>>>>>>>
private static final String VXLAN = "VXLAN";
private static final String VLAN = "VLAN";
@Reference(cardinality = ReferenceCardinality.MANDATORY) |
<<<<<<<
import org.apache.solr.client.solrj.SolrQuery;
import stroom.dashboard.expression.v1.FieldIndexMap;
import stroom.dictionary.api.WordListProvider;
import stroom.docref.DocRef;
import stroom.meta.shared.MetaService;
import stroom.pipeline.errorhandler.ErrorReceiver;
import stroom.pipeline.errorhandler.MessageUtil;
=======
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import stroom.annotation.api.AnnotationDataSource;
import stroom.pipeline.server.errorhandler.MessageUtil;
>>>>>>>
import stroom.annotation.api.AnnotationDataSource;
import stroom.pipeline.errorhandler.MessageUtil;
<<<<<<<
import stroom.query.api.v2.Param;
import stroom.query.common.v2.Coprocessor;
import stroom.query.common.v2.CoprocessorSettings;
import stroom.query.common.v2.CoprocessorSettingsMap.CoprocessorKey;
import stroom.query.common.v2.Payload;
import stroom.search.extraction.ExtractionConfig;
import stroom.search.extraction.ExtractionTaskExecutor;
import stroom.search.extraction.ExtractionTaskHandler;
import stroom.search.extraction.ExtractionTaskProducer;
import stroom.search.extraction.StreamMapCreator;
import stroom.search.extraction.Values;
import stroom.search.solr.CachedSolrIndex;
import stroom.search.solr.SolrIndexCache;
import stroom.search.solr.search.SearchExpressionQueryBuilder.SearchExpressionQuery;
import stroom.search.solr.search.SolrSearchTask.ResultReceiver;
import stroom.search.solr.shared.SolrIndexField;
import stroom.security.api.SecurityContext;
import stroom.task.api.ExecutorProvider;
import stroom.task.api.TaskContext;
import stroom.task.api.TaskTerminatedException;
import stroom.task.shared.ThreadPool;
import stroom.task.shared.ThreadPoolImpl;
=======
import stroom.search.coprocessor.CompletionState;
import stroom.search.coprocessor.Coprocessors;
import stroom.search.coprocessor.CoprocessorsFactory;
import stroom.search.coprocessor.Error;
import stroom.search.coprocessor.NewCoprocessor;
import stroom.search.coprocessor.Receiver;
import stroom.search.coprocessor.ReceiverImpl;
import stroom.search.extraction.ExpressionFilter;
import stroom.search.extraction.ExtractionDecoratorFactory;
import stroom.search.resultsender.NodeResult;
import stroom.search.resultsender.ResultSender;
import stroom.search.resultsender.ResultSenderFactory;
import stroom.security.SecurityContext;
import stroom.security.SecurityHelper;
import stroom.task.server.TaskCallback;
import stroom.task.server.TaskContext;
import stroom.task.server.TaskTerminatedException;
>>>>>>>
import stroom.search.coprocessor.CompletionState;
import stroom.search.coprocessor.Coprocessors;
import stroom.search.coprocessor.CoprocessorsFactory;
import stroom.search.coprocessor.Error;
import stroom.search.coprocessor.NewCoprocessor;
import stroom.search.coprocessor.Receiver;
import stroom.search.coprocessor.ReceiverImpl;
import stroom.search.extraction.ExpressionFilter;
import stroom.search.extraction.ExtractionDecoratorFactory;
import stroom.search.resultsender.NodeResult;
import stroom.search.resultsender.ResultSender;
import stroom.search.resultsender.ResultSenderFactory;
import stroom.security.api.SecurityContext;
import stroom.task.api.TaskContext;
import stroom.task.api.TaskTerminatedException;
<<<<<<<
import stroom.util.shared.Location;
import stroom.util.shared.Severity;
=======
import stroom.util.shared.HasTerminate;
import stroom.util.spring.StroomScope;
import stroom.util.task.MonitorImpl;
import stroom.util.thread.ThreadUtil;
>>>>>>>
<<<<<<<
private final SolrIndexCache solrIndexCache;
private final WordListProvider wordListProvider;
=======
>>>>>>>
<<<<<<<
private final SolrCoprocessorFactory coprocessorFactory;
private final ExtractionTaskExecutor extractionTaskExecutor;
private final ExtractionConfig extractionConfig;
private final MetaService metaService;
=======
private final CoprocessorsFactory coprocessorsFactory;
private final SolrSearchFactory solrSearchFactory;
private final ExtractionDecoratorFactory extractionDecoratorFactory;
private final ResultSenderFactory resultSenderFactory;
>>>>>>>
private final CoprocessorsFactory coprocessorsFactory;
private final SolrSearchFactory solrSearchFactory;
private final ExtractionDecoratorFactory extractionDecoratorFactory;
private final ResultSenderFactory resultSenderFactory;
<<<<<<<
private final CountDownLatch searchCompleteLatch = new CountDownLatch(1);
private final Provider<ExtractionTaskHandler> extractionTaskHandlerProvider;
private final ExecutorProvider executorProvider;
private final SolrSearchTaskHandler solrSearchTaskHandler;
=======
private final CompletionState searchCompletionState = new CompletionState();
>>>>>>>
private final CompletionState searchCompletionState = new CompletionState();
<<<<<<<
SolrClusterSearchTaskHandler(final SolrIndexCache solrIndexCache,
final WordListProvider wordListProvider,
final TaskContext taskContext,
final SolrCoprocessorFactory coprocessorFactory,
final ExtractionTaskExecutor extractionTaskExecutor,
final ExtractionConfig extractionConfig,
final MetaService metaService,
final SecurityContext securityContext,
final SolrSearchConfig searchConfig,
final Provider<ExtractionTaskHandler> extractionTaskHandlerProvider,
final ExecutorProvider executorProvider,
final SolrSearchTaskHandler solrSearchTaskHandler) {
this.solrIndexCache = solrIndexCache;
this.wordListProvider = wordListProvider;
=======
SolrClusterSearchTaskHandler(final TaskContext taskContext,
final CoprocessorsFactory coprocessorsFactory,
final SolrSearchFactory solrSearchFactory,
final ExtractionDecoratorFactory extractionDecoratorFactory,
final ResultSenderFactory resultSenderFactory,
final SecurityContext securityContext) {
>>>>>>>
SolrClusterSearchTaskHandler(final TaskContext taskContext,
final CoprocessorsFactory coprocessorsFactory,
final SolrSearchFactory solrSearchFactory,
final ExtractionDecoratorFactory extractionDecoratorFactory,
final ResultSenderFactory resultSenderFactory,
final SecurityContext securityContext) {
<<<<<<<
this.coprocessorFactory = coprocessorFactory;
this.extractionTaskExecutor = extractionTaskExecutor;
this.extractionConfig = extractionConfig;
this.metaService = metaService;
=======
this.coprocessorsFactory = coprocessorsFactory;
this.solrSearchFactory = solrSearchFactory;
this.extractionDecoratorFactory = extractionDecoratorFactory;
this.resultSenderFactory = resultSenderFactory;
>>>>>>>
this.coprocessorsFactory = coprocessorsFactory;
this.solrSearchFactory = solrSearchFactory;
this.extractionDecoratorFactory = extractionDecoratorFactory;
this.resultSenderFactory = resultSenderFactory;
<<<<<<<
// Get an array of stored index fields that will be used for getting stored data.
final FieldIndexMap storedFieldIndexMap = new FieldIndexMap();
for (final String storedField : storedFields) {
storedFieldIndexMap.create(storedField, true);
}
// See if we need to filter steams and if any of the coprocessors need us to extract data.
boolean filterStreams;
Map<CoprocessorKey, Coprocessor> coprocessorMap = null;
Map<DocRef, Set<Coprocessor>> extractionCoprocessorsMap = null;
final FieldIndexMap extractionFieldIndexMap = new FieldIndexMap(true);
filterStreams = true;
// Create a map of index fields keyed by name.
final Map<String, SolrIndexField> indexFieldsMap = index.getFieldsMap();
// Compile all of the result component options to optimise pattern matching etc.
if (task.getCoprocessorMap() != null) {
coprocessorMap = new HashMap<>();
extractionCoprocessorsMap = new HashMap<>();
=======
// Create coprocessors.
final Coprocessors coprocessors = coprocessorsFactory.create(task.getCoprocessorMap(), storedFields, query.getParams(), this, task);
>>>>>>>
// Create coprocessors.
final Coprocessors coprocessors = coprocessorsFactory.create(task.getCoprocessorMap(), storedFields, query.getParams(), this);
<<<<<<<
searchResultCollector.onFailure(e);
=======
callback.onFailure(e);
>>>>>>>
callback.onFailure(e);
<<<<<<<
public void log(final Severity severity, final Location location, final String elementId, final String message,
final Throwable e) {
if (e != null) {
LOGGER.debug(e::getMessage, e);
}
if (!(e instanceof TaskTerminatedException)) {
final String msg = MessageUtil.getMessage(message, e);
try {
errors.put(msg);
} catch (final InterruptedException ie) {
// Continue to interrupt this thread.
Thread.currentThread().interrupt();
}
=======
public void accept(final Error error) {
if (error != null) {
LOGGER.debug(error::getMessage, error.getThrowable());
if (!(error.getThrowable() instanceof TaskTerminatedException)) {
final String msg = MessageUtil.getMessage(error.getMessage(), error.getThrowable());
errors.offer(msg);
}
>>>>>>>
public void accept(final Error error) {
if (error != null) {
LOGGER.debug(error::getMessage, error.getThrowable());
if (!(error.getThrowable() instanceof TaskTerminatedException)) {
final String msg = MessageUtil.getMessage(error.getMessage(), error.getThrowable());
errors.offer(msg);
} |
<<<<<<<
import org.hibernate.proxy.HibernateProxy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
=======
import org.hibernate.proxy.HibernateProxy;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
>>>>>>>
import org.hibernate.proxy.HibernateProxy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.hibernate.proxy.HibernateProxy;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
<<<<<<<
SQLUtil.setParameters(query, sql);
rtn = (long) query.executeUpdate();
=======
SqlUtil.setParameters(query, sql);
rtn = Long.valueOf(query.executeUpdate());
>>>>>>>
SqlUtil.setParameters(query, sql);
rtn = (long) query.executeUpdate(); |
<<<<<<<
@ConfigurableElement(type = "RollingFileAppender", category = Category.DESTINATION, roles = {
PipelineElementType.ROLE_TARGET, PipelineElementType.ROLE_DESTINATION,
PipelineElementType.VISABILITY_STEPPING}, icon = ElementIcons.STREAM)
class RollingFileAppender extends AbstractRollingAppender {
=======
@ConfigurableElement(
type = "RollingFileAppender",
category = Category.DESTINATION,
roles = {
PipelineElementType.ROLE_TARGET,
PipelineElementType.ROLE_DESTINATION,
PipelineElementType.VISABILITY_STEPPING},
icon = ElementIcons.FILES)
public class RollingFileAppender extends AbstractRollingAppender {
>>>>>>>
@ConfigurableElement(
type = "RollingFileAppender",
category = Category.DESTINATION,
roles = {
PipelineElementType.ROLE_TARGET,
PipelineElementType.ROLE_DESTINATION,
PipelineElementType.VISABILITY_STEPPING},
icon = ElementIcons.FILES)
class RollingFileAppender extends AbstractRollingAppender { |
<<<<<<<
private final HttpServletRequest request = new MockHttpServletRequest();
private final MockContainerRequestContext requestContext = new MockContainerRequestContext();
private final SecurityContext securityContext = new MockSecurityContext();
=======
RestResourceAutoLoggerImpl filter;
ObjectMapper objectMapper;
Random random = new Random();
private HttpServletRequest request = new MockHttpServletRequest();
private MockContainerRequestContext requestContext = new MockContainerRequestContext();
private SecurityContext securityContext = new MockSecurityContext();
>>>>>>>
RestResourceAutoLoggerImpl filter;
ObjectMapper objectMapper;
Random random = new Random();
private final HttpServletRequest request = new MockHttpServletRequest();
private final MockContainerRequestContext requestContext = new MockContainerRequestContext();
private final SecurityContext securityContext = new MockSecurityContext();
<<<<<<<
private final StroomEventLoggingService eventLoggingService = new MockStroomEventLoggingService();
=======
private StroomEventLoggingService eventLoggingService = new MockStroomEventLoggingService();
>>>>>>>
private final StroomEventLoggingService eventLoggingService = new MockStroomEventLoggingService();
<<<<<<<
private final RequestLoggingConfig config = new RequestLoggingConfig();
=======
private RequestLoggingConfig config = new RequestLoggingConfig();
>>>>>>>
private final RequestLoggingConfig config = new RequestLoggingConfig();
<<<<<<<
RestResourceAutoLoggerImpl filter;
ObjectMapper objectMapper;
Random random = new Random();
private final Injector injector;
=======
private Injector injector;
>>>>>>>
private Injector injector;
<<<<<<<
TestRestResourceAutoLogger() {
injector = Guice.createInjector(new MockRsLoggingModule());
=======
TestRestResourceAutoLogger() {
injector = Guice.createInjector(new MockRSLoggingModule());
>>>>>>>
TestRestResourceAutoLogger() {
injector = Guice.createInjector(new MockRsLoggingModule());
<<<<<<<
} catch (Exception e) {
// Ignore errors
}
=======
} catch (Exception e) {
}
>>>>>>>
} catch (Exception e) {
// Ignore errors
} |
<<<<<<<
=======
import stroom.event.logging.api.EventActionDecorator;
import stroom.event.logging.api.ObjectInfoProvider;
import stroom.event.logging.api.ObjectType;
>>>>>>>
import stroom.event.logging.api.EventActionDecorator;
<<<<<<<
@Override
public void search(final String typeId,
final Query query,
final String resultType,
final PageResponse pageResponse,
final String descriptionVerb,
final Throwable ex) {
=======
public void search(final String typeId, final Query query, final String resultType, final PageResponse pageResponse,
final String descriptionVerb, final Throwable ex,
final EventActionDecorator<SearchEventAction> actionDecorator) {
>>>>>>>
@Override
public void search(final String typeId, final Query query, final String resultType, final PageResponse pageResponse,
final String descriptionVerb, final Throwable ex,
final EventActionDecorator<SearchEventAction> actionDecorator) { |
<<<<<<<
import stroom.entity.shared.DocRefUtil;
import stroom.entity.shared.EntityAction;
import stroom.entity.shared.EntityActionConfirmation;
import stroom.entity.shared.FindFolderCriteria;
=======
import stroom.entity.shared.DocRef;
import stroom.entity.shared.DocRefs;
>>>>>>>
import stroom.entity.shared.DocRefUtil;
import stroom.entity.shared.DocRefs;
<<<<<<<
import stroom.test.StroomCoreServerTestFileUtil;
import stroom.util.io.StreamUtil;
=======
import stroom.test.StroomCoreServerTestFileUtil;
>>>>>>>
import stroom.test.StroomCoreServerTestFileUtil;
<<<<<<<
public void testPipeline() {
final DocRef folder = DocRefUtil.create(folderService.create(null, FileSystemTestUtil.getUniqueTestString()));
=======
public void testPipeline() throws IOException {
final DocRef folder = DocRef.create(folderService.create(null, FileSystemTestUtil.getUniqueTestString()));
>>>>>>>
public void testPipeline() throws IOException {
final DocRef folder = DocRefUtil.create(folderService.create(null, FileSystemTestUtil.getUniqueTestString())); |
<<<<<<<
import stroom.security.shared.User;
import stroom.svg.client.SvgPresets;
=======
import stroom.security.shared.FindUserCriteria;
import stroom.widget.button.client.GlyphIcons;
>>>>>>>
import stroom.security.shared.FindUserCriteria;
import stroom.svg.client.SvgPresets; |
<<<<<<<
=======
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
>>>>>>>
<<<<<<<
return Response
.status(Response.Status.BAD_REQUEST)
.entity("Invalid sortBy field")
.build();
=======
return RestUtil.badRequest("Invalid sortBy field");
>>>>>>>
return RestUtil.badRequest("Invalid sortBy field");
<<<<<<<
return Response
.status(Response.Status.BAD_REQUEST)
.entity("Page offset must be greater than 0")
.build();
=======
RestUtil.badRequest("Page offset must be greater than 0");
>>>>>>>
RestUtil.badRequest("Page offset must be greater than 0");
<<<<<<<
return Response
.status(Response.Status.BAD_REQUEST)
.entity("Page size, if used, must be greater than 1")
.build();
=======
return RestUtil.badRequest("Page size, if used, must be greater than 1");
>>>>>>>
return RestUtil.badRequest("Page size, if used, must be greater than 1"); |
<<<<<<<
import stroom.data.store.api.Store;
import stroom.data.store.api.Target;
import stroom.data.store.api.TargetUtil;
=======
import org.junit.jupiter.api.extension.ExtendWith;
import stroom.data.store.api.StreamStore;
import stroom.data.store.api.StreamTarget;
import stroom.data.store.api.StreamTargetUtil;
import stroom.db.util.DbUtil;
>>>>>>>
<<<<<<<
import java.io.IOException;
import java.io.UncheckedIOException;
=======
import java.nio.file.Path;
>>>>>>>
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Path; |
<<<<<<<
import stroom.task.api.TaskHandler;
=======
import stroom.pipeline.factory.Element;
import stroom.refdata.store.RefDataStoreModule;
import stroom.task.TaskHandler;
>>>>>>>
import stroom.task.api.TaskHandler;
import stroom.pipeline.factory.Element;
import stroom.refdata.store.RefDataStoreModule;
import stroom.task.TaskHandler; |
<<<<<<<
=======
import com.google.common.base.Preconditions;
import event.logging.BaseAdvancedQueryItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import com.google.common.base.Preconditions;
import event.logging.BaseAdvancedQueryItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
public abstract class DocumentEntityServiceImpl<E extends DocumentEntity, C extends FindDocumentEntityCriteria> implements DocumentEntityService<E>, BaseEntityService<E>, FindService<E, C>, ProvidesNamePattern {
public static final String FOLDER = ExplorerConstants.FOLDER;
private static final String NAME_PATTERN_PROPERTY = "stroom.namePattern";
private static final String NAME_PATTERN_VALUE = "^[a-zA-Z0-9_\\- \\.\\(\\)]{1,}$";
=======
public abstract class DocumentEntityServiceImpl<E extends DocumentEntity, C extends FindDocumentEntityCriteria> implements DocumentEntityService<E>, FindService<E, C>, SupportsCriteriaLogging<C> {
protected static final Logger LOGGER = LoggerFactory.getLogger(ImportExportSerializerImpl.class);
public static final String NAME_PATTERN_PROPERTY = "stroom.namePattern";
public static final String NAME_PATTERN_VALUE = "^[a-zA-Z0-9_\\- \\.\\(\\)]{1,}$";
>>>>>>>
public abstract class DocumentEntityServiceImpl<E extends DocumentEntity, C extends FindDocumentEntityCriteria> implements DocumentEntityService<E>, BaseEntityService<E>, FindService<E, C>, ProvidesNamePattern {
protected static final Logger LOGGER = LoggerFactory.getLogger(ImportExportSerializerImpl.class);
public static final String FOLDER = ExplorerConstants.FOLDER;
private static final String NAME_PATTERN_PROPERTY = "stroom.namePattern";
private static final String NAME_PATTERN_VALUE = "^[a-zA-Z0-9_\\- \\.\\(\\)]{1,}$";
<<<<<<<
public DocRef importDocument(final DocRef docRef, final Map<String, String> dataMap, final ImportState importState, final ImportMode importMode) {
=======
public DocRef importDocument(final Folder folder,
final Map<String, String> dataMap,
final ImportState importState,
final ImportMode importMode) {
LOGGER.debug("importDocument: folder [%s]",
(folder != null ? folder.getName() + " - " + folder.getUuid() : "null"));
>>>>>>>
public DocRef importDocument(final DocRef docRef,
final Map<String, String> dataMap,
final ImportState importState,
final ImportMode importMode) {
LOGGER.debug("importDocument: folder [%s]",
(folder != null ? folder.getName() + " - " + folder.getUuid() : "null"));
<<<<<<<
importExportHelper.performImport(entity, dataMap, importState, importMode);
=======
importExportHelper.performImport(entity, dataMap, mainConfigPath, importState, importMode);
validateNameUniqueness(folder, importState, importMode, config, uuid);
// We don't want to overwrite any marshaled data so disable marshalling on creation.
setFolder(entity, DocRefUtil.create(folder));
>>>>>>>
importExportHelper.performImport(entity, dataMap, mainConfigPath, importState, importMode); |
<<<<<<<
private DelegateExtensionFunctionDefinition(final StroomXSLTFunctionLibrary library,
final String functionName,
final int minArgs,
final int maxArgs,
final SequenceType[] argTypes,
final SequenceType resultType,
final Class<?> delegateClass) {
=======
DelegateExtensionFunctionDefinition(final StroomXSLTFunctionLibrary library, final String functionName,
final int minArgs, final int maxArgs, final SequenceType[] argTypes, final SequenceType resultType,
final Class<?> delegateClass) {
>>>>>>>
DelegateExtensionFunctionDefinition(final StroomXSLTFunctionLibrary library, final String functionName,
final int minArgs, final int maxArgs, final SequenceType[] argTypes, final SequenceType resultType,
final Class<?> delegateClass) {
<<<<<<<
public DelegateExtensionFunctionCall getFunctionCall() {
return functionCall;
}
public static Builder startBuild() {
return new Builder();
}
public static class Builder {
private StroomXSLTFunctionLibrary library;
private String functionName;
private int minArgs = 0;
private int maxArgs = 0;
private SequenceType[] argTypes = new SequenceType[]{};
private SequenceType resultType;
private Class<?> delegateClass;
private Builder() {
}
public Builder library(final StroomXSLTFunctionLibrary value) {
this.library = value;
return this;
}
public Builder functionName(final String value) {
this.functionName = value;
return this;
}
public Builder minArgs(final int value) {
this.minArgs = value;
return this;
}
public Builder maxArgs(final int value) {
this.maxArgs = value;
return this;
}
public Builder argTypes(final SequenceType[] value) {
this.argTypes = value;
return this;
}
public Builder resultType(final SequenceType value) {
this.resultType = value;
return this;
}
public Builder delegateClass(final Class<?> value) {
this.delegateClass = value;
return this;
}
public DelegateExtensionFunctionDefinition build() {
return new DelegateExtensionFunctionDefinition(
library,
functionName,
minArgs,
maxArgs,
argTypes,
resultType,
delegateClass);
}
}
=======
>>>>>>>
public static Builder startBuild() {
return new Builder();
}
public static class Builder {
private StroomXSLTFunctionLibrary library;
private String functionName;
private int minArgs = 0;
private int maxArgs = 0;
private SequenceType[] argTypes = new SequenceType[]{};
private SequenceType resultType;
private Class<?> delegateClass;
private Builder() {
}
public Builder library(final StroomXSLTFunctionLibrary value) {
this.library = value;
return this;
}
public Builder functionName(final String value) {
this.functionName = value;
return this;
}
public Builder minArgs(final int value) {
this.minArgs = value;
return this;
}
public Builder maxArgs(final int value) {
this.maxArgs = value;
return this;
}
public Builder argTypes(final SequenceType[] value) {
this.argTypes = value;
return this;
}
public Builder resultType(final SequenceType value) {
this.resultType = value;
return this;
}
public Builder delegateClass(final Class<?> value) {
this.delegateClass = value;
return this;
}
public DelegateExtensionFunctionDefinition build() {
return new DelegateExtensionFunctionDefinition(
library,
functionName,
minArgs,
maxArgs,
argTypes,
resultType,
delegateClass);
}
} |
<<<<<<<
=======
import org.apache.commons.lang3.tuple.Pair;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
>>>>>>>
import org.apache.commons.lang3.tuple.Pair; |
<<<<<<<
import stroom.docref.DocRef;
import stroom.security.DocumentPermissionCache;
import stroom.feed.shared.FeedDoc;
=======
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import stroom.docref.DocRef;
import stroom.entity.DocumentPermissionCache;
import stroom.feed.shared.Feed;
import stroom.pipeline.PipelineStore;
>>>>>>>
import stroom.docref.DocRef;
import stroom.security.DocumentPermissionCache;
import stroom.feed.shared.FeedDoc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import stroom.docref.DocRef;
import stroom.entity.DocumentPermissionCache;
import stroom.feed.shared.Feed;
import stroom.pipeline.PipelineStore;
<<<<<<<
=======
import stroom.refdata.store.MapDefinition;
import stroom.refdata.store.RefDataStore;
import stroom.refdata.store.RefDataValue;
import stroom.refdata.store.RefDataValueProxy;
import stroom.refdata.store.RefStreamDefinition;
import stroom.refdata.store.StringValue;
import stroom.refdata.store.MultiRefDataValueProxy;
import stroom.security.Security;
>>>>>>>
import stroom.refdata.store.MapDefinition;
import stroom.refdata.store.RefDataStore;
import stroom.refdata.store.RefDataValue;
import stroom.refdata.store.RefDataValueProxy;
import stroom.refdata.store.RefStreamDefinition;
import stroom.refdata.store.StringValue;
import stroom.refdata.store.MultiRefDataValueProxy;
import stroom.security.Security;
<<<<<<<
import stroom.data.store.api.StreamSourceInputStream;
import stroom.data.store.api.StreamSourceInputStreamProvider;
import stroom.data.meta.api.Data;
import stroom.streamstore.shared.StreamTypeNames;
=======
import stroom.streamstore.fs.serializable.StreamSourceInputStream;
import stroom.streamstore.fs.serializable.StreamSourceInputStreamProvider;
import stroom.streamstore.shared.Stream;
import stroom.streamstore.shared.StreamType;
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
>>>>>>>
import stroom.data.store.api.StreamSourceInputStream;
import stroom.data.store.api.StreamSourceInputStreamProvider;
import stroom.data.meta.api.Data;
import stroom.streamstore.shared.StreamTypeNames;
import stroom.streamstore.fs.serializable.StreamSourceInputStream;
import stroom.streamstore.fs.serializable.StreamSourceInputStreamProvider;
import stroom.streamstore.shared.Stream;
import stroom.streamstore.shared.StreamType;
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
<<<<<<<
&& StreamTypeNames.CONTEXT.equals(pipelineReference.getStreamType())) {
getNestedStreamEventList(pipelineReference, mapName, keyName, referenceDataResult);
=======
&& StreamType.CONTEXT.getName().equals(pipelineReference.getStreamType())) {
getNestedStreamEventList(
pipelineReference,
lookupIdentifier.getPrimaryMapName(),
lookupIdentifier.getKey(),
referenceDataResult);
>>>>>>>
&& StreamType.CONTEXT.getName().equals(pipelineReference.getStreamType())) {
getNestedStreamEventList(
pipelineReference,
lookupIdentifier.getPrimaryMapName(),
lookupIdentifier.getKey(),
referenceDataResult);
<<<<<<<
cachedMapStore = new CachedMapStore(streamNo, mapStore);
nestedStreamCache.put(streamTypeName, cachedMapStore);
=======
>>>>>>>
<<<<<<<
private MapStore getContextData(final Data stream,
final StreamSourceInputStream contextStream,
final DocRef contextPipeline) {
=======
private void setValueProxyOnResult(final RefDataStore refDataStore,
final String mapName,
final String keyName,
final ReferenceDataResult result,
final RefStreamDefinition refStreamDefinition) {
final MapDefinition mapDefinition = new MapDefinition(refStreamDefinition, mapName);
// This stream def may/may not have this map name. To save us hitting the DB each time to
// find out we hold the result it in pipeline scope. This assumes we have already determined
// that the refStreamDef is fully loaded
Boolean doesMapDefExist = refDataLoaderHolder.isMapDefinitionAvailable(mapDefinition);
if (doesMapDefExist == null) {
// existence unknown so do a lookup in the DB
doesMapDefExist = refDataStore.exists(mapDefinition);
refDataLoaderHolder.markMapDefinitionAvailablility(mapDefinition, doesMapDefExist);
}
if (doesMapDefExist) {
// Define a proxy object to allow callers to get the required value from the store
// now that we know that the stream that may contain it is in there.
final RefDataValueProxy refDataValueProxy = refDataStore.getValueProxy(mapDefinition, keyName);
result.setRefDataValueProxy(refDataValueProxy);
} else {
// this stream doesn't have this map so return a null proxy to save a pointless lookup
result.setRefDataValueProxy(null);
}
}
private String getPipelineVersion(final PipelineReference pipelineReference) {
return refDataLoaderHolder.getPipelineVersion(pipelineReference, pipelineStore);
}
private void loadContextData(
final Stream stream,
final StreamSourceInputStream contextStream,
final DocRef contextPipeline,
final RefStreamDefinition refStreamDefinition,
final RefDataStore refDataStore) {
>>>>>>>
private void setValueProxyOnResult(final RefDataStore refDataStore,
final String mapName,
final String keyName,
final ReferenceDataResult result,
final RefStreamDefinition refStreamDefinition) {
final MapDefinition mapDefinition = new MapDefinition(refStreamDefinition, mapName);
// This stream def may/may not have this map name. To save us hitting the DB each time to
// find out we hold the result it in pipeline scope. This assumes we have already determined
// that the refStreamDef is fully loaded
Boolean doesMapDefExist = refDataLoaderHolder.isMapDefinitionAvailable(mapDefinition);
if (doesMapDefExist == null) {
// existence unknown so do a lookup in the DB
doesMapDefExist = refDataStore.exists(mapDefinition);
refDataLoaderHolder.markMapDefinitionAvailablility(mapDefinition, doesMapDefExist);
}
if (doesMapDefExist) {
// Define a proxy object to allow callers to get the required value from the store
// now that we know that the stream that may contain it is in there.
final RefDataValueProxy refDataValueProxy = refDataStore.getValueProxy(mapDefinition, keyName);
result.setRefDataValueProxy(refDataValueProxy);
} else {
// this stream doesn't have this map so return a null proxy to save a pointless lookup
result.setRefDataValueProxy(null);
}
}
private String getPipelineVersion(final PipelineReference pipelineReference) {
return refDataLoaderHolder.getPipelineVersion(pipelineReference, pipelineStore);
}
private void loadContextData(
final Stream stream,
final StreamSourceInputStream contextStream,
final DocRef contextPipeline,
final RefStreamDefinition refStreamDefinition,
final RefDataStore refDataStore) {
<<<<<<<
return contextDataLoader.load(contextStream, stream, feedHolder.getFeedName(), contextPipeline);
=======
// load the context data into the RefDataStore so it is available for lookups
contextDataLoader.load(
contextStream,
stream,
feedHolder.getFeed(),
contextPipeline,
refStreamDefinition,
refDataStore);
>>>>>>>
// load the context data into the RefDataStore so it is available for lookups
contextDataLoader.load(
contextStream,
stream,
feedHolder.getFeed(),
contextPipeline,
refStreamDefinition,
refDataStore);
<<<<<<<
final EffectiveStreamKey effectiveStreamKey = new EffectiveStreamKey(pipelineReference.getFeed().getName(),
pipelineReference.getStreamType(), fromMs, toMs);
=======
final EffectiveStreamKey effectiveStreamKey = new EffectiveStreamKey(
pipelineReference.getFeed(),
pipelineReference.getStreamType(),
fromMs,
toMs);
>>>>>>>
final EffectiveStreamKey effectiveStreamKey = new EffectiveStreamKey(
pipelineReference.getFeed(),
pipelineReference.getStreamType(),
fromMs,
toMs); |
<<<<<<<
=======
import stroom.streamstore.shared.StreamTypeService;
import stroom.util.scheduler.SimpleCron;
import stroom.util.shared.ModelStringUtil;
>>>>>>>
<<<<<<<
=======
private static final int MB = 1024 * 1024;
private static final int DEFAULT_ROLL_SIZE = 100 * MB;
private static final long SECOND = 1000;
private static final long MINUTE = 60 * SECOND;
private static final long HOUR = 60 * MINUTE;
>>>>>>>
private static final int MB = 1024 * 1024;
private static final int DEFAULT_ROLL_SIZE = 100 * MB;
private static final long SECOND = 1000;
private static final long MINUTE = 60 * SECOND;
private static final long HOUR = 60 * MINUTE;
<<<<<<<
=======
private Long frequency = HOUR;
private SimpleCron schedule;
private long rollSize = DEFAULT_ROLL_SIZE;
private boolean validatedSettings;
>>>>>>>
private Long frequency = HOUR;
private SimpleCron schedule;
private long rollSize = DEFAULT_ROLL_SIZE;
private boolean validatedSettings;
<<<<<<<
return new RollingStreamDestination(key,
getFrequency(),
getRollSize(),
System.currentTimeMillis(),
streamStore,
streamTarget,
nodeName);
=======
return new RollingStreamDestination(key,
frequency,
schedule,
rollSize,
streamStore,
streamTarget,
nodeName,
System.currentTimeMillis());
>>>>>>>
return new RollingStreamDestination(key,
getFrequency(),
getRollSize(),
System.currentTimeMillis(),
streamStore,
streamTarget,
nodeName);
<<<<<<<
=======
@PipelineProperty(description = "Choose how frequently streams are rolled.", defaultValue = "1h")
public void setFrequency(final String frequency) {
if (frequency == null || frequency.trim().length() == 0) {
this.frequency = null;
} else {
try {
final Long value = ModelStringUtil.parseDurationString(frequency);
if (value == null || value <= 0) {
throw new PipelineFactoryException("Incorrect value for frequency: " + frequency);
}
this.frequency = value;
} catch (final NumberFormatException e) {
throw new PipelineFactoryException("Incorrect value for frequency: " + frequency);
}
}
}
@PipelineProperty(description = "Provide a cron expression to determine when streams are rolled.")
public void setSchedule(final String expression) {
if (expression == null || expression.trim().length() == 0) {
this.schedule = null;
} else {
try {
this.schedule = SimpleCron.compile(expression);
} catch (final NumberFormatException e) {
throw new PipelineFactoryException("Incorrect value for schedule: " + expression);
}
}
}
>>>>>>>
@PipelineProperty(description = "Choose how frequently streams are rolled.", defaultValue = "1h")
public void setFrequency(final String frequency) {
if (frequency == null || frequency.trim().length() == 0) {
this.frequency = null;
} else {
try {
final Long value = ModelStringUtil.parseDurationString(frequency);
if (value == null || value <= 0) {
throw new PipelineFactoryException("Incorrect value for frequency: " + frequency);
}
this.frequency = value;
} catch (final NumberFormatException e) {
throw new PipelineFactoryException("Incorrect value for frequency: " + frequency);
}
}
}
@PipelineProperty(description = "Provide a cron expression to determine when streams are rolled.")
public void setSchedule(final String expression) {
if (expression == null || expression.trim().length() == 0) {
this.schedule = null;
} else {
try {
this.schedule = SimpleCron.compile(expression);
} catch (final NumberFormatException e) {
throw new PipelineFactoryException("Incorrect value for schedule: " + expression);
}
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.