conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
private static final String USE_SECURITY_GROUP = "useSecurityGroup";
@Reference(cardinality = ReferenceCardinality.MANDATORY)
=======
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
>>>>>>>
@Reference(cardinality = ReferenceCardinality.MANDATORY)
<<<<<<<
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected ComponentConfigService componentConfigService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
=======
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
>>>>>>>
@Reference(cardinality = ReferenceCardinality.MANDATORY) |
<<<<<<<
final MapBinder<String, Object> entityServiceByTypeBinder = MapBinder.newMapBinder(binder(), String.class, Object.class);
entityServiceByTypeBinder.addBinding(PipelineDoc.DOCUMENT_TYPE).to(PipelineStoreImpl.class);
entityServiceByTypeBinder.addBinding(TextConverterDoc.DOCUMENT_TYPE).to(stroom.pipeline.TextConverterStoreImpl.class);
entityServiceByTypeBinder.addBinding(XsltDoc.DOCUMENT_TYPE).to(stroom.pipeline.XsltStoreImpl.class);
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(PipelineJobs.class);
=======
EntityTypeBinder.create(binder())
.bind(PipelineDoc.DOCUMENT_TYPE, PipelineStoreImpl.class)
.bind(TextConverterDoc.DOCUMENT_TYPE, stroom.pipeline.TextConverterStoreImpl.class)
.bind(XsltDoc.DOCUMENT_TYPE, stroom.pipeline.XsltStoreImpl.class);
// Provide object info to the logging service.
ObjectInfoProviderBinder.create(binder())
.bind(Doc.class, DocObjectInfoProvider.class)
.bind(PipelineDoc.class, PipelineDocObjectInfoProvider.class);
>>>>>>>
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(PipelineJobs.class);
EntityTypeBinder.create(binder())
.bind(PipelineDoc.DOCUMENT_TYPE, PipelineStoreImpl.class)
.bind(TextConverterDoc.DOCUMENT_TYPE, stroom.pipeline.TextConverterStoreImpl.class)
.bind(XsltDoc.DOCUMENT_TYPE, stroom.pipeline.XsltStoreImpl.class);
// Provide object info to the logging service.
ObjectInfoProviderBinder.create(binder())
.bind(Doc.class, DocObjectInfoProvider.class)
.bind(PipelineDoc.class, PipelineDocObjectInfoProvider.class); |
<<<<<<<
=======
@JsonInclude(Include.NON_EMPTY)
@XmlRootElement(name = "stroomStatsStore")
@XmlType(name = "StroomStatsStoreDoc", propOrder = {"type", "uuid", "name", "version", "createTime", "updateTime", "createUser", "updateUser", "description", "statisticType", "rollUpType", "precision", "enabled", "config"})
>>>>>>>
@JsonInclude(Include.NON_EMPTY)
<<<<<<<
=======
@XmlElement(name = "description")
@JsonProperty("description")
>>>>>>>
<<<<<<<
=======
@XmlElement(name = "statisticType")
@JsonProperty("statisticType")
>>>>>>>
<<<<<<<
=======
@XmlElement(name = "statisticRollUpType")
@JsonProperty("statisticRollUpType")
>>>>>>>
<<<<<<<
=======
@XmlElement(name = "precision")
@JsonProperty("precision")
>>>>>>>
<<<<<<<
=======
@XmlElement(name = "enabled")
@JsonProperty("enabled")
>>>>>>>
<<<<<<<
=======
@XmlElement(name = "config")
@JsonProperty("config")
>>>>>>> |
<<<<<<<
import stroom.task.api.TaskHandler;
import stroom.util.lifecycle.jobmanagement.ScheduledJobs;
=======
import stroom.task.api.TaskHandlerBinder;
>>>>>>>
import stroom.task.api.TaskHandlerBinder;
import stroom.util.lifecycle.jobmanagement.ScheduledJobs;
<<<<<<<
final MapBinder<String, Object> entityServiceByTypeBinder = MapBinder.newMapBinder(binder(), String.class, Object.class);
entityServiceByTypeBinder.addBinding(DashboardDoc.DOCUMENT_TYPE).to(stroom.dashboard.DashboardStoreImpl.class);
// final Multibinder<FindService> findServiceBinder = Multibinder.newSetBinder(binder(), FindService.class);
// findServiceBinder.addBinding().to(stroom.dashboard.DashboardStoreImpl.class);
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(DashboardJobs.class);
=======
EntityTypeBinder.create(binder())
.bind(DashboardDoc.DOCUMENT_TYPE, DashboardStoreImpl.class);
>>>>>>>
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(DashboardJobs.class);
EntityTypeBinder.create(binder())
.bind(DashboardDoc.DOCUMENT_TYPE, DashboardStoreImpl.class); |
<<<<<<<
import stroom.index.shared.IndexVolume.VolumeUseState;
=======
import stroom.index.shared.IndexVolumeGroup;
import stroom.security.api.SecurityContext;
>>>>>>>
import stroom.index.shared.IndexVolume.VolumeUseState;
import stroom.index.shared.IndexVolumeGroup; |
<<<<<<<
import stroom.entity.server.util.SQLBuilder;
import stroom.entity.server.util.SQLUtil;
import stroom.entity.server.util.StroomEntityManager;
=======
import stroom.entity.server.util.HqlBuilder;
import stroom.entity.server.util.SqlBuilder;
import stroom.entity.server.util.StroomEntityManager;
>>>>>>>
import stroom.entity.server.util.StroomEntityManager;
import stroom.entity.server.util.HqlBuilder;
import stroom.entity.server.util.SqlBuilder;
import stroom.entity.server.util.StroomEntityManager; |
<<<<<<<
final String uuid = req.getParameter(UUID_ARG);
boolean found = false;
if (uuid != null) {
final ResourceKey resourceKey = new ResourceKey(null, uuid);
try {
final Path file = getTempFile(resourceKey);
if (file != null && Files.isRegularFile(file)) {
if (FileUtil.getCanonicalPath(file).toLowerCase().endsWith(".zip")) {
resp.setContentType("application/zip");
} else {
resp.setContentType("application/octet-stream");
=======
// Get the current request.
final HttpServletRequest originalRequest = httpServletRequestHolder.get();
// Set this request.
httpServletRequestHolder.set(req);
try {
final String uuid = req.getParameter(UUID_ARG);
boolean found = false;
if (uuid != null) {
final ResourceKey resourceKey = new ResourceKey(null, uuid);
try {
final Path file = getTempFile(resourceKey);
if (file != null && Files.isRegularFile(file)) {
if (file.toAbsolutePath().toString().toLowerCase().endsWith(".zip")) {
resp.setContentType("application/zip");
} else {
resp.setContentType("application/octet-stream");
}
resp.getOutputStream().write(Files.readAllBytes(file));
found = true;
>>>>>>>
// Get the current request.
final HttpServletRequest originalRequest = httpServletRequestHolder.get();
// Set this request.
httpServletRequestHolder.set(req);
try {
final String uuid = req.getParameter(UUID_ARG);
boolean found = false;
if (uuid != null) {
final ResourceKey resourceKey = new ResourceKey(null, uuid);
try {
final Path file = getTempFile(resourceKey);
if (file != null && Files.isRegularFile(file)) {
if (FileUtil.getCanonicalPath(file).toLowerCase().endsWith(".zip")) {
resp.setContentType("application/zip");
} else {
resp.setContentType("application/octet-stream");
}
resp.getOutputStream().write(Files.readAllBytes(file));
found = true; |
<<<<<<<
import stroom.explorer.shared.SharedDocRef;
=======
import stroom.entity.shared.SharedDocRef;
import stroom.pipeline.shared.StepLocation;
import stroom.streamstore.shared.StreamType;
>>>>>>>
import stroom.explorer.shared.SharedDocRef;
import stroom.pipeline.shared.StepLocation;
<<<<<<<
private final long streamId;
private final long eventId;
=======
private final Long streamId;
>>>>>>>
private final long streamId;
<<<<<<<
private final String childStreamType;
=======
private final StreamType childStreamType;
private final StepLocation stepLocation;
>>>>>>>
private final String childStreamType;
private final StepLocation stepLocation;
<<<<<<<
private BeginPipelineSteppingEvent(final long streamId, final long eventId, final Long childStreamId,
final String childStreamType, final SharedDocRef pipelineRef) {
=======
private BeginPipelineSteppingEvent(final Long streamId, final Long childStreamId,
final StreamType childStreamType, final StepLocation stepLocation, final SharedDocRef pipelineRef) {
>>>>>>>
private BeginPipelineSteppingEvent(final long streamId,
final Long childStreamId,
final String childStreamType,
final StepLocation stepLocation,
final SharedDocRef pipelineRef) {
<<<<<<<
public static void fire(final HasHandlers source, final long streamId, final long eventId, final Long childStreamId,
final String childStreamType, final SharedDocRef pipelineRef) {
source.fireEvent(new BeginPipelineSteppingEvent(streamId, eventId, childStreamId, childStreamType, pipelineRef));
=======
public static void fire(final HasHandlers source, final Long streamId, final Long childStreamId,
final StreamType childStreamType, final StepLocation stepLocation, final SharedDocRef pipelineRef) {
source.fireEvent(new BeginPipelineSteppingEvent(streamId, childStreamId, childStreamType, stepLocation, pipelineRef));
>>>>>>>
public static void fire(final HasHandlers source,
final long streamId,
final Long childStreamId,
final String childStreamType,
final StepLocation stepLocation,
final SharedDocRef pipelineRef) {
source.fireEvent(new BeginPipelineSteppingEvent(streamId, childStreamId, childStreamType, stepLocation, pipelineRef));
<<<<<<<
public long getEventId() {
return eventId;
}
=======
>>>>>>> |
<<<<<<<
import stroom.meta.shared.Meta;
=======
import org.junit.jupiter.api.extension.ExtendWith;
import stroom.data.meta.shared.Data;
>>>>>>>
import org.junit.jupiter.api.extension.ExtendWith;
import stroom.meta.shared.Meta; |
<<<<<<<
import stroom.document.client.event.DirtyEvent;
import stroom.document.client.event.DirtyEvent.DirtyHandler;
import stroom.document.client.event.HasDirtyHandlers;
=======
import stroom.dispatch.client.ClientDispatchAsync;
import stroom.entity.client.event.DirtyEvent;
import stroom.entity.client.event.DirtyEvent.DirtyHandler;
import stroom.entity.client.event.HasDirtyHandlers;
import stroom.pipeline.shared.FetchDocRefsAction;
>>>>>>>
import stroom.dispatch.client.ClientDispatchAsync;
import stroom.document.client.event.DirtyEvent;
import stroom.document.client.event.DirtyEvent.DirtyHandler;
import stroom.document.client.event.HasDirtyHandlers;
import stroom.pipeline.shared.FetchDocRefsAction; |
<<<<<<<
=======
import stroom.io.SeekableOutputStream;
import stroom.streamstore.server.StreamException;
import stroom.streamstore.server.StreamTarget;
import stroom.streamstore.shared.Stream;
import stroom.streamstore.shared.StreamType;
import stroom.streamstore.shared.StreamVolume;
import stroom.util.logging.StroomLogger;
import stroom.feed.MetaMap;
>>>>>>> |
<<<<<<<
import org.springframework.transaction.TransactionException;
=======
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import org.springframework.transaction.TransactionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; |
<<<<<<<
=======
@JsonInclude(Include.NON_EMPTY)
@XmlRootElement(name = "xslt")
@XmlType(name = "XsltDoc", propOrder = {"type", "uuid", "name", "version", "createTime", "updateTime", "createUser", "updateUser", "description"})
>>>>>>>
@JsonInclude(Include.NON_EMPTY) |
<<<<<<<
import stroom.dictionary.server.DictionaryStore;
=======
import stroom.dictionary.shared.DictionaryService;
import stroom.entity.server.MarshalOptions;
import stroom.entity.server.SupportsCriteriaLogging;
>>>>>>>
import stroom.dictionary.server.DictionaryStore;
import stroom.dictionary.shared.DictionaryService;
import stroom.entity.server.MarshalOptions;
import stroom.entity.server.SupportsCriteriaLogging;
<<<<<<<
=======
} finally {
securityContext.restorePermissions();
marshalOptions.setDisabled(false);
>>>>>>>
} finally {
marshalOptions.setDisabled(false);
<<<<<<<
if (streamProcessor != null && criteria.getFetchSet().contains(PipelineEntity.ENTITY_TYPE)) {
// We will try and load the pipeline but will ignore permission
// failures as we don't mind users seeing streams even if they do
// not have visibility of the pipeline that created the stream.
try {
streamProcessor.setPipeline(pipelineService.load(streamProcessor.getPipeline()));
} catch (final PermissionException e) {
streamProcessor.setPipeline(null);
// The current user might not have permission to see this
// pipeline.
LOGGER.debug(e.getMessage());
throw e;
}
=======
private static <T> Optional<T> safeOptional(final Supplier<T> supplier) {
Optional<T> optional = Optional.empty();
try {
optional = Optional.ofNullable(supplier.get());
} catch (final Exception e) {
LOGGER.debug(e.getMessage());
>>>>>>>
private static <T> Optional<T> safeOptional(final Supplier<T> supplier) {
Optional<T> optional = Optional.empty();
try {
optional = Optional.ofNullable(supplier.get());
} catch (final Exception e) {
LOGGER.debug(e.getMessage());
<<<<<<<
=======
@Override
public void appendCriteria(final List<BaseAdvancedQueryItem> items, final FindStreamAttributeMapCriteria criteria) {
streamStore.appendCriteria(items, criteria.getFindStreamCriteria());
}
private static class EntityRef {
private final String type;
private final long id;
private EntityRef(final String type, final long id) {
this.type = type;
this.id = id;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final EntityRef entityRef = (EntityRef) o;
return id == entityRef.id &&
Objects.equals(type, entityRef.type);
}
@Override
public int hashCode() {
return Objects.hash(type, id);
}
}
>>>>>>>
private static class EntityRef {
private final String type;
private final long id;
private EntityRef(final String type, final long id) {
this.type = type;
this.id = id;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final EntityRef entityRef = (EntityRef) o;
return id == entityRef.id &&
Objects.equals(type, entityRef.type);
}
@Override
public int hashCode() {
return Objects.hash(type, id);
}
} |
<<<<<<<
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import stroom.util.logging.StroomLogger;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; |
<<<<<<<
import javax.annotation.Resource;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
=======
import javax.annotation.Resource;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
>>>>>>>
import javax.annotation.Resource;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
<<<<<<<
=======
private static final String[] NON_ENV_VARS = {"feed", "pipeline", "streamId", "searchId", "node", "year", "month",
"day", "hour", "minute", "second", "millis", "ms", "uuid", "fileName", "fileStem", "fileExtension",
StroomProperties.STROOM_TEMP};
private static final Set<String> NON_ENV_VARS_SET = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList(NON_ENV_VARS)));
public String replaceAll(String path) {
path = replaceContextVars(path);
path = replaceTimeVars(path);
path = replaceUUIDVars(path);
path = replaceSystemProperties(path);
return path;
}
public String replaceContextVars(String path) {
if (feedHolder != null && feedHolder.getFeed() != null) {
path = replace(path, "feed", feedHolder.getFeed().getName());
}
if (pipelineHolder != null && pipelineHolder.getPipeline() != null) {
path = replace(path, "pipeline", pipelineHolder.getPipeline().getName());
}
if (streamHolder != null && streamHolder.getStream() != null) {
path = replace(path, "streamId", String.valueOf(streamHolder.getStream().getId()));
}
if (searchIdHolder != null && searchIdHolder.getSearchId() != null) {
path = replace(path, "searchId", String.valueOf(searchIdHolder.getSearchId()));
}
if (nodeCache != null) {
path = replace(path, "node", String.valueOf(nodeCache.getDefaultNode().getName()));
}
return path;
}
>>>>>>>
<<<<<<<
private static String replace(final String path,
final String type,
final Supplier<String> replacementSupplier) {
=======
static String replace(final String path, final String type, final String replacement) {
>>>>>>>
private static String replace(final String path,
final String type,
final Supplier<String> replacementSupplier) {
<<<<<<<
newPath = newPath.substring(0, start) + replacementSupplier.get() + newPath.substring(end);
start = newPath.indexOf(param, end);
=======
newPath = newPath.substring(0, start) + replacement + newPath.substring(end);
start = newPath.indexOf(param, start);
>>>>>>>
newPath = newPath.substring(0, start) + replacementSupplier.get() + newPath.substring(end);
start = newPath.indexOf(param, start); |
<<<<<<<
import stroom.dashboard.expression.v1.FieldIndexMap;
import stroom.docref.DocRef;
import stroom.pipeline.errorhandler.ErrorReceiver;
import stroom.query.common.v2.Coprocessor;
import stroom.search.extraction.ExtractionTask.ResultReceiver;
import stroom.task.shared.ThreadPool;
import stroom.task.shared.ThreadPoolImpl;
import stroom.util.shared.Severity;
import stroom.util.task.taskqueue.AbstractTaskProducer;
=======
import stroom.query.api.v2.DocRef;
import stroom.search.coprocessor.CompletionState;
import stroom.search.coprocessor.Error;
import stroom.search.coprocessor.Receiver;
import stroom.search.coprocessor.ReceiverImpl;
import stroom.search.coprocessor.Values;
import stroom.security.SecurityContext;
import stroom.security.SecurityHelper;
import stroom.task.server.ExecutorProvider;
import stroom.task.server.ThreadPoolImpl;
import stroom.util.shared.HasTerminate;
import stroom.util.shared.ThreadPool;
>>>>>>>
import stroom.docref.DocRef;
import stroom.search.coprocessor.CompletionState;
import stroom.search.coprocessor.Error;
import stroom.search.coprocessor.Receiver;
import stroom.search.coprocessor.ReceiverImpl;
import stroom.search.coprocessor.Values;
import stroom.security.api.SecurityContext;
import stroom.task.api.ExecutorProvider;
import stroom.task.shared.ThreadPool;
import stroom.task.shared.ThreadPoolImpl;
<<<<<<<
private final long now = System.currentTimeMillis();
private final AtomicInteger threadsUsed = new AtomicInteger();
private final int maxThreadsPerTask;
private final AtomicInteger tasksTotal = new AtomicInteger();
private final AtomicInteger tasksCompleted = new AtomicInteger();
private final CountDownLatch completionLatch = new CountDownLatch(1);
private final FieldIndexMap extractionFieldIndexMap;
private final Map<DocRef, Set<Coprocessor>> extractionCoprocessorsMap;
private final ErrorReceiver errorReceiver;
=======
private final HasTerminate clusterSearchTask;
private final Receiver parentReceiver;
private final Map<DocRef, Receiver> receivers;
>>>>>>>
private final Receiver parentReceiver;
private final Map<DocRef, Receiver> receivers;
<<<<<<<
private final AtomicBoolean completedEventMapping = new AtomicBoolean();
=======
private final CompletionState streamMapCreatorCompletionState = new CompletionState();
>>>>>>>
private final CompletionState streamMapCreatorCompletionState = new CompletionState();
<<<<<<<
private volatile boolean finishedAddingTasks;
public ExtractionTaskProducer(final TaskExecutor taskExecutor,
final StreamMapCreator streamMapCreator,
final LinkedBlockingQueue<Values> storedData,
final FieldIndexMap extractionFieldIndexMap,
final Map<DocRef, Set<Coprocessor>> extractionCoprocessorsMap,
final ErrorReceiver errorReceiver,
final int maxThreadsPerTask,
final Executor executor,
final Provider<ExtractionTaskHandler> handlerProvider) {
super(taskExecutor, executor);
this.extractionFieldIndexMap = extractionFieldIndexMap;
this.extractionCoprocessorsMap = extractionCoprocessorsMap;
this.errorReceiver = errorReceiver;
this.maxThreadsPerTask = maxThreadsPerTask;
=======
private final Topic<Values> topic;
ExtractionTaskProducer(final TaskExecutor taskExecutor,
final HasTerminate hasTerminate,
final StreamMapCreator streamMapCreator,
final Receiver parentReceiver,
final Map<DocRef, Receiver> receivers,
final int maxStoredDataQueueSize,
final int maxThreadsPerTask,
final ExecutorProvider executorProvider,
final Provider<ExtractionTaskHandler> handlerProvider,
final SecurityContext securityContext) {
super(taskExecutor, maxThreadsPerTask, executorProvider.getExecutor(THREAD_POOL));
this.parentReceiver = parentReceiver;
this.receivers = receivers;
this.clusterSearchTask = hasTerminate;
>>>>>>>
private final Topic<Values> topic;
ExtractionTaskProducer(final TaskExecutor taskExecutor,
final StreamMapCreator streamMapCreator,
final Receiver parentReceiver,
final Map<DocRef, Receiver> receivers,
final int maxStoredDataQueueSize,
final int maxThreadsPerTask,
final ExecutorProvider executorProvider,
final Provider<ExtractionTaskHandler> handlerProvider,
final SecurityContext securityContext) {
super(taskExecutor, maxThreadsPerTask, executorProvider.getExecutor(THREAD_POOL));
this.parentReceiver = parentReceiver;
this.receivers = receivers;
<<<<<<<
=======
// Create a queue to receive values and store them for asynchronous processing.
topic = new LinkedBlockingQueueTopic<>(maxStoredDataQueueSize, hasTerminate);
// // Group coprocessors by extraction pipeline.
// final Map<DocRef, Set<NewCoprocessor>> map = new HashMap<>();
// coprocessors.getSet().forEach(coprocessor ->
// map.computeIfAbsent(coprocessor.getSettings().getExtractionPipeline(), k ->
// new HashSet<>()).add(coprocessor));
//
// receiverMap = map.entrySet().stream().collect(Collectors.toMap(Entry::getKey, e -> {
// Set<NewCoprocessor> coprocessorSet = e.getValue();
//
// // Create a receiver that will send data to all coprocessors.
// Receiver receiver;
// if (e.getValue().size() == 1) {
// receiver = coprocessorSet.iterator().next();
// } else {
// receiver = new MultiReceiver(coprocessorSet);
// }
// return receiver;
// }));
>>>>>>>
// Create a queue to receive values and store them for asynchronous processing.
topic = new LinkedBlockingQueueTopic<>(maxStoredDataQueueSize);
// // Group coprocessors by extraction pipeline.
// final Map<DocRef, Set<NewCoprocessor>> map = new HashMap<>();
// coprocessors.getSet().forEach(coprocessor ->
// map.computeIfAbsent(coprocessor.getSettings().getExtractionPipeline(), k ->
// new HashSet<>()).add(coprocessor));
//
// receiverMap = map.entrySet().stream().collect(Collectors.toMap(Entry::getKey, e -> {
// Set<NewCoprocessor> coprocessorSet = e.getValue();
//
// // Create a receiver that will send data to all coprocessors.
// Receiver receiver;
// if (e.getValue().size() == 1) {
// receiver = coprocessorSet.iterator().next();
// } else {
// receiver = new MultiReceiver(coprocessorSet);
// }
// return receiver;
// }));
<<<<<<<
try {
while (!completedEventMapping.get()) {
=======
// Elevate permissions so users with only `Use` feed permission can `Read` streams.
try (final SecurityHelper securityHelper = SecurityHelper.elevate(securityContext)) {
while (!streamMapCreatorCompletionState.isComplete() && !hasTerminate.isTerminated()) {
>>>>>>>
try {
while (!streamMapCreatorCompletionState.isComplete() && !Thread.currentThread().isInterrupted()) {
<<<<<<<
final Values values = storedData.take();
if (values.complete()) {
// If we did not get any values then there are no more to get if the search task producer is complete.
completedEventMapping.set(true);
} else {
// If we have some values then map them.
streamMapCreator.addEvent(streamEventMap, values.getValues());
=======
final Values values = topic.get();
if (values != null) {
// If we have some values then map them.
streamMapCreator.addEvent(streamEventMap, values.getValues());
>>>>>>>
final Values values = topic.get();
if (values != null) {
// If we have some values then map them.
streamMapCreator.addEvent(streamEventMap, values.getValues());
<<<<<<<
// Tell the supplied executor that we are ready to deliver tasks.
signalAvailable();
} catch (final InterruptedException e) {
=======
} catch (final RuntimeException e) {
>>>>>>>
} catch (final RuntimeException e) {
<<<<<<<
completedEventMapping.set(true);
// Continue to interrupt this thread.
Thread.currentThread().interrupt();
} catch (final RuntimeException e) {
LOGGER.error(e.getMessage(), e);
completedEventMapping.set(true);
=======
receivers.values().forEach(receiver -> {
receiver.getErrorConsumer().accept(new Error(e.getMessage(), e));
receiver.getCompletionCountConsumer().accept(1L);
});
} finally {
// Tell the supplied executor that we are ready to deliver tasks.
signalAvailable();
>>>>>>>
receivers.values().forEach(receiver -> {
receiver.getErrorConsumer().accept(new Error(e.getMessage(), e));
receiver.getCompletionCountConsumer().accept(1L);
});
} finally {
// Tell the supplied executor that we are ready to deliver tasks.
signalAvailable();
<<<<<<<
private void terminate() {
finishedAddingTasks = true;
streamEventMap.clear();
// Drain the queue and increment the complete task count.
while (taskQueue.poll() != null) {
tasksCompleted.getAndIncrement();
}
completionLatch.countDown();
}
=======
>>>>>>>
<<<<<<<
final boolean completedEventMapping = this.completedEventMapping.get();
=======
final boolean completedEventMapping = this.streamMapCreatorCompletionState.isComplete();
>>>>>>>
final boolean completedEventMapping = this.streamMapCreatorCompletionState.isComplete();
<<<<<<<
tasksCreated++;
=======
tasksCreated.incrementAndGet();
>>>>>>>
tasksCreated.incrementAndGet(); |
<<<<<<<
import stroom.svg.client.SvgPresets;
=======
import stroom.svg.client.SvgIcon;
import stroom.svg.client.SvgPreset;
>>>>>>>
import stroom.svg.client.SvgPresets;
import stroom.svg.client.SvgIcon; |
<<<<<<<
import io.grpc.ManagedChannelBuilder;
import io.grpc.netty.NettyChannelBuilder;
import org.onosproject.event.AbstractListenerManager;
import org.onosproject.grpc.api.GrpcChannelId;
import org.onosproject.grpc.api.GrpcController;
=======
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.onosproject.grpc.ctl.AbstractGrpcClientController;
>>>>>>>
import org.onosproject.grpc.ctl.AbstractGrpcClientController;
<<<<<<<
@Reference(cardinality = ReferenceCardinality.MANDATORY)
private GrpcController grpcController;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
=======
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
>>>>>>>
@Reference(cardinality = ReferenceCardinality.MANDATORY) |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.junit.Assert;
import org.junit.Test;
import org.springframework.aop.framework.Advised;
>>>>>>>
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.aop.framework.Advised; |
<<<<<<<
import stroom.query.api.Field;
import stroom.query.api.FieldBuilder;
import stroom.query.api.Format;
import stroom.query.api.OffsetRange;
import stroom.query.api.Query;
import stroom.query.api.QueryKey;
import stroom.query.api.Result;
import stroom.query.api.ResultRequest;
import stroom.query.api.Row;
import stroom.query.api.SearchRequest;
import stroom.query.api.SearchResponse;
import stroom.query.api.TableResult;
import stroom.query.api.TableSettings;
import stroom.search.server.SearchResource;
=======
>>>>>>>
import stroom.query.api.Field;
import stroom.query.api.FieldBuilder;
import stroom.query.api.Format;
import stroom.query.api.OffsetRange;
import stroom.query.api.Query;
import stroom.query.api.QueryKey;
import stroom.query.api.Result;
import stroom.query.api.ResultRequest;
import stroom.query.api.Row;
import stroom.query.api.SearchRequest;
import stroom.query.api.SearchResponse;
import stroom.query.api.TableResult;
import stroom.query.api.TableSettings;
import stroom.search.server.SearchResource;
<<<<<<<
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class TestEventSearch extends AbstractCoreIntegrationTest {
=======
import java.util.*;
public class TestEventSearch extends AbstractSearchTest {
>>>>>>>
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class TestEventSearch extends AbstractSearchTest { |
<<<<<<<
private static class TaskState {
private final Task<?> task;
private final Monitor monitor;
TaskState(final Task<?> task, final Monitor monitor) {
this.task = task;
this.monitor = monitor;
}
}
=======
static boolean isTerminated() {
final TaskState taskState = currentState();
return taskState != null && taskState.task.isTerminated();
}
static void terminate() {
final TaskState taskState = currentState();
if (taskState != null) {
taskState.task.terminate();
}
}
>>>>>>>
static boolean isTerminated() {
final TaskState taskState = currentState();
return taskState != null && taskState.task.isTerminated();
}
static void terminate() {
final TaskState taskState = currentState();
if (taskState != null) {
taskState.task.terminate();
}
}
private static class TaskState {
private final Task<?> task;
private final Monitor monitor;
TaskState(final Task<?> task, final Monitor monitor) {
this.task = task;
this.monitor = monitor;
}
} |
<<<<<<<
private static final ViewDataResource VIEW_DATA_RESOURCE = GWT.create(ViewDataResource.class);
=======
>>>>>>> |
<<<<<<<
import stroom.security.shared.User;
=======
>>>>>>>
<<<<<<<
import stroom.security.shared.UserService;
import stroom.util.task.ServerTask;
=======
import stroom.security.shared.UserStatus;
import stroom.util.cert.CertificateUtil;
import stroom.util.shared.EqualsUtil;
>>>>>>>
import stroom.util.task.ServerTask;
<<<<<<<
return token != null
&& (token instanceof JWTAuthenticationToken);
=======
return token != null && (token instanceof UsernamePasswordToken ||
token instanceof CertificateAuthenticationToken ||
token instanceof JWTAuthenticationToken);
>>>>>>>
return token != null
&& (token instanceof JWTAuthenticationToken);
<<<<<<<
return new SimpleAuthenticationInfo(user, user.getPasswordHash(), getName());
=======
check(user);
return new SimpleAuthenticationInfo(UserRefFactory.create(user), user.getPasswordHash(), getName());
>>>>>>>
return new SimpleAuthenticationInfo(UserRefFactory.create(user), user.getPasswordHash(), getName());
<<<<<<<
=======
private AuthenticationInfo authenticateWithCertificate(final CertificateAuthenticationToken token)
throws AuthenticationException {
try {
if (!allowCertificateAuthentication()) {
throw new EntityServiceException("Certificate authentication is not allowed");
}
final Pattern pattern = getPattern();
if (pattern == null) {
throw new EntityServiceException("No valid certificateDNPattern found");
}
final String dn = (String) token.getCredentials();
final String username = CertificateUtil.extractUserIdFromDN(dn, pattern);
if (LOGGER.isDebugEnabled()) {
final String cn = CertificateUtil.extractCNFromDN(dn);
LOGGER.debug("authenticate() - dn=" + dn + ", cn=" + cn + ", userId=" + username);
}
final User user = loadUserByUsername(username);
if (StringUtils.hasText(username) && user == null) {
throw new EntityServiceException(username + " does not exist");
}
if (user != null) {
check(user);
return new SimpleAuthenticationInfo(UserRefFactory.create(user), user.getPasswordHash(), getName());
}
} catch (final AuthenticationException e) {
throw e;
} catch (final Exception e) {
LOGGER.debug(e.getMessage(), e);
throw new BadCredentialsException(e.getMessage());
}
return null;
}
private AuthenticationInfo authenticateWithUsernamePassword(final UsernamePasswordToken token)
throws AuthenticationException {
try {
final String username = token.getUsername();
// Null username is invalid
if (username == null) {
throw new AccountException("Null user names are not allowed by this realm.");
}
final User user = loadUserByUsername(username);
if (StringUtils.hasText(username) && user == null) {
throw new BadCredentialsException("Bad Credentials");
}
if (user != null) {
check(user);
return new SimpleAuthenticationInfo(UserRefFactory.create(user), user.getPasswordHash(), getName());
}
} catch (final AuthenticationException e) {
throw e;
} catch (final Exception e) {
LOGGER.debug(e.getMessage(), e);
throw new BadCredentialsException(e.getMessage());
}
return null;
}
private boolean allowCertificateAuthentication() {
return stroomPropertyService.getBooleanProperty("stroom.security.allowCertificateAuthentication", false);
}
private Pattern getPattern() {
final String regex = stroomPropertyService.getProperty("stroom.security.certificateDNPattern");
if (!EqualsUtil.isEquals(cachedRegex, regex)) {
cachedRegex = regex;
cachedPattern = null;
if (regex != null) {
try {
cachedPattern = Pattern.compile(regex);
} catch (final RuntimeException e) {
final String message = "Problem compiling certificateDNPattern regex: " + e.getMessage();
LOGGER.error(message, e);
throw new EntityServiceException(message);
}
}
}
return cachedPattern;
}
private void check(final User user) {
if (UserStatus.LOCKED.equals(user.getStatus())) {
throw new LockedException("User account is locked");
} else if (UserStatus.DISABLED.equals(user.getStatus())) {
throw new DisabledException("User account has been deactivated");
} else if (UserStatus.EXPIRED.equals(user.getStatus())) {
throw new AccountExpiredException("User account has expired");
} else if (!UserStatus.ENABLED.equals(user.getStatus())) {
throw new DisabledException("User account is not enabled");
}
}
>>>>>>>
<<<<<<<
securityContext.popUser();
return UserRef.create(userGroup);
=======
return newUserGroup;
>>>>>>>
securityContext.popUser();
return newUserGroup; |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import com.google.common.collect.Maps;
import org.springframework.util.StringUtils;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import org.springframework.util.StringUtils;
<<<<<<<
import stroom.util.shared.Monitor;
import stroom.util.thread.BufferFactory;
=======
import stroom.util.zip.StroomHeaderArguments;
>>>>>>>
import stroom.util.thread.BufferFactory;
<<<<<<<
public final static long DEFAULT_MAX_STREAM_SIZE = ModelStringUtil.parseIECByteSizeString("10G");
private static final Logger LOGGER = LoggerFactory.getLogger(StroomZipRepositoryProcessor.class);
=======
public final static int DEFAULT_MAX_FILE_SCAN = 10000;
private final static long DEFAULT_MAX_STREAM_SIZE = ModelStringUtil.parseIECByteSizeString("10G");
private final StroomLogger LOGGER = StroomLogger.getLogger(StroomZipRepositoryProcessor.class);
>>>>>>>
public final static int DEFAULT_MAX_FILE_SCAN = 10000;
private final static long DEFAULT_MAX_STREAM_SIZE = ModelStringUtil.parseIECByteSizeString("10G");
private static final Logger LOGGER = LoggerFactory.getLogger(StroomZipRepositoryProcessor.class);
<<<<<<<
public abstract void startExecutor();
=======
public abstract void processFeedFiles(final StroomZipRepository stroomZipRepository,
final String feed,
final List<Path> fileList);
public abstract byte[] getReadBuffer();
/**
* Process a Stroom zip repository,
*
* @param stroomZipRepository The Stroom zip repository to process.
* @return True is there are more files to process, i.e. we reached our max
* file scan limit.
*/
public boolean process(final StroomZipRepository stroomZipRepository) {
boolean isComplete = true;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("process() - Scanning " + stroomZipRepository.getRootDir());
}
// Scan all of the zip files in the repository so that we can map zip files to feeds.
try (final Stream<Path> zipFiles = stroomZipRepository.walkZipFiles()) {
final List<Path> filesBatch = zipFiles.limit(maxFileScan).collect(Collectors.toList());
// Quit once we have hit the max
if (filesBatch.size() >= maxFileScan) {
isComplete = false;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("process() - Hit scan limit of " + maxFileScan);
}
}
LOGGER.info("Processing %s files, isComplete: [%s]",
filesBatch.size(), Boolean.valueOf(isComplete).toString());
//build the map of feed -> files, only scan a limited number of files
Map<String, List<Path>> feedToFilesMap = filesBatch.parallelStream()
.filter(file -> !taskContext.isTerminated()) //do no more work if we are terminated
.map(file -> fileScan(stroomZipRepository, file))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.groupingBy(
Map.Entry::getKey,
Collectors.mapping(
Entry::getValue,
Collectors.toList())));
if (LOGGER.isDebugEnabled()) {
int fileCount = feedToFilesMap.values().stream()
.mapToInt(List::size)
.sum();
LOGGER.debug("Found %s feeds across %s files", feedToFilesMap.keySet().size(), fileCount);
}
final Comparator<Path> fileComparator = (f1, f2) -> {
if (f1 == null || f2 == null || f1.getFileName().toString() == null || f2.getFileName().toString() == null) {
return 0;
}
return f1.getFileName().toString().compareTo(f2.getFileName().toString());
};
//spawn a task for each feed->files entry to load the data into a stream
CompletableFuture[] processFeedFilesFutures = feedToFilesMap.entrySet().stream()
.filter(entry -> !taskContext.isTerminated()) //do no more work if we are terminated
.map(entry -> {
final String feedName = entry.getKey();
final List<Path> fileList = new ArrayList<>(entry.getValue());
// Sort the map so the items are processed in order
fileList.sort(fileComparator);
//get the future for loading a list of files into a feed
return createProcessFeedFilesTask(stroomZipRepository, feedName, fileList);
})
.toArray(CompletableFuture[]::new);
try {
//wait for all the sub-tasks to complete
CompletableFuture.allOf(processFeedFilesFutures).get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOGGER.error("Proxy aggregation thread interrupted", e);
} catch (ExecutionException e) {
throw new RuntimeException(String.format("Error waiting for %s proxy aggregation jobs to complete",
processFeedFilesFutures.length), e);
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("process() - Completed");
}
} catch (final IOException e) {
LOGGER.error(e.getMessage(), e);
}
return isComplete;
}
private CompletableFuture<Void> createProcessFeedFilesTask(final StroomZipRepository stroomZipRepository,
final String feed,
final List<Path> fileList) {
return CompletableFuture.runAsync(
() -> {
if (!taskContext.isTerminated()) {
taskContext.setName("Process feed files");
taskContext.info(String.format("Processing %s files for feed %s", fileList.size(), feed));
processFeedFiles(stroomZipRepository, feed, fileList);
} else {
LOGGER.info("run() - Quit Feed Aggregation %s", feed);
}
},
executor);
}
/**
* Peek at the stream to get the header file feed
*/
private Optional<Entry<String, Path>> fileScan(final StroomZipRepository stroomZipRepository, final Path file) {
//only a single thread is working on this file so we don't need any thread safety
StroomZipFile stroomZipFile = null;
try {
stroomZipFile = new StroomZipFile(file);
final Set<String> baseNameSet = stroomZipFile.getStroomZipNameSet().getBaseNameSet();
if (baseNameSet.isEmpty()) {
stroomZipRepository.addErrorMessage(stroomZipFile, "Unable to find any entry??", true);
return Optional.empty();
}
final String anyBaseName = baseNameSet.iterator().next();
>>>>>>>
public abstract void processFeedFiles(final StroomZipRepository stroomZipRepository,
final String feed,
final List<Path> fileList);
/**
* Process a Stroom zip repository,
*
* @param stroomZipRepository The Stroom zip repository to process.
* @return True is there are more files to process, i.e. we reached our max
* file scan limit.
*/
public boolean process(final StroomZipRepository stroomZipRepository) {
boolean isComplete = true;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("process() - Scanning " + stroomZipRepository.getRootDir());
}
// Scan all of the zip files in the repository so that we can map zip files to feeds.
try (final Stream<Path> zipFiles = stroomZipRepository.walkZipFiles()) {
final List<Path> filesBatch = zipFiles.limit(maxFileScan).collect(Collectors.toList());
// Quit once we have hit the max
if (filesBatch.size() >= maxFileScan) {
isComplete = false;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("process() - Hit scan limit of " + maxFileScan);
}
}
LOGGER.info("Processing %s files, isComplete: [%s]",
filesBatch.size(), Boolean.valueOf(isComplete).toString());
//build the map of feed -> files, only scan a limited number of files
Map<String, List<Path>> feedToFilesMap = filesBatch.parallelStream()
.filter(file -> !taskContext.isTerminated()) //do no more work if we are terminated
.map(file -> fileScan(stroomZipRepository, file))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.groupingBy(
Map.Entry::getKey,
Collectors.mapping(
Entry::getValue,
Collectors.toList())));
if (LOGGER.isDebugEnabled()) {
int fileCount = feedToFilesMap.values().stream()
.mapToInt(List::size)
.sum();
LOGGER.debug("Found %s feeds across %s files", feedToFilesMap.keySet().size(), fileCount);
}
final Comparator<Path> fileComparator = (f1, f2) -> {
if (f1 == null || f2 == null || f1.getFileName().toString() == null || f2.getFileName().toString() == null) {
return 0;
}
return f1.getFileName().toString().compareTo(f2.getFileName().toString());
};
//spawn a task for each feed->files entry to load the data into a stream
CompletableFuture[] processFeedFilesFutures = feedToFilesMap.entrySet().stream()
.filter(entry -> !taskContext.isTerminated()) //do no more work if we are terminated
.map(entry -> {
final String feedName = entry.getKey();
final List<Path> fileList = new ArrayList<>(entry.getValue());
// Sort the map so the items are processed in order
fileList.sort(fileComparator);
//get the future for loading a list of files into a feed
return createProcessFeedFilesTask(stroomZipRepository, feedName, fileList);
})
.toArray(CompletableFuture[]::new);
try {
//wait for all the sub-tasks to complete
CompletableFuture.allOf(processFeedFilesFutures).get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOGGER.error("Proxy aggregation thread interrupted", e);
} catch (ExecutionException e) {
throw new RuntimeException(String.format("Error waiting for %s proxy aggregation jobs to complete",
processFeedFilesFutures.length), e);
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("process() - Completed");
}
} catch (final IOException e) {
LOGGER.error(e.getMessage(), e);
}
return isComplete;
}
private CompletableFuture<Void> createProcessFeedFilesTask(final StroomZipRepository stroomZipRepository,
final String feed,
final List<Path> fileList) {
return CompletableFuture.runAsync(
() -> {
if (!taskContext.isTerminated()) {
taskContext.setName("Process feed files");
taskContext.info(String.format("Processing %s files for feed %s", fileList.size(), feed));
processFeedFiles(stroomZipRepository, feed, fileList);
} else {
LOGGER.info("run() - Quit Feed Aggregation %s", feed);
}
},
executor);
}
/**
* Peek at the stream to get the header file feed
*/
private Optional<Entry<String, Path>> fileScan(final StroomZipRepository stroomZipRepository, final Path file) {
//only a single thread is working on this file so we don't need any thread safety
StroomZipFile stroomZipFile = null;
try {
stroomZipFile = new StroomZipFile(file);
final Set<String> baseNameSet = stroomZipFile.getStroomZipNameSet().getBaseNameSet();
if (baseNameSet.isEmpty()) {
stroomZipRepository.addErrorMessage(stroomZipFile, "Unable to find any entry??", true);
return Optional.empty();
}
final String anyBaseName = baseNameSet.iterator().next();
<<<<<<<
public void sendEntry(final List<? extends StroomStreamHandler> stroomStreamHandlerList, final MetaMap metaMap,
final StroomZipEntry targetEntry) throws IOException {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("sendEntry() - " + targetEntry);
}
final byte[] buffer = BufferFactory.create();
for (final StroomStreamHandler stroomStreamHandler : stroomStreamHandlerList) {
if (stroomStreamHandler instanceof StroomHeaderStreamHandler) {
((StroomHeaderStreamHandler) stroomStreamHandler).handleHeader(metaMap);
}
stroomStreamHandler.handleEntryStart(targetEntry);
}
final InitialByteArrayOutputStream initialByteArrayOutputStream = new InitialByteArrayOutputStream(buffer);
metaMap.write(initialByteArrayOutputStream, false);
final BufferPos bufferPos = initialByteArrayOutputStream.getBufferPos();
for (final StroomStreamHandler stroomStreamHandler : stroomStreamHandlerList) {
stroomStreamHandler.handleEntryData(bufferPos.getBuffer(), 0, bufferPos.getBufferPos());
}
for (final StroomStreamHandler stroomStreamHandler : stroomStreamHandlerList) {
stroomStreamHandler.handleEntryEnd();
}
}
=======
>>>>>>> |
<<<<<<<
=======
import stroom.io.SeekableOutputStream;
import stroom.io.StreamCloser;
import stroom.util.io.FileUtil;
import javax.annotation.Nonnull;
>>>>>>>
<<<<<<<
=======
>>>>>>>
<<<<<<<
this.raFile = FileChannel.open(lockFile, StandardOpenOption.CREATE, StandardOpenOption.READ, StandardOpenOption.WRITE);
// Write a marker
mainBuffer.write(BlockGZIPConstants.BLOCK_GZIP_V1_IDENTIFIER);
// At the start of the block file write the block size an empty place
// for the index offset and the marker
// we
mainBuffer.writeLong(blockSize);
// Uncompressed Data Length
mainBuffer.writeLong(0);
// Index POS
mainBuffer.writeLong(0);
// End POS
mainBuffer.writeLong(0);
flushMainBuffer();
// Make sure the streams are closed.
streamCloser.add(mainBuffer).add(indexBuffer).add(raFile);
=======
this.raFile = new RandomAccessFile(lockFile, BlockGZIPConstants.READ_WRITE);
try {
// Write a marker
mainBuffer.write(BlockGZIPConstants.BLOCK_GZIP_V1_IDENTIFIER);
// At the start of the block file write the block size an empty place
// for the index offset and the marker
// we
mainBuffer.writeLong(blockSize);
// Uncompressed Data Length
mainBuffer.writeLong(0);
// Index POS
mainBuffer.writeLong(0);
// End POS
mainBuffer.writeLong(0);
flushMainBuffer();
// Make sure the streams are closed.
streamCloser.add(mainBuffer).add(indexBuffer).add(raFile);
} catch (final IOException e) {
streamCloser.close();
raFile.close();
throw e;
}
>>>>>>>
this.raFile = FileChannel.open(lockFile, StandardOpenOption.CREATE, StandardOpenOption.READ, StandardOpenOption.WRITE);
try {
// Write a marker
mainBuffer.write(BlockGZIPConstants.BLOCK_GZIP_V1_IDENTIFIER);
// At the start of the block file write the block size an empty place
// for the index offset and the marker
// we
mainBuffer.writeLong(blockSize);
// Uncompressed Data Length
mainBuffer.writeLong(0);
// Index POS
mainBuffer.writeLong(0);
// End POS
mainBuffer.writeLong(0);
flushMainBuffer();
// Make sure the streams are closed.
streamCloser.add(mainBuffer).add(indexBuffer).add(raFile);
} catch (final IOException e) {
streamCloser.close();
raFile.close();
throw e;
}
<<<<<<<
final long currentRawBlockStartPos = raFile.position();
=======
final long currentRawBlockStartPos = raFile.getChannel().position();
>>>>>>>
final long currentRawBlockStartPos = raFile.position(); |
<<<<<<<
=======
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.hook.ORecordHook.RESULT;
import com.orientechnologies.orient.core.hook.ORecordHook.TYPE;
>>>>>>>
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.hook.ORecordHook.RESULT;
import com.orientechnologies.orient.core.hook.ORecordHook.TYPE; |
<<<<<<<
final PageResponse pageResponse = new PageResponse(offset,
limited.size(),
(long) fullList.size(),
true);
=======
final PageResponse pageResponse = new PageResponse(offset, limited.size(), (long) fullList.size(), true);
>>>>>>>
final PageResponse pageResponse = new PageResponse(
offset,
limited.size(),
(long) fullList.size(),
true);
<<<<<<<
@SuppressWarnings("checkstyle:needbraces")
=======
public PageResponse getPageResponse() {
return pageResponse;
}
public List<T> getValues() {
return values;
}
public int size() {
return values.size();
}
/**
* @return the first item or null if the list is empty
*/
@JsonIgnore
public T getFirst() {
if (values.size() > 0) {
return values.get(0);
} else {
return null;
}
}
@JsonIgnore
public int getPageStart() {
return (int) pageResponse.getOffset();
}
@JsonIgnore
public int getPageSize() {
if (pageResponse.getTotal() == null) {
return getPageStart() + values.size();
}
return pageResponse.getTotal().intValue();
}
@JsonIgnore
public boolean isExact() {
return pageResponse.isExact();
}
public Stream<T> stream() {
return values.stream();
}
public Stream<T> parallelStream() {
return values.parallelStream();
}
public void forEach(final Consumer<? super T> action) {
values.forEach(action);
}
>>>>>>>
public PageResponse getPageResponse() {
return pageResponse;
}
public List<T> getValues() {
return values;
}
public int size() {
return values.size();
}
/**
* @return the first item or null if the list is empty
*/
@JsonIgnore
public T getFirst() {
if (values.size() > 0) {
return values.get(0);
} else {
return null;
}
}
@JsonIgnore
public int getPageStart() {
return (int) pageResponse.getOffset();
}
@JsonIgnore
public int getPageSize() {
if (pageResponse.getTotal() == null) {
return getPageStart() + values.size();
}
return pageResponse.getTotal().intValue();
}
@JsonIgnore
public boolean isExact() {
return pageResponse.isExact();
}
public Stream<T> stream() {
return values.stream();
}
public Stream<T> parallelStream() {
return values.parallelStream();
}
public void forEach(final Consumer<? super T> action) {
values.forEach(action);
}
@SuppressWarnings("checkstyle:needbraces") |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
=======
import stroom.entity.server.util.SqlBuilder;
>>>>>>>
import stroom.entity.server.util.SqlBuilder;
<<<<<<<
final String sql = getTempIdSelectSql(age, batchSize);
final long count = batchIdTransactionHelper.insertIntoTempIdTable(tempIdTable, sql.toString());
LOGGER.debug("Inserted {} ids in {}", count, logExecutionTime);
=======
final SqlBuilder sql = getTempIdSelectSql(age, batchSize);
final long count = batchIdTransactionHelper.insertIntoTempIdTable(tempIdTable, sql);
LOGGER.debug("Inserted %s ids in %s", count, logExecutionTime);
>>>>>>>
final SqlBuilder sql = getTempIdSelectSql(age, batchSize);
final long count = batchIdTransactionHelper.insertIntoTempIdTable(tempIdTable, sql);
LOGGER.debug("Inserted {} ids in {}", count, logExecutionTime);
<<<<<<<
final long total) {
info("Deleting {} (total={})", type, total);
=======
final long total) {
info("Deleting %s (total=%s)", type, total);
>>>>>>>
final long total) {
info("Deleting {} (total={})", type, total); |
<<<<<<<
import org.xml.sax.SAXException;
import stroom.docref.DocRef;
import stroom.feed.FeedStore;
=======
import stroom.docref.DocRef;
import stroom.entity.shared.DocRefUtil;
import stroom.feed.FeedService;
import stroom.feed.shared.Feed;
>>>>>>>
import org.xml.sax.SAXException;
import stroom.docref.DocRef;
import stroom.feed.FeedStore;
import stroom.docref.DocRef;
import stroom.entity.shared.DocRefUtil;
import stroom.feed.FeedService;
import stroom.feed.shared.Feed;
<<<<<<<
import stroom.streamstore.shared.StreamTypeNames;
=======
import stroom.refdata.store.MapDefinition;
import stroom.refdata.store.RefDataStore;
import stroom.refdata.store.RefDataStoreProvider;
import stroom.refdata.store.RefDataValue;
import stroom.refdata.store.RefStreamDefinition;
import stroom.refdata.store.StringValue;
import stroom.streamstore.shared.StreamType;
>>>>>>>
import stroom.streamstore.shared.StreamTypeNames;
import stroom.refdata.store.MapDefinition;
import stroom.refdata.store.RefDataStore;
import stroom.refdata.store.RefDataStoreProvider;
import stroom.refdata.store.RefDataValue;
import stroom.refdata.store.RefStreamDefinition;
import stroom.refdata.store.StringValue;
import stroom.streamstore.shared.StreamType;
<<<<<<<
final DocRef pipelineRef = new DocRef(PipelineDoc.DOCUMENT_TYPE, "1234");
final PipelineReference pipelineReference = new PipelineReference(pipelineRef, feed, StreamTypeNames.REFERENCE);
=======
final DocRef pipelineRef = pipelineStore.createDocument(TEST_PIPELINE_1);
final PipelineReference pipelineReference = new PipelineReference(
pipelineRef, DocRefUtil.create(feed), StreamType.REFERENCE.getName());
>>>>>>>
final DocRef pipelineRef = pipelineStore.createDocument(TEST_PIPELINE_1);
final PipelineReference pipelineReference = new PipelineReference(
pipelineRef, DocRefUtil.create(feed), StreamType.REFERENCE.getName()); |
<<<<<<<
=======
OCLASS_ATTRS.add(CustomAttributes.TAB.getName());
OCLASS_ATTRS.add(CustomAttributes.DESCRIPTION.getName());
>>>>>>>
OCLASS_ATTRS.add(CustomAttributes.TAB.getName()); |
<<<<<<<
import stroom.feed.server.FeedService;
=======
import stroom.feed.MetaMapFactory;
import stroom.feed.StroomHeaderArguments;
import stroom.feed.StroomStatusCode;
import stroom.feed.StroomStreamException;
>>>>>>>
import stroom.feed.server.FeedService;
import stroom.feed.MetaMapFactory;
import stroom.feed.StroomHeaderArguments;
import stroom.feed.StroomStatusCode;
import stroom.feed.StroomStreamException;
<<<<<<<
private final MetaMap metaMap;
private final MetaMapFilter metaMapFilter;
=======
private final DataReceiptPolicyChecker dataReceiptPolicyChecker;
>>>>>>>
private final MetaMapFilter metaMapFilter;
<<<<<<<
final MetaMap metaMap,
final MetaMapFilterFactory metaMapFilterFactory) {
=======
final DataReceiptPolicyChecker dataReceiptPolicyChecker) {
>>>>>>>
final MetaMapFilterFactory metaMapFilterFactory) {
<<<<<<<
this.metaMap = metaMap;
this.metaMapFilter = metaMapFilterFactory.create("dataFeed");
=======
this.dataReceiptPolicyChecker = dataReceiptPolicyChecker;
>>>>>>>
this.metaMapFilter = metaMapFilterFactory.create("dataFeed");
<<<<<<<
if (metaMapFilter.filter(metaMap)) {
=======
final MetaMap metaMap = MetaMapFactory.create(request);
// We need to examine the meta map and ensure we aren't dropping or rejecting this data.
final DataReceiptAction dataReceiptAction = dataReceiptPolicyChecker.check(metaMap);
if (DataReceiptAction.REJECT.equals(dataReceiptAction)) {
debug("Rejecting data", metaMap);
throw new StroomStreamException(StroomStatusCode.RECEIPT_POLICY_SET_TO_REJECT_DATA);
} else if (DataReceiptAction.RECEIVE.equals(dataReceiptAction)) {
>>>>>>>
final MetaMap metaMap = MetaMapFactory.create(request);
if (metaMapFilter.filter(metaMap)) { |
<<<<<<<
=======
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.Invocation.Builder;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
>>>>>>>
<<<<<<<
=======
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Supplier;
>>>>>>>
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Supplier; |
<<<<<<<
/*
* Copyright (C) 2012-2019 52°North Initiative for Geospatial Open Source
=======
/**
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source
>>>>>>>
/*
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source |
<<<<<<<
/*
* Copyright (C) 2012-2019 52°North Initiative for Geospatial Open Source
=======
/**
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source
>>>>>>>
/*
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source |
<<<<<<<
if (payloadMap != null) {
resultHandler.handle(payloadMap);
}
if (errors != null) {
getErrorSet().addAll(errors);
}
if (result.isComplete()) {
// All the results are in but we may still have work pending, so wait
waitForPendingWork();
completionState.complete();
}
} catch (final RuntimeException e) {
getErrorSet().add(e.getMessage());
completionState.complete();
=======
if (payloadMap != null) {
resultHandler.handle(payloadMap, task);
}
if (errors != null) {
getErrorSet().addAll(errors);
}
if (result.isComplete()) {
complete();
>>>>>>>
if (payloadMap != null) {
resultHandler.handle(payloadMap);
}
if (errors != null) {
getErrorSet().addAll(errors);
}
if (result.isComplete()) {
// All the results are in but we may still have work pending, so wait
complete();
}
} catch (final RuntimeException e) {
getErrorSet().add(e.getMessage());
complete();
<<<<<<<
private void waitForPendingWork() {
LOGGER.logDurationIfTraceEnabled(() -> {
LOGGER.trace("No remaining nodes so wait for the result handler to clear any pending work");
try {
resultHandler.waitForPendingWork();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOGGER.debug("Thread interrupted waiting for resultHandler to finish pending work");
// we will just let it complete as we have been interrupted
}
}, "Waiting for resultHandler to finish pending work");
}
=======
@Override
>>>>>>> |
<<<<<<<
import stroom.task.api.job.ScheduledJobsBinder;
import stroom.util.lifecycle.LifecycleAwareBinder;
=======
>>>>>>>
import stroom.util.lifecycle.LifecycleAwareBinder;
<<<<<<<
ScheduledJobsBinder.create(binder()).bind(StreamTaskJobs.class);
LifecycleAwareBinder.create(binder()).bind(StreamTaskCreatorImpl.class);
=======
>>>>>>>
LifecycleAwareBinder.create(binder()).bind(StreamTaskCreatorImpl.class); |
<<<<<<<
import org.apache.wicket.util.string.Strings;
import org.joda.time.DateTimeZone;
import org.orienteer.core.boot.loader.OrienteerClassLoader;
=======
import org.joda.time.DateTimeZone;
>>>>>>>
import org.apache.wicket.util.string.Strings;
import org.joda.time.DateTimeZone;
import org.orienteer.core.boot.loader.OrienteerClassLoader;
import org.joda.time.DateTimeZone; |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import stroom.statistics.shared.StatisticType;
import stroom.util.DatabaseTool;
import stroom.util.logging.LogExecutionTime;
=======
>>>>>>> |
<<<<<<<
import stroom.security.shared.User;
import stroom.statistics.shared.StatisticStoreEntity;
import stroom.stats.shared.StroomStatsStoreEntity;
=======
import stroom.security.server.User;
import stroom.statistics.shared.StatisticStore;
>>>>>>>
import stroom.security.server.User;
import stroom.statistics.shared.StatisticStoreEntity;
import stroom.stats.shared.StroomStatsStoreEntity;
<<<<<<<
deleteEntity(QueryEntity.class);
=======
deleteEntity(Policy.class);
deleteEntity(Query.class);
>>>>>>>
deleteEntity(Policy.class);
deleteEntity(QueryEntity.class); |
<<<<<<<
ResultPage<MetaRow> findMetaRow(final FindMetaCriteria criteria);
=======
/**
* Find meta data records and attributes that match the specified criteria and are decorated with data retention information.
*
* @param criteria The criteria to find matching meta data records with.
* @return A list of matching meta data records that includes attributes.
*/
ResultPage<MetaRow> findDecoratedRows(FindMetaCriteria criteria);
>>>>>>>
/**
* Find meta data records and attributes that match the specified criteria and are decorated with data retention information.
*
* @param criteria The criteria to find matching meta data records with.
* @return A list of matching meta data records that includes attributes.
*/
ResultPage<MetaRow> findDecoratedRows(final FindMetaCriteria criteria);
<<<<<<<
Long getMaxDataIdWithCreationBeforePeriod(final Long timestampMs);
List<MetaInfoSection> fetchFullMetaInfo(final long id) ;
Optional<DataRetentionTracker> getRetentionTracker();
void setTracker(final DataRetentionTracker dataRetentionTracker);
=======
List<MetaInfoSection> fetchFullMetaInfo(long id);
/**
* Get a distinct list of processor UUIds for meta data matching the supplied criteria.
*
* @param criteria The criteria to find matching meta data processor UUIds for.
* @return A distinct list of processor UUIds for meta data matching the supplied criteria.
*/
List<String> getProcessorUuidList(FindMetaCriteria criteria);
>>>>>>>
List<MetaInfoSection> fetchFullMetaInfo(long id);
/**
* Get a distinct list of processor UUIds for meta data matching the supplied criteria.
*
* @param criteria The criteria to find matching meta data processor UUIds for.
* @return A distinct list of processor UUIds for meta data matching the supplied criteria.
*/
List<String> getProcessorUuidList(FindMetaCriteria criteria);
Optional<DataRetentionTracker> getRetentionTracker();
void setTracker(final DataRetentionTracker dataRetentionTracker); |
<<<<<<<
import stroom.node.NodeCache;
import stroom.processor.impl.db.StreamTaskService;
=======
import stroom.node.api.NodeInfo;
>>>>>>>
import stroom.node.api.NodeInfo;
import stroom.processor.impl.db.StreamTaskService; |
<<<<<<<
import io.dropwizard.configuration.EnvironmentVariableSubstitutor;
import io.dropwizard.configuration.SubstitutingSourceProvider;
=======
import io.dropwizard.servlets.tasks.LogConfigurationTask;
>>>>>>>
import io.dropwizard.configuration.EnvironmentVariableSubstitutor;
import io.dropwizard.configuration.SubstitutingSourceProvider;
import io.dropwizard.servlets.tasks.LogConfigurationTask;
<<<<<<<
public void initialize(Bootstrap<Config> bootstrap) {
// This allows us to use templating in the YAML configuration.
bootstrap.setConfigurationSourceProvider(new SubstitutingSourceProvider(
bootstrap.getConfigurationSourceProvider(),
new EnvironmentVariableSubstitutor(false)));
=======
public void initialize(final Bootstrap<Configuration> bootstrap) {
>>>>>>>
public void initialize(final Bootstrap<Configuration> bootstrap) {
// This allows us to use templating in the YAML configuration.
bootstrap.setConfigurationSourceProvider(new SubstitutingSourceProvider(
bootstrap.getConfigurationSourceProvider(),
new EnvironmentVariableSubstitutor(false)));
<<<<<<<
public void run(Config configuration, io.dropwizard.setup.Environment environment) throws Exception {
// The order in which the following are run is important.
Environment.configure(environment);
configureCors(environment);
SpringContexts springContexts = new SpringContexts();
Servlets servlets = new Servlets(environment.getApplicationContext());
Filters filters = new Filters(environment.getApplicationContext());
Listeners listeners = new Listeners(environment.servlets(), springContexts.rootContext);
springContexts.start(environment, configuration);
ServletMonitor servletMonitor = new ServletMonitor((servlets.upgradeDispatcherServletHolder));
Resources resources = new Resources(environment.jersey(), servletMonitor);
HealthChecks.registerHealthChecks(environment.healthChecks(), resources, servletMonitor);
AdminTasks.registerAdminTasks(environment);
=======
public void run(final Configuration configuration, final Environment environment) throws Exception {
// Add useful logging setup.
registerLogConfiguration(environment);
environment.healthChecks().register(LogLevelInspector.class.getName(), new LogLevelInspector());
// We want Stroom to use the root path so we need to move Dropwizard's path.
environment.jersey().setUrlPattern(ResourcePaths.ROOT_PATH + "/*");
// Set up a session manager for Jetty
SessionHandler sessions = new SessionHandler();
environment.servlets().setSessionHandler(sessions);
// Configure Cross-Origin Resource Sharing.
>>>>>>>
public void run(final Configuration configuration, final Environment environment) throws Exception {
// Add useful logging setup.
registerLogConfiguration(environment);
environment.healthChecks().register(LogLevelInspector.class.getName(), new LogLevelInspector());
// We want Stroom to use the root path so we need to move Dropwizard's path.
environment.jersey().setUrlPattern(ResourcePaths.ROOT_PATH + "/*");
// Set up a session manager for Jetty
SessionHandler sessions = new SessionHandler();
environment.servlets().setSessionHandler(sessions);
// Configure Cross-Origin Resource Sharing. |
<<<<<<<
public static final String SESSION_COOKIE_NAME = "STROOM_SESSION_ID";
=======
private static final boolean SUPER_DEV_AUTHENTICATION_REQUIRED_VALUE = false;
private static final String SUPER_DEV_CONTENT_SECURITY_POLICY_VALUE = "";
>>>>>>>
public static final String SESSION_COOKIE_NAME = "STROOM_SESSION_ID";
private static final boolean SUPER_DEV_AUTHENTICATION_REQUIRED_VALUE = false;
private static final String SUPER_DEV_CONTENT_SECURITY_POLICY_VALUE = "";
<<<<<<<
LOGGER.warn("" + ConsoleColour.red(
"" +
"\n _ _ " +
"\n | | | | " +
"\n ___ _ _ _ __ ___ _ __ __| | _____ __ _ __ ___ ___ __| | ___ " +
"\n / __| | | | '_ \\ / _ \\ '__| / _` |/ _ \\ \\ / / | '_ ` _ \\ / _ \\ / _` |/ _ \\" +
"\n \\__ \\ |_| | |_) | __/ | | (_| | __/\\ V / | | | | | | (_) | (_| | __/" +
"\n |___/\\__,_| .__/ \\___|_| \\__,_|\\___| \\_/ |_| |_| |_|\\___/ \\__,_|\\___|" +
"\n | | " +
"\n |_| " +
"\n"));
// disableAuthentication(appConfig);
// Super Dev Mode isn't compatible with HTTPS so ensure cookies are not secure.
appConfig.getSessionCookieConfig().setSecure(false);
// The standard content security policy is incompatible with GWT super dev mode
disableContentSecurity(appConfig);
}
}
private void disableAuthentication(final AppConfig appConfig) {
LOGGER.warn("\n" + ConsoleColour.red(
"" +
"\n ***************************************************************" +
"\n FOR DEVELOPER USE ONLY! DO NOT RUN IN PRODUCTION ENVIRONMENTS!" +
"\n" +
"\n ALL AUTHENTICATION IS DISABLED!" +
"\n ***************************************************************"));
final AuthenticationConfig authenticationConfig = appConfig.getSecurityConfig().getAuthenticationConfig();
// Auth needs HTTPS and GWT super dev mode cannot work in HTTPS
String msg = new ColouredStringBuilder()
=======
LOGGER.warn("\n" + ConsoleColour.red(
"\n _ _ " +
"\n | | | | " +
"\n ___ _ _ _ __ ___ _ __ __| | _____ __ _ __ ___ ___ __| | ___ " +
"\n / __| | | | '_ \\ / _ \\ '__| / _` |/ _ \\ \\ / / | '_ ` _ \\ / _ \\ / _` |/ _ \\" +
"\n \\__ \\ |_| | |_) | __/ | | (_| | __/\\ V / | | | | | | (_) | (_| | __/" +
"\n |___/\\__,_| .__/ \\___|_| \\__,_|\\___| \\_/ |_| |_| |_|\\___/ \\__,_|\\___|" +
"\n | | " +
"\n |_| " +
"\n" +
"\n ***************************************************************" +
"\n FOR DEVELOPER USE ONLY! DO NOT RUN IN PRODUCTION ENVIRONMENTS!\n" +
"\n ALL AUTHENTICATION IS DISABLED!" +
"\n ***************************************************************"));
// Auth needs HTTPS and GWT super dev mode cannot work in HTTPS
String msg = new ColouredStringBuilder()
>>>>>>>
LOGGER.warn("" + ConsoleColour.red(
"" +
"\n _ _ " +
"\n | | | | " +
"\n ___ _ _ _ __ ___ _ __ __| | _____ __ _ __ ___ ___ __| | ___ " +
"\n / __| | | | '_ \\ / _ \\ '__| / _` |/ _ \\ \\ / / | '_ ` _ \\ / _ \\ / _` |/ _ \\" +
"\n \\__ \\ |_| | |_) | __/ | | (_| | __/\\ V / | | | | | | (_) | (_| | __/" +
"\n |___/\\__,_| .__/ \\___|_| \\__,_|\\___| \\_/ |_| |_| |_|\\___/ \\__,_|\\___|" +
"\n | | " +
"\n |_| " +
"\n"));
// disableAuthentication(appConfig);
// Super Dev Mode isn't compatible with HTTPS so ensure cookies are not secure.
appConfig.getSessionCookieConfig().setSecure(false);
// The standard content security policy is incompatible with GWT super dev mode
disableContentSecurity(appConfig);
}
}
private void disableAuthentication(final AppConfig appConfig) {
LOGGER.warn("\n" + ConsoleColour.red(
"" +
"\n ***************************************************************" +
"\n FOR DEVELOPER USE ONLY! DO NOT RUN IN PRODUCTION ENVIRONMENTS!" +
"\n" +
"\n ALL AUTHENTICATION IS DISABLED!" +
"\n ***************************************************************"));
final AuthenticationConfig authenticationConfig = appConfig.getSecurityConfig().getAuthenticationConfig();
// Auth needs HTTPS and GWT super dev mode cannot work in HTTPS
String msg = new ColouredStringBuilder()
<<<<<<<
LOGGER.warn(msg);
authenticationConfig.setAuthenticationRequired(false);
}
=======
LOGGER.warn(msg);
final AuthenticationConfig authenticationConfig = appConfig.getSecurityConfig().getAuthenticationConfig();
authenticationConfig.setAuthenticationRequired(SUPER_DEV_AUTHENTICATION_REQUIRED_VALUE);
>>>>>>>
LOGGER.warn(msg);
authenticationConfig.setAuthenticationRequired(SUPER_DEV_AUTHENTICATION_REQUIRED_VALUE);
}
<<<<<<<
LOGGER.warn(msg);
contentSecurityConfig.setContentSecurityPolicy("");
=======
LOGGER.warn(msg);
final ContentSecurityConfig contentSecurityConfig = appConfig.getSecurityConfig().getContentSecurityConfig();
contentSecurityConfig.setContentSecurityPolicy("");
}
>>>>>>>
LOGGER.warn(msg);
contentSecurityConfig.setContentSecurityPolicy(SUPER_DEV_CONTENT_SECURITY_POLICY_VALUE); |
<<<<<<<
int entryCount = 5;
=======
>>>>>>>
<<<<<<<
final Instant startInstant = Instant.now();
LOGGER.info("-------------------------load starts here--------------------------------------");
List<RefStreamDefinition> refStreamDefs1 = loadBulkData(
refStreamDefCount, keyValueMapCount, rangeValueMapCount, entryCount, 0, mapNamFunc);
=======
List<RefStreamDefinition> refStreamDefs1 = null;
List<RefStreamDefinition> refStreamDefs2 = null;
if (doLoad) {
LOGGER.info("-------------------------load starts here--------------------------------------");
refStreamDefs1 = loadBulkData(
refStreamDefCount, keyValueMapCount, rangeValueMapCount, entryCount, 0, mapNamFunc);
if (doAsserts) {
assertDbCounts(
refStreamDefCount,
totalMapEntries,
totalKeyValueEntryCount,
totalRangeValueEntryCount,
totalValueEntryCount);
}
>>>>>>>
List<RefStreamDefinition> refStreamDefs1 = null;
List<RefStreamDefinition> refStreamDefs2 = null;
final Instant startInstant = Instant.now();
if (doLoad) {
LOGGER.info("-------------------------load starts here--------------------------------------");
refStreamDefs1 = loadBulkData(
refStreamDefCount, keyValueMapCount, rangeValueMapCount, entryCount, 0, mapNamFunc);
if (doAsserts) {
assertDbCounts(
refStreamDefCount,
totalMapEntries,
totalKeyValueEntryCount,
totalRangeValueEntryCount,
totalValueEntryCount);
}
<<<<<<<
LAMBDA_LOGGER.info("COmpleted both loads in {}",
Duration.between(startInstant, Instant.now()).toString());
LOGGER.info("-------------------------gets start here---------------------------------------");
Random random = new Random();
// for each ref stream def & map def, have N goes at picking a random key and getting the value for it
Stream.concat(refStreamDefs1.stream(), refStreamDefs2.stream()).forEach(refStreamDef -> {
Instant startTime = Instant.now();
Stream.of(KV_TYPE, RANGE_TYPE).forEach(valueType -> {
for (int i = 0; i < entryCount; i++) {
String mapName = mapNamFunc.buildMapName(refStreamDef, valueType, random.nextInt(keyValueMapCount));
MapDefinition mapDefinition = new MapDefinition(refStreamDef, mapName);
int entryIdx = random.nextInt(entryCount);
String queryKey;
String expectedValue;
if (valueType.equals(KV_TYPE)) {
queryKey = buildKey(entryIdx);
expectedValue = buildKeyStoreValue(mapName, entryIdx, queryKey);
} else {
Range<Long> range = buildRangeKey(entryIdx);
// in the DB teh keys are ranges so we need to pick a value in that range
queryKey = Long.toString(random.nextInt(range.size().intValue()) + range.getFrom());
expectedValue = buildRangeStoreValue(mapName, entryIdx, range);
=======
// In case the load was done elsewhere
if (refStreamDefs1 == null) {
refStreamDefs1 = buildRefStreamDefs(refStreamDefCount, 0);
}
if (refStreamDefs2 == null) {
refStreamDefs2 = buildRefStreamDefs(refStreamDefCount, refStreamDefCount);
}
Random random = new Random();
// for each ref stream def & map def, have N goes at picking a random key and getting the value for it
Stream.concat(refStreamDefs1.stream(), refStreamDefs2.stream()).forEach(refStreamDef -> {
Instant startTime = Instant.now();
Stream.of(KV_TYPE, RANGE_TYPE).forEach(valueType -> {
for (int i = 0; i < entryCount; i++) {
String mapName = mapNamFunc.buildMapName(refStreamDef, valueType, random.nextInt(keyValueMapCount));
MapDefinition mapDefinition = new MapDefinition(refStreamDef, mapName);
int entryIdx = random.nextInt(entryCount);
String queryKey;
String expectedValue;
if (valueType.equals(KV_TYPE)) {
queryKey = buildKey(entryIdx);
expectedValue = buildKeyStoreValue(mapName, entryIdx, queryKey);
} else {
Range<Long> range = buildRangeKey(entryIdx);
// in the DB teh keys are ranges so we need to pick a value in that range
queryKey = Long.toString(random.nextInt(range.size().intValue()) + range.getFrom());
expectedValue = buildRangeStoreValue(mapName, entryIdx, range);
}
// get the proxy then get the value
RefDataValueProxy valueProxy = refDataStore.getValueProxy(mapDefinition, queryKey);
Optional<RefDataValue> optRefDataValue = valueProxy.supplyValue();
assertThat(optRefDataValue).isNotEmpty();
String value = ((StringValue) (optRefDataValue.get())).getValue();
assertThat(value).isEqualTo(expectedValue);
//now do it in one hit
optRefDataValue = refDataStore.getValue(mapDefinition, queryKey);
assertThat(optRefDataValue).isNotEmpty();
value = ((StringValue) (optRefDataValue.get())).getValue();
assertThat(value).isEqualTo(expectedValue);
>>>>>>>
// In case the load was done elsewhere
if (refStreamDefs1 == null) {
refStreamDefs1 = buildRefStreamDefs(refStreamDefCount, 0);
}
if (refStreamDefs2 == null) {
refStreamDefs2 = buildRefStreamDefs(refStreamDefCount, refStreamDefCount);
}
Random random = new Random();
// for each ref stream def & map def, have N goes at picking a random key and getting the value for it
Stream.concat(refStreamDefs1.stream(), refStreamDefs2.stream()).forEach(refStreamDef -> {
Instant startTime = Instant.now();
Stream.of(KV_TYPE, RANGE_TYPE).forEach(valueType -> {
for (int i = 0; i < entryCount; i++) {
String mapName = mapNamFunc.buildMapName(refStreamDef, valueType, random.nextInt(keyValueMapCount));
MapDefinition mapDefinition = new MapDefinition(refStreamDef, mapName);
int entryIdx = random.nextInt(entryCount);
String queryKey;
String expectedValue;
if (valueType.equals(KV_TYPE)) {
queryKey = buildKey(entryIdx);
expectedValue = buildKeyStoreValue(mapName, entryIdx, queryKey);
} else {
Range<Long> range = buildRangeKey(entryIdx);
// in the DB teh keys are ranges so we need to pick a value in that range
queryKey = Long.toString(random.nextInt(range.size().intValue()) + range.getFrom());
expectedValue = buildRangeStoreValue(mapName, entryIdx, range);
}
// get the proxy then get the value
RefDataValueProxy valueProxy = refDataStore.getValueProxy(mapDefinition, queryKey);
Optional<RefDataValue> optRefDataValue = valueProxy.supplyValue();
assertThat(optRefDataValue).isNotEmpty();
String value = ((StringValue) (optRefDataValue.get())).getValue();
assertThat(value).isEqualTo(expectedValue);
//now do it in one hit
optRefDataValue = refDataStore.getValue(mapDefinition, queryKey);
assertThat(optRefDataValue).isNotEmpty();
value = ((StringValue) (optRefDataValue.get())).getValue();
assertThat(value).isEqualTo(expectedValue);
<<<<<<<
final Instant startInstant = Instant.now();
for (int i = 0; i < refStreamDefinitionCount; i++) {
RefStreamDefinition refStreamDefinition = buildRefStreamDefinition(i + refStreamDefinitionOffset);
=======
buildRefStreamDefs(refStreamDefinitionCount, refStreamDefinitionOffset)
.forEach(refStreamDefinition -> {
refStreamDefinitions.add(refStreamDefinition);
>>>>>>>
final Instant startInstant = Instant.now();
buildRefStreamDefs(refStreamDefinitionCount, refStreamDefinitionOffset)
.forEach(refStreamDefinition -> {
refStreamDefinitions.add(refStreamDefinition);
<<<<<<<
loader.completeProcessing();
});
}
LAMBDA_LOGGER.info("Loaded {} ref stream definitions in {}",
refStreamDefinitionCount, Duration.between(startInstant, Instant.now()).toString());
LOGGER.info("Counts:, KeyValue: {}, KeyRangeValue: {}, ProcInfo: {}",
refDataStore.getKeyValueEntryCount(),
refDataStore.getKeyRangeValueEntryCount(),
refDataStore.getProcessingInfoEntryCount());
=======
>>>>>>>
LAMBDA_LOGGER.info("Loaded {} ref stream definitions in {}",
refStreamDefinitionCount, Duration.between(startInstant, Instant.now()).toString());
LOGGER.info("Counts:, KeyValue: {}, KeyRangeValue: {}, ProcInfo: {}",
refDataStore.getKeyValueEntryCount(),
refDataStore.getKeyRangeValueEntryCount(),
refDataStore.getProcessingInfoEntryCount()); |
<<<<<<<
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
=======
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
import stroom.query.api.DocRef;
import stroom.util.logging.StroomLogger;
=======
>>>>>>>
import stroom.query.api.DocRef; |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import stroom.entity.server.util.SQLBuilder;
import stroom.entity.server.util.SQLUtil;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.entity.server.util.StroomEntityManager;
import stroom.entity.shared.BaseCriteria.OrderByDirection;
=======
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import stroom.entity.server.util.SqlBuilder;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.entity.server.util.StroomEntityManager;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.entity.server.util.StroomEntityManager;
import stroom.entity.shared.BaseCriteria.OrderByDirection;
import stroom.entity.server.util.SqlBuilder;
import stroom.entity.server.util.StroomDatabaseInfo;
import stroom.entity.server.util.StroomEntityManager;
<<<<<<<
=======
import stroom.util.logging.StroomLogger;
>>>>>>> |
<<<<<<<
import stroom.security.client.api.ClientSecurityContext;
import stroom.data.client.presenter.ClassificationWrappedMetaPresenter;
import stroom.data.client.presenter.ProcessorTaskPresenter;
import stroom.docref.SharedObject;
=======
import stroom.streamstore.client.presenter.ClassificationWrappedStreamPresenter;
import stroom.streamstore.client.presenter.StreamTaskPresenter;
import stroom.streamstore.shared.Stream;
import stroom.streamtask.shared.StreamProcessor;
import stroom.svg.client.Icon;
import stroom.svg.client.SvgPreset;
import stroom.util.client.ImageUtil;
import stroom.util.shared.SharedObject;
>>>>>>>
import stroom.svg.client.Icon;
import stroom.svg.client.SvgPreset;
import stroom.util.client.ImageUtil;
<<<<<<<
final Provider<ProcessorTaskPresenter> streamTaskPresenterProvider) {
super(eventBus, view, securityContext);
=======
final Provider<StreamTaskPresenter> streamTaskPresenterProvider) {
super(eventBus, view);
>>>>>>>
final Provider<ProcessorTaskPresenter> streamTaskPresenterProvider) {
super(eventBus, view); |
<<<<<<<
import stroom.cluster.ClusterConfig;
import stroom.dashboard.QueryHistoryConfig;
=======
import stroom.cluster.api.ClusterConfig;
import stroom.data.retention.impl.DataRetentionConfig;
>>>>>>>
import stroom.cluster.ClusterConfig;
import stroom.dashboard.QueryHistoryConfig;
import stroom.cluster.api.ClusterConfig;
import stroom.data.retention.impl.DataRetentionConfig;
<<<<<<<
import stroom.policy.PolicyConfig;
import stroom.processor.ProcessorConfig;
import stroom.processor.ProxyAggregationConfig;
=======
import stroom.receive.ReceiveDataConfig;
>>>>>>>
import stroom.policy.PolicyConfig;
import stroom.processor.ProcessorConfig;
import stroom.processor.ProxyAggregationConfig;
import stroom.receive.ReceiveDataConfig;
<<<<<<<
import stroom.statistics.StatisticsConfig;
import stroom.statistics.internal.InternalStatisticsConfig;
import stroom.statistics.sql.SQLStatisticsConfig;
import stroom.statistics.stroomstats.internal.HBaseStatisticsConfig;
=======
import stroom.statistics.impl.InternalStatisticsConfig;
import stroom.statistics.impl.hbase.internal.HBaseStatisticsConfig;
import stroom.statistics.impl.sql.SQLStatisticsConfig;
import stroom.storedquery.impl.db.StoredQueryHistoryConfig;
import stroom.streamtask.ProcessConfig;
import stroom.streamtask.ProxyAggregationConfig;
>>>>>>>
import stroom.statistics.StatisticsConfig;
import stroom.statistics.internal.InternalStatisticsConfig;
import stroom.statistics.sql.SQLStatisticsConfig;
import stroom.statistics.stroomstats.internal.HBaseStatisticsConfig;
import stroom.statistics.impl.InternalStatisticsConfig;
import stroom.statistics.impl.hbase.internal.HBaseStatisticsConfig;
import stroom.statistics.impl.sql.SQLStatisticsConfig;
import stroom.storedquery.impl.db.StoredQueryHistoryConfig;
import stroom.streamtask.ProcessConfig;
import stroom.streamtask.ProxyAggregationConfig;
<<<<<<<
bind(PolicyConfig.class).toInstance(appConfig.getPolicyConfig());
bind(ProcessorConfig.class).toInstance(appConfig.getProcessorConfig());
=======
bind(DataRetentionConfig.class).toInstance(appConfig.getPolicyConfig());
bind(ProcessConfig.class).toInstance(appConfig.getProcessConfig());
>>>>>>>
bind(DataRetentionConfig.class).toInstance(appConfig.getPolicyConfig());
bind(ProcessConfig.class).toInstance(appConfig.getProcessConfig());
bind(PolicyConfig.class).toInstance(appConfig.getPolicyConfig());
bind(ProcessorConfig.class).toInstance(appConfig.getProcessorConfig()); |
<<<<<<<
import com.google.inject.multibindings.Multibinder;
import stroom.task.api.TaskHandler;
import stroom.util.lifecycle.jobmanagement.ScheduledJobs;
=======
import stroom.ruleset.shared.FetchDataRetentionPolicyAction;
import stroom.ruleset.shared.SaveDataRetentionPolicyAction;
import stroom.task.api.TaskHandlerBinder;
>>>>>>>
import com.google.inject.multibindings.Multibinder;
import stroom.ruleset.shared.FetchDataRetentionPolicyAction;
import stroom.ruleset.shared.SaveDataRetentionPolicyAction;
import stroom.task.api.TaskHandlerBinder;
import stroom.util.lifecycle.jobmanagement.ScheduledJobs;
<<<<<<<
final Multibinder<TaskHandler> taskHandlerBinder = Multibinder.newSetBinder(binder(), TaskHandler.class);
taskHandlerBinder.addBinding().to(stroom.policy.FetchDataRetentionPolicyHandler.class);
taskHandlerBinder.addBinding().to(stroom.policy.SaveDataRetentionPolicyHandler.class);
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(PolicyJobs.class);
=======
TaskHandlerBinder.create(binder())
.bind(FetchDataRetentionPolicyAction.class, stroom.policy.FetchDataRetentionPolicyHandler.class)
.bind(SaveDataRetentionPolicyAction.class, stroom.policy.SaveDataRetentionPolicyHandler.class);
>>>>>>>
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(PolicyJobs.class);
TaskHandlerBinder.create(binder())
.bind(FetchDataRetentionPolicyAction.class, stroom.policy.FetchDataRetentionPolicyHandler.class)
.bind(SaveDataRetentionPolicyAction.class, stroom.policy.SaveDataRetentionPolicyHandler.class); |
<<<<<<<
import stroom.importexport.shared.ImportState;
import stroom.security.Security;
=======
import stroom.importexport.shared.ImportState;
import stroom.security.Insecure;
>>>>>>>
import stroom.importexport.shared.ImportState;
import stroom.security.Security;
<<<<<<<
security.insecure(() -> {
try {
final List<ImportState> confirmList = importDataAction.getConfirmList();
if (confirmList != null && confirmList.size() > 0) {
for (final ImportState confirmation : confirmList) {
try {
final Event event = eventLoggingService.createAction("ImportConfig", "Importing Configuration");
final event.logging.Object object = new event.logging.Object();
object.setType(confirmation.getDocRef().getType());
object.setId(confirmation.getDocRef().getUuid());
object.setName(confirmation.getSourcePath());
object.getData().add(EventLoggingUtil.createData("ImportAction",
confirmation.getState().getDisplayValue()));
final MultiObject multiObject = new MultiObject();
multiObject.getObjects().add(object);
final Import imp = new Import();
imp.setSource(multiObject);
event.getEventDetail().setImport(imp);
eventLoggingService.log(event);
} catch (final RuntimeException e) {
LOGGER.error("Unable to import event!", e);
}
=======
try {
final List<ImportState> confirmList = importDataAction.getConfirmList();
if (confirmList != null && confirmList.size() > 0) {
for (final ImportState confirmation : confirmList) {
try {
final Event event = eventLoggingService.createAction("ImportConfig", "Importing Configuration");
String state = "Error";
if (confirmation.getState() != null) {
state = confirmation.getState().getDisplayValue();
}
final event.logging.Object object = new event.logging.Object();
object.setType(confirmation.getDocRef().getType());
object.setId(confirmation.getDocRef().getUuid());
object.setName(confirmation.getSourcePath());
object.getData().add(EventLoggingUtil.createData("ImportAction", state));
final MultiObject multiObject = new MultiObject();
multiObject.getObjects().add(object);
final Import imp = new Import();
imp.setSource(multiObject);
event.getEventDetail().setImport(imp);
eventLoggingService.log(event);
} catch (final Exception e) {
LOGGER.error("Unable to import event!", e);
>>>>>>>
security.insecure(() -> {
try {
final List<ImportState> confirmList = importDataAction.getConfirmList();
if (confirmList != null && confirmList.size() > 0) {
for (final ImportState confirmation : confirmList) {
try {
final Event event = eventLoggingService.createAction("ImportConfig", "Importing Configuration");
String state = "Error";
if (confirmation.getState() != null) {
state = confirmation.getState().getDisplayValue();
}
final event.logging.Object object = new event.logging.Object();
object.setType(confirmation.getDocRef().getType());
object.setId(confirmation.getDocRef().getUuid());
object.setName(confirmation.getSourcePath());
object.getData().add(EventLoggingUtil.createData("ImportAction", state));
final MultiObject multiObject = new MultiObject();
multiObject.getObjects().add(object);
final Import imp = new Import();
imp.setSource(multiObject);
event.getEventDetail().setImport(imp);
eventLoggingService.log(event);
} catch (final RuntimeException e) {
LOGGER.error("Unable to import event!", e);
} |
<<<<<<<
public class XSLTFilter extends AbstractXMLFilter implements SupportsCodeInjection {
private static final StroomLogger LOGGER = StroomLogger.getLogger(XSLTFilter.class);
=======
public class XSLTFilter extends AbstractXMLFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(XSLTFilter.class);
>>>>>>>
public class XSLTFilter extends AbstractXMLFilter implements SupportsCodeInjection {
private static final Logger LOGGER = LoggerFactory.getLogger(XSLTFilter.class); |
<<<<<<<
private final MapStoreHolder mapStoreHolder;
=======
private final FeedService feedService;
>>>>>>>
private final MapStoreHolder mapStoreHolder;
<<<<<<<
final MapStoreHolder mapStoreHolder,
final PipelineStore pipelineStore,
=======
@Named("cachedFeedService") final FeedService feedService,
@Named("cachedPipelineStore") final PipelineStore pipelineStore,
>>>>>>>
final MapStoreHolder mapStoreHolder,
final PipelineStore pipelineStore,
<<<<<<<
this.mapStoreHolder = mapStoreHolder;
=======
this.feedService = feedService;
>>>>>>>
this.mapStoreHolder = mapStoreHolder;
<<<<<<<
this.feedProperties = feedProperties;
=======
this.refDataStore = refDataStoreProvider.getOffHeapStore();
>>>>>>>
this.feedProperties = feedProperties;
this.refDataStore = refDataStoreProvider.getOffHeapStore();
<<<<<<<
populateMaps(pipeline, stream, streamSource, feedName, stream.getTypeName(), mapStoreBuilder);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Finished loading reference data: " + mapStorePoolKey.toString());
}
=======
populateMaps(
pipeline,
stream,
streamSource,
feed,
stream.getStreamType(),
task.getRefStreamDefinition());
LOGGER.debug("Finished loading reference data: {}", refStreamDefinition);
>>>>>>>
populateMaps(
pipeline,
stream,
streamSource,
feed,
stream.getStreamType(),
task.getRefStreamDefinition());
LOGGER.debug("Finished loading reference data: {}", refStreamDefinition);
<<<<<<<
private void populateMaps(final Pipeline pipeline,
final Data stream,
final StreamSource streamSource,
final String feedName,
final String streamTypeName,
final MapStoreBuilder mapStoreBuilder) {
=======
private void populateMaps(final Pipeline pipeline,
final Stream stream,
final StreamSource streamSource,
final Feed feed,
final StreamType streamType,
final RefStreamDefinition refStreamDefinition) {
// final List<RefStreamDefinition> loadedRefStreamDefinitions = new ArrayList<>();
>>>>>>>
private void populateMaps(final Pipeline pipeline,
final Data stream,
final StreamSource streamSource,
final String feedName,
final String streamTypeName,
final RefStreamDefinition refStreamDefinition) { |
<<<<<<<
* Get an executor for use in places where we don't want to use tasks and task handlers.
*
* @return An executor.
*/
Executor getExecutor();
/**
=======
>>>>>>> |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.springframework.context.annotation.Bean;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean; |
<<<<<<<
import stroom.data.meta.api.Data;
=======
import stroom.feed.shared.Feed;
import stroom.refdata.store.RefDataStore;
import stroom.refdata.store.RefStreamDefinition;
import stroom.streamstore.shared.Stream;
>>>>>>>
import stroom.refdata.store.RefDataStore;
import stroom.refdata.store.RefStreamDefinition;
<<<<<<<
public MapStore load(final InputStream inputStream, final Data stream, final String feedName,
final DocRef contextPipeline) {
return taskManager.exec(new ContextDataLoadTask(inputStream, stream, feedName, contextPipeline));
=======
public void load(final InputStream inputStream,
final Stream stream,
final Feed feed,
final DocRef contextPipeline,
final RefStreamDefinition refStreamDefinition,
final RefDataStore refDataStore) {
taskManager.exec(new ContextDataLoadTask(
inputStream, stream, feed, contextPipeline, refStreamDefinition, refDataStore));
>>>>>>>
public void load(final InputStream inputStream,
final Data data,
final String feedName,
final DocRef contextPipeline,
final RefStreamDefinition refStreamDefinition,
final RefDataStore refDataStore) {
taskManager.exec(new ContextDataLoadTask(
inputStream, data, feedName, contextPipeline, refStreamDefinition, refDataStore)); |
<<<<<<<
super(folderService);
LOGGER.debug("Initialising: {}", this.getClass().getCanonicalName());
=======
super(cachedFolderService);
LOGGER.debug("Initialising: %s", this.getClass().getCanonicalName());
>>>>>>>
super(cachedFolderService);
LOGGER.debug("Initialising: {}", this.getClass().getCanonicalName()); |
<<<<<<<
import stroom.core.client.ContentManager;
import stroom.core.client.presenter.Plugin;
import stroom.dispatch.client.AsyncCallbackAdaptor;
=======
import stroom.app.client.ContentManager;
import stroom.app.client.presenter.Plugin;
>>>>>>>
import stroom.core.client.ContentManager;
import stroom.core.client.presenter.Plugin;
<<<<<<<
import stroom.entity.shared.ResultList;
import stroom.entity.shared.SharedDocRef;
=======
>>>>>>>
import stroom.entity.shared.SharedDocRef;
<<<<<<<
dispatcher.execute(new GetPipelineForStreamAction(event.getStreamId(), event.getChildStreamId()),
new AsyncCallbackAdaptor<SharedDocRef>() {
@Override
public void onSuccess(final SharedDocRef result) {
choosePipeline(result, event.getStreamId(), event.getEventId(),
event.getChildStreamType());
}
});
=======
dispatcher.exec(new GetPipelineForStreamAction(event.getStreamId(), event.getChildStreamId())).onSuccess(result -> choosePipeline(result, event.getStreamId(), event.getEventId(),
event.getChildStreamType()));
>>>>>>>
dispatcher.exec(new GetPipelineForStreamAction(event.getStreamId(), event.getChildStreamId())).onSuccess(result -> choosePipeline(result, event.getStreamId(), event.getEventId(),
event.getChildStreamType())); |
<<<<<<<
import stroom.task.api.TaskHandler;
import stroom.util.lifecycle.jobmanagement.ScheduledJobs;
=======
import stroom.task.api.TaskHandlerBinder;
>>>>>>>
import stroom.task.api.TaskHandlerBinder;
import stroom.util.lifecycle.jobmanagement.ScheduledJobs;
<<<<<<<
final Multibinder<TaskHandler> taskHandlerBinder = Multibinder.newSetBinder(binder(), TaskHandler.class);
taskHandlerBinder.addBinding().to(stroom.search.AsyncSearchTaskHandler.class);
taskHandlerBinder.addBinding().to(stroom.search.ClusterSearchTaskHandler.class);
taskHandlerBinder.addBinding().to(stroom.search.EventSearchTaskHandler.class);
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(SearchJobs.class);
=======
TaskHandlerBinder.create(binder())
.bind(AsyncSearchTask.class, AsyncSearchTaskHandler.class)
.bind(ClusterSearchTask.class, ClusterSearchTaskHandler.class)
.bind(EventSearchTask.class, EventSearchTaskHandler.class);
>>>>>>>
final Multibinder<ScheduledJobs> jobs = Multibinder.newSetBinder(binder(), ScheduledJobs.class);
jobs.addBinding().to(SearchJobs.class);
TaskHandlerBinder.create(binder())
.bind(AsyncSearchTask.class, AsyncSearchTaskHandler.class)
.bind(ClusterSearchTask.class, ClusterSearchTaskHandler.class)
.bind(EventSearchTask.class, EventSearchTaskHandler.class); |
<<<<<<<
=======
@JsonInclude(Include.NON_EMPTY)
@XmlRootElement(name = "doc")
@XmlType(name = "Doc", propOrder = {"type", "uuid", "name", "version", "createTime", "updateTime", "createUser", "updateUser"})
@XmlAccessorType(XmlAccessType.FIELD)
>>>>>>>
@JsonInclude(Include.NON_EMPTY) |
<<<<<<<
=======
static void addOrienteerVersions(Path pathToPomXml) {
pomXmlUtils.addOrienteerVersions(pathToPomXml);
}
>>>>>>> |
<<<<<<<
=======
if (!doneCreateOrRefreshAdminRole) {
doneCreateOrRefreshAdminRole = true;
createOrRefreshAdminUserGroup();
}
UserRef userRef = userService.getUserRefByName(username);
>>>>>>>
<<<<<<<
UserRef userRef = userService.getUserByName(username);
if (userRef == null) {
// The requested system user does not exist.
if (UserService.INITIAL_ADMIN_ACCOUNT.equals(username)) {
userRef = createOrRefreshAdmin();
}
}
=======
if (userRef != null) {
user = userService.loadByUuidInsecure(userRef.getUuid());
}
>>>>>>>
UserRef userRef = userService.getUserRefByName(username);
if (userRef == null) {
// The requested system user does not exist.
if (UserService.INITIAL_ADMIN_ACCOUNT.equals(username)) {
userRef = createOrRefreshAdmin();
}
} |
<<<<<<<
public AbstractSearchTask(final Task<?> parentTask, final String sessionId, final String userName,
final FindStreamCriteria streamFilter, final Query query) {
super(null, sessionId, userName);
=======
public AbstractSearchTask(final Task<?> parentTask, final String userToken,
final FindStreamCriteria streamFilter, final Search search) {
super(null, userToken);
>>>>>>>
public AbstractSearchTask(final Task<?> parentTask, final String userToken,
final FindStreamCriteria streamFilter, final Query query) {
super(null, userToken); |
<<<<<<<
import stroom.pipeline.scope.PipelineScoped;
import stroom.processor.shared.Processor;
import stroom.processor.shared.ProcessorFilterTask;
=======
import stroom.streamtask.shared.Processor;
import stroom.streamtask.shared.ProcessorFilterTask;
import stroom.util.pipeline.scope.PipelineScoped;
>>>>>>>
import stroom.pipeline.scope.PipelineScoped;
import stroom.processor.shared.Processor;
import stroom.processor.shared.ProcessorFilterTask;
import stroom.streamtask.shared.Processor;
import stroom.streamtask.shared.ProcessorFilterTask;
import stroom.util.pipeline.scope.PipelineScoped; |
<<<<<<<
import stroom.util.guice.HasHealthCheckBinder;
=======
import stroom.util.guice.RestResourcesBinder;
>>>>>>>
import stroom.util.guice.HasHealthCheckBinder;
import stroom.util.guice.RestResourcesBinder;
<<<<<<<
GuiceUtil.buildMultiBinder(binder(), RestResource.class)
.addBinding(DbStatusResourceImpl.class);
HasHealthCheckBinder.create(binder())
.bind(DbHealthCheck.class);
=======
RestResourcesBinder.create(binder())
.bindResource(DbStatusResourceImpl.class);
>>>>>>>
RestResourcesBinder.create(binder())
.bindResource(DbStatusResourceImpl.class);
HasHealthCheckBinder.create(binder())
.bind(DbHealthCheck.class); |
<<<<<<<
import stroom.importexport.ImportExportHelper;
import stroom.persist.EntityManagerSupport;
import stroom.pipeline.shared.PipelineEntity;
import stroom.pipeline.shared.TextConverter;
import stroom.pipeline.shared.XSLT;
import stroom.refdata.ReferenceDataModule;
import stroom.security.SecurityContext;
=======
import stroom.pipeline.shared.PipelineDoc;
import stroom.pipeline.shared.TextConverterDoc;
import stroom.pipeline.shared.XsltDoc;
>>>>>>>
import stroom.pipeline.shared.PipelineDoc;
import stroom.pipeline.shared.TextConverterDoc;
import stroom.pipeline.shared.XsltDoc;
import stroom.refdata.ReferenceDataModule;
<<<<<<<
// install sub-modules
install(new ReferenceDataModule());
bind(PipelineService.class).to(PipelineServiceImpl.class);
bind(XSLTService.class).to(XSLTServiceImpl.class);
bind(TextConverterService.class).to(TextConverterServiceImpl.class);
bind(TextConverterService.class).to(TextConverterServiceImpl.class);
=======
bind(PipelineStore.class).to(PipelineStoreImpl.class);
bind(XsltStore.class).to(XsltStoreImpl.class);
bind(TextConverterStore.class).to(TextConverterStoreImpl.class);
bind(TextConverterStore.class).to(TextConverterStoreImpl.class);
>>>>>>>
// install sub-modules
install(new ReferenceDataModule());
bind(PipelineStore.class).to(PipelineStoreImpl.class);
bind(XsltStore.class).to(XsltStoreImpl.class);
bind(TextConverterStore.class).to(TextConverterStoreImpl.class);
bind(TextConverterStore.class).to(TextConverterStoreImpl.class); |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
<<<<<<<
private static final Logger LOGGER = LoggerFactory.getLogger(BenchmarkClusterExecutor.class);
=======
// 20 min timeout
public static final int TIME_OUT = 1000 * 60 * 20;
private static final StroomLogger LOGGER = StroomLogger.getLogger(BenchmarkClusterExecutor.class);
>>>>>>>
// 20 min timeout
public static final int TIME_OUT = 1000 * 60 * 20;
private static final Logger LOGGER = LoggerFactory.getLogger(BenchmarkClusterExecutor.class); |
<<<<<<<
import stroom.svg.client.Icon;
import stroom.svg.client.SvgPresets;
import stroom.widget.button.client.ButtonView;
=======
import stroom.util.shared.ModelStringUtil;
import stroom.widget.button.client.GlyphButtonView;
import stroom.widget.button.client.GlyphIcons;
>>>>>>>
import stroom.util.shared.ModelStringUtil;
import stroom.svg.client.Icon;
import stroom.svg.client.SvgPresets;
import stroom.widget.button.client.ButtonView; |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import stroom.util.config.StroomProperties;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import stroom.util.config.StroomProperties;
<<<<<<<
=======
import stroom.util.logging.StroomLogger;
>>>>>>>
<<<<<<<
public static final String LOCK_EXTENSION = ".lock";
public static final String ZIP_EXTENSION = ".zip";
public static final String ERROR_EXTENSION = ".err";
public static final String BAD_EXTENSION = ".bad";
// 1 hour
public static final int DEFAULT_LOCK_AGE_MS = 1000 * 60 * 60;
private static final Logger LOGGER = LoggerFactory.getLogger(StroomZipRepository.class);
private static final Pattern ZIP_PATTERN = Pattern.compile(".*\\.zip");
=======
public final static String LOCK_EXTENSION = ".lock";
public final static String ZIP_EXTENSION = ".zip";
public final static String ERROR_EXTENSION = ".err";
public final static String BAD_EXTENSION = ".bad";
public final static String DEFAULT_ZIP_FILENAME_DELIMITER = "%";
public final static String[] INVALID_ZIP_FILENAME_DELIMITERS = {"/", "\\", "$", "{", "}"};
// 1 hour
public final static int DEFAULT_LOCK_AGE_MS = 1000 * 60 * 60;
public final static int MAX_FILENAME_LENGTH = 255;
private final static StroomLogger LOGGER = StroomLogger.getLogger(StroomZipRepository.class);
private final static Pattern ZIP_PATTERN = Pattern.compile(".*\\.zip$");
private final static Pattern BASE_FILENAME_PATTERN = Pattern.compile("^(\\d{3})+$");
>>>>>>>
public final static String LOCK_EXTENSION = ".lock";
public final static String ZIP_EXTENSION = ".zip";
public final static String ERROR_EXTENSION = ".err";
public final static String BAD_EXTENSION = ".bad";
public final static String DEFAULT_ZIP_FILENAME_DELIMITER = "%";
public final static String[] INVALID_ZIP_FILENAME_DELIMITERS = {"/", "\\", "$", "{", "}"};
// 1 hour
public final static int DEFAULT_LOCK_AGE_MS = 1000 * 60 * 60;
public final static int MAX_FILENAME_LENGTH = 255;
private final static Logger LOGGER = LoggerFactory.getLogger(StroomZipRepository.class);
private final static Pattern ZIP_PATTERN = Pattern.compile(".*\\.zip$");
private final static Pattern BASE_FILENAME_PATTERN = Pattern.compile("^(\\d{3})+$");
<<<<<<<
private final AtomicLong fileCount = new AtomicLong(0);
private final AtomicBoolean finish = new AtomicBoolean(false);
private final int lockDeleteAgeMs;
=======
private final AtomicLong fileCount = new AtomicLong(0);
private final AtomicBoolean finish = new AtomicBoolean(false);
private final int lockDeleteAgeMs;
private final String zipFilenameDelimiter;
private final Pattern templatePartPattern;
>>>>>>>
private final AtomicLong fileCount = new AtomicLong(0);
private final AtomicBoolean finish = new AtomicBoolean(false);
private final int lockDeleteAgeMs;
private final String zipFilenameDelimiter;
private final Pattern templatePartPattern;
<<<<<<<
=======
//TODO may be used by Stroom
>>>>>>>
//TODO may be used by Stroom
<<<<<<<
final List<String> fileList = new ArrayList<>();
final List<String> dirList = new ArrayList<>();
buildFileLists(dir, fileList, dirList);
=======
final List<String> fileList = new ArrayList<>();
final List<String> dirList = new ArrayList<>();
buildZipFileLists(dir, fileList, dirList);
>>>>>>>
final List<String> fileList = new ArrayList<>();
final List<String> dirList = new ArrayList<>();
buildZipFileLists(dir, fileList, dirList);
<<<<<<<
@SuppressWarnings(value = "DM_DEFAULT_ENCODING")
// PrintWriter does not take a charset and this is only an error message
=======
@SuppressWarnings(value = "DM_DEFAULT_ENCODING")
>>>>>>>
@SuppressWarnings(value = "DM_DEFAULT_ENCODING") |
<<<<<<<
import java.io.UncheckedIOException;
=======
>>>>>>>
import java.io.UncheckedIOException;
<<<<<<<
final boolean close) {
long bytesWritten;
=======
final boolean close) {
long bytesWritten = 0;
>>>>>>>
final boolean close) {
long bytesWritten; |
<<<<<<<
import stroom.security.api.SecurityContext;
import stroom.security.api.UserTokenUtil;
import stroom.security.shared.UserToken;
import stroom.task.api.TaskManager;
import stroom.ui.config.shared.UiConfig;
=======
import stroom.security.SecurityContext;
import stroom.security.SecurityHelper;
import stroom.task.server.TaskManager;
import stroom.util.config.PropertyUtil;
>>>>>>>
import stroom.security.api.SecurityContext;
import stroom.task.api.TaskManager;
import stroom.ui.config.shared.UiConfig;
<<<<<<<
final UserToken userToken = UserTokenUtil.create(securityContext.getUserId());
=======
>>>>>>> |
<<<<<<<
import stroom.entity.client.EntityItemListBox;
=======
import stroom.entity.shared.DocRef;
>>>>>>>
<<<<<<<
import stroom.entity.shared.ResultList;
import stroom.entity.shared.SharedDocRef;
import stroom.item.client.ItemListBox;
import stroom.query.api.DocRef;
import stroom.query.api.ExpressionTerm.Condition;
=======
import stroom.query.shared.Condition;
>>>>>>>
import stroom.query.api.DocRef;
import stroom.query.api.ExpressionTerm.Condition;
<<<<<<<
=======
import java.util.List;
>>>>>>>
import java.util.ArrayList;
import java.util.List;
<<<<<<<
final EntityReferenceFindAction<FindStreamAttributeKeyCriteria> findAction = new EntityReferenceFindAction<FindStreamAttributeKeyCriteria>(
new FindStreamAttributeKeyCriteria());
dispatcher.execute(findAction, new AsyncCallbackAdaptor<ResultList<SharedDocRef>>() {
@Override
public void onSuccess(final ResultList<SharedDocRef> resultList) {
for (final SharedDocRef docRef : resultList) {
view.getStreamAttributeKey().addItem(docRef);
}
}
});
=======
final FindStreamAttributeKeyCriteria criteria = new FindStreamAttributeKeyCriteria();
criteria.setOrderBy(FindStreamAttributeKeyCriteria.ORDER_BY_NAME);
dispatcher.exec(new EntityReferenceFindAction<>(criteria)).onSuccess(view::setKeys);
>>>>>>>
final FindStreamAttributeKeyCriteria criteria = new FindStreamAttributeKeyCriteria();
criteria.setOrderBy(FindStreamAttributeKeyCriteria.ORDER_BY_NAME);
dispatcher.exec(new EntityReferenceFindAction<>(criteria)).onSuccess(result -> {
final List<DocRef> list = new ArrayList<>(result.getValues());
view.setKeys(list);
}); |
<<<<<<<
import stroom.task.api.ExecutorProvider;
import stroom.task.api.TaskContext;
import stroom.util.logging.LambdaLogUtil;
=======
import stroom.task.server.TaskContext;
import stroom.util.concurrent.ExecutorProvider;
>>>>>>>
import stroom.task.api.ExecutorProvider;
import stroom.task.api.TaskContext;
import stroom.util.logging.LambdaLogUtil;
<<<<<<<
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
=======
>>>>>>>
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
<<<<<<<
LOGGER.debug(() ->
LogUtil.message("Query finished in {}", Duration.between(queryStart, Instant.now())));
});
=======
LOGGER.debug(() -> "Query finished in " + Duration.between(queryStart, Instant.now()));
};
runnable = taskWrapperProvider.get().wrap(runnable);
executorProvider.getExecutor().execute(runnable);
>>>>>>>
LOGGER.debug(() -> "Query finished in " + Duration.between(queryStart, Instant.now()));
};
runnable = taskContext.subTask(runnable);
executor.execute(runnable); |
<<<<<<<
import stroom.core.client.gin.PluginModule;
=======
import stroom.app.client.gin.PluginModule;
import stroom.editor.client.view.EditorMenuPresenter;
>>>>>>>
import stroom.core.client.gin.PluginModule;
import stroom.editor.client.view.EditorMenuPresenter;
<<<<<<<
import stroom.process.client.presenter.ProcessorPresenter;
import stroom.process.client.presenter.ProcessorPresenter.ProcessorView;
import stroom.process.client.view.ProcessorViewImpl;
import stroom.xmleditor.client.view.XMLEditorMenuPresenter;
=======
>>>>>>>
import stroom.process.client.presenter.ProcessorPresenter;
import stroom.process.client.presenter.ProcessorPresenter.ProcessorView;
import stroom.process.client.view.ProcessorViewImpl; |
<<<<<<<
response.render(JavaScriptHeaderItem.forReference(new JavaScriptResourceReference(OArchitectEditorWidget.class, "js/actions.js")));
response.render(OnLoadHeaderItem.forScript(String.format("; init('%s', '%s', '%s', '%s');",
container.getMarkupId(), editor.getMarkupId(), sidebar.getMarkupId(), toolbar.getMarkupId())));
=======
PackageResourceReference configXml = new PackageResourceReference(OArchitectEditorWidget.class, "js/architect.js");
String configUrl = urlFor(configXml, null).toString();
String baseUrl = configUrl.substring(0, configUrl.indexOf("js/architect"));
TextTemplate configTemplate = new PackageTextTemplate(OArchitectEditorWidget.class, "config.tmpl.xml");
Map<String, Object> params = CommonUtils.toMap("basePath", baseUrl);
String config = configTemplate.asString(params);
response.render(OnLoadHeaderItem.forScript(String.format("init('%s', %s, '%s', '%s', '%s', '%s');",
baseUrl,
CommonUtils.escapeAndWrapAsJavaScriptString(config),
container.getMarkupId(),
editor.getMarkupId(),
sidebar.getMarkupId(),
toolbar.getMarkupId())));
>>>>>>>
response.render(JavaScriptHeaderItem.forReference(new JavaScriptResourceReference(OArchitectEditorWidget.class, "js/actions.js")));
response.render(OnLoadHeaderItem.forScript(String.format("; init('%s', '%s', '%s', '%s');",
container.getMarkupId(), editor.getMarkupId(), sidebar.getMarkupId(), toolbar.getMarkupId())));
PackageResourceReference configXml = new PackageResourceReference(OArchitectEditorWidget.class, "js/architect.js");
String configUrl = urlFor(configXml, null).toString();
String baseUrl = configUrl.substring(0, configUrl.indexOf("js/architect"));
TextTemplate configTemplate = new PackageTextTemplate(OArchitectEditorWidget.class, "config.tmpl.xml");
Map<String, Object> params = CommonUtils.toMap("basePath", baseUrl);
String config = configTemplate.asString(params);
response.render(OnLoadHeaderItem.forScript(String.format("init('%s', %s, '%s', '%s', '%s', '%s');",
baseUrl,
CommonUtils.escapeAndWrapAsJavaScriptString(config),
container.getMarkupId(),
editor.getMarkupId(),
sidebar.getMarkupId(),
toolbar.getMarkupId()))); |
<<<<<<<
=======
import stroom.authentication.resources.token.v1.TokenService;
import stroom.security.impl.AuthenticationConfig.JwtConfig;
>>>>>>>
<<<<<<<
=======
private String getUserIdFromToken(final String token) {
LOGGER.debug(() -> "Checking with the Authentication Service that a token is valid.");
return tokenService.verifyToken(token)
.orElseThrow(() -> new RuntimeException(
LogUtil.message("Unable to get user identity from token {} ", token)));
}
>>>>>>>
<<<<<<<
final JsonWebKeySet publicJsonWebKey = getJsonWebKeySet();
if (publicJsonWebKey == null) {
resultBuilder.withDetail(KEY, "Cannot get stroom-authentication-service's public key!\n");
=======
String publicJsonWebKey = tokenService.getPublicKey();
boolean canGetJwk = StringUtils.isNotBlank(publicJsonWebKey);
if (!canGetJwk) {
resultBuilder.withDetail(KEY, "Missing public key\n");
>>>>>>>
final JsonWebKeySet publicJsonWebKey = getJsonWebKeySet();
if (publicJsonWebKey == null) {
resultBuilder.withDetail(KEY, "Missing public key\n");
<<<<<<<
} catch (RuntimeException e) {
resultBuilder.withDetail(KEY, "Error fetching our identity provider's public key! " +
"This means we cannot verify clients' authentication tokens ourselves. " +
"This might mean the authentication service is down or unavailable. " +
=======
} catch (RuntimeException e) {
resultBuilder.withDetail(KEY, "Error fetching our public key! " +
>>>>>>>
} catch (RuntimeException e) {
resultBuilder.withDetail(KEY, "Error fetching our identity provider's public key! " +
"This means we cannot verify clients' authentication tokens ourselves. " +
"This might mean the authentication service is down or unavailable. " + |
<<<<<<<
import stroom.docref.DocRef;
import stroom.security.shared.PermissionNames;
import stroom.security.client.api.ClientSecurityContext;
=======
import stroom.query.api.v2.DocRef;
import stroom.security.client.ClientSecurityContext;
import stroom.streamstore.shared.Stream;
import stroom.streamtask.shared.StreamProcessor;
import stroom.task.client.TaskEndEvent;
>>>>>>>
import stroom.explorer.shared.SharedDocRef;
import stroom.security.client.api.ClientSecurityContext;
import stroom.security.shared.PermissionNames;
import stroom.task.client.TaskEndEvent;
<<<<<<<
protected DocumentEditPresenter<?, ?> createEditor() {
if (securityContext.hasAppPermission(PermissionNames.VIEW_DATA_PERMISSION) ||
securityContext.hasAppPermission(PermissionNames.MANAGE_PROCESSORS_PERMISSION)) {
=======
protected MyPresenterWidget<?> createEditor() {
if (securityContext.hasAppPermission(Stream.VIEW_DATA_PERMISSION) || securityContext.hasAppPermission(StreamProcessor.MANAGE_PROCESSORS_PERMISSION)) {
>>>>>>>
protected MyPresenterWidget<?> createEditor() {
if (securityContext.hasAppPermission(PermissionNames.VIEW_DATA_PERMISSION) ||
securityContext.hasAppPermission(PermissionNames.MANAGE_PROCESSORS_PERMISSION)) { |
<<<<<<<
import stroom.entity.shared.DocRefUtil;
import stroom.entity.shared.DocumentServiceReadAction;
import stroom.query.api.v1.DocRef;
=======
import stroom.entity.shared.EntityServiceLoadAction;
import stroom.query.api.v2.DocRef;
>>>>>>>
import stroom.entity.shared.DocRefUtil;
import stroom.entity.shared.DocumentServiceReadAction;
import stroom.query.api.v2.DocRef; |
<<<<<<<
import stroom.task.api.TaskContextFactory;
import stroom.task.api.TaskTerminatedException;
import javax.inject.Provider;
import java.util.*;
import java.util.concurrent.*;
=======
import stroom.task.server.TaskCallback;
import stroom.task.server.TaskManager;
import stroom.task.server.TaskTerminatedException;
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
import stroom.util.shared.Task;
import stroom.util.shared.VoidResult;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
public class SolrSearchResultCollector implements Store, TaskCallback<NodeResult> {
private static final Logger LOGGER = LoggerFactory.getLogger(SolrSearchResultCollector.class);
private static final LambdaLogger LAMBDA_LOGGER = LambdaLoggerFactory.getLogger(SolrSearchResultCollector.class);
>>>>>>>
import stroom.task.api.TaskContextFactory;
import stroom.task.api.TaskTerminatedException;
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
import javax.inject.Provider;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
<<<<<<<
private final Executor executor;
private final TaskContextFactory taskContextFactory;
private final Provider<SolrAsyncSearchTaskHandler> solrAsyncSearchTaskHandlerProvider;
private final SolrAsyncSearchTask task;
=======
private final CompletionState completionState = new CompletionState();
private final TaskManager taskManager;
private final Task<VoidResult> task;
>>>>>>>
private final Executor executor;
private final TaskContextFactory taskContextFactory;
private final Provider<SolrAsyncSearchTaskHandler> solrAsyncSearchTaskHandlerProvider;
private final SolrAsyncSearchTask task;
<<<<<<<
this.completionState = completionState;
=======
>>>>>>>
<<<<<<<
final Runnable runnable = taskContextFactory.context(TASK_NAME, taskContext -> {
// Don't begin execution if we have been asked to complete already.
if (!completionState.isComplete()) {
final SolrAsyncSearchTaskHandler asyncSearchTaskHandler = solrAsyncSearchTaskHandlerProvider.get();
asyncSearchTaskHandler.exec(taskContext, task);
=======
taskManager.execAsync(task, new TaskCallback<VoidResult>() {
@Override
public void onSuccess(final VoidResult result) {
// Do nothing here as the results go into the collector
}
@Override
public void onFailure(final Throwable t) {
// We can expect some tasks to throw a task terminated exception
// as they may be terminated before we even try to execute them.
if (!(t instanceof TaskTerminatedException)) {
LOGGER.error(t.getMessage(), t);
getErrorSet().add(t.getMessage());
complete();
throw new RuntimeException(t.getMessage(), t);
}
complete();
>>>>>>>
final Runnable runnable = taskContextFactory.context(TASK_NAME, taskContext -> {
// Don't begin execution if we have been asked to complete already.
if (!completionState.isComplete()) {
final SolrAsyncSearchTaskHandler asyncSearchTaskHandler = solrAsyncSearchTaskHandlerProvider.get();
asyncSearchTaskHandler.exec(taskContext, task);
<<<<<<<
complete();
}
public void complete() {
completionState.complete();
=======
task.terminate();
complete();
}
public void complete() {
completionState.complete();
>>>>>>>
completionState.complete();
}
public void complete() {
completionState.complete();
<<<<<<<
return completionState.isComplete();
}
@Override
public void awaitCompletion() throws InterruptedException {
completionState.awaitCompletion();
=======
return completionState.isComplete();
}
@Override
public boolean awaitCompletion(final long timeout, final TimeUnit unit) throws InterruptedException {
return completionState.awaitCompletion(timeout, unit);
>>>>>>>
return completionState.isComplete();
}
@Override
public void awaitCompletion() throws InterruptedException {
completionState.awaitCompletion();
<<<<<<<
=======
", complete=" + completionState.isComplete() +
>>>>>>>
", complete=" + completionState.isComplete() + |
<<<<<<<
public void onClick(Optional<AjaxRequestTarget> targetOptional) {
DashboardPanel<?> dashboard = findParent(DashboardPanel.class);
=======
public void onClick(AjaxRequestTarget target) {
IDashboardContainer container = findParent(IDashboardContainer.class);
DashboardPanel<?> dashboard = container.getCurrentDashboard().getSelfComponent();
>>>>>>>
public void onClick(Optional<AjaxRequestTarget> targetOptional) {
IDashboardContainer container = findParent(IDashboardContainer.class);
DashboardPanel<?> dashboard = container.getCurrentDashboard().getSelfComponent(); |
<<<<<<<
import java.util.List;
import org.apache.wicket.AttributeModifier;
=======
import com.orientechnologies.orient.core.record.impl.ODocument;
import org.apache.wicket.behavior.AttributeAppender;
>>>>>>>
import java.util.List;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.behavior.AttributeAppender;
<<<<<<<
import com.orientechnologies.orient.core.record.impl.ODocument;
=======
import ru.ydn.wicket.wicketorientdb.model.ODocumentPropertyModel;
import java.util.List;
>>>>>>>
import com.orientechnologies.orient.core.record.impl.ODocument;
<<<<<<<
link.add(new FAIcon("icon", new PropertyModel<String>(itemModel, "icon")),
new Label("name", new ODocumentNameModel(item.getModel())).setRenderBodyOnly(true),
new WebMarkupContainer("menuLevelGlyph").setVisibilityAllowed(hasSubItems));
=======
link.add(new FAIcon("icon", new ODocumentPropertyModel<String>(itemModel, "icon")),
new Label("name", new ODocumentNameModel(item.getModel())).setRenderBodyOnly(true));
>>>>>>>
link.add(new FAIcon("icon", new PropertyModel<String>(itemModel, "icon")),
new Label("name", new ODocumentNameModel(item.getModel())).setRenderBodyOnly(true)); |
<<<<<<<
private StroomCacheManager cacheManager;
@Resource
private ExplorerNodeService explorerNodeService;
=======
private StroomCacheManager stroomCacheManager;
>>>>>>>
private StroomCacheManager stroomCacheManager;
@Resource
private ExplorerNodeService explorerNodeService; |
<<<<<<<
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipeline), 1), mapStoreBuilder.getMapStore());
=======
referenceData.put(new MapStoreCacheKey(DocRef.create(pipeline), 1, null), mapStoreBuilder.getMapStore());
>>>>>>>
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipeline), 1, null), mapStoreBuilder.getMapStore());
<<<<<<<
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipeline), 2), mapStoreBuilder.getMapStore());
=======
referenceData.put(new MapStoreCacheKey(DocRef.create(pipeline), 2, null), mapStoreBuilder.getMapStore());
>>>>>>>
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipeline), 2, null), mapStoreBuilder.getMapStore());
<<<<<<<
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipeline), 3), mapStoreBuilder.getMapStore());
=======
referenceData.put(new MapStoreCacheKey(DocRef.create(pipeline), 3, null), mapStoreBuilder.getMapStore());
>>>>>>>
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipeline), 3, null), mapStoreBuilder.getMapStore());
<<<<<<<
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipelineEntity), 0), mapStoreBuilder.getMapStore());
=======
referenceData.put(new MapStoreCacheKey(DocRef.create(pipelineEntity), 0, null), mapStoreBuilder.getMapStore());
>>>>>>>
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipelineEntity), 0, null), mapStoreBuilder.getMapStore());
<<<<<<<
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipelineEntity), 0), mapStoreBuilder.getMapStore());
=======
referenceData.put(new MapStoreCacheKey(DocRef.create(pipelineEntity), 0, null), mapStoreBuilder.getMapStore());
>>>>>>>
referenceData.put(new MapStoreCacheKey(DocRefUtil.create(pipelineEntity), 0, null), mapStoreBuilder.getMapStore()); |
<<<<<<<
import stroom.docref.HasDisplayValue;
import stroom.util.shared.HasAuditInfo;
import stroom.util.shared.HasIntegerId;
import stroom.util.shared.HasPrimitiveValue;
import stroom.util.shared.PrimitiveValueConverter;
=======
>>>>>>>
import stroom.docref.HasDisplayValue;
import stroom.util.shared.HasAuditInfo;
import stroom.util.shared.HasIntegerId;
import stroom.util.shared.HasPrimitiveValue;
import stroom.util.shared.PrimitiveValueConverter; |
<<<<<<<
setAllNotNormalBlock();
=======
setBlockLayer(BlockRenderLayer.SOLID, BlockRenderLayer.TRANSLUCENT);
}
@Override
public boolean isFullBlock(IBlockState state)
{
return false;
}
@Override
public boolean isFullCube(IBlockState state)
{
return false;
}
@Override
public boolean isOpaqueCube(IBlockState state)
{
return false;
}
@Override
public boolean isNormalCube(IBlockState state, IBlockAccess world, BlockPos pos)
{
return false;
>>>>>>>
setAllNotNormalBlock();
setBlockLayer(BlockRenderLayer.SOLID, BlockRenderLayer.TRANSLUCENT); |
<<<<<<<
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.BlockRendererDispatcher;
import net.minecraft.client.renderer.IRenderTypeBuffer;
import net.minecraft.client.renderer.RenderType;
import net.minecraft.client.renderer.model.IBakedModel;
=======
import net.minecraft.client.renderer.IRenderTypeBuffer;
import net.minecraft.client.renderer.Quaternion;
import net.minecraft.client.renderer.RenderType;
import net.minecraft.client.renderer.Vector3f;
import net.minecraft.client.renderer.model.BakedQuad;
>>>>>>>
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.BlockRendererDispatcher;
import net.minecraft.client.renderer.IRenderTypeBuffer;
import net.minecraft.client.renderer.RenderType;
import net.minecraft.client.renderer.model.BakedQuad;
import net.minecraft.client.renderer.model.IBakedModel;
<<<<<<<
import net.minecraft.util.math.vector.Quaternion;
import net.minecraft.util.math.vector.Vector3f;
import net.minecraftforge.client.model.data.EmptyModelData;
=======
import java.util.List;
>>>>>>>
import net.minecraft.util.math.vector.Quaternion;
import net.minecraft.util.math.vector.Vector3f;
import java.util.List; |
<<<<<<<
=======
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.metadata.security.OUser;
import com.orientechnologies.orient.core.metadata.security.ORule.ResourceGeneric;
>>>>>>>
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.metadata.security.OUser;
import com.orientechnologies.orient.core.metadata.security.ORule.ResourceGeneric;
<<<<<<<
import org.orienteer.core.orientd.plugin.OrienteerHazelcastPlugin;
import org.orienteer.core.wicket.pageStore.OrientDbDataStore;
import org.orienteer.core.resource.OContentShareResource;
=======
>>>>>>>
import org.orienteer.core.orientd.plugin.OrienteerHazelcastPlugin;
<<<<<<<
import ru.ydn.wicket.wicketorientdb.IOrientDbSettings;
import ru.ydn.wicket.wicketorientdb.LazyAuthorizationRequestCycleListener;
import ru.ydn.wicket.wicketorientdb.OrientDbWebApplication;
import ru.ydn.wicket.wicketorientdb.OrientDbWebSession;
=======
import ru.ydn.wicket.wicketorientdb.*;
import ru.ydn.wicket.wicketorientdb.security.OSecurityHelper;
import ru.ydn.wicket.wicketorientdb.security.OrientPermission;
>>>>>>>
import ru.ydn.wicket.wicketorientdb.*;
import ru.ydn.wicket.wicketorientdb.security.OSecurityHelper;
import ru.ydn.wicket.wicketorientdb.security.OrientPermission;
<<<<<<<
/**
* Create page manager provider for application
* @return {@link IPageManagerProvider} default - {@link DefaultPageManagerProvider}
*/
protected IPageManagerProvider createPageManagerProvider() {
return new DefaultPageManagerProvider(this) {
@Override
protected IDataStore newDataStore() {
return new OrientDbDataStore();
}
@Override
protected IPageStore newPageStore(IDataStore dataStore) {
ISerializer pageSerializer = application.getFrameworkSettings().getSerializer();
return new HazelcastPageStore(pageSerializer, dataStore);
}
};
}
=======
@Override
public boolean checkResource(ResourceGeneric resource, String specific, int iOperation) {
if(OSecurityHelper.FEATURE_RESOURCE.equals(resource)) {
if(Strings.isEmpty(specific)) return true;
else
return super.checkResource(resource, specific, iOperation)
|| OrienteerWebSession.get().getOPerspective().providesFeature(specific);
} else {
return super.checkResource(resource, specific, iOperation);
}
}
>>>>>>>
/**
* Create page manager provider for application
* @return {@link IPageManagerProvider} default - {@link DefaultPageManagerProvider}
*/
protected IPageManagerProvider createPageManagerProvider() {
return new DefaultPageManagerProvider(this) {
@Override
protected IDataStore newDataStore() {
return new OrientDbDataStore();
}
@Override
protected IPageStore newPageStore(IDataStore dataStore) {
ISerializer pageSerializer = application.getFrameworkSettings().getSerializer();
return new HazelcastPageStore(pageSerializer, dataStore);
}
};
}
@Override
public boolean checkResource(ResourceGeneric resource, String specific, int iOperation) {
if(OSecurityHelper.FEATURE_RESOURCE.equals(resource)) {
if(Strings.isEmpty(specific)) return true;
else
return super.checkResource(resource, specific, iOperation)
|| OrienteerWebSession.get().getOPerspective().providesFeature(specific);
} else {
return super.checkResource(resource, specific, iOperation);
}
} |
<<<<<<<
=======
import com.google.common.base.Preconditions;
>>>>>>>
import com.google.common.base.Preconditions;
<<<<<<<
import net.minecraft.item.BlockItemUseContext;
import net.minecraft.item.Item;
import net.minecraft.item.ItemGroup;
import net.minecraft.item.ItemStack;
import net.minecraft.pathfinding.PathType;
=======
import net.minecraft.fluid.Fluid;
import net.minecraft.fluid.Fluids;
import net.minecraft.fluid.IFluidState;
import net.minecraft.item.BlockItemUseContext;
import net.minecraft.item.Item;
import net.minecraft.item.ItemGroup;
import net.minecraft.item.ItemStack;
>>>>>>>
import net.minecraft.item.BlockItemUseContext;
import net.minecraft.item.Item;
import net.minecraft.item.ItemGroup;
import net.minecraft.item.ItemStack;
import net.minecraft.pathfinding.PathType;
import net.minecraft.fluid.Fluid;
import net.minecraft.fluid.Fluids;
import net.minecraft.fluid.IFluidState;
import net.minecraft.item.BlockItemUseContext;
import net.minecraft.item.Item;
import net.minecraft.item.ItemGroup;
import net.minecraft.item.ItemStack; |
<<<<<<<
.setLightLevel(s -> 13)
.notSolid(), BlockItemBalloon::new);
=======
.lightValue(13)
.notSolid(), BlockItemBalloon::new, BlockStateProperties.WATERLOGGED);
>>>>>>>
.setLightLevel(s -> 13)
.notSolid(), BlockItemBalloon::new, BlockStateProperties.WATERLOGGED); |
<<<<<<<
import blusunrize.immersiveengineering.api.ApiUtils;
import blusunrize.immersiveengineering.api.utils.shapes.CachedShapesWithTransform;
=======
>>>>>>>
import blusunrize.immersiveengineering.api.utils.shapes.CachedShapesWithTransform; |
<<<<<<<
import net.minecraft.util.math.vector.Matrix4f;
import net.minecraft.util.text.TextFormatting;
=======
>>>>>>>
import net.minecraft.util.math.vector.Matrix4f; |
<<<<<<<
/*
* Copyright (C) 2012-2019 52°North Initiative for Geospatial Open Source
=======
/**
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source
>>>>>>>
/*
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source |
<<<<<<<
Properties.create(Material.IRON).hardnessAndResistance(3, 15).notSolid(),
=======
Properties.create(Material.IRON).sound(SoundType.METAL).hardnessAndResistance(3, 15),
>>>>>>>
Properties.create(Material.IRON).sound(SoundType.METAL).hardnessAndResistance(3, 15).notSolid(), |
<<<<<<<
import net.minecraft.util.text.ITextComponent;
=======
import net.minecraft.util.math.RayTraceResult.Type;
>>>>>>>
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.math.RayTraceResult.Type;
<<<<<<<
if(!(rtr instanceof BlockRayTraceResult))
return null;
BlockRayTraceResult brtr = (BlockRayTraceResult)rtr;
=======
if(rtr.getType()==Type.MISS)
return new String[0];
>>>>>>>
if(rtr.getType()==Type.MISS)
return null;
<<<<<<<
return new ITextComponent[]{
TextUtils.formatFluidStack(tank.getFluid())
};
=======
{
String s = null;
if(!tank.getFluid().isEmpty())
s = tank.getFluid().getDisplayName().getFormattedText()+": "+tank.getFluidAmount()+"mB";
else
s = I18n.format(Lib.GUI+"empty");
return new String[]{s};
}
if(!(rtr instanceof BlockRayTraceResult))
return new String[0];
BlockRayTraceResult brtr = (BlockRayTraceResult)rtr;
>>>>>>>
return new ITextComponent[]{
TextUtils.formatFluidStack(tank.getFluid())
};
if(!(rtr instanceof BlockRayTraceResult))
return null;
BlockRayTraceResult brtr = (BlockRayTraceResult)rtr; |
<<<<<<<
boolean hammerUseSide(Direction side, PlayerEntity player, Vector3d hitVec);
=======
boolean hammerUseSide(Direction side, PlayerEntity player, Hand hand, Vec3d hitVec);
>>>>>>>
boolean hammerUseSide(Direction side, PlayerEntity player, Hand hand, Vector3d hitVec);
<<<<<<<
ActionResultType screwdriverUseSide(Direction side, PlayerEntity player, Vector3d hitVec);
=======
ActionResultType screwdriverUseSide(Direction side, PlayerEntity player, Hand hand, Vec3d hitVec);
>>>>>>>
ActionResultType screwdriverUseSide(Direction side, PlayerEntity player, Hand hand, Vector3d hitVec); |
<<<<<<<
import blusunrize.immersiveengineering.client.DynamicModelLoader;
import blusunrize.immersiveengineering.client.render.tile.DynamicModel.ModelType;
import blusunrize.immersiveengineering.client.utils.IERenderTypes;
=======
>>>>>>>
import blusunrize.immersiveengineering.client.DynamicModelLoader;
import blusunrize.immersiveengineering.client.render.tile.DynamicModel.ModelType;
import blusunrize.immersiveengineering.client.utils.IERenderTypes;
<<<<<<<
=======
import net.minecraft.util.math.Vec3d;
import org.lwjgl.opengl.GL11;
>>>>>>>
<<<<<<<
public ArcFurnaceRenderer(TileEntityRendererDispatcher rendererDispatcherIn)
{
super(rendererDispatcherIn);
DynamicModelLoader.requestTexture(HOT_METLA_FLOW);
DynamicModelLoader.requestTexture(HOT_METLA_STILL);
}
=======
>>>>>>>
public ArcFurnaceRenderer(TileEntityRendererDispatcher rendererDispatcherIn)
{
super(rendererDispatcherIn);
} |
<<<<<<<
this.fontRenderer.drawStringWithShadow("Gun configuration", 10, 15, 0xffffffff);
=======
this.fontRendererObj.drawStringWithShadow(I18n.format("blockbuster.gui.gun.title"), 10, 15, 0xffffffff);
>>>>>>>
this.fontRenderer.drawStringWithShadow(I18n.format("blockbuster.gui.gun.title"), 10, 15, 0xffffffff);
<<<<<<<
this.fontRenderer.drawStringWithShadow("Command on fire", this.fireCommand.area.x, this.fireCommand.area.y - 12, 0xffffff);
=======
this.fontRendererObj.drawStringWithShadow(I18n.format("blockbuster.gui.gun.fire_command"), this.fireCommand.area.x, this.fireCommand.area.y - 12, 0xffffff);
>>>>>>>
this.fontRenderer.drawStringWithShadow(I18n.format("blockbuster.gui.gun.fire_command"), this.fireCommand.area.x, this.fireCommand.area.y - 12, 0xffffff);
<<<<<<<
this.fontRenderer.drawStringWithShadow("Command on tick", this.tickCommand.area.x, this.tickCommand.area.y - 12, 0xffffff);
this.fontRenderer.drawStringWithShadow("Command on impact", this.impactCommand.area.x, this.impactCommand.area.y - 12, 0xffffff);
=======
this.fontRendererObj.drawStringWithShadow(I18n.format("blockbuster.gui.gun.impact_command"), this.impactCommand.area.x, this.impactCommand.area.y - 12, 0xffffff);
>>>>>>>
this.fontRenderer.drawStringWithShadow(I18n.format("blockbuster.gui.gun.impact_command"), this.impactCommand.area.x, this.impactCommand.area.y - 12, 0xffffff);
<<<<<<<
this.drawCenteredString(this.fontRenderer, "Gun transforms", this.gun.area.getX(0.5F), this.gun.area.y - 28, 0xffffff);
this.drawCenteredString(this.fontRenderer, "Projectile transforms", this.projectile.area.getX(0.5F), this.projectile.area.y - 28, 0xffffff);
=======
this.drawCenteredString(this.fontRendererObj, I18n.format("blockbuster.gui.gun.gun_transforms"), this.gun.area.getX(0.5F), this.gun.area.y - 28, 0xffffff);
this.drawCenteredString(this.fontRendererObj, I18n.format("blockbuster.gui.gun.projectile_transforms"), this.projectile.area.getX(0.5F), this.projectile.area.y - 28, 0xffffff);
>>>>>>>
this.drawCenteredString(this.fontRenderer, I18n.format("blockbuster.gui.gun.gun_transforms"), this.gun.area.getX(0.5F), this.gun.area.y - 28, 0xffffff);
this.drawCenteredString(this.fontRenderer, I18n.format("blockbuster.gui.gun.projectile_transforms"), this.projectile.area.getX(0.5F), this.projectile.area.y - 28, 0xffffff); |
<<<<<<<
=======
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.commons.lang3.mutable.MutableObject;
import org.lwjgl.opengl.GL11;
>>>>>>>
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.commons.lang3.mutable.MutableObject; |
<<<<<<<
TransformationMatrix tMatrix = matrix.toTransformationMatrix();
final float extend = getExtensionIntoBlock(getTile());
=======
final double extend = getCurrentExtension();
>>>>>>>
TransformationMatrix tMatrix = matrix.toTransformationMatrix();
final double extend = getCurrentExtension(); |
<<<<<<<
import net.minecraft.block.Blocks;
import net.minecraft.client.renderer.TransformationMatrix;
import net.minecraft.client.renderer.Vector3f;
import net.minecraft.client.renderer.Vector4f;
=======
>>>>>>>
import net.minecraft.block.Blocks;
import net.minecraft.client.renderer.TransformationMatrix;
<<<<<<<
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.client.renderer.vertex.DefaultVertexFormats;
import net.minecraft.client.renderer.vertex.VertexFormat;
import net.minecraft.client.renderer.vertex.VertexFormatElement;
import net.minecraft.entity.Entity;
=======
>>>>>>>
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.client.renderer.vertex.VertexFormat;
import net.minecraft.client.renderer.vertex.VertexFormatElement;
import net.minecraft.entity.Entity;
<<<<<<<
import net.minecraftforge.client.model.pipeline.BakedQuadBuilder;
import net.minecraftforge.client.model.pipeline.IVertexConsumer;
import net.minecraftforge.common.extensions.IForgeEntityMinecart;
=======
import net.minecraftforge.common.model.TRSRTransformation;
>>>>>>>
<<<<<<<
if(o instanceof ItemStack)
return ItemStack.areItemsEqual((ItemStack)o, stack)&&
(!checkNBT||Utils.compareItemNBT((ItemStack)o, stack));
else if(o instanceof Collection)
{
for(Object io : (Collection)o)
if(stackMatchesObject(stack, io, checkNBT))
return true;
}
else if(o instanceof IngredientWithSize)
return ((IngredientWithSize)o).test(stack);
else if(o instanceof Ingredient)
return ((Ingredient)o).test(stack);
else if(o instanceof ItemStack[])
{
for(ItemStack io : (ItemStack[])o)
if(ItemStack.areItemsEqual(io, stack)&&(!checkNBT||Utils.compareItemNBT(io, stack)))
return true;
}
else if(o instanceof FluidStack)
return FluidUtil.getFluidContained(stack)
.map(fs -> fs.containsFluid((FluidStack)o))
.orElse(false);
else if(o instanceof ResourceLocation)
return compareToOreName(stack, (ResourceLocation)o);
else if(o instanceof IngredientWithSize)
return ((IngredientWithSize)o).test(stack);
else
throw new IllegalArgumentException("Comparisong object "+o+" of class "+o.getClass()+" is invalid!");
return false;
=======
return ItemUtils.stackMatchesObject(stack, o, checkNBT);
>>>>>>>
return ItemUtils.stackMatchesObject(stack, o, checkNBT);
<<<<<<<
public static Function<BakedQuad, BakedQuad> transformQuad(TransformationMatrix transform, Int2IntFunction colorMultiplier)
=======
@Deprecated
public static Function<BakedQuad, BakedQuad> transformQuad(TRSRTransformation transform, Int2IntFunction colorMultiplier)
>>>>>>>
@Deprecated
public static Function<BakedQuad, BakedQuad> transformQuad(TransformationMatrix transform, Int2IntFunction colorMultiplier)
<<<<<<<
@OnlyIn(Dist.CLIENT)
private static class QuadTransformer implements Function<BakedQuad, BakedQuad>
{
@Nonnull
private final TransformationMatrix transform;
@Nullable
private final Int2IntFunction colorTransform;
private BakedQuadBuilder currentQuadBuilder;
//TODO BQBuilder seems to exclusively use BLOCK. Is that correct or a Forge bug?
private IVertexConsumer transformer = createConsumer(DefaultVertexFormats.BLOCK);
private QuadTransformer(TransformationMatrix transform, @Nullable Int2IntFunction colorTransform)
{
this.transform = transform;
this.colorTransform = colorTransform;
}
@Override
public BakedQuad apply(BakedQuad q)
{
currentQuadBuilder = new BakedQuadBuilder(q.func_187508_a());
q.pipe(transformer);
return currentQuadBuilder.build();
}
private IVertexConsumer createConsumer(VertexFormat f)
{
int posPos = -1;
int normPos = -1;
int colorPos = -1;
for(int i = 0; i < f.getElements().size(); i++)
if(f.getElements().get(i).getUsage()==VertexFormatElement.Usage.POSITION)
posPos = i;
else if(f.getElements().get(i).getUsage()==VertexFormatElement.Usage.NORMAL)
normPos = i;
else if(f.getElements().get(i).getUsage()==VertexFormatElement.Usage.COLOR)
colorPos = i;
if(posPos==-1)
return null;
final int posPosFinal = posPos;
final int normPosFinal = normPos;
final int colorPosFinal = colorPos;
return new IVertexConsumer()
{
int tintIndex = -1;
@Nonnull
@Override
public VertexFormat getVertexFormat()
{
return f;
}
@Override
public void setQuadTint(int tint)
{
currentQuadBuilder.setQuadTint(tint);
tintIndex = tint;
}
@Override
public void setQuadOrientation(@Nonnull Direction orientation)
{
Vec3i normal = orientation.getDirectionVec();
Vector3f newFront = new Vector3f(normal.getX(), normal.getY(), normal.getZ());
transform.transformNormal(newFront);
Direction newOrientation = Direction.getFacingFromVector(
newFront.getX(),
newFront.getY(),
newFront.getZ()
);
currentQuadBuilder.setQuadOrientation(newOrientation);
}
@Override
public void setApplyDiffuseLighting(boolean diffuse)
{
currentQuadBuilder.setApplyDiffuseLighting(diffuse);
}
@Override
public void setTexture(@Nonnull TextureAtlasSprite texture)
{
currentQuadBuilder.setTexture(texture);
}
@Override
public void put(int element, @Nonnull float... data)
{
if(element==posPosFinal&&transform!=null)
{
Vector4f newPos = new Vector4f(data[0], data[1], data[2], 1);
transform.transformPosition(newPos);
data = new float[3];
data[0] = newPos.getX();
data[1] = newPos.getY();
data[2] = newPos.getZ();
}
else if(element==normPosFinal)
{
Vector3f newNormal = new Vector3f(data[0], data[1], data[2]);
transform.transformNormal(newNormal);
data = new float[3];
data[0] = newNormal.getX();
data[1] = newNormal.getY();
data[2] = newNormal.getZ();
}
else if(element==colorPosFinal)
{
if(tintIndex!=-1&&colorTransform!=null)
{
int multiplier = colorTransform.apply(tintIndex);
if(multiplier!=0)
{
float r = (float)(multiplier >> 16&255)/255.0F;
float g = (float)(multiplier >> 8&255)/255.0F;
float b = (float)(multiplier&255)/255.0F;
float[] oldData = data;
data = new float[4];
data[0] = oldData[0]*r;
data[1] = oldData[1]*g;
data[2] = oldData[2]*b;
data[3] = oldData[3];
}
}
}
currentQuadBuilder.put(element, data);
}
};
}
}
=======
>>>>>>> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.