conflict_resolution
stringlengths
27
16k
<<<<<<< List<Action> actions = new ArrayList<Action>(); actions.add(AddContentTagAction.getInstance()); ======= List<Action> actions = new ArrayList<>(); actions.add(TagAbstractFileAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> List<Action> actions = new ArrayList<>(); actions.add(AddContentTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions());
<<<<<<< } catch (NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS } ======= } catch (NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS } >>>>>>> } catch (NoCurrentCaseException ex) { <<<<<<< } catch (NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS } ======= } catch (NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS } >>>>>>> } catch (NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
<<<<<<< ======= import java.io.File; import java.text.NumberFormat; import javax.swing.DefaultComboBoxModel; import javax.swing.JFileChooser; import javax.swing.JFormattedTextField; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; >>>>>>> import java.io.File; import javax.swing.JFileChooser; <<<<<<< ======= import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.GeneralFilter; >>>>>>> import org.sleuthkit.autopsy.casemodule.GeneralFilter; <<<<<<< private static final long serialVersionUID = 1L; ======= private final JFileChooser fc; >>>>>>> private static final long serialVersionUID = 1L; private final JFileChooser fc; <<<<<<< ======= fc = new JFileChooser(); fc.setFileSelectionMode(JFileChooser.FILES_ONLY); fc.setMultiSelectionEnabled(false); fc.setFileFilter(new GeneralFilter(GeneralFilter.GRAPHIC_IMAGE_EXTS, GeneralFilter.GRAPHIC_IMG_DECR)); int availableProcessors = Runtime.getRuntime().availableProcessors(); Integer fileIngestThreadCountChoices[]; int recommendedFileIngestThreadCount; if (availableProcessors >= 16) { fileIngestThreadCountChoices = new Integer[]{1, 2, 4, 6, 8, 12, 16}; if (availableProcessors >= 18) { recommendedFileIngestThreadCount = 16; } else { recommendedFileIngestThreadCount = 12; } } else if (availableProcessors >= 12 && availableProcessors <= 15) { fileIngestThreadCountChoices = new Integer[]{1, 2, 4, 6, 8, 12}; if (availableProcessors >= 14) { recommendedFileIngestThreadCount = 12; } else { recommendedFileIngestThreadCount = 8; } } else if (availableProcessors >= 8 && availableProcessors <= 11) { fileIngestThreadCountChoices = new Integer[]{1, 2, 4, 6, 8}; if (availableProcessors >= 10) { recommendedFileIngestThreadCount = 8; } else { recommendedFileIngestThreadCount = 6; } } else if (availableProcessors >= 6 && availableProcessors <= 7) { fileIngestThreadCountChoices = new Integer[]{1, 2, 4, 6}; recommendedFileIngestThreadCount = 4; } else if (availableProcessors >= 4 && availableProcessors <= 5) { fileIngestThreadCountChoices = new Integer[]{1, 2, 4}; recommendedFileIngestThreadCount = 2; } else if (availableProcessors >= 2 && availableProcessors <= 3) { fileIngestThreadCountChoices = new Integer[]{1, 2}; recommendedFileIngestThreadCount = 1; } else { fileIngestThreadCountChoices = new Integer[]{1}; recommendedFileIngestThreadCount = 1; } numberOfFileIngestThreadsComboBox.setModel(new DefaultComboBoxModel<>(fileIngestThreadCountChoices)); restartRequiredLabel.setText(NbBundle.getMessage(AutopsyOptionsPanel.class, "AutopsyOptionsPanel.restartRequiredLabel.text", recommendedFileIngestThreadCount)); // TODO listen to changes in form fields and call controller.changed() DocumentListener docListener = new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); } @Override public void removeUpdate(DocumentEvent e) { firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); } @Override public void changedUpdate(DocumentEvent e) { firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); } }; this.jFormattedTextFieldProcTimeOutHrs.getDocument().addDocumentListener(docListener); >>>>>>> fc = new JFileChooser(); fc.setFileSelectionMode(JFileChooser.FILES_ONLY); fc.setMultiSelectionEnabled(false); fc.setFileFilter(new GeneralFilter(GeneralFilter.GRAPHIC_IMAGE_EXTS, GeneralFilter.GRAPHIC_IMG_DECR)); <<<<<<< ======= numberOfFileIngestThreadsComboBox.setSelectedItem(UserPreferences.numberOfFileIngestThreads()); if (UserPreferences.getIsTimeOutEnabled()) { // user specified time out jCheckBoxEnableProcTimeout.setSelected(true); jFormattedTextFieldProcTimeOutHrs.setEditable(true); int timeOutHrs = UserPreferences.getProcessTimeOutHrs(); jFormattedTextFieldProcTimeOutHrs.setValue((long) timeOutHrs); } else { // never time out jCheckBoxEnableProcTimeout.setSelected(false); jFormattedTextFieldProcTimeOutHrs.setEditable(false); int timeOutHrs = UserPreferences.getProcessTimeOutHrs(); jFormattedTextFieldProcTimeOutHrs.setValue((long) timeOutHrs); } agencyLogoPathField.setText(ModuleSettings.getConfigSetting(ReportBranding.MODULE_NAME, ReportBranding.AGENCY_LOGO_PATH_PROP)); >>>>>>> agencyLogoPathField.setText(ModuleSettings.getConfigSetting(ReportBranding.MODULE_NAME, ReportBranding.AGENCY_LOGO_PATH_PROP)); <<<<<<< ======= UserPreferences.setNumberOfFileIngestThreads((Integer) numberOfFileIngestThreadsComboBox.getSelectedItem()); UserPreferences.setIsTimeOutEnabled(jCheckBoxEnableProcTimeout.isSelected()); if (jCheckBoxEnableProcTimeout.isSelected()) { // only store time out if it is enabled long timeOutHrs = (long) jFormattedTextFieldProcTimeOutHrs.getValue(); UserPreferences.setProcessTimeOutHrs((int) timeOutHrs); } if (!agencyLogoPathField.getText().isEmpty()) { File image = new File(agencyLogoPathField.getText()); if (image.exists()) { ModuleSettings.setConfigSetting(ReportBranding.MODULE_NAME, ReportBranding.AGENCY_LOGO_PATH_PROP, agencyLogoPathField.getText()); } } >>>>>>> if (!agencyLogoPathField.getText().isEmpty()) { File image = new File(agencyLogoPathField.getText()); if (image.exists()) { ModuleSettings.setConfigSetting(ReportBranding.MODULE_NAME, ReportBranding.AGENCY_LOGO_PATH_PROP, agencyLogoPathField.getText()); } } <<<<<<< .addContainerGap(148, Short.MAX_VALUE)) ======= .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jLabelNumThreads) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(numberOfFileIngestThreadsComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(restartRequiredLabel)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jLabelSetProcessTimeOut) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jCheckBoxEnableProcTimeout) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jFormattedTextFieldProcTimeOutHrs, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabelProcessTimeOutUnits))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(agencyLogoImageLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(agencyLogoPathField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(browseLogosButton)) .addContainerGap(52, Short.MAX_VALUE)) >>>>>>> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(agencyLogoImageLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(agencyLogoPathField) .addComponent(browseLogosButton)) .addGap(35, 35, 35)) <<<<<<< private void useBestViewerRBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useBestViewerRBActionPerformed ======= private void viewsHideSlackCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewsHideSlackCBActionPerformed firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_viewsHideSlackCBActionPerformed private void dataSourcesHideSlackCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dataSourcesHideSlackCBActionPerformed >>>>>>> private void useBestViewerRBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useBestViewerRBActionPerformed firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_useBestViewerRBActionPerformed private void keepCurrentViewerRBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_keepCurrentViewerRBActionPerformed firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_keepCurrentViewerRBActionPerformed private void dataSourcesHideKnownCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dataSourcesHideKnownCBActionPerformed firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_dataSourcesHideKnownCBActionPerformed private void viewsHideKnownCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewsHideKnownCBActionPerformed firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_viewsHideKnownCBActionPerformed private void useLocalTimeRBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useLocalTimeRBActionPerformed firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_useLocalTimeRBActionPerformed private void useGMTTimeRBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useGMTTimeRBActionPerformed firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_useGMTTimeRBActionPerformed private void dataSourcesHideSlackCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dataSourcesHideSlackCBActionPerformed
<<<<<<< ======= NewCaseVisualPanel2 currentComponent = getComponent(); final String caseNumber = currentComponent.getCaseNumber(); final String examiner = currentComponent.getExaminer(); try { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { try { Case.create(createdDirectory, caseName, caseNumber, examiner); } catch (Exception ex) { Exceptions.printStackTrace(ex); } } }); //Case.create(createdDirectory, caseName, caseNumber, examiner); } catch (Exception ex) { throw new WizardValidationException(this.getComponent(), NbBundle.getMessage(this.getClass(), "NewCaseWizardPanel2.validate.errCreateCase.msg"), null); } >>>>>>>
<<<<<<< + " END) AS last60," + " SUM(CASE " + " WHEN artifact_type_id = " + TSK_WEB_ACCOUNT_TYPE.getTypeID() + " THEN 1 " + " ELSE 0 " + " END) AS countOfKnownAccountTypes," ======= + " END) AS pageViewsInLast60," >>>>>>> + " END) AS pageViewsInLast60," + " SUM(CASE " + " WHEN artifact_type_id = " + TSK_WEB_ACCOUNT_TYPE.getTypeID() + " THEN 1 " + " ELSE 0 " + " END) AS countOfKnownAccountTypes," <<<<<<< activityEnd, totalVisits, visitsInLast60, filesDownloaded, countOfKnownAccountTypes, dataSource)); ======= activityEnd, totalPageViews, pageViewsInLast60, filesDownloaded, dataSource)); >>>>>>> activityEnd, totalPageViews, pageViewsInLast60, filesDownloaded, countOfKnownAccountTypes, dataSource));
<<<<<<< .append("</td><td>").append(jobTotals.processTime).append("</td></tr>\n"); detailsSb.append("<tr><td>").append( ======= .append("</td><td>").append(processTime.get()).append("</td></tr>\n"); //NON-NLS detailsSb.append("<tr><td>").append( //NON-NLS >>>>>>> .append("</td><td>").append(jobTotals.processTime).append("</td></tr>\n"); //NON-NLS detailsSb.append("<tr><td>").append( //NON-NLS <<<<<<< .append("</td><td>").append(jobTotals.numFiles).append("</td></tr>\n"); detailsSb.append("</table>"); ======= .append("</td><td>").append(numFiles.get()).append("</td></tr>\n"); //NON-NLS detailsSb.append("</table>"); //NON-NLS >>>>>>> .append("</td><td>").append(jobTotals.numFiles).append("</td></tr>\n"); //NON-NLS detailsSb.append("</table>"); //NON-NLS
<<<<<<< return "MD5 Hash"; } }, ObjectID { @Override public String toString() { return "Object ID"; ======= return NbBundle.getMessage(this.getClass(), "AbstractAbstractFileNode.md5HashColLbl"); >>>>>>> return NbBundle.getMessage(this.getClass(), "AbstractAbstractFileNode.md5HashColLbl"); } }, ObjectID { @Override public String toString() { return "Object ID";
<<<<<<< import org.sleuthkit.datamodel.EventType; import org.sleuthkit.datamodel.TimelineEvent; ======= import org.sleuthkit.datamodel.TimelineEventType; >>>>>>> import org.sleuthkit.datamodel.TimelineEvent; import org.sleuthkit.datamodel.TimelineEventType; <<<<<<< Map<CombinedEventGroup, List<TimelineEvent>> groupedEventList = events.stream().collect(groupingBy(event -> new CombinedEventGroup(event.getTime(), event.getFileObjID(), event.getFullDescription()))); for(Entry<CombinedEventGroup, List<TimelineEvent>> entry: groupedEventList.entrySet()){ List<TimelineEvent> groupedEvents = entry.getValue(); CombinedEventGroup group = entry.getKey(); Map<EventType, Long> eventMap = new HashMap<>(); for(TimelineEvent event: groupedEvents) { eventMap.put(event.getEventType(), event.getEventID()); } // We want to merge together file sub-type events that are at //the same time, but create individual events for other event // sub-types if (hasFileTypeEvents(eventMap.keySet()) || eventMap.size() == 1) { combinedEvents.add(new CombinedEvent(group.time * 1000, eventMap)); } else { for(Entry<EventType, Long> singleEntry: eventMap.entrySet()) { Map<EventType, Long> singleEventMap = new HashMap<>(); singleEventMap.put(singleEntry.getKey(), singleEntry.getValue()); combinedEvents.add(new CombinedEvent(group.time * 1000, singleEventMap)); ======= TimelineDBUtils dbUtils = new TimelineDBUtils(sleuthkitCase); final String querySql = "SELECT time, " + dbUtils.csvAggFunction("CAST(tsk_events.event_id AS VARCHAR)") + " AS eventIDs, " + dbUtils.csvAggFunction("CAST(event_type_id AS VARCHAR)") + " AS eventTypes" + " FROM " + TimelineManager.getAugmentedEventsTablesSQL(filterState.getActiveFilter()) + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + eventManager.getSQLWhere(filterState.getActiveFilter()) + " GROUP BY time, full_description, file_obj_id ORDER BY time ASC, full_description"; try (SleuthkitCase.CaseDbQuery dbQuery = sleuthkitCase.executeQuery(querySql); ResultSet resultSet = dbQuery.getResultSet();) { while (resultSet.next()) { //make a map from event type to event ID List<Long> eventIDs = unGroupConcat(resultSet.getString("eventIDs"), Long::valueOf); List<TimelineEventType> eventTypes = unGroupConcat(resultSet.getString("eventTypes"), typesString -> eventManager.getEventType(Integer.valueOf(typesString)).orElseThrow(() -> new TskCoreException("Error mapping event type id " + typesString + ".S"))); // We want to merge together file sub-type events that are at //the same time, but create individual events for other event // sub-types Map<TimelineEventType, Long> eventMap = new HashMap<>(); if (hasFileTypeEvents(eventTypes)) { for (int i = 0; i < eventIDs.size(); i++) { eventMap.put(eventTypes.get(i), eventIDs.get(i)); } combinedEvents.add(new CombinedEvent(resultSet.getLong("time") * 1000, eventMap)); } else { for (int i = 0; i < eventIDs.size(); i++) { eventMap.put(eventTypes.get(i), eventIDs.get(i)); combinedEvents.add(new CombinedEvent(resultSet.getLong("time") * 1000, eventMap)); eventMap.clear(); } >>>>>>> Map<CombinedEventGroup, List<TimelineEvent>> groupedEventList = events.stream().collect(groupingBy(event -> new CombinedEventGroup(event.getTime(), event.getFileObjID(), event.getFullDescription()))); for(Entry<CombinedEventGroup, List<TimelineEvent>> entry: groupedEventList.entrySet()){ List<TimelineEvent> groupedEvents = entry.getValue(); CombinedEventGroup group = entry.getKey(); Map<TimelineEventType, Long> eventMap = new HashMap<>(); for(TimelineEvent event: groupedEvents) { eventMap.put(event.getEventType(), event.getEventID()); } // We want to merge together file sub-type events that are at //the same time, but create individual events for other event // sub-types if (hasFileTypeEvents(eventMap.keySet()) || eventMap.size() == 1) { combinedEvents.add(new CombinedEvent(group.time * 1000, eventMap)); } else { for(Entry<TimelineEventType, Long> singleEntry: eventMap.entrySet()) { Map<TimelineEventType, Long> singleEventMap = new HashMap<>(); singleEventMap.put(singleEntry.getKey(), singleEntry.getValue()); combinedEvents.add(new CombinedEvent(group.time * 1000, singleEventMap)); <<<<<<< private boolean hasFileTypeEvents(Collection<EventType> eventTypes) { ======= private boolean hasFileTypeEvents(List<TimelineEventType> eventTypes) { >>>>>>> private boolean hasFileTypeEvents(Collection<TimelineEventType> eventTypes) {
<<<<<<< import java.util.Objects; import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactInstance.KnownStatus; ======= import org.sleuthkit.datamodel.TskData; >>>>>>> import java.util.Objects; import org.sleuthkit.datamodel.TskData;
<<<<<<< import org.sleuthkit.autopsy.coreutils.ImageUtils; ======= >>>>>>> import org.sleuthkit.autopsy.coreutils.ImageUtils; <<<<<<< public class MediaViewImagePanel extends JPanel { private static final Logger LOGGER = Logger.getLogger(MediaViewImagePanel.class.getName()); private final boolean fxInited; ======= public class MediaViewImagePanel extends javax.swing.JPanel { >>>>>>> public class MediaViewImagePanel extends JPanel { private static final Logger LOGGER = Logger.getLogger(MediaViewImagePanel.class.getName()); private final boolean fxInited; <<<<<<< public MediaViewImagePanel() { initComponents(); fxInited = org.sleuthkit.autopsy.core.Installer.isJavaFxInited(); if (fxInited) { Platform.runLater(() -> { // build jfx ui (we could do this in FXML?) fxImageView = new ImageView(); // will hold image borderpane = new BorderPane(fxImageView); // centers and sizes imageview borderpane.setBackground(new Background(new BackgroundFill(javafx.scene.paint.Color.BLACK, CornerRadii.EMPTY, Insets.EMPTY))); fxPanel = new JFXPanel(); // bridge jfx-swing Scene scene = new Scene(borderpane, javafx.scene.paint.Color.BLACK); //root of jfx tree fxPanel.setScene(scene); //bind size of image to that of scene, while keeping proportions fxImageView.fitWidthProperty().bind(scene.widthProperty()); fxImageView.fitHeightProperty().bind(scene.heightProperty()); ======= public MediaViewImagePanel() { initComponents(); fxInited = org.sleuthkit.autopsy.core.Installer.isJavaFxInited(); if (fxInited) { Platform.runLater(() -> { fxImageView = new ImageView(); borderpane = new BorderPane(fxImageView); borderpane.setBackground(new Background(new BackgroundFill(javafx.scene.paint.Color.BLACK, CornerRadii.EMPTY, Insets.EMPTY))); fxPanel = new JFXPanel(); Scene scene = new Scene(borderpane, javafx.scene.paint.Color.BLACK); fxImageView.fitWidthProperty().bind(scene.widthProperty()); fxImageView.fitHeightProperty().bind(scene.heightProperty()); fxPanel.setScene(scene); // resizes the image to have width of 100 while preserving the ratio and using // higher quality filtering method; this ImageView is also cached to // improve performance >>>>>>> public MediaViewImagePanel() { initComponents(); fxInited = org.sleuthkit.autopsy.core.Installer.isJavaFxInited(); if (fxInited) { Platform.runLater(() -> { // build jfx ui (we could do this in FXML?) fxImageView = new ImageView(); // will hold image borderpane = new BorderPane(fxImageView); // centers and sizes imageview borderpane.setBackground(new Background(new BackgroundFill(javafx.scene.paint.Color.BLACK, CornerRadii.EMPTY, Insets.EMPTY))); fxPanel = new JFXPanel(); // bridge jfx-swing Scene scene = new Scene(borderpane, javafx.scene.paint.Color.BLACK); //root of jfx tree fxPanel.setScene(scene); //bind size of image to that of scene, while keeping proportions fxImageView.fitWidthProperty().bind(scene.widthProperty()); fxImageView.fitHeightProperty().bind(scene.heightProperty()); <<<<<<< EventQueue.invokeLater(() -> { add(fxPanel);//add jfx ui to JPanel ======= EventQueue.invokeLater(() -> { add(fxPanel); >>>>>>> EventQueue.invokeLater(() -> { add(fxPanel);//add jfx ui to JPanel <<<<<<< /* hide the panel during loading/transformations * TODO: repalce this with a progress indicator */ ======= final String fileName = file.getName(); //hide the panel during loading/transformations //TODO: repalce this with a progress indicator >>>>>>> //hide the panel during loading/transformations //TODO: repalce this with a progress indicator <<<<<<< try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(file));) { BufferedImage bufferedImage = ImageIO.read(inputStream); if (bufferedImage == null) { LOGGER.log(Level.WARNING, "Could image reader not found for file: {0}", file.getName()); //NON-NLS ======= try (InputStream inputStream = new ReadContentInputStream(file);) { // fxImage = new Image(inputStream); //original input stream BufferedImage bi = ImageIO.read(inputStream); if (bi == null) { logger.log(Level.WARNING, "Could image reader not found for file: " + fileName); //NON-NLS >>>>>>> try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(file));) { BufferedImage bufferedImage = ImageIO.read(inputStream); if (bufferedImage == null) { LOGGER.log(Level.WARNING, "Could image reader not found for file: {0}", file.getName()); //NON-NLS <<<<<<< fxImage = SwingFXUtils.toFXImage(bufferedImage, null); } catch (IllegalArgumentException | IOException ex) { LOGGER.log(Level.WARNING, "Could not load image file into media view: " + file.getName(), ex); //NON-NLS ======= //convert from awt imageto fx image fxImage = SwingFXUtils.toFXImage(bi, null); } catch (IllegalArgumentException | IOException ex) { logger.log(Level.WARNING, "Could not load image file into media view: " + fileName, ex); //NON-NLS >>>>>>> fxImage = SwingFXUtils.toFXImage(bufferedImage, null); } catch (IllegalArgumentException | IOException ex) { LOGGER.log(Level.WARNING, "Could not load image file into media view: " + file.getName(), ex); //NON-NLS <<<<<<< if (fxImage.isError()) { LOGGER.log(Level.WARNING, "Could not load image file into media view: " + file.getName(), fxImage.getException()); //NON-NLS ======= if (fxImage.isError()) { logger.log(Level.WARNING, "Could not load image file into media view: " + fileName, fxImage.getException()); //NON-NLS >>>>>>> if (fxImage.isError()) { LOGGER.log(Level.WARNING, "Could not load image file into media view: " + file.getName(), fxImage.getException()); //NON-NLS <<<<<<< * @return supported mime types ======= * returns supported mime types * * @return >>>>>>> * @return supported mime types
<<<<<<< import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttribute; ======= import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; >>>>>>> import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; <<<<<<< private final Long fileId; ======= /** * Constructor to create an instance given a CorrelationAttribute. * * @param correlationAttribute The correlation attribute to modify. */ public AddEditCentralRepoCommentAction(CorrelationAttributeInstance correlationAttribute) { super(Bundle.AddEditCentralRepoCommentAction_menuItemText_addEditCentralRepoComment()); this.correlationAttributeInstance = correlationAttribute; } >>>>>>> private final Long fileId; <<<<<<< correlationAttribute = EamArtifactUtil.getCorrelationAttributeFromContent(file); fileId = file.getId(); if (correlationAttribute == null) { ======= correlationAttributeInstance = EamArtifactUtil.getInstanceFromContent(file); if (correlationAttributeInstance == null) { >>>>>>> fileId = file.getId(); correlationAttributeInstance = EamArtifactUtil.getInstanceFromContent(file); if (correlationAttributeInstance == null) {
<<<<<<< import java.util.HashMap; import java.util.LinkedHashSet; ======= >>>>>>> import java.util.HashMap; <<<<<<< outlineView.getOutline().setAutoResizeMode(JTable.AUTO_RESIZE_ALL_COLUMNS); /* * Since we are modifying the columns, we don't want to listen * to added/removed events as un-hide/hide. */ tableListener.listenToVisibilityChanges(false); ======= outline.setAutoResizeMode(JTable.AUTO_RESIZE_ALL_COLUMNS); >>>>>>> outline.setAutoResizeMode(JTable.AUTO_RESIZE_ALL_COLUMNS); /* * Since we are modifying the columns, we don't want to listen * to added/removed events as un-hide/hide. */ tableListener.listenToVisibilityChanges(false); <<<<<<< Integer sortRank = preferences.getInt(getColumnSortRankKey(columnKey, propName), 0); ======= Integer sortRank = preferences.getInt(ResultViewerPersistence.getColumnSortRankKey(tfn, propName), 0); >>>>>>> Integer sortRank = preferences.getInt(ResultViewerPersistence.getColumnSortRankKey(tfn, propName), 0); <<<<<<< Boolean sortOrder = preferences.getBoolean(getColumnSortOrderKey(columnKey, propName), true); ======= Boolean sortOrder = preferences.getBoolean(ResultViewerPersistence.getColumnSortOrderKey(tfn, propName), true); >>>>>>> Boolean sortOrder = preferences.getBoolean(ResultViewerPersistence.getColumnSortOrderKey(tfn, propName), true); <<<<<<< Integer value = preferences.getInt(getColumnPositionKey(columnKey, prop.getName()), -1); ======= Integer value = preferences.getInt(ResultViewerPersistence.getColumnPositionKey(tfn, prop.getName()), -1); >>>>>>> Integer value = preferences.getInt(ResultViewerPersistence.getColumnPositionKey(tfn, prop.getName()), -1); <<<<<<< /** * Gets a key for the current node and a property of its child nodes to * store the column position into a preference file. * * @param keyBase The base of the key. Typically a fully quallified class * name. * @param propName The name of the specific property to make a key for. * * @return A generated key for the preference file */ private String getColumnPositionKey(String keyBase, String propName) { return getColumnKeyBase(keyBase, propName) + ".column"; } /** * Gets a key for the current node and a property of its child nodes to * store the column sort ordering into a preference file. * * @param keyBase The base of the key. Typically a fully quallified class * name. * @param propName The name of the specific property to make a key for. * * @return A generated key for the preference file */ static private String getColumnSortOrderKey(String keyBase, String propName) { return getColumnKeyBase(keyBase, propName) + ".sortOrder"; } /** * Gets a key for the current node and a property of its child nodes to * store the column sort rank into a preference file. * * @param keyBase The base of the key. Typically a fully quallified class * name. * @param propName The name of the specific property to make a key for. * * @return A generated key for the preference file */ static private String getColumnSortRankKey(String keyBase, String propName) { return getColumnKeyBase(keyBase, propName) + ".sortRank"; } static private String getColumnHiddenKey(String keyBase, String propName) { return getColumnKeyBase(keyBase, propName) + ".hidden"; } private static String getColumnKeyBase(String keyBase, String propName) { return stripNonAlphanumeric(keyBase) + "." + stripNonAlphanumeric(propName); } private static String stripNonAlphanumeric(String str) { return str.replaceAll("[^a-zA-Z0-9_]", ""); } ======= >>>>>>>
<<<<<<< import org.sleuthkit.autopsy.corecomponentinterfaces.FileTypeViewer; ======= import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; >>>>>>> import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.corecomponentinterfaces.FileTypeViewer;
<<<<<<< /* * Copyright (C) 2012-2018 52°North Initiative for Geospatial Open Source ======= /** * Copyright (C) 2012-2018 52°North Initiative for Geospatial Open Source >>>>>>> /* * Copyright (C) 2012-2018 52°North Initiative for Geospatial Open Source <<<<<<< private AbstractFeatureEntity featureOfInterest; private PhenomenonEntity observableProperty; private ProcedureEntity procedure; private OfferingEntity offering; private boolean hiddenChild = false; ======= private AbstractFeatureOfInterest featureOfInterest; private ObservableProperty observableProperty; private Procedure procedure; private Offering offering; private String seriesType; private boolean hiddenChild; private boolean publish = true; >>>>>>> private AbstractFeatureEntity featureOfInterest; private PhenomenonEntity observableProperty; private ProcedureEntity procedure; private OfferingEntity offering; private boolean hiddenChild = false; private boolean publish = true; <<<<<<< public AbstractFeatureEntity getFeatureOfInterest() { ======= public boolean isPublish() { return publish; } public void setPublish(boolean publish) { this.publish = publish; } public AbstractFeatureOfInterest getFeatureOfInterest() { >>>>>>> public boolean isPublish() { return publish; } public void setPublish(boolean publish) { this.publish = publish; } public AbstractFeatureEntity getFeatureOfInterest() { <<<<<<< /** * @param featureOfInterest * the featureOfInterest to set */ public void setFeatureOfInterest(AbstractFeatureEntity featureOfInterest) { ======= public void setFeatureOfInterest(AbstractFeatureOfInterest featureOfInterest) { >>>>>>> public void setFeatureOfInterest(AbstractFeatureEntity featureOfInterest) { <<<<<<< /** * @return the observableProperty */ public PhenomenonEntity getPhenomenon() { ======= public ObservableProperty getObservableProperty() { >>>>>>> public PhenomenonEntity getPhenomenon() { <<<<<<< /** * @param observableProperty * the observableProperty to set */ public void setPhenomenon(PhenomenonEntity observableProperty) { ======= public void setObservableProperty(ObservableProperty observableProperty) { >>>>>>> public void setPhenomenon(PhenomenonEntity observableProperty) { <<<<<<< /** * @return the procedure */ public ProcedureEntity getProcedure() { ======= public Procedure getProcedure() { >>>>>>> public ProcedureEntity getProcedure() { <<<<<<< private String seriesType; /** * @param procedure * the procedure to set */ public void setProcedure(ProcedureEntity procedure) { ======= public void setProcedure(Procedure procedure) { >>>>>>> /** * @param procedure * the procedure to set */ public void setProcedure(ProcedureEntity procedure) { <<<<<<< /** * @return the offering */ public OfferingEntity getOffering() { ======= public Offering getOffering() { >>>>>>> /** * @return the offering */ public OfferingEntity getOffering() { <<<<<<< /** * @param offering the offering to set */ public void setOffering(OfferingEntity offering) { ======= public void setOffering(Offering offering) { >>>>>>> public void setOffering(OfferingEntity offering) {
<<<<<<< ======= boolean enableOpenMultiUserCaseButton = UserPreferences.getIsMultiUserModeEnabled(); openMultiUserCaseButton.setEnabled(enableOpenMultiUserCaseButton); openMultiUserCaseLabel.setEnabled(enableOpenMultiUserCaseButton); >>>>>>> <<<<<<< ======= private void openMultiUserCaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_openMultiUserCaseButtonActionPerformed setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); MultiUserCasesDialog multiUserCaseWindow = MultiUserCasesDialog.getInstance(); multiUserCaseWindow.setLocationRelativeTo(WindowManager.getDefault().getMainWindow()); multiUserCaseWindow.setVisible(true); setCursor(null); }//GEN-LAST:event_openMultiUserCaseButtonActionPerformed >>>>>>>
<<<<<<< ======= import org.controlsfx.control.action.Action; import org.controlsfx.control.action.ActionUtils; >>>>>>> import org.controlsfx.control.action.Action; import org.controlsfx.control.action.ActionUtils; <<<<<<< final List<ContentTag> fileTags = tagsManager.getContentTags(file); if (tagName == categoryManager.getTagName(Category.ZERO)) { ======= final List<ContentTag> fileTags = tagsManager.getContentTagsByContent(file); if (tagName.equals(catZeroTagName)) { >>>>>>> final List<ContentTag> fileTags = tagsManager.getContentTags(file); if (tagName == categoryManager.getTagName(Category.ZERO)) {
<<<<<<< // first of all, update the current path group, regardless of what grouping is in view try { DrawableFile file = getDrawableDB().getFileFromID(fileId); String pathVal = file.getDrawablePath(); GroupKey<?> pathGroupKey = new GroupKey(DrawableAttribute.PATH,pathVal, file.getDataSource()); updateCurrentPathGroup(pathGroupKey); } catch (TskCoreException | TskDataException ex) { Exceptions.printStackTrace(ex); } ======= // first of all, update the current path group, regardless of what grouping is in view try { DrawableFile file = getDrawableDB().getFileFromID(fileId); String pathVal = file.getDrawablePath(); GroupKey<?> pathGroupKey = new GroupKey<>(DrawableAttribute.PATH,pathVal, file.getDataSource()); updateCurrentPathGroup(pathGroupKey); } catch (TskCoreException | TskDataException ex) { Exceptions.printStackTrace(ex); } >>>>>>> // first of all, update the current path group, regardless of what grouping is in view try { DrawableFile file = getDrawableDB().getFileFromID(fileId); String pathVal = file.getDrawablePath(); GroupKey<?> pathGroupKey = new GroupKey<>(DrawableAttribute.PATH,pathVal, file.getDataSource()); updateCurrentPathGroup(pathGroupKey); } catch (TskCoreException | TskDataException ex) { Exceptions.printStackTrace(ex); } <<<<<<< ======= >>>>>>> <<<<<<< if (analyzedGroups.contains(group) == false) { // Add to analyzedGroups only if this is the grouping being viewed. if (getGroupBy() == group.getGroupKey().getAttribute()) { analyzedGroups.add(group); sortAnalyzedGroups(); } ======= // Add to analyzedGroups only if it's the same group type as the one in view if ((analyzedGroups.contains(group) == false) && (getGroupBy() == group.getGroupKey().getAttribute())) { analyzedGroups.add(group); sortAnalyzedGroups(); >>>>>>> // Add to analyzedGroups only if it's the same group type as the one in view if ((analyzedGroups.contains(group) == false) && (getGroupBy() == group.getGroupKey().getAttribute())) { // Add to analyzedGroups only if this is the grouping being viewed. if (getGroupBy() == group.getGroupKey().getAttribute()) { analyzedGroups.add(group); sortAnalyzedGroups(); }
<<<<<<< actions.add(null); // creates a menu separator actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); ======= actions.add(null); // creates a menu separator actions.add(TagAbstractFileAction.getInstance()); actions.add(TagBlackboardArtifactAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(null); // creates a menu separator actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.add(TagBlackboardArtifactAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.add(TagBlackboardArtifactAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.add(TagBlackboardArtifactAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); <<<<<<< actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); ======= actions.add(TagAbstractFileAction.getInstance()); actions.add(TagBlackboardArtifactAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions()); >>>>>>> actions.add(AddContentTagAction.getInstance()); actions.add(AddBlackboardArtifactTagAction.getInstance()); actions.addAll(ContextMenuExtensionPoint.getActions());
<<<<<<< logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); putIngestStatus(jobId, abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); ======= logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); //NON-NLS putIngestStatus(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); >>>>>>> logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); //NON-NLS putIngestStatus(jobId, abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); <<<<<<< logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); ======= logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); //NON-NLS putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); >>>>>>> logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); //NON-NLS putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); <<<<<<< logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); ======= logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); //NON-NLS putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); >>>>>>> logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); //NON-NLS putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); <<<<<<< putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); ======= putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); //NON-NLS >>>>>>> putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); //NON-NLS <<<<<<< putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); ======= putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); //NON-NLS >>>>>>> putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); //NON-NLS <<<<<<< logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); ======= logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); //NON-NLS putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); >>>>>>> logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); //NON-NLS putIngestStatus(jobId, aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
<<<<<<< class IngestModulesConfigWizardPanel extends ShortcutWizardDescriptorPanel { ======= class IngestModulesConfigWizardPanel implements WizardDescriptor.FinishablePanel<WizardDescriptor> { @NbBundle.Messages("IngestModulesConfigWizardPanel.name.text=Configure Ingest Modules") >>>>>>> class IngestModulesConfigWizardPanel extends ShortcutWizardDescriptorPanel { @NbBundle.Messages("IngestModulesConfigWizardPanel.name.text=Configure Ingest Modules")
<<<<<<< private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; ======= private static final String ffquery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; private static final String ffcookiequery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; private static final String ff3cookiequery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; private static final String ffbookmarkquery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) as dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; private static final String ffdownloadquery = "select target, source,(startTime/1000000) as startTime, maxBytes from moz_downloads"; >>>>>>> private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) as dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads";
<<<<<<< import org.sleuthkit.autopsy.timeline.actions.Back; import org.sleuthkit.autopsy.timeline.actions.Forward; ======= import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.ui.IntervalSelector; >>>>>>> import org.sleuthkit.autopsy.timeline.ui.IntervalSelector; <<<<<<< private final ContextMenu contextMenu; ======= private ContextMenu chartContextMenu; public ContextMenu getChartContextMenu() { return chartContextMenu; } >>>>>>> private ContextMenu chartContextMenu; @Override public ContextMenu getChartContextMenu() { return chartContextMenu; } <<<<<<< //use one handler with an if chain because it maintains state ChartDragHandler<String, EventCountsChart> dragHandler = new ChartDragHandler<>(this, getXAxis()); setOnMousePressed(dragHandler); setOnMouseReleased(dragHandler); setOnMouseDragged(dragHandler); contextMenu = ActionUtils.createContextMenu( Arrays.asList(new ActionGroup( NbBundle.getMessage(this.getClass(), "EventCountsChart.contextMenu.zoomHistory.name"), new Back(controller), new Forward(controller)))); contextMenu.setAutoHide(true); setOnMouseClicked((MouseEvent clickEvent) -> { contextMenu.hide(); if (clickEvent.getButton() == MouseButton.SECONDARY && clickEvent.isStillSincePress()) { contextMenu.show(EventCountsChart.this, clickEvent.getScreenX(), clickEvent.getScreenY()); clickEvent.consume(); } }); this.controller.getEventsModel().zoomParametersProperty().addListener(o -> { clearIntervalSelector(); controller.selectEventIDs(Collections.emptyList()); }); ======= ChartDragHandler<String, EventCountsChart> chartDragHandler = new ChartDragHandler<>(this); setOnMousePressed(chartDragHandler); setOnMouseReleased(chartDragHandler); setOnMouseDragged(chartDragHandler); setOnMouseClicked(new MouseClickedHandler<>(this)); >>>>>>> ChartDragHandler<String, EventCountsChart> chartDragHandler = new ChartDragHandler<>(this); setOnMousePressed(chartDragHandler); setOnMouseReleased(chartDragHandler); setOnMouseDragged(chartDragHandler); setOnMouseClicked(new MouseClickedHandler<>(this)); <<<<<<< ======= public synchronized void setController(TimeLineController controller) { this.controller = controller; setModel(this.controller.getEventsModel()); } @Override >>>>>>> <<<<<<< public CountsIntervalSelector newIntervalSelector(double x, Axis<String> dateAxis) { return new CountsIntervalSelector(x, getHeight() - dateAxis.getHeight() - dateAxis.getTickLength(), dateAxis, controller); ======= public void setModel(FilteredEventsModel filteredEvents) { filteredEvents.zoomParametersProperty().addListener(o -> { clearIntervalSelector(); controller.selectEventIDs(Collections.emptyList()); }); } @Override public CountsIntervalSelector newIntervalSelector() { return new CountsIntervalSelector(this); >>>>>>> public CountsIntervalSelector newIntervalSelector() { return new CountsIntervalSelector(this);
<<<<<<< if (IngestManager.getDefault().isIngestRunning()) { final String msg = "<html>Ingest is ongoing on another data source. Adding a new source now might slow down the current ingest.<br />Do you want to proceed and add a new data source now?</html>"; if (JOptionPane.showConfirmDialog(null, msg, "Ingest in progress", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE) == JOptionPane.NO_OPTION) { ======= final IngestConfigurator ingestConfig = Lookup.getDefault().lookup(IngestConfigurator.class); if (null != ingestConfig && ingestConfig.isIngestRunning()) { final String msg = NbBundle.getMessage(this.getClass(), "AddImageAction.ingestConfig.ongoingIngest.msg"); if (JOptionPane.showConfirmDialog(null, msg, NbBundle.getMessage(this.getClass(), "AddImageAction.ingestConfig.ongoingIngest.title"), JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE) == JOptionPane.NO_OPTION) { >>>>>>> if (IngestManager.getDefault().isIngestRunning()) { final String msg = NbBundle.getMessage(this.getClass(), "AddImageAction.ingestConfig.ongoingIngest.msg"); if (JOptionPane.showConfirmDialog(null, msg, NbBundle.getMessage(this.getClass(), "AddImageAction.ingestConfig.ongoingIngest.title"), JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE) == JOptionPane.NO_OPTION) {
<<<<<<< import org.sleuthkit.datamodel.HashHitInfo; ======= import org.sleuthkit.datamodel.SleuthkitCase; >>>>>>> import org.sleuthkit.datamodel.HashHitInfo; import org.sleuthkit.datamodel.SleuthkitCase;
<<<<<<< // Create a "Quick Tag" sub-menu. JMenu quickTagMenu = new JMenu(NbBundle.getMessage(this.getClass(), "AddTagAction.quickTag")); add(quickTagMenu); // Each tag name in the current set of tags gets its own menu item in // the "Quick Tags" sub-menu. Selecting one of these menu items adds // a tag with the associated tag name. List<JMenuItem> standardTagMenuitems = new ArrayList<>(); ======= // Create a menu item for each of the existing and visible tags. // Selecting one of these menu items adds a tag with the associated tag name. >>>>>>> // Create a menu item for each of the existing and visible tags. // Selecting one of these menu items adds a tag with the associated tag name. List<JMenuItem> standardTagMenuitems = new ArrayList<>(); <<<<<<< // Show custom tags before predefined tags in the menu if (standardTagNames.contains(tagDisplayName)) { standardTagMenuitems.add(tagNameItem); } else { quickTagMenu.add(tagNameItem); } ======= add(tagNameItem); >>>>>>> // Show custom tags before predefined tags in the menu if (standardTagNames.contains(tagDisplayName)) { standardTagMenuitems.add(tagNameItem); } else { add(tagNameItem); }
<<<<<<< import com.quorum.tessera.config.KeyData; import com.quorum.tessera.config.KeyVaultConfig; ======= import com.quorum.tessera.config.keypairs.ConfigKeyPair; >>>>>>> import com.quorum.tessera.config.KeyVaultConfig; import com.quorum.tessera.config.keypairs.ConfigKeyPair;
<<<<<<< public class TextFileExtractorException extends Exception { public TextFileExtractorException(String msg, Throwable ex) { super(msg, ex); } public TextFileExtractorException(String msg) { super(msg); } } /** * Return the encoding of the file * @return Detected encoding or UNKNOWN_CHARSET */ public Charset getEncoding() { if (encoding != null) return encoding; // Encoding detection is hard. We use several libraries since the data passed in is often messy. // First try CharsetDetector (from Tika / ICU4J) // It is a rule-baesd detection approach try (InputStream stream = new BufferedInputStream(new ReadContentInputStream(file))) { ======= public static Charset getEncoding(Content content) { try (InputStream stream = new BufferedInputStream(new ReadContentInputStream(content))) { // Tika first >>>>>>> /** * Return the encoding of the file * @return Detected encoding or UNKNOWN_CHARSET */ public Charset getEncoding() { if (encoding != null) return encoding; // Encoding detection is hard. We use several libraries since the data passed in is often messy. // First try CharsetDetector (from Tika / ICU4J) // It is a rule-baesd detection approach try (InputStream stream = new BufferedInputStream(new ReadContentInputStream(file))) {
<<<<<<< import java.awt.Component; ======= import java.awt.Image; >>>>>>> import java.awt.Component; import java.awt.Image; <<<<<<< private final List<SwingWorker<Void, Void>> thumbnailWorkers = new ArrayList<>(); private final DefaultListModel<AbstractFile> instancesListModel = new DefaultListModel<>(); ======= private final List<SwingWorker<Void, Void>> thumbnailWorkers = new ArrayList<>(); >>>>>>> private final List<SwingWorker<Void, Void>> thumbnailWorkers = new ArrayList<>(); private final DefaultListModel<AbstractFile> instancesListModel = new DefaultListModel<>(); <<<<<<< thumbnailWrapper = FileSearch.getVideoThumbnails(file); ======= FileSearch.getVideoThumbnails(thumbnailWrapper); videoThumbnailViewer.repaint(); >>>>>>> FileSearch.getVideoThumbnails(thumbnailWrapper); videoThumbnailViewer.repaint();
<<<<<<< import java.awt.Cursor; ======= import java.beans.PropertyChangeListener; >>>>>>> import java.awt.Cursor; import java.beans.PropertyChangeListener; <<<<<<< import java.util.ArrayList; ======= import java.util.EnumSet; >>>>>>> import java.util.ArrayList; import java.util.EnumSet; <<<<<<< import org.openide.nodes.Children; ======= import org.openide.util.NbBundle; >>>>>>> import org.openide.nodes.Children; import org.openide.util.NbBundle; <<<<<<< applyFiltersButton.addActionListener(actionEvent -> applyFilters()); ======= updateTimeZone(); updateFilters(); UserPreferences.addChangeListener(preferenceChangeEvent -> { if (preferenceChangeEvent.getKey().equals(UserPreferences.DISPLAY_TIMES_IN_LOCAL_TIME)) { updateTimeZone(); } }); this.ingestListener = pce -> { String eventType = pce.getPropertyName(); if (eventType.equals(DATA_ADDED.toString())) { updateFilters(); refreshButton.setEnabled(true); } }; applyFiltersButton.addActionListener(e -> applyFilters()); refreshButton.addActionListener(e -> applyFilters()); >>>>>>> updateTimeZone(); updateFilters(); UserPreferences.addChangeListener(preferenceChangeEvent -> { if (preferenceChangeEvent.getKey().equals(UserPreferences.DISPLAY_TIMES_IN_LOCAL_TIME)) { updateTimeZone(); } }); this.ingestListener = pce -> { String eventType = pce.getPropertyName(); if (eventType.equals(DATA_ADDED.toString())) { updateFilters(); refreshButton.setEnabled(true); } }; applyFiltersButton.addActionListener(e -> applyFilters()); refreshButton.addActionListener(e -> applyFilters()); <<<<<<< updateAndApplyFilters(); ======= IngestManager.getInstance().addIngestModuleEventListener(ingestListener); Case.addEventTypeSubscriber(EnumSet.of(CURRENT_CASE), evt -> { devicesMap.clear(); devicesPane.removeAll(); }); } @Override public void removeNotify() { super.removeNotify(); IngestManager.getInstance().removeIngestModuleEventListener(ingestListener); >>>>>>> IngestManager.getInstance().addIngestModuleEventListener(ingestListener); Case.addEventTypeSubscriber(EnumSet.of(CURRENT_CASE), evt -> { devicesMap.clear(); devicesPane.removeAll(); }); } @Override public void removeNotify() { super.removeNotify(); IngestManager.getInstance().removeIngestModuleEventListener(ingestListener); <<<<<<< applyFiltersButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/communications/images/control-double.png"))); // NOI18N ======= applyFiltersButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/communications/images/tick.png"))); // NOI18N >>>>>>> applyFiltersButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/communications/images/tick.png"))); // NOI18N <<<<<<< .addComponent(applyFiltersButton, javax.swing.GroupLayout.PREFERRED_SIZE, 83, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) ======= .addComponent(applyFiltersButton, javax.swing.GroupLayout.PREFERRED_SIZE, 83, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(refreshButton)) .addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addGap(0, 0, 0)) >>>>>>> .addComponent(applyFiltersButton, javax.swing.GroupLayout.PREFERRED_SIZE, 83, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(refreshButton)) .addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addGap(0, 0, 0)) <<<<<<< getRootPane().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); new SwingWorker<AbstractNode, Void>() { @Override protected AbstractNode doInBackground() throws Exception { List<AccountDeviceInstanceKey> accountDeviceInstanceKeys = new ArrayList<>(); for (AccountDeviceInstance adi : commsManager.getAccountDeviceInstancesWithCommunications(commsFilter)) { long communicationsCount = commsManager.getCommunicationsCount(adi, commsFilter); accountDeviceInstanceKeys.add(new AccountDeviceInstanceKey(adi, commsFilter, communicationsCount)); }; return new AbstractNode(Children.create(new AccountsRootChildren(accountDeviceInstanceKeys, commsManager), true)); } @Override protected void done() { super.done(); //To change body of generated methods, choose Tools | Templates. setCursor(Cursor.getDefaultCursor()); getRootPane().setCursor(Cursor.getDefaultCursor()); try { em.setRootContext(get()); } catch (InterruptedException | ExecutionException ex) { logger.log(Level.SEVERE, "Error getting account device instances for filter: " + commsFilter, ex); } } }.execute(); ======= List<AccountDeviceInstanceKey> accountDeviceInstanceKeys = commsManager .getAccountDeviceInstancesWithCommunications(commsFilter) .stream() .map(adi -> new AccountDeviceInstanceKey(adi, commsFilter)) .collect(Collectors.toList()); em.setRootContext(new AbstractNode(new AccountsRootChildren(accountDeviceInstanceKeys, commsManager))); >>>>>>> getRootPane().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); new SwingWorker<AbstractNode, Void>() { @Override protected AbstractNode doInBackground() throws Exception { List<AccountDeviceInstanceKey> accountDeviceInstanceKeys = new ArrayList<>(); for (AccountDeviceInstance adi : commsManager.getAccountDeviceInstancesWithCommunications(commsFilter)) { long communicationsCount = commsManager.getCommunicationsCount(adi, commsFilter); accountDeviceInstanceKeys.add(new AccountDeviceInstanceKey(adi, commsFilter, communicationsCount)); }; return new AbstractNode(Children.create(new AccountsRootChildren(accountDeviceInstanceKeys, commsManager), true)); } @Override protected void done() { super.done(); //To change body of generated methods, choose Tools | Templates. setCursor(Cursor.getDefaultCursor()); getRootPane().setCursor(Cursor.getDefaultCursor()); try { em.setRootContext(get()); } catch (InterruptedException | ExecutionException ex) { logger.log(Level.SEVERE, "Error getting account device instances for filter: " + commsFilter, ex); } } }.execute(); <<<<<<< ======= private final javax.swing.JScrollPane jScrollPane2 = new javax.swing.JScrollPane(); private final javax.swing.JScrollPane jScrollPane3 = new javax.swing.JScrollPane(); private final javax.swing.JButton refreshButton = new javax.swing.JButton(); >>>>>>> private final javax.swing.JScrollPane jScrollPane2 = new javax.swing.JScrollPane(); private final javax.swing.JScrollPane jScrollPane3 = new javax.swing.JScrollPane(); private final javax.swing.JButton refreshButton = new javax.swing.JButton();
<<<<<<< try { CaseDbConnectionInfo dbInfo = UserPreferences.getDatabaseConnectionInfo(); tbDbHostname.setText(dbInfo.getHost().trim()); tbDbPort.setText(dbInfo.getPort().trim()); tbDbUsername.setText(dbInfo.getUserName().trim()); tbDbPassword.setText(dbInfo.getPassword()); } catch (IllegalArgumentException ex) { logger.log(Level.SEVERE, "Error accessing case database connection info", ex); //NON-NLS } try { MessageServiceConnectionInfo msgServiceInfo = UserPreferences.getMessageServiceConnectionInfo(); tbMsgHostname.setText(msgServiceInfo.getHost().trim()); tbMsgPort.setText(msgServiceInfo.getPort().trim()); tbMsgUsername.setText(msgServiceInfo.getUserName().trim()); tbMsgPassword.setText(msgServiceInfo.getPassword()); } catch (IllegalArgumentException ex) { logger.log(Level.SEVERE, "Error accessing case database connection info", ex); //NON-NLS } ======= lbTestDatabase.setIcon(null); lbTestSolr.setIcon(null); lbTestMessageService.setIcon(null); lbTestDbWarning.setText(""); lbTestSolrWarning.setText(""); lbTestMessageWarning.setText(""); CaseDbConnectionInfo dbInfo = UserPreferences.getDatabaseConnectionInfo(); tbDbHostname.setText(dbInfo.getHost().trim()); tbDbPort.setText(dbInfo.getPort().trim()); tbDbUsername.setText(dbInfo.getUserName().trim()); tbDbPassword.setText(dbInfo.getPassword()); MessageServiceConnectionInfo msgServiceInfo = UserPreferences.getMessageServiceConnectionInfo(); tbMsgHostname.setText(msgServiceInfo.getHost().trim()); tbMsgPort.setText(Integer.toString(msgServiceInfo.getPort())); tbMsgUsername.setText(msgServiceInfo.getUserName().trim()); tbMsgPassword.setText(msgServiceInfo.getPassword()); >>>>>>> lbTestDatabase.setIcon(null); lbTestSolr.setIcon(null); lbTestMessageService.setIcon(null); lbTestDbWarning.setText(""); lbTestSolrWarning.setText(""); lbTestMessageWarning.setText(""); try { CaseDbConnectionInfo dbInfo = UserPreferences.getDatabaseConnectionInfo(); tbDbHostname.setText(dbInfo.getHost().trim()); tbDbPort.setText(dbInfo.getPort().trim()); tbDbUsername.setText(dbInfo.getUserName().trim()); tbDbPassword.setText(dbInfo.getPassword()); } catch (IllegalArgumentException ex) { logger.log(Level.SEVERE, "Error accessing case database connection info", ex); //NON-NLS } try { MessageServiceConnectionInfo msgServiceInfo = UserPreferences.getMessageServiceConnectionInfo(); tbMsgHostname.setText(msgServiceInfo.getHost().trim()); tbMsgPort.setText(Integer.toString(msgServiceInfo.getPort())); tbMsgUsername.setText(msgServiceInfo.getUserName().trim()); tbMsgPassword.setText(msgServiceInfo.getPassword()); } catch (IllegalArgumentException ex) { logger.log(Level.SEVERE, "Error accessing case database connection info", ex); //NON-NLS } <<<<<<< tbMsgPort.getText().trim()); try { UserPreferences.setMessageServiceConnectionInfo(msgServiceInfo); } catch (IllegalArgumentException ex) { logger.log(Level.SEVERE, "Error accessing messaging service connection info", ex); //NON-NLS } ======= port, tbMsgUsername.getText().trim(), new String(tbMsgPassword.getPassword())); UserPreferences.setMessageServiceConnectionInfo(msgServiceInfo); >>>>>>> port, tbMsgUsername.getText().trim(), new String(tbMsgPassword.getPassword())); try { UserPreferences.setMessageServiceConnectionInfo(msgServiceInfo); } catch (IllegalArgumentException ex) { logger.log(Level.SEVERE, "Error accessing messaging service connection info", ex); //NON-NLS }
<<<<<<< import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; ======= import org.sleuthkit.datamodel.TskData; >>>>>>> import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.datamodel.TskData; <<<<<<< public ImportHashDatabaseWorker(String filename, EamArtifactInstance.KnownStatus knownStatus, int globalSetID, EamArtifact.Type contentType) throws EamDbException { ======= public ImportHashDatabaseWorker(String filename, TskData.FileKnown knownStatus, int globalSetID) throws EamDbException, UnknownHostException { >>>>>>> public ImportHashDatabaseWorker(String filename, TskData.FileKnown knownStatus, int globalSetID, EamArtifact.Type contentType) throws EamDbException, UnknownHostException {
<<<<<<< private UNCPathUtilities uncPathUtilities = new UNCPathUtilities(); ======= private long jobId; private static class IngestJobTotals { private AtomicLong totalItemsRecovered = new AtomicLong(0); private AtomicLong totalItemsWithErrors = new AtomicLong(0); private AtomicLong totalWritetime = new AtomicLong(0); private AtomicLong totalParsetime = new AtomicLong(0); } private static synchronized IngestJobTotals getTotalsForIngestJobs(long ingestJobId) { IngestJobTotals totals = totalsForIngestJobs.get(ingestJobId); if (totals == null) { totals = new PhotoRecCarverFileIngestModule.IngestJobTotals(); totalsForIngestJobs.put(ingestJobId, totals); } return totals; } private static synchronized void initTotalsForIngestJob(long ingestJobId) { IngestJobTotals totals = new PhotoRecCarverFileIngestModule.IngestJobTotals(); totalsForIngestJobs.put(ingestJobId, totals); } >>>>>>> private UNCPathUtilities uncPathUtilities = new UNCPathUtilities(); private long jobId; private static class IngestJobTotals { private AtomicLong totalItemsRecovered = new AtomicLong(0); private AtomicLong totalItemsWithErrors = new AtomicLong(0); private AtomicLong totalWritetime = new AtomicLong(0); private AtomicLong totalParsetime = new AtomicLong(0); } private static synchronized IngestJobTotals getTotalsForIngestJobs(long ingestJobId) { IngestJobTotals totals = totalsForIngestJobs.get(ingestJobId); if (totals == null) { totals = new PhotoRecCarverFileIngestModule.IngestJobTotals(); totalsForIngestJobs.put(ingestJobId, totals); } return totals; } private static synchronized void initTotalsForIngestJob(long ingestJobId) { IngestJobTotals totals = new PhotoRecCarverFileIngestModule.IngestJobTotals(); totalsForIngestJobs.put(ingestJobId, totals); } <<<<<<< PhotoRecCarverFileIngestModule.pathsByJob.put(this.context.getJobId(), new WorkingPaths(outputDirPath, tempDirPath)); } catch (SecurityException | IOException | UnsupportedOperationException ex) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(this.getClass(), "cannotCreateOutputDir.message", ex.getLocalizedMessage())); ======= PhotoRecCarverFileIngestModule.pathsByJob.put(this.jobId, new WorkingPaths(outputDirPath, tempDirPath)); // Initialize job totals initTotalsForIngestJob(jobId); } catch (SecurityException | IOException | UnsupportedOperationException ex) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "cannotCreateOutputDir.message", ex.getLocalizedMessage())); >>>>>>> PhotoRecCarverFileIngestModule.pathsByJob.put(this.jobId, new WorkingPaths(outputDirPath, tempDirPath)); // Initialize job totals initTotalsForIngestJob(jobId); } catch (SecurityException | IOException | UnsupportedOperationException ex) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "cannotCreateOutputDir.message", ex.getLocalizedMessage())); <<<<<<< long freeDiskSpace = IngestServices.getInstance().getFreeDiskSpace(); if ((freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN) && ((file.getSize() * 1.2) > freeDiskSpace)) { ======= if ((freeDiskSpace != -1) && ((file.getSize() * 1.2) > freeDiskSpace)) { totals.totalItemsWithErrors.incrementAndGet(); >>>>>>> long freeDiskSpace = IngestServices.getInstance().getFreeDiskSpace(); if ((freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN) && ((file.getSize() * 1.2) > freeDiskSpace)) { <<<<<<< MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "PhotoRecIngestModule.UnableToCarve", file.getName()), NbBundle.getMessage(this.getClass(), "PhotoRecIngestModule.NotEnoughDiskSpace")); ======= MessageNotifyUtil.Notify.error(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.UnableToCarve", file.getName()), NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.NotEnoughDiskSpace")); >>>>>>> MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "PhotoRecIngestModule.UnableToCarve", file.getName()), NbBundle.getMessage(this.getClass(), "PhotoRecIngestModule.NotEnoughDiskSpace")); <<<<<<< FileIngestModuleProcessTerminator terminator = new FileIngestModuleProcessTerminator(this.context, true); int exitValue = ExecUtil.execute(processAndSettings, terminator); ======= int exitValue = ExecUtil.execute(processAndSettings, new FileIngestModuleProcessTerminator(this.context)); >>>>>>> FileIngestModuleProcessTerminator terminator = new FileIngestModuleProcessTerminator(this.context, true); int exitValue = ExecUtil.execute(processAndSettings, terminator); <<<<<<< ======= long writedelta = (System.currentTimeMillis() - writestart); totals.totalWritetime.addAndGet(writedelta); >>>>>>> long writedelta = (System.currentTimeMillis() - writestart); totals.totalWritetime.addAndGet(writedelta); <<<<<<< } catch (IOException ex) { ======= } catch (IOException ex) { totals.totalItemsWithErrors.incrementAndGet(); >>>>>>> } catch (IOException ex) { totals.totalItemsWithErrors.incrementAndGet(); <<<<<<< ======= private static synchronized void postSummary(long jobId) { IngestJobTotals jobTotals = totalsForIngestJobs.remove(jobId); StringBuilder detailsSb = new StringBuilder(); //details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.numberOfCarved")) .append("</td>"); //NON-NLS detailsSb.append("<td>").append(jobTotals.totalItemsRecovered.get()).append("</td></tr>"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.numberOfErrors")) .append("</td>"); //NON-NLS detailsSb.append("<td>").append(jobTotals.totalItemsWithErrors.get()).append("</td></tr>"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.totalWritetime")) .append("</td><td>").append(jobTotals.totalWritetime.get()).append("</td></tr>\n"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.totalParsetime")) .append("</td><td>").append(jobTotals.totalParsetime.get()).append("</td></tr>\n"); //NON-NLS detailsSb.append("</table>"); //NON-NLS IngestServices.getInstance().postMessage(IngestMessage.createMessage( IngestMessage.MessageType.INFO, PhotoRecCarverIngestModuleFactory.getModuleName(), NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.photoRecResults"), detailsSb.toString())); } >>>>>>> private static synchronized void postSummary(long jobId) { IngestJobTotals jobTotals = totalsForIngestJobs.remove(jobId); StringBuilder detailsSb = new StringBuilder(); //details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.numberOfCarved")) .append("</td>"); //NON-NLS detailsSb.append("<td>").append(jobTotals.totalItemsRecovered.get()).append("</td></tr>"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.numberOfErrors")) .append("</td>"); //NON-NLS detailsSb.append("<td>").append(jobTotals.totalItemsWithErrors.get()).append("</td></tr>"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.totalWritetime")) .append("</td><td>").append(jobTotals.totalWritetime.get()).append("</td></tr>\n"); //NON-NLS detailsSb.append("<tr><td>") //NON-NLS .append(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.totalParsetime")) .append("</td><td>").append(jobTotals.totalParsetime.get()).append("</td></tr>\n"); //NON-NLS detailsSb.append("</table>"); //NON-NLS IngestServices.getInstance().postMessage(IngestMessage.createMessage( IngestMessage.MessageType.INFO, PhotoRecCarverIngestModuleFactory.getModuleName(), NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.complete.photoRecResults"), detailsSb.toString())); } <<<<<<< } catch (SecurityException ex) { ======= postSummary(jobId); } catch (SecurityException ex) { >>>>>>> postSummary(jobId); } catch (SecurityException ex) {
<<<<<<< import org.apache.commons.lang.math.NumberUtils; ======= >>>>>>> import org.apache.commons.lang.math.NumberUtils; <<<<<<< double upgradeSolrIndexVersion4to5(double currentIndexVersion, String solr4IndexPath, String tempResultsDir) throws AutopsyService.AutopsyServiceException, SecurityException, IOException { ======= private void upgradeSolrIndexVersion4to5(String solr4IndexPath, String tempResultsDir) throws AutopsyService.AutopsyServiceException, SecurityException, IOException { >>>>>>> private double upgradeSolrIndexVersion4to5(double currentIndexVersion, String solr4IndexPath, String tempResultsDir) throws AutopsyService.AutopsyServiceException, SecurityException, IOException { <<<<<<< double upgradeSolrIndexVersion5to6(double currentIndexVersion, String solr5IndexPath, String tempResultsDir) throws AutopsyService.AutopsyServiceException, SecurityException, IOException { if (currentIndexVersion != 5.0) { return currentIndexVersion; } ======= private void upgradeSolrIndexVersion5to6(String solr5IndexPath, String tempResultsDir) throws AutopsyService.AutopsyServiceException, SecurityException, IOException { >>>>>>> private double upgradeSolrIndexVersion5to6(double currentIndexVersion, String solr5IndexPath, String tempResultsDir) throws AutopsyService.AutopsyServiceException, SecurityException, IOException { if (currentIndexVersion != 5.0) { return currentIndexVersion; }
<<<<<<< import java.nio.file.Paths; import java.util.ArrayList; ======= import java.nio.file.Paths; >>>>>>> import java.nio.file.Paths; import java.util.ArrayList; <<<<<<< import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; import org.opencv.core.Core; ======= import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.BasicThreadFactory; >>>>>>> import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; import org.opencv.core.Core; <<<<<<< import org.sleuthkit.autopsy.datamodel.ContentUtils; ======= import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector.FileTypeDetectorInitException; >>>>>>> import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector.FileTypeDetectorInitException; <<<<<<< * Utilities for creating and manipulating thumbnail and icon images. * ======= * Utilities for working with Images and creating thumbnails. Reuses thumbnails * by storing them in the case's cache directory. >>>>>>> * * Utilities for working with Images and creating thumbnails. Reuses thumbnails * by storing them in the case's cache directory. <<<<<<< private static final Logger LOGGER = Logger.getLogger(ImageUtils.class.getName()); ======= private static final Logger LOGGER = Logger.getLogger(ImageUtils.class.getName()); /** save thumbnails to disk as this format */ private static final String FORMAT = "png"; //NON-NLS >>>>>>> private static final Logger LOGGER = Logger.getLogger(ImageUtils.class.getName()); /** save thumbnails to disk as this format */ private static final String FORMAT = "png"; //NON-NLS <<<<<<< // if the file type is known and we don't support it, bail if (attributes.size() > 0) { return false; } } catch (TskCoreException ex) { logger.log(Level.WARNING, "Error while getting file signature from blackboard.", ex); //NON-NLS ======= >>>>>>> <<<<<<< final String extension = f.getNameExtension(); if (extension.isEmpty() == false) { // Note: thumbnail generator only supports JPG, GIF, and PNG for now if (SUPP_IMAGE_EXTENSIONS.contains(extension) || SUPP_VIDEO_EXTENSIONS.contains(extension)) { return true; } ======= final String extension = file.getNameExtension(); if (StringUtils.isNotBlank(extension) && SUPPORTED_EXTENSIONS.contains(extension)) { return true; >>>>>>> final String extension = file.getNameExtension(); if (StringUtils.isNotBlank(extension) && SUPPORTED_EXTENSIONS.contains(extension)) { return true; <<<<<<< //TODO: why do we allow Content here if we only handle AbstractFiles? Image icon = null; ======= return getThumbnail(content, iconSize); } /** * Get a thumbnail of a specified size. Generates the image if it is * not already cached. * * @param content * @param iconSize * * @return a thumbnail for the given image or a default one if there was a * problem making a thumbnail. */ public static Image getThumbnail(Content content, int iconSize) { >>>>>>> return getThumbnail(content, iconSize); } /** * Get a thumbnail of a specified size. Generates the image if it is * not already cached. * * @param content * @param iconSize * * @return a thumbnail for the given image or a default one if there was a * problem making a thumbnail. */ public static Image getThumbnail(Content content, int iconSize) { <<<<<<< * * @return File object for cached image. Is guaranteed to exist. ======= * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. * * @deprecated use {@link #getCachedThumbnailFile(org.sleuthkit.datamodel.Content, int) * } instead. >>>>>>> * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. * * @deprecated use {@link #getCachedThumbnailFile(org.sleuthkit.datamodel.Content, int) * } instead. * <<<<<<< * Get a file object for where the cached icon should exist. The returned * file may not exist. * ======= * Get a thumbnail of a specified size. Generates the image if it is * not already cached. * * @param content * @param iconSize * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. */ @Nullable public static File getCachedThumbnailFile(Content content, int iconSize) { getThumbnail(content, iconSize); return getCachedThumbnailLocation(content.getId()); } /** * Get a file object for where the cached icon should exist. The returned * file may not exist. * >>>>>>> * * Get a thumbnail of a specified size. Generates the image if it is * not already cached. * * @param content * @param iconSize * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. */ @Nullable public static File getCachedThumbnailFile(Content content, int iconSize) { getThumbnail(content, iconSize); return getCachedThumbnailLocation(content.getId()); } /** * Get a file object for where the cached icon should exist. The returned * file may not exist. * <<<<<<< return getCachedThumbnailLocation(id); } /** * Get a file object for where the cached icon should exist. The returned * file may not exist. * * @param fileID * * @return * */ private static File getCachedThumbnailLocation(long fileID) { return Paths.get(Case.getCurrentCase().getCacheDirectory(), "thumbnails", fileID + ".png").toFile(); ======= return getCachedThumbnailLocation(id); } private static File getCachedThumbnailLocation(long id) { return Paths.get(Case.getCurrentCase().getCacheDirectory(), "thumbnails", id + ".png").toFile(); >>>>>>> return getCachedThumbnailLocation(id); } /** * Get a file object for where the cached icon should exist. The returned * file may not exist. * * @param fileID * * @return * */ private static File getCachedThumbnailLocation(long fileID) { return Paths.get(Case.getCurrentCase().getCacheDirectory(), "thumbnails", fileID + ".png").toFile(); <<<<<<< ======= /** * Check if the given file is a png based on header. * * @param file * * @return true if png file, false otherwise */ >>>>>>> /** * Check if the given file is a png based on header. * * @param file * * @return true if png file, false otherwise */ <<<<<<< private static BufferedImage generateImageThumbnail(Content content, int iconSize) { ======= @Nullable private static BufferedImage generateThumbnail(Content content, int iconSize) { try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(content));) { BufferedImage bi = ImageIO.read(inputStream); >>>>>>> @Nullable private static BufferedImage generateImageThumbnail(Content content, int iconSize) { try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(content));) { BufferedImage bi = ImageIO.read(inputStream); <<<<<<< return ScalrWrapper.resizeFast(bi, iconSize); } catch (IllegalArgumentException e) { // if resizing does not work due to extremely small height/width ratio, // crop the image instead. BufferedImage biCropped = ScalrWrapper.cropImage(bi, Math.min(iconSize, bi.getWidth()), Math.min(iconSize, bi.getHeight())); return biCropped; } catch (OutOfMemoryError e) { logger.log(Level.WARNING, "Could not scale image (too large): " + content.getName(), e); //NON-NLS ======= try { return ScalrWrapper.resizeFast(bi, iconSize); } catch (IllegalArgumentException e) { // if resizing does not work due to extreme aspect ratio, // crop the image instead. return ScalrWrapper.cropImage(bi, Math.min(iconSize, bi.getWidth()), Math.min(iconSize, bi.getHeight())); } } catch (OutOfMemoryError e) { LOGGER.log(Level.WARNING, "Could not scale image (too large): " + content.getName(), e); //NON-NLS >>>>>>> try { return ScalrWrapper.resizeFast(bi, iconSize); } catch (IllegalArgumentException e) { // if resizing does not work due to extreme aspect ratio, // crop the image instead. return ScalrWrapper.cropImage(bi, Math.min(iconSize, bi.getWidth()), Math.min(iconSize, bi.getHeight())); } } catch (OutOfMemoryError e) { LOGGER.log(Level.WARNING, "Could not scale image (too large): " + content.getName(), e); //NON-NLS <<<<<<< } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException ex) { logger.log(Level.WARNING, "Could not close input stream after resizing thumbnail: " + content.getName(), ex); //NON-NLS } } } } /** * copy the first 500kb to a temporary file * * @param file * @param tempFile * * @throws IOException */ public static void copyFileUsingStream(Content file, java.io.File tempFile) throws IOException { com.google.common.io.Files.createParentDirs(tempFile); ProgressHandle progress = ProgressHandleFactory.createHandle("extracting temporary file " + file.getName()); progress.start(100); try { ContentUtils.writeToFile(file, tempFile, progress, null, true); } catch (IOException ex) { logger.log(Level.WARNING, "Error buffering file", ex); //NON-NLS ======= >>>>>>> } } /** * copy the first 500kb to a temporary file * * @param file * @param tempFile * * @throws IOException */ public static void copyFileUsingStream(Content file, java.io.File tempFile) throws IOException { com.google.common.io.Files.createParentDirs(tempFile); ProgressHandle progress = ProgressHandleFactory.createHandle("extracting temporary file " + file.getName()); progress.start(100); try { ContentUtils.writeToFile(file, tempFile, progress, null, true); } catch (IOException ex) { logger.log(Level.WARNING, "Error buffering file", ex); //NON-NLS
<<<<<<< Map<Integer, CommonAttributeValueList> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase()); Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType, mimeTypesToFilterOn); ======= Set<String> mimeTypesToFilterOn = new HashSet<>(); if (isFilterByMedia()) { mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES); } if (isFilterByDoc()) { mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES); } Map<Integer, CommonAttributeValueList> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn); return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); >>>>>>> Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); Map<Integer, CommonAttributeValueList> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn); return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); <<<<<<< Map<String, Map<String, CommonAttributeValueList>> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCase(Case.getCurrentCase()); Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType, mimeTypesToFilterOn); ======= Set<String> mimeTypesToFilterOn = new HashSet<>(); if (isFilterByMedia()) { mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES); } if (isFilterByDoc()) { mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES); } Map<String, Map<String, CommonAttributeValueList>> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn); return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); >>>>>>> Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); Map<String, Map<String, CommonAttributeValueList>> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn); return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType);
<<<<<<< import java.util.Date; ======= import java.util.Collection; >>>>>>> import java.util.Date; import java.util.Collection; <<<<<<< ======= import org.apache.commons.io.FileUtils; import org.openide.util.Lookup; >>>>>>> import org.apache.commons.io.FileUtils; <<<<<<< /** * Property name for the event when a new BlackBoardArtifactTag is * added. The new value is tag added, the old value is empty */ ======= /** * Name for the property change event when a report is deleted from the * case. Both the old value and the new value supplied by the event * object are null. */ REPORT_DELETED, /** * Property name for the event when a new BlackBoardArtifactTag is * added. The new value is tag added, the old value is empty */ >>>>>>> /** * Name for the property change event when a report is deleted from the * case. Both the old value and the new value supplied by the event * object are null. */ REPORT_DELETED, /** * Property name for the event when a new BlackBoardArtifactTag is * added. The new value is tag added, the old value is empty */ <<<<<<< /** * Property name for the event when a new BlackBoardArtifactTag is * deleted. The new value is empty, the old value is a * {@link BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo} * object with info about the deleted tag. */ ======= /** * Property name for the event when a new BlackBoardArtifactTag is * deleted. The new value is empty, the old value is the deleted tag */ >>>>>>> /** * Property name for the event when a new BlackBoardArtifactTag is * deleted. The new value is empty, the old value is a * {@link BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo} * object with info about the deleted tag. */ <<<<<<< /** * Property name for the event when a new ContentTag is deleted. The new * value is empty, the old value is a * {@link ContentTagDeletedEvent.DeletedContentTagInfo} object with info * about the deleted tag. */ ======= /** * Property name for the event when a new ContentTag is deleted. The new * value is empty, the old value is the deleted tag */ >>>>>>> /** * Property name for the event when a new ContentTag is deleted. The new * value is empty, the old value is a * {@link ContentTagDeletedEvent.DeletedContentTagInfo} object with info * about the deleted tag. */ <<<<<<< * @param localPath The path of the report file, must be in the case * directory or one of its subdirectories. * @param srcModuleName The name of the module that created the report. * @param reportName The report name, may be empty. ======= * @param localPath The path of the report file, must be in the case * directory or one of its subdirectories. * @param sourceModuleName The name of the module that created the report. * @param reportName The report name, may be empty. * * @return A Report data transfer object (DTO) for the new row. >>>>>>> * @param localPath The path of the report file, must be in the case * directory or one of its subdirectories. * @param srcModuleName The name of the module that created the report. * @param reportName The report name, may be empty.
<<<<<<< import java.util.HashMap; ======= import java.util.Date; >>>>>>> import java.util.HashMap; import java.util.Date; <<<<<<< private javax.swing.JButton editAccountBtn; private javax.swing.JButton editAliasBtn; private javax.swing.JButton editMetadataBtn; ======= private javax.swing.JTextField examinerField; private javax.swing.JLabel examinerLbl; >>>>>>> private javax.swing.JButton editAccountBtn; private javax.swing.JButton editAliasBtn; private javax.swing.JButton editMetadataBtn; private javax.swing.JTextField examinerField; private javax.swing.JLabel examinerLbl; <<<<<<< "PersonaDetailsPanel_EmptyName_msg=Persona name cannot be empty.", "PersonaDetailsPanel_EmptyName_Title=Empty persona name",}) ======= "PersonaDetailsPanel_EmptyName_msg=Persona name cannot be empty", "PersonaDetailsPanel_EmptyName_Title=Empty persona name", "PersonaDetailsPanel_EmptyComment_msg=Persona comment cannot be empty", "PersonaDetailsPanel_EmptyComment_Title=Empty persona comment",}) >>>>>>> "PersonaDetailsPanel_EmptyName_msg=Persona name cannot be empty.", "PersonaDetailsPanel_EmptyName_Title=Empty persona name", "PersonaDetailsPanel_EmptyComment_msg=Persona comment cannot be empty.", "PersonaDetailsPanel_EmptyComment_Title=Empty persona comment",})
<<<<<<< import java.util.ArrayList; import java.util.Arrays; import java.util.List; ======= import java.nio.charset.StandardCharsets; >>>>>>> import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.nio.charset.StandardCharsets;
<<<<<<< OSSLog.logD("[httpdnsmini] - buildUrl: " + resolveUrl); InputStream in = null; ======= OSSLog.logDebug("[httpdnsmini] - buildUrl: " + resolveUrl); >>>>>>> InputStream in = null; OSSLog.logDebug("[httpdnsmini] - buildUrl: " + resolveUrl); <<<<<<< } finally { try { if(in != null) { in.close(); } } catch (IOException e) { e.printStackTrace(); } ======= OSSLog.logThrowable2Local(e); >>>>>>>
<<<<<<< // bucket所在数据中心 public String bucketLocation; ======= // bucket's location private String bucketLocation; /** * Sets bucket location * @param location */ public void setBucketLocation(String location) { this.bucketLocation = location; } /** * Gets the bucket location * @return */ public String getBucketLocation() { return bucketLocation; } >>>>>>> // bucket's location public String bucketLocation;
<<<<<<< ======= //import com.aliyuncs.DefaultAcsClient; //import com.aliyuncs.exceptions.ClientException; //import com.aliyuncs.http.MethodType; //import com.aliyuncs.http.ProtocolType; //import com.aliyuncs.profile.DefaultProfile; //import com.aliyuncs.profile.IClientProfile; //import com.aliyuncs.sts.model.v20150401.AssumeRoleRequest; //import com.aliyuncs.sts.model.v20150401.AssumeRoleResponse; >>>>>>>
<<<<<<< // 如果Bucket是私有的,需要签出有签名的URL,并指定过期时间 public void presignConstrainedURL(final Handler handler) { new Thread(new Runnable() { @Override public void run() { try { // 获取签名url,过期时间为5分钟 String url = oss.presignConstrainedObjectURL(testBucket, testObject, 5 * 60); Log.d("signContrainedURL", "get url: " + url); // 访问该url Request request = new Request.Builder().url(url).build(); Response resp = null; resp = new OkHttpClient().newCall(request).execute(); if (resp.code() == 200) { Log.d("signContrainedURL", "object size: " + resp.body().contentLength()); handler.sendEmptyMessage(MainActivity.SIGN_SUC); } else { Log.e("signContrainedURL", "get object failed, error code: " + resp.code() + "error message: " + resp.message()); handler.sendEmptyMessage(MainActivity.FAIL); } }catch (IOException e) { e.printStackTrace(); handler.sendEmptyMessage(MainActivity.FAIL); }catch (ClientException e) { e.printStackTrace(); handler.sendEmptyMessage(MainActivity.FAIL); } } }).start(); ======= // If the bucket is private, the signed URL is required for the access. // Expiration time is specified in the signed URL. public void presignConstrainedURL() { try { // Gets the signed url, the expiration time is 5 minute String url = oss.presignConstrainedObjectURL(testBucket, testObject, 5 * 60); Log.d("signContrainedURL", "get url: " + url); // access it with the url Request request = new Request.Builder().url(url).build(); Response resp = new OkHttpClient().newCall(request).execute(); if (resp.code() == 200) { Log.d("signContrainedURL", "object size: " + resp.body().contentLength()); } else { Log.e("signContrainedURL", "get object failed, error code: " + resp.code() + "error message: " + resp.message()); } } catch (IOException e) { e.printStackTrace(); } catch (ClientException e) { e.printStackTrace(); } >>>>>>> // If the bucket is private, the signed URL is required for the access. // Expiration time is specified in the signed URL. public void presignConstrainedURL(final Handler handler) { new Thread(new Runnable() { @Override public void run() { try { // Gets the signed url, the expiration time is 5 minute String url = oss.presignConstrainedObjectURL(testBucket, testObject, 5 * 60); Log.d("signContrainedURL", "get url: " + url); // 访问该url Request request = new Request.Builder().url(url).build(); Response resp = null; resp = new OkHttpClient().newCall(request).execute(); if (resp.code() == 200) { Log.d("signContrainedURL", "object size: " + resp.body().contentLength()); handler.sendEmptyMessage(MainActivity.SIGN_SUC); } else { Log.e("signContrainedURL", "get object failed, error code: " + resp.code() + "error message: " + resp.message()); handler.sendEmptyMessage(MainActivity.FAIL); } }catch (IOException e) { e.printStackTrace(); handler.sendEmptyMessage(MainActivity.FAIL); }catch (ClientException e) { e.printStackTrace(); handler.sendEmptyMessage(MainActivity.FAIL); } } }).start();
<<<<<<< // 只获取一个文件的元信息 public void headObject(final Handler handler) { // 创建同步获取文件元信息请求 ======= // Gets file's metadata public void headObject() { // Creates a request to get the file's metadata >>>>>>> // Gets file's metadata public void headObject(final Handler handler) { // Creates a request to get the file's metadata
<<<<<<< // 异步断点上传,不设置记录保存路径,只在本次上传内做断点续传 public void resumableUpload(final Handler handler) { Log.d("thread",Thread.currentThread().getName()); // 创建断点上传请求 ======= // Resumable upload without checkpoint directory. public void resumableUpload() { // Creates the request >>>>>>> // Resumable upload without checkpoint directory. public void resumableUpload(final Handler handler) { Log.d("thread",Thread.currentThread().getName()); // Creates the request
<<<<<<< ======= /** * Adds an instance of {@link PartSummary} * @param partSummary * PartSummary instance */ public void addPart(PartSummary partSummary) { this.parts.add(partSummary); } >>>>>>>
<<<<<<< import com.alibaba.sdk.android.oss.model.ListBucketsRequest; import com.alibaba.sdk.android.oss.model.ListBucketsResult; ======= import com.alibaba.sdk.android.oss.model.ListMultipartUploadsRequest; import com.alibaba.sdk.android.oss.model.ListMultipartUploadsResult; >>>>>>> import com.alibaba.sdk.android.oss.model.ListBucketsRequest; import com.alibaba.sdk.android.oss.model.ListBucketsResult; import com.alibaba.sdk.android.oss.model.ListMultipartUploadsRequest; import com.alibaba.sdk.android.oss.model.ListMultipartUploadsResult;
<<<<<<< import org.swellrt.client.editor.doodad.WidgetDoodad; ======= import org.swellrt.model.doodad.WidgetDoodad; import org.swellrt.model.doodad.WidgetModelDoodad; >>>>>>> import org.swellrt.model.doodad.WidgetDoodad;
<<<<<<< public void testUserWithNoDomainReturnAddress() throws Exception { configureRedirectString("none"); attemptLogin("frodo", "password", true); verify(resp.getWriter()).write("[email protected]"); } ======= public void testAnonymousLogin() throws IOException { attemptLogin("_anonymous_", "", true); } >>>>>>> public void testUserWithNoDomainReturnAddress() throws Exception { configureRedirectString("none"); attemptLogin("frodo", "password", true); verify(resp.getWriter()).write("[email protected]"); } public void testAnonymousLogin() throws IOException { attemptLogin("_anonymous_", "", true); }
<<<<<<< ======= ParticipantId participantId = sessionManager.getLoggedInUser(req); if (participantId == null) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } >>>>>>>
<<<<<<< import org.swellrt.client.editor.doodad.ExternalAnnotationHandler; import org.swellrt.client.editor.doodad.WidgetController; import org.swellrt.client.editor.doodad.WidgetDoodad; ======= import org.swellrt.model.doodad.ExternalAnnotationHandler; import org.swellrt.model.doodad.WidgetController; import org.swellrt.model.doodad.WidgetDoodad; import org.swellrt.model.doodad.WidgetModelDoodad; >>>>>>> import org.swellrt.model.doodad.ExternalAnnotationHandler; import org.swellrt.model.doodad.WidgetController; import org.swellrt.model.doodad.WidgetDoodad;
<<<<<<< context.setMixInAnnotations(GroupMembership.class, GroupMembershipMixin.class); context.setMixInAnnotations(FamilyMember.class, FamilyMemberMixin.class); context.setMixInAnnotations(Question.class, QuestionMixin.class); context.setMixInAnnotations(QuestionOption.class, QuestionOptionMixin.class); ======= context.setMixInAnnotations(StoryTag.class, StoryTagMixin.class); >>>>>>> context.setMixInAnnotations(GroupMembership.class, GroupMembershipMixin.class); context.setMixInAnnotations(FamilyMember.class, FamilyMemberMixin.class); context.setMixInAnnotations(Question.class, QuestionMixin.class); context.setMixInAnnotations(QuestionOption.class, QuestionOptionMixin.class); context.setMixInAnnotations(StoryTag.class, StoryTagMixin.class);
<<<<<<< private String appSecret; private String appId; private static final String API_VERSION = "2.5"; ======= private static final String API_VERSION = "2.8"; >>>>>>> private String appSecret; private String appId; private static final String API_VERSION = "2.8";
<<<<<<< public BoundEffect addParticleToLibrary(FileHandle handle) { ======= public void addParticle(FileHandle handle) { pathMap.put(handle.name(), handle.path()); String name = handle.nameWithoutExtension(); >>>>>>> public BoundEffect addParticle(FileHandle handle) { pathMap.put(handle.name(), handle.path()); String name = handle.nameWithoutExtension(); <<<<<<< return effect; ======= TalosMain.Instance().ProjectController().setDirty(); } public void updateParticle(FileHandle handle) { String name = handle.nameWithoutExtension(); if(vfxLibrary.containsKey(name)) { ParticleEffectDescriptor descriptor = new ParticleEffectDescriptor(); assetProvider.setParticleFolder(handle.parent().path()); descriptor.setAssetProvider(assetProvider); descriptor.load(handle); vfxLibrary.put(name, descriptor); skeletonContainer.updateEffect(name, descriptor); } } @Override public void write(Json json) { json.writeValue("skeleton", skeletonContainer); json.writeObjectStart("paths"); for(String fileName: pathMap.keys()) { json.writeValue(fileName, pathMap.get(fileName)); } json.writeObjectEnd(); } @Override public void read(Json json, JsonValue jsonData) { cleanWorkspace(); JsonValue paths = jsonData.get("paths"); pathMap.clear(); for(JsonValue path: paths) { pathMap.put(path.name(), path.asString()); } skeletonContainer = new SkeletonContainer(this); skeletonContainer.read(json, jsonData.get("skeleton")); } public void cleanWorkspace() { pathMap.clear(); skeletonContainer.clear(); } public String getPath(String fileName) { return pathMap.get(fileName); } public BvBAssetProvider getAssetProvider() { return assetProvider; } public ObjectMap<String, ParticleEffectDescriptor> getVfxLibrary() { return vfxLibrary; >>>>>>> TalosMain.Instance().ProjectController().setDirty(); return effect; } public void updateParticle(FileHandle handle) { String name = handle.nameWithoutExtension(); if(vfxLibrary.containsKey(name)) { ParticleEffectDescriptor descriptor = new ParticleEffectDescriptor(); assetProvider.setParticleFolder(handle.parent().path()); descriptor.setAssetProvider(assetProvider); descriptor.load(handle); vfxLibrary.put(name, descriptor); skeletonContainer.updateEffect(name, descriptor); } } @Override public void write(Json json) { json.writeValue("skeleton", skeletonContainer); json.writeObjectStart("paths"); for(String fileName: pathMap.keys()) { json.writeValue(fileName, pathMap.get(fileName)); } json.writeObjectEnd(); } @Override public void read(Json json, JsonValue jsonData) { cleanWorkspace(); JsonValue paths = jsonData.get("paths"); pathMap.clear(); for(JsonValue path: paths) { pathMap.put(path.name(), path.asString()); } skeletonContainer = new SkeletonContainer(this); skeletonContainer.read(json, jsonData.get("skeleton")); } public void cleanWorkspace() { pathMap.clear(); skeletonContainer.clear(); } public String getPath(String fileName) { return pathMap.get(fileName); } public BvBAssetProvider getAssetProvider() { return assetProvider; } public ObjectMap<String, ParticleEffectDescriptor> getVfxLibrary() { return vfxLibrary;
<<<<<<< Integer i2 = new Integer(2); static long RUN_LENGTH = 100000; // static long REFERENCE_BIPS = 48416; ======= Integer i2 = new Integer(2); static long RUN_LENGTH = 100 * 1000; // static long REFERENCE_BIPS = 48416; >>>>>>> Integer i2 = new Integer(2); static long RUN_LENGTH = 100 * 1000; // static long REFERENCE_BIPS = 48416; <<<<<<< long end = System.nanoTime(); return (end - start)/(1000*1000.0); } public double slf4jMessageFormatter_TwoArg(long len) { String s = ""; s += ""; // keep compiler happy long start = System.nanoTime(); for (int i = 0; i < len; i++) { s = MessageFormatter.format("This is some {} short message {} ", i1, i2); } long end = System.nanoTime(); return (end - start)/(1000*1000.0); } ======= long end = System.nanoTime(); return (end - start) / (1000 * 1000.0); } >>>>>>> long end = System.nanoTime(); return (end - start)/(1000*1000.0); }
<<<<<<< String expression = getExpressionWithAbsoluteStartAndEndTimeStamps(context); sb.append(MessageFormat.format("Alert {0} was triggered at {1}\n", getDisplayedName(context, context.getAlert().getName()), ======= String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); sb.append(MessageFormat.format("Alert {0} with id {1} was triggered at {2}\n", getDisplayedName(context, context.getAlert().getName()), context.getAlert().getId().intValue(), >>>>>>> String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); sb.append(MessageFormat.format("Alert {0} was triggered at {1}\n", getDisplayedName(context, context.getAlert().getName()),
<<<<<<< @Valid @XmlElement private final KeyVaultConfig azureKeyVaultConfig; public KeyConfiguration(final Path passwordFile, final List<String> passwords, final List<KeyData> keyData, final KeyVaultConfig azureKeyVaultConfig) { ======= public KeyConfiguration(final Path passwordFile, final List<String> passwords, final List<ConfigKeyPair> keyData) { >>>>>>> @Valid @XmlElement private final KeyVaultConfig azureKeyVaultConfig; public KeyConfiguration(final Path passwordFile, final List<String> passwords, final List<ConfigKeyPair> keyData, final KeyVaultConfig azureKeyVaultConfig) {
<<<<<<< sb.append(MessageFormat.format("Alert {0} with id {1} was triggered at {2}\n", TemplateReplacement.applyTemplateChanges(context, context.getAlert().getName()), context.getAlert().getId().intValue(), ======= sb.append(MessageFormat.format("Alert {0} was triggered at {1}\n", getDisplayedName(context, context.getAlert().getName()), >>>>>>> sb.append(MessageFormat.format("Alert {0} was triggered at {1}\n", TemplateReplacement.applyTemplateChanges(context, context.getAlert().getName()),
<<<<<<< import com.stratio.meta2.common.data.ClusterName; import com.stratio.meta2.metadata.ClusterMetadata; import com.stratio.meta2.metadata.ConnectorAttachedMetadata; import com.stratio.meta2.metadata.DataStoreMetadata; ======= >>>>>>> import com.stratio.meta2.common.data.ClusterName;
<<<<<<< }else if(toExecute.toLowerCase().startsWith("list connectors")){ listConnectors(); }else if(toExecute.toLowerCase().startsWith(EXPLAIN_PLAN_TOKEN)){ explainPlan(toExecute); } else { ======= } else if(executeApiCAll(toExecute)){ println(""); } else { >>>>>>> }else if(toExecute.toLowerCase().startsWith("list connectors")){ listConnectors(); }else if(toExecute.toLowerCase().startsWith(EXPLAIN_PLAN_TOKEN)){ explainPlan(toExecute); } else if(executeApiCAll(toExecute)){ println(""); } else {
<<<<<<< import com.stratio.meta.core.executor.StreamExecutor; import com.stratio.streaming.api.IStratioStreamingAPI; import org.apache.log4j.Logger; ======= >>>>>>> import com.stratio.meta.core.executor.StreamExecutor; import com.stratio.streaming.api.IStratioStreamingAPI; import org.apache.log4j.Logger; <<<<<<< MetaStep myStep = node; MetaPath myPath = myStep.getPath(); if(myPath == MetaPath.COMMAND){ result = CommandExecutor.execute(myStep.getStmt(), session); } else if(myPath == MetaPath.CASSANDRA){ result = CassandraExecutor.execute(myStep, session); } else if(myPath == MetaPath.DEEP){ result = DeepExecutor.execute(myStep.getStmt(), resultsFromChildren, isRoot(), session, deepSparkContext, engineConfig); } else if(myPath == MetaPath.STREAMING){ result = StreamExecutor.execute(myStep.getStmt(), stratioStreamingAPI); } else if(myPath == MetaPath.UNSUPPORTED){ result = QueryResult.createFailQueryResult("Query not supported."); } else { result = QueryResult.createFailQueryResult("Query not supported yet."); ======= /** * Execute the elements of the tree starting from the bottom up. * @param session The Cassandra session. * @param deepSparkContext The Deep context. * @param engineConfig The engine configuration. * @return A {@link com.stratio.meta.common.result.Result}. */ public Result executeTreeDownTop(Session session, DeepSparkContext deepSparkContext, EngineConfig engineConfig){ // Get results from my children List<Result> resultsFromChildren = new ArrayList<>(); for(Tree child: children){ resultsFromChildren.add(child.executeTreeDownTop(session,deepSparkContext, engineConfig)); } // Execute myself and return final result return executeMyself(session, deepSparkContext, engineConfig, resultsFromChildren); } /** * Execute the current node of the tree. * @param session The Cassandra session. * @param deepSparkContext The Deep context. * @param engineConfig The engine configuration. * @param resultsFromChildren The results from the children. * @return A {@link com.stratio.meta.common.result.Result}. */ public Result executeMyself(Session session, DeepSparkContext deepSparkContext, EngineConfig engineConfig, List<Result> resultsFromChildren){ Result result = null; if(node == null){ return QueryResult.createSuccessQueryResult(); } MetaStep myStep = node; MetaPath myPath = myStep.getPath(); if(myPath == MetaPath.COMMAND){ result = CommandExecutor.execute(myStep.getStmt(), session); } else if(myPath == MetaPath.CASSANDRA){ result = CassandraExecutor.execute(myStep, session); } else if(myPath == MetaPath.DEEP){ result = DeepExecutor.execute(myStep.getStmt(), resultsFromChildren, isRoot(), session, deepSparkContext, engineConfig); } // TODO: To be included when streaming integration is fully accomplished /*else if(myPath == MetaPath.STREAMING){ result = StreamExecutor.execute(myStep.getStmt()); }*/ else if(myPath == MetaPath.UNSUPPORTED){ result = Result.createUnsupportedOperationErrorResult("Query not supported."); } else { result = Result.createUnsupportedOperationErrorResult("Query not supported yet."); } return result; } /** * Determine if the tree has not node. * @return Whether the tree does not contain a node. */ public boolean isEmpty(){ return node == null; } /** * Get the node assigned to this vertex of the tree. * @return A {@link com.stratio.meta.core.utils.MetaStep}. */ public MetaStep getNode(){ return node; } /** * Get the list of childrens. * @return The list. */ public List<Tree> getChildren(){ return children; >>>>>>> MetaStep myStep = node; MetaPath myPath = myStep.getPath(); if(myPath == MetaPath.COMMAND){ result = CommandExecutor.execute(myStep.getStmt(), session); } else if(myPath == MetaPath.CASSANDRA){ result = CassandraExecutor.execute(myStep, session); } else if(myPath == MetaPath.DEEP){ result = DeepExecutor.execute(myStep.getStmt(), resultsFromChildren, isRoot(), session, deepSparkContext, engineConfig); } else if(myPath == MetaPath.STREAMING){ result = StreamExecutor.execute(myStep.getStmt(), stratioStreamingAPI); } else if(myPath == MetaPath.UNSUPPORTED){ result = Result.createUnsupportedOperationErrorResult("Query not supported."); } else { result = Result.createUnsupportedOperationErrorResult("Query not supported yet.");
<<<<<<< import com.stratio.crossdata.core.parser.Parser; ======= import com.stratio.crossdata.common.utils.Constants; >>>>>>> import com.stratio.crossdata.core.parser.Parser; import com.stratio.crossdata.common.utils.Constants;
<<<<<<< import com.stratio.meta2.common.data.TableName; import com.stratio.meta2.core.engine.validator.Validation; import com.stratio.meta2.core.engine.validator.ValidationRequirements; ======= import com.stratio.meta2.core.validator.Validation; import com.stratio.meta2.core.validator.ValidationRequirements; import com.stratio.meta2.common.data.TableName; >>>>>>> import com.stratio.meta2.common.data.TableName; import com.stratio.meta2.core.validator.Validation; import com.stratio.meta2.core.validator.ValidationRequirements;
<<<<<<< ======= import com.stratio.meta2.common.metadata.CatalogMetadata; >>>>>>> import com.stratio.meta2.common.metadata.CatalogMetadata;
<<<<<<< import com.datastax.driver.core.ColumnMetadata; import com.datastax.driver.core.TableMetadata; import java.util.ArrayList; ======= >>>>>>> import com.datastax.driver.core.ColumnMetadata; import com.datastax.driver.core.TableMetadata; import java.util.ArrayList;
<<<<<<< import java.util.Iterator; import java.util.List; ======= >>>>>>> import java.util.Iterator;
<<<<<<< VALIDATE_PRIORITY, ======= VALIDATE_SCOPE, >>>>>>> VALIDATE_PRIORITY, VALIDATE_SCOPE,
<<<<<<< public AttachConnectorStatement(ConnectorName connectorName, ClusterName clusterName, String json, int priority, int pagination) { ======= public AttachConnectorStatement(ConnectorName connectorName, ClusterName clusterName, String json, Integer priority) { >>>>>>> public AttachConnectorStatement(ConnectorName connectorName, ClusterName clusterName, String json, int priority, int pagination) { <<<<<<< this.pagination = pagination; ======= this.priority = priority; >>>>>>> this.pagination = pagination; this.priority = priority; <<<<<<< .add(ValidationTypes.MUST_BE_CONNECTED) .add(ValidationTypes.PAGINATION_SUPPORT); ======= .add(ValidationTypes.MUST_BE_CONNECTED) .add(ValidationTypes.VALIDATE_PRIORITY); >>>>>>> .add(ValidationTypes.MUST_BE_CONNECTED) .add(ValidationTypes.PAGINATION_SUPPORT) .add(ValidationTypes.VALIDATE_PRIORITY);
<<<<<<< import com.stratio.crossdata.common.statements.structures.AbstractRelation; ======= import com.stratio.crossdata.common.statements.structures.Operator; >>>>>>> import com.stratio.crossdata.common.statements.structures.AbstractRelation; import com.stratio.crossdata.common.statements.structures.Operator; <<<<<<< if ((where != null) && (!where.isEmpty())) { sb.append(" WHEN "); sb.append(StringUtils.stringList(where, " AND ")); ======= if (whereInc) { sb.append(" WHERE "); if(withSQLSyntax) { sb.append(StringUtils.stringList(where, " AND ").replaceAll(Operator.MATCH.toString(),"LIKE")); } else{ sb.append(StringUtils.stringList(where, " AND ")); } >>>>>>> if ((where != null) && (!where.isEmpty())) { sb.append(" WHERE "); if(withSQLSyntax) { sb.append(StringUtils.stringList(where, " AND ").replaceAll(Operator.MATCH.toString(),"LIKE")); } else{ sb.append(StringUtils.stringList(where, " AND ")); }
<<<<<<< import com.datastax.driver.core.DataType; import com.stratio.meta.common.result.QueryResult; import com.stratio.meta.common.result.Result; import com.stratio.meta.common.utils.StringUtils; import com.stratio.meta.core.engine.EngineConfig; import com.stratio.meta.core.metadata.CustomIndexMetadata; import com.stratio.meta.core.metadata.MetadataManager; import com.stratio.meta.core.structures.IndexType; import com.stratio.meta.core.utils.MetaPath; import com.stratio.meta.core.utils.MetaStep; import com.stratio.meta.core.utils.Tree; import com.stratio.meta2.common.data.TableName; import com.stratio.meta2.common.metadata.TableMetadata; import com.stratio.meta2.common.statements.structures.terms.GenericTerm; import com.stratio.meta2.common.statements.structures.terms.StringTerm; import com.stratio.meta2.common.statements.structures.terms.Term; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; ======= import java.util.*; import com.datastax.driver.core.ColumnMetadata; import com.datastax.driver.core.DataType; import com.datastax.driver.core.TableMetadata; import com.stratio.meta.common.result.QueryResult; import com.stratio.meta.common.result.Result; import com.stratio.meta.common.utils.StringUtils; import com.stratio.meta.core.engine.EngineConfig; import com.stratio.meta.core.metadata.CustomIndexMetadata; import com.stratio.meta.core.metadata.MetadataManager; import com.stratio.meta.core.structures.IndexType; import com.stratio.meta.core.utils.MetaPath; import com.stratio.meta.core.utils.MetaStep; import com.stratio.meta.core.utils.Tree; import com.stratio.meta2.common.data.TableName; import com.stratio.meta2.common.statements.structures.terms.GenericTerm; import com.stratio.meta2.common.statements.structures.terms.StringTerm; import com.stratio.meta2.common.statements.structures.terms.Term; import com.stratio.meta2.core.engine.validator.ValidationRequirements; >>>>>>> import com.datastax.driver.core.DataType; import com.stratio.meta.common.result.QueryResult; import com.stratio.meta.common.result.Result; import com.stratio.meta.common.utils.StringUtils; import com.stratio.meta.core.engine.EngineConfig; import com.stratio.meta.core.metadata.CustomIndexMetadata; import com.stratio.meta.core.metadata.MetadataManager; import com.stratio.meta.core.structures.IndexType; import com.stratio.meta.core.utils.MetaPath; import com.stratio.meta.core.utils.MetaStep; import com.stratio.meta.core.utils.Tree; import com.stratio.meta2.common.data.TableName; import com.stratio.meta2.common.metadata.TableMetadata; import com.stratio.meta2.common.statements.structures.terms.GenericTerm; import com.stratio.meta2.common.statements.structures.terms.StringTerm; import com.stratio.meta2.common.statements.structures.terms.Term; import com.stratio.meta2.core.engine.validator.ValidationRequirements; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; <<<<<<< throw new UnsupportedOperationException(); /*Result result = QueryResult.createSuccessQueryResult(); if(!options.isEmpty()){ result = Result.createValidationErrorResult( "WITH OPTIONS clause not supported in index creation."); ======= Result result = QueryResult.createSuccessQueryResult(); if (!options.isEmpty()) { result = Result .createValidationErrorResult("WITH OPTIONS clause not supported in index creation."); >>>>>>> throw new UnsupportedOperationException(); /*Result result = QueryResult.createSuccessQueryResult(); if(!options.isEmpty()){ result = Result.createValidationErrorResult( "WITH OPTIONS clause not supported in index creation.");
<<<<<<< //Validate FROM keyspace Result result = validateKeyspaceAndTable(metadata, targetKeyspace, keyspaceInc, keyspace, tableName); if(!result.hasError() && joinInc){ if(join.getKeyspace() != null){ result = validateKeyspaceAndTable(metadata, targetKeyspace, join.isKeyspaceInc(), join.getKeyspace(), join.getTablename()); } } ======= if(true){ // TODO: To be removed return QueryResult.CreateSuccessQueryResult(); } Result result = validateKeyspaceAndTable(metadata, targetKeyspace); >>>>>>> //Validate FROM keyspace Result result = validateKeyspaceAndTable(metadata, targetKeyspace, keyspaceInc, keyspace, tableName); if(!result.hasError() && joinInc){ if(join.getKeyspace() != null){ result = validateKeyspaceAndTable(metadata, targetKeyspace, join.isKeyspaceInc(), join.getKeyspace(), join.getTablename()); } } if(true){ // TODO: To be removed return QueryResult.CreateSuccessQueryResult(); }
<<<<<<< operationsC1.add(Operations.FILTER_NON_INDEXED_GET); operationsC1.add(Operations.FILTER_NON_INDEXED_LT); //Streaming connector. Set<Operations> operationsC2 = new HashSet<>(); operationsC2.add(Operations.PROJECT); operationsC2.add(Operations.SELECT_OPERATOR); operationsC2.add(Operations.FILTER_PK_EQ); operationsC2.add(Operations.SELECT_INNER_JOIN); operationsC2.add(Operations.SELECT_INNER_JOIN_PARTIALS_RESULTS); operationsC2.add(Operations.INSERT); operationsC2.add(Operations.FILTER_DISJUNCTION); operationsC1.add(Operations.FILTER_NON_INDEXED_IN); ======= operationsC1.add(Operations.SELECT_INNER_JOIN); >>>>>>> operationsC1.add(Operations.FILTER_NON_INDEXED_GET); operationsC1.add(Operations.FILTER_NON_INDEXED_LT); operationsC1.add(Operations.SELECT_INNER_JOIN); <<<<<<< ======= >>>>>>>
<<<<<<< import com.stratio.meta2.core.metadata.MetadataManager; ======= import com.stratio.meta2.core.query.MetadataValidatedQuery; >>>>>>> import com.stratio.meta2.core.metadata.MetadataManager; import com.stratio.meta2.core.query.MetadataValidatedQuery;
<<<<<<< /* * Copyright (C) 2012-2019 52°North Initiative for Geospatial Open Source ======= /** * Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source >>>>>>> /* * Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source
<<<<<<< import com.stratio.crossdata.common.statements.structures.RelationTerm; ======= import com.stratio.crossdata.common.utils.StringUtils; >>>>>>> import com.stratio.crossdata.common.statements.structures.RelationTerm; import com.stratio.crossdata.common.utils.StringUtils;
<<<<<<< import com.stratio.meta.core.utils.QueryStatus; import com.stratio.streaming.api.IStratioStreamingAPI; ======= import com.stratio.meta.common.result.QueryStatus; >>>>>>> import com.stratio.streaming.api.IStratioStreamingAPI; import com.stratio.meta.common.result.QueryStatus;
<<<<<<< import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; ======= import com.datastax.driver.core.ResultSet; >>>>>>> import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.datastax.driver.core.ResultSet;
<<<<<<< // Test with CORRECT statements @Test public void validateExplainPlanForSelect() { String inputText = "EXPLAIN PLAN FOR SELECT users.name FROM demo.users;"; validateOk(inputText, "validateExplainPlanForSelect"); } @Test public void validateExplainPlanForDropIndex() { String methodName = "validateExplainPlanForDropIndex"; String inputText = "EXPLAIN PLAN FOR DROP INDEX users_gender_idx;"; MetaStatement stmt = _pt.testRegularStatement(inputText, methodName); stmt.setSessionKeyspace("demo"); ((ExplainPlanStatement) stmt).getMetaStatement().setSessionKeyspace("demo"); Result result = stmt.validate(_metadataManager); assertNotNull(result, "Sentence validation not supported - " + methodName); assertFalse(result.hasError(), "Cannot validate sentence - " + methodName + ": " + result.getErrorMessage()); } // Test with WRONG statements @Test public void validateExplainPlanForWrongSelect() { String inputText = "EXPLAIN PLAN FOR SELECT idk.name FROM demo.idk;"; validateFail(inputText, "validateExplainPlanForWrongSelect"); } @Test public void validateExplainPlanForWrongDropIndex() { String inputText = "EXPLAIN PLAN FOR DROP INDEX idk;"; validateFail(inputText, "validateExplainPlanForWrongDropIndex"); } ======= // Test with CORRECT statements @Test public void validateExplainPlanForSelect(){ String inputText = "EXPLAIN PLAN FOR SELECT name FROM demo.users;"; validateOk(inputText, "validateExplainPlanForSelect"); } @Test public void validateExplainPlanForDropIndex(){ String methodName = "validateExplainPlanForDropIndex"; String inputText = "EXPLAIN PLAN FOR DROP INDEX users_gender_idx;"; MetaStatement stmt = pt.testRegularStatement(inputText, methodName); stmt.setSessionKeyspace("demo"); ((ExplainPlanStatement) stmt).getMetaStatement().setSessionKeyspace("demo"); Result result = stmt.validate(metadataManager); assertNotNull(result, "Sentence validation not supported - " + methodName); assertFalse(result.hasError(), "Cannot validate sentence - " + methodName + ": " + getErrorMessage(result)); } // Test with WRONG statements @Test public void validateExplainPlanForWrongSelect(){ String inputText = "EXPLAIN PLAN FOR SELECT name FROM demo.idk;"; validateFail(inputText, "validateExplainPlanForWrongSelect"); } @Test public void validateExplainPlanForWrongDropIndex(){ String inputText = "EXPLAIN PLAN FOR DROP INDEX idk;"; validateFail(inputText, "validateExplainPlanForWrongDropIndex"); } >>>>>>> // Test with CORRECT statements @Test public void validateExplainPlanForSelect() { String inputText = "EXPLAIN PLAN FOR SELECT users.name FROM demo.users;"; validateOk(inputText, "validateExplainPlanForSelect"); } @Test public void validateExplainPlanForDropIndex() { String methodName = "validateExplainPlanForDropIndex"; String inputText = "EXPLAIN PLAN FOR DROP INDEX users_gender_idx;"; MetaStatement stmt = pt.testRegularStatement(inputText, methodName); stmt.setSessionKeyspace("demo"); ((ExplainPlanStatement) stmt).getMetaStatement().setSessionKeyspace("demo"); Result result = stmt.validate(metadataManager); assertNotNull(result, "Sentence validation not supported - " + methodName); assertFalse(result.hasError(), "Cannot validate sentence - " + methodName + ": " + getErrorMessage(result)); } // Test with WRONG statements @Test public void validateExplainPlanForWrongSelect() { String inputText = "EXPLAIN PLAN FOR SELECT idk.name FROM demo.idk;"; validateFail(inputText, "validateExplainPlanForWrongSelect"); } @Test public void validateExplainPlanForWrongDropIndex() { String inputText = "EXPLAIN PLAN FOR DROP INDEX idk;"; validateFail(inputText, "validateExplainPlanForWrongDropIndex"); }
<<<<<<< actorRefUri = findAnyActorRef(clusterMetadata, Status.ONLINE, Operations.CREATE_CATALOG); } catch (PlanningException pe) { LOG.debug("Cannot determine any connector for the operation: " + Operations.CREATE_CATALOG); } executionType = ExecutionType.CREATE_TABLE_AND_CATALOG; ======= actorRefUri = findAnyActorRef(clusterMetadata, ConnectorStatus.ONLINE, Operations.CREATE_CATALOG); >>>>>>> actorRefUri = findAnyActorRef(clusterMetadata, Status.ONLINE, Operations.CREATE_CATALOG);
<<<<<<< ======= import static com.stratio.crossdata.common.statements.structures.SelectorType.FUNCTION; import static com.stratio.crossdata.common.statements.structures.SelectorType.RELATION; >>>>>>>
<<<<<<< import com.stratio.crossdata.core.validator.requirements.ValidationTypes; import com.stratio.crossdata.core.validator.requirements.ValidationRequirements; ======= import com.stratio.crossdata.common.utils.StringUtils; import com.stratio.crossdata.core.structures.Option; import com.stratio.crossdata.core.validator.Validation; import com.stratio.crossdata.core.validator.ValidationRequirements; >>>>>>> import com.stratio.crossdata.core.validator.requirements.ValidationTypes; import com.stratio.crossdata.core.validator.requirements.ValidationRequirements; import com.stratio.crossdata.common.utils.StringUtils; import com.stratio.crossdata.core.structures.Option;
<<<<<<< import com.stratio.meta2.core.grid.Grid; import com.stratio.meta2.core.grid.GridInitializer; import org.apache.log4j.Logger; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; ======= >>>>>>>
<<<<<<< public NodeMetadata getNode(NodeName name) { shouldBeInit(); shouldExist(name); return (NodeMetadata) metadata.get(name); } public NodeMetadata getNodeIfExists(NodeName name) { shouldBeInit(); IMetadata iMetadata = metadata.get(name); NodeMetadata nodeMetadata = null; if(iMetadata != null){ nodeMetadata = (NodeMetadata) iMetadata; } return nodeMetadata; } public void setNodeStatus(NodeName nodeName, Status status){ shouldBeInit(); try { writeLock.lock(); NodeMetadata nodeMetadata = new NodeMetadata(nodeName, status); createNode(nodeMetadata, false); } catch (Exception ex) { throw new MetadataManagerException(ex); } finally { writeLock.unlock(); } } public void setNodeStatusIfExists(NodeName nodeName, Status status){ shouldBeInit(); try { writeLock.lock(); NodeMetadata nodeMetadata = getNodeIfExists(nodeName); if(nodeMetadata != null){ nodeMetadata.setStatus(status); beginTransaction(); createNode(nodeMetadata, false); commitTransaction(); } } catch (Exception ex) { throw new MetadataManagerException(ex); } finally { writeLock.unlock(); } } public Status getNodeStatus(NodeName nodeName){ return getNode(nodeName).getStatus(); } public List<NodeName> getNodeNames(Status status){ List<NodeName> onlineNodeNames = new ArrayList<>(); for(NodeMetadata nodeMetadata: getNodes(status)){ onlineNodeNames.add(nodeMetadata.getName()); } return onlineNodeNames; } public List<NodeMetadata> getNodes(Status status){ List<NodeMetadata> onlineNodes = new ArrayList<>(); for (NodeMetadata node: getNodes()) { if (node.getStatus() == status) { onlineNodes.add(node); } } return onlineNodes; } public List<NodeMetadata> getNodes() { List<NodeMetadata> nodes = new ArrayList<>(); for (Map.Entry<FirstLevelName, IMetadata> entry: metadata.entrySet()) { IMetadata iMetadata = entry.getValue(); if (iMetadata instanceof NodeMetadata) { nodes.add((NodeMetadata) iMetadata); } } return nodes; } public boolean checkGetConnectorName(NodeName nodeName) { boolean result = false; try { writeLock.lock(); if ((!exists(nodeName)) || (getNode(nodeName).getStatus() == Status.OFFLINE)) { setNodeStatus(nodeName, Status.INITIALIZING); result = true; } } catch (Exception e) { result = false; } finally { writeLock.unlock(); } return result; } ======= public void addCatalogToCluster(CatalogName catalog, ClusterName clusterName) { ClusterMetadata clusterMetadata = getCluster(clusterName); clusterMetadata.addPersistedCatalog(catalog); createCluster(clusterMetadata, false); } public void removeCatalogFromClusters(CatalogName catalog) { List<ClusterMetadata> clusters = getClusters(); for(ClusterMetadata cluster: clusters){ removeCatalogFromCluster(catalog, cluster); } } private void removeCatalogFromCluster(CatalogName catalog, ClusterMetadata cluster) { cluster.removePersistedCatalog(catalog); createCluster(cluster, false); } >>>>>>> public NodeMetadata getNode(NodeName name) { shouldBeInit(); shouldExist(name); return (NodeMetadata) metadata.get(name); } public NodeMetadata getNodeIfExists(NodeName name) { shouldBeInit(); IMetadata iMetadata = metadata.get(name); NodeMetadata nodeMetadata = null; if(iMetadata != null){ nodeMetadata = (NodeMetadata) iMetadata; } return nodeMetadata; } public void setNodeStatus(NodeName nodeName, Status status){ shouldBeInit(); try { writeLock.lock(); NodeMetadata nodeMetadata = new NodeMetadata(nodeName, status); createNode(nodeMetadata, false); } catch (Exception ex) { throw new MetadataManagerException(ex); } finally { writeLock.unlock(); } } public void setNodeStatusIfExists(NodeName nodeName, Status status){ shouldBeInit(); try { writeLock.lock(); NodeMetadata nodeMetadata = getNodeIfExists(nodeName); if(nodeMetadata != null){ nodeMetadata.setStatus(status); beginTransaction(); createNode(nodeMetadata, false); commitTransaction(); } } catch (Exception ex) { throw new MetadataManagerException(ex); } finally { writeLock.unlock(); } } public Status getNodeStatus(NodeName nodeName){ return getNode(nodeName).getStatus(); } public List<NodeName> getNodeNames(Status status){ List<NodeName> onlineNodeNames = new ArrayList<>(); for(NodeMetadata nodeMetadata: getNodes(status)){ onlineNodeNames.add(nodeMetadata.getName()); } return onlineNodeNames; } public List<NodeMetadata> getNodes(Status status){ List<NodeMetadata> onlineNodes = new ArrayList<>(); for (NodeMetadata node: getNodes()) { if (node.getStatus() == status) { onlineNodes.add(node); } } return onlineNodes; } public List<NodeMetadata> getNodes() { List<NodeMetadata> nodes = new ArrayList<>(); for (Map.Entry<FirstLevelName, IMetadata> entry: metadata.entrySet()) { IMetadata iMetadata = entry.getValue(); if (iMetadata instanceof NodeMetadata) { nodes.add((NodeMetadata) iMetadata); } } return nodes; } public boolean checkGetConnectorName(NodeName nodeName) { boolean result = false; try { writeLock.lock(); if ((!exists(nodeName)) || (getNode(nodeName).getStatus() == Status.OFFLINE)) { setNodeStatus(nodeName, Status.INITIALIZING); result = true; } } catch (Exception e) { result = false; } finally { writeLock.unlock(); } return result; } public void addCatalogToCluster(CatalogName catalog, ClusterName clusterName) { ClusterMetadata clusterMetadata = getCluster(clusterName); clusterMetadata.addPersistedCatalog(catalog); createCluster(clusterMetadata, false); } public void removeCatalogFromClusters(CatalogName catalog) { List<ClusterMetadata> clusters = getClusters(); for(ClusterMetadata cluster: clusters){ removeCatalogFromCluster(catalog, cluster); } } private void removeCatalogFromCluster(CatalogName catalog, ClusterMetadata cluster) { cluster.removePersistedCatalog(catalog); createCluster(cluster, false); }
<<<<<<< ======= import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; >>>>>>> import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; <<<<<<< @Test public void azureKeyPairIdsAllowedCharacterSetIsAlphanumericAndDash() { String keyVaultId = "0123456789-abcdefghijklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPQRSTUVWXYZ"; AzureVaultKeyPair keyPair = new AzureVaultKeyPair(keyVaultId, keyVaultId); Set<ConstraintViolation<AzureVaultKeyPair>> violations = validator.validate(keyPair); assertThat(violations).hasSize(0); } @Test public void azureKeyPairIdsDisallowedCharactersCreateViolation() { String keyVaultId = "invalid_@!£$%^~^&_id"; AzureVaultKeyPair keyPair = new AzureVaultKeyPair(keyVaultId, keyVaultId); Set<ConstraintViolation<AzureVaultKeyPair>> violations = validator.validate(keyPair); assertThat(violations).hasSize(2); assertThat(violations).extracting("messageTemplate") .containsExactly("Azure Key Vault key IDs can only contain alphanumeric characters and dashes (-)", "Azure Key Vault key IDs can only contain alphanumeric characters and dashes (-)"); } @Test public void keyVaultVaultPairProvidedWithoutKeyVaultConfigCreatesViolation() { AzureVaultKeyPair keyPair = new AzureVaultKeyPair("publicVauldId", "privateVaultId"); KeyConfiguration keyConfiguration = new KeyConfiguration(null, null, singletonList(keyPair), null); Config config = new Config(null, null, null, keyConfiguration, null, null, false, false); Set<ConstraintViolation<Config>> violations = validator.validateProperty(config, "keys"); assertThat(violations).hasSize(1); ConstraintViolation<Config> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{ValidKeyVaultConfiguration.message}"); } @Test public void nonKeyVaultPairProvidedWithoutKeyVaultConfigDoesNotCreateViolation() { DirectKeyPair keyPair = new DirectKeyPair("pub", "priv"); KeyConfiguration keyConfiguration = new KeyConfiguration(null, null, singletonList(keyPair), null); Config config = new Config(null, null, null, keyConfiguration, null, null, false, false); Set<ConstraintViolation<Config>> violations = validator.validateProperty(config, "keys"); assertThat(violations).hasSize(0); } @Test public void keyConfigurationIsNullCreatesNotNullViolation() { Config config = new Config(null, null, null, null, null, null, false, false); Set<ConstraintViolation<Config>> violations = validator.validateProperty(config, "keys"); assertThat(violations).hasSize(1); ConstraintViolation<Config> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{javax.validation.constraints.NotNull.message}"); } @Test public void keyVaultConfigWithNoUrlCreatesNullViolation() { KeyVaultConfig keyVaultConfig = new KeyVaultConfig(null); Set<ConstraintViolation<KeyVaultConfig>> violations = validator.validate(keyVaultConfig); assertThat(violations).hasSize(1); ConstraintViolation<KeyVaultConfig> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{javax.validation.constraints.NotNull.message}"); } @Test public void vaultKeyPairProvidedButKeyVaultConfigHasNullUrlCreatesNullViolation() { AzureVaultKeyPair keyPair = new AzureVaultKeyPair("pubId", "privId"); KeyVaultConfig keyVaultConfig = new KeyVaultConfig(null); KeyConfiguration keyConfiguration = new KeyConfiguration(null, null, singletonList(keyPair), keyVaultConfig); Set<ConstraintViolation<KeyConfiguration>> violations = validator.validate(keyConfiguration); assertThat(violations).hasSize(1); ConstraintViolation<KeyConfiguration> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{javax.validation.constraints.NotNull.message}"); assertThat(violation.getPropertyPath().toString()).isEqualTo("azureKeyVaultConfig.url"); } ======= @Test public void keyConfigurationIsNullCreatesNotNullViolation() { Config config = new Config(null, null, null, null, null, null, false, false); Set<ConstraintViolation<Config>> violations = validator.validateProperty(config, "keys"); assertThat(violations).hasSize(1); ConstraintViolation<Config> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{javax.validation.constraints.NotNull.message}"); } >>>>>>> @Test public void azureKeyPairIdsAllowedCharacterSetIsAlphanumericAndDash() { String keyVaultId = "0123456789-abcdefghijklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPQRSTUVWXYZ"; AzureVaultKeyPair keyPair = new AzureVaultKeyPair(keyVaultId, keyVaultId); Set<ConstraintViolation<AzureVaultKeyPair>> violations = validator.validate(keyPair); assertThat(violations).hasSize(0); } @Test public void azureKeyPairIdsDisallowedCharactersCreateViolation() { String keyVaultId = "invalid_@!£$%^~^&_id"; AzureVaultKeyPair keyPair = new AzureVaultKeyPair(keyVaultId, keyVaultId); Set<ConstraintViolation<AzureVaultKeyPair>> violations = validator.validate(keyPair); assertThat(violations).hasSize(2); assertThat(violations).extracting("messageTemplate") .containsExactly("Azure Key Vault key IDs can only contain alphanumeric characters and dashes (-)", "Azure Key Vault key IDs can only contain alphanumeric characters and dashes (-)"); } @Test public void keyVaultVaultPairProvidedWithoutKeyVaultConfigCreatesViolation() { AzureVaultKeyPair keyPair = new AzureVaultKeyPair("publicVauldId", "privateVaultId"); KeyConfiguration keyConfiguration = new KeyConfiguration(null, null, singletonList(keyPair), null); Config config = new Config(null, null, null, keyConfiguration, null, null, false, false); Set<ConstraintViolation<Config>> violations = validator.validateProperty(config, "keys"); assertThat(violations).hasSize(1); ConstraintViolation<Config> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{ValidKeyVaultConfiguration.message}"); } @Test public void nonKeyVaultPairProvidedWithoutKeyVaultConfigDoesNotCreateViolation() { DirectKeyPair keyPair = new DirectKeyPair("pub", "priv"); KeyConfiguration keyConfiguration = new KeyConfiguration(null, null, singletonList(keyPair), null); Config config = new Config(null, null, null, keyConfiguration, null, null, false, false); Set<ConstraintViolation<Config>> violations = validator.validateProperty(config, "keys"); assertThat(violations).hasSize(0); } @Test public void keyConfigurationIsNullCreatesNotNullViolation() { Config config = new Config(null, null, null, null, null, null, false, false); Set<ConstraintViolation<Config>> violations = validator.validateProperty(config, "keys"); assertThat(violations).hasSize(1); ConstraintViolation<Config> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{javax.validation.constraints.NotNull.message}"); } @Test public void keyVaultConfigWithNoUrlCreatesNullViolation() { KeyVaultConfig keyVaultConfig = new KeyVaultConfig(null); Set<ConstraintViolation<KeyVaultConfig>> violations = validator.validate(keyVaultConfig); assertThat(violations).hasSize(1); ConstraintViolation<KeyVaultConfig> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{javax.validation.constraints.NotNull.message}"); } @Test public void vaultKeyPairProvidedButKeyVaultConfigHasNullUrlCreatesNullViolation() { AzureVaultKeyPair keyPair = new AzureVaultKeyPair("pubId", "privId"); KeyVaultConfig keyVaultConfig = new KeyVaultConfig(null); KeyConfiguration keyConfiguration = new KeyConfiguration(null, null, singletonList(keyPair), keyVaultConfig); Set<ConstraintViolation<KeyConfiguration>> violations = validator.validate(keyConfiguration); assertThat(violations).hasSize(1); ConstraintViolation<KeyConfiguration> violation = violations.iterator().next(); assertThat(violation.getMessageTemplate()).isEqualTo("{javax.validation.constraints.NotNull.message}"); assertThat(violation.getPropertyPath().toString()).isEqualTo("azureKeyVaultConfig.url"); }
<<<<<<< Set<Operations> supportedOperations) { this(name, version, dataStoreRefs, clusterProperties, Status.OFFLINE, null, requiredProperties, ======= Set<Operations> supportedOperations) throws ManifestException { this(name, version, dataStoreRefs, clusterProperties, ConnectorStatus.OFFLINE, null, requiredProperties, >>>>>>> Set<Operations> supportedOperations) throws ManifestException { this(name, version, dataStoreRefs, clusterProperties, Status.OFFLINE, null, requiredProperties, <<<<<<< this.supportedOperations = convertManifestOperationsToMetadataOperations(supportedOperations); this.status = Status.OFFLINE; this.status = Status.OFFLINE; ======= this.connectorStatus = ConnectorStatus.OFFLINE; >>>>>>> this.status = Status.OFFLINE;
<<<<<<< verify(partyInfoService, times(2)).getURLFromRecipientKey(recipientKey); verify(partyInfoService).getURLFromRecipientKey(new Key("key2".getBytes())); verify(partyInfoService, times(3)).getPartyInfo(); verify(partyInfoService, times(3)).getPartyInfo(); ======= } @Test public void testResendAll(){ EncodedPayload encodedPayload = new EncodedPayload(new Key(new byte[0]), new byte[0], new Nonce(new byte[0]), Arrays.asList("box1".getBytes(), "box2".getBytes()), new Nonce(new byte[0])); List<Key> recipientKeys = Arrays.asList(new Key("somekey".getBytes()), new Key("key2".getBytes())); EncodedPayloadWithRecipients encodedPayloadWithRecipients = new EncodedPayloadWithRecipients(encodedPayload, recipientKeys); when(transactionService.retrieveAllForRecipient(any())) .thenReturn(Arrays.asList(encodedPayloadWithRecipients)); Key recipientKey = new Key("somekey".getBytes()); when(partyInfoService.getURLFromRecipientKey(recipientKey)).thenReturn("http://someurl.com"); PartyInfo partyInfo = new PartyInfo("http://someurl.com",Collections.emptySet(), Collections.emptySet()); when(partyInfoService.getPartyInfo()).thenReturn(partyInfo); enclave.resendAll("someKey".getBytes()); verify(transactionService, times(1)).retrieveAllForRecipient(any()); verify(encoder).encode(any(EncodedPayloadWithRecipients.class)); verify(postDelegate, times(1)).doPost(any(),any(),any()); >>>>>>> verify(partyInfoService, times(2)).getURLFromRecipientKey(recipientKey); verify(partyInfoService).getURLFromRecipientKey(new Key("key2".getBytes())); verify(partyInfoService, times(3)).getPartyInfo(); verify(partyInfoService, times(3)).getPartyInfo(); } @Test public void testResendAll(){ EncodedPayload encodedPayload = new EncodedPayload(new Key(new byte[0]), new byte[0], new Nonce(new byte[0]), Arrays.asList("box1".getBytes(), "box2".getBytes()), new Nonce(new byte[0])); List<Key> recipientKeys = Arrays.asList(new Key("somekey".getBytes()), new Key("key2".getBytes())); EncodedPayloadWithRecipients encodedPayloadWithRecipients = new EncodedPayloadWithRecipients(encodedPayload, recipientKeys); when(transactionService.retrieveAllForRecipient(any())) .thenReturn(Arrays.asList(encodedPayloadWithRecipients)); Key recipientKey = new Key("somekey".getBytes()); when(partyInfoService.getURLFromRecipientKey(recipientKey)).thenReturn("http://someurl.com"); PartyInfo partyInfo = new PartyInfo("http://someurl.com",Collections.emptySet(), Collections.emptySet()); when(partyInfoService.getPartyInfo()).thenReturn(partyInfo); enclave.resendAll("someKey".getBytes()); verify(transactionService, times(1)).retrieveAllForRecipient(any()); verify(encoder).encode(any(EncodedPayloadWithRecipients.class)); verify(postDelegate, times(1)).doPost(any(),any(),any());
<<<<<<< import com.stratio.meta.common.result.MetaResult; import com.stratio.meta.core.metadata.CustomIndexMetadata; ======= import com.stratio.meta.common.result.QueryResult; import com.stratio.meta.common.result.Result; >>>>>>> import com.stratio.meta.core.metadata.CustomIndexMetadata; <<<<<<< import com.stratio.meta.core.structures.*; import com.stratio.meta.core.utils.*; ======= import com.stratio.meta.core.structures.*; import com.stratio.meta.core.utils.DeepResult; import com.stratio.meta.core.utils.MetaPath; import com.stratio.meta.core.utils.MetaStep; import com.stratio.meta.core.utils.ParserUtils; import com.stratio.meta.core.utils.Tree; >>>>>>> import com.stratio.meta.core.structures.*; import com.stratio.meta.common.result.QueryResult; import com.stratio.meta.common.result.Result; import com.stratio.meta.core.utils.DeepResult; import com.stratio.meta.core.utils.MetaPath; import com.stratio.meta.core.utils.MetaStep; import com.stratio.meta.core.utils.ParserUtils; import com.stratio.meta.core.utils.Tree; <<<<<<< //TODO: We should probably remove this an pass it as parameters. private MetadataManager _metadata = null; private TableMetadata _tableMetadata = null; public SelectStatement(SelectionClause selectionClause, String tablename, ======= public SelectStatement(SelectionClause selectionClause, String tableName, >>>>>>> //TODO: We should probably remove this an pass it as parameters. private MetadataManager _metadata = null; private TableMetadata _tableMetadata = null; public SelectStatement(SelectionClause selectionClause, String tablename, <<<<<<< tableMetadata = metadata.getTableMetadata(effectiveKeyspace, tablename); //Cache Metadata manager and table metadata for the getDriverStatement. _metadata = metadata; _tableMetadata = tableMetadata; ======= tableMetadata = metadata.getTableMetadata(effectiveKeyspace, tableName); >>>>>>> tableMetadata = metadata.getTableMetadata(effectiveKeyspace, tableName); //Cache Metadata manager and table metadata for the getDriverStatement. _metadata = metadata; _tableMetadata = tableMetadata; <<<<<<< /** * Validate the supported select options. * @return A {@link com.stratio.meta.common.result.MetaResult} with the validation result. */ private MetaResult validateOptions(){ MetaResult result = new MetaResult(); ======= private Result validateOptions(){ Result result = QueryResult.CreateSuccessQueryResult(); >>>>>>> /** * Validate the supported select options. * @return A {@link com.stratio.meta.common.result.Result} with the validation result. */ private Result validateOptions(){ Result result = QueryResult.CreateSuccessQueryResult(); <<<<<<< private MetaResult validateWhereClause(TableMetadata tableMetadata){ //TODO: Check that the MATCH operator is only used in Lucene mapped columns. MetaResult result = new MetaResult(); for(MetaRelation relation : where){ if(MetaRelation.TYPE_COMPARE == relation.getType()) { ======= private Result validateWhereClause(TableMetadata tableMetadata){ Result result = QueryResult.CreateSuccessQueryResult(); for(Relation relation : where){ if(Relation.TYPE_COMPARE == relation.getType()) { >>>>>>> private Result validateWhereClause(TableMetadata tableMetadata){ //TODO: Check that the MATCH operator is only used in Lucene mapped columns. Result result = QueryResult.CreateSuccessQueryResult(); for(Relation relation : where){ if(Relation.TYPE_COMPARE == relation.getType()) { <<<<<<< String luceneWhere = getLuceneWhereClause(_metadata, _tableMetadata); for(MetaRelation metaRelation: this.where){ ======= for(Relation metaRelation: this.where){ >>>>>>> String luceneWhere = getLuceneWhereClause(_metadata, _tableMetadata); for(Relation metaRelation: this.where){
<<<<<<< ======= public void testQ01Crossdata() throws ManifestException { init(); String inputText = "[tpcc], " + " SELECT ol_o_id, ol_d_id,ol_w_id,sum(ol_quantity),avg(ol_quantity),sum(ol_amount) AS suma,avg(ol_amount),count(*) " + " FROM tpcc.order_line WHERE ol_d_id=4 AND ol_w_id=175 " + " GROUP BY ol_o_id, ol_d_id,ol_w_id ORDER BY ol_amount desc limit 10;"; QueryWorkflow queryWorkflow = (QueryWorkflow) getPlannedQuery( inputText, "testQ01Crossdata", false, false, order_line); assertNotNull(queryWorkflow, "Null workflow received."); assertEquals(queryWorkflow.getResultType(), ResultType.RESULTS, "Invalid result type"); assertEquals(queryWorkflow.getExecutionType(), ExecutionType.SELECT, "Invalid execution type"); } @Test >>>>>>>
<<<<<<< public static final String CLUSTER = "local"; public static final String JOB_NAME = "stratioDeepWithMeta"; public static final String CASSANDRA_HOST = "localhost"; public static final int CASSANDRA_PORT = 9160; ======= public static final String cluster = "local[2]"; public static final String jobName = "stratioDeepWithMeta"; public static final String cassandraHost = "localhost"; public static final int cassandraPort = 9160; >>>>>>> public static final String CLUSTER = "local[2]"; public static final String JOB_NAME = "stratioDeepWithMeta"; public static final String CASSANDRA_HOST = "localhost"; public static final int CASSANDRA_PORT = 9160;
<<<<<<< ======= import com.stratio.meta.common.metadata.structures.TableMetadata; import com.stratio.meta2.common.data.ClusterName; import com.stratio.meta2.core.statements.SelectStatement; >>>>>>> import com.stratio.meta2.core.statements.SelectStatement;
<<<<<<< import com.stratio.meta2.core.query.MetadataPlannedQuery; ======= import com.stratio.meta2.core.query.MetadataInProgressQuery; import com.stratio.meta2.core.query.MetadataPlannedQuery; >>>>>>> import com.stratio.meta2.core.query.MetadataInProgressQuery; import com.stratio.meta2.core.query.MetadataPlannedQuery; <<<<<<< import com.stratio.meta2.core.statements.AttachConnectorStatement; import com.stratio.meta2.core.statements.CreateCatalogStatement; import com.stratio.meta2.core.statements.CreateIndexStatement; import com.stratio.meta2.core.statements.CreateTableStatement; import com.stratio.meta2.core.statements.DropCatalogStatement; import com.stratio.meta2.core.statements.DropIndexStatement; import com.stratio.meta2.core.statements.DropTableStatement; import com.stratio.meta2.core.statements.MetaStatement; ======= import com.stratio.meta2.core.statements.AttachConnectorStatement; import org.apache.log4j.Logger; import java.util.Map; >>>>>>> import com.stratio.meta2.core.statements.AttachConnectorStatement; import com.stratio.meta2.core.statements.CreateCatalogStatement; import com.stratio.meta2.core.statements.CreateIndexStatement; import com.stratio.meta2.core.statements.CreateTableStatement; import com.stratio.meta2.core.statements.DropCatalogStatement; import com.stratio.meta2.core.statements.DropIndexStatement; import com.stratio.meta2.core.statements.DropTableStatement; import com.stratio.meta2.core.statements.MetaStatement; import java.util.Map; <<<<<<< DataStoreMetadata datastoreMetadata = MetadataManager.MANAGER.getDataStore(new DataStoreName(attachClusterStatement .getDatastoreName())); ======= private void attachCluster(AttachClusterStatement attachClusterStatement){ DataStoreMetadata datastoreMetadata = MetadataManager.MANAGER .getDataStore(new DataStoreName(attachClusterStatement.getDatastoreName())); >>>>>>> private InProgressQuery coordinateSelect(SelectPlannedQuery selectPlannedQuery) { InProgressQuery inProgressQuery = null; return inProgressQuery; } private void attachCluster(AttachClusterStatement attachClusterStatement){ DataStoreMetadata datastoreMetadata = MetadataManager.MANAGER .getDataStore(new DataStoreName(attachClusterStatement.getDatastoreName()));
<<<<<<< logger.debug("TRACE: Validating = " + this.toString()); ======= >>>>>>> logger.debug("TRACE: Validating = " + this.toString()); <<<<<<< logger.debug("TRACE: Relation = " + relation.toString()); logger.debug("TRACE: relation.getIdentifiers().get(0).getTable = " + relation.getIdentifiers().get(0).getTable()); ======= >>>>>>> logger.debug("TRACE: Relation = " + relation.toString()); logger.debug("TRACE: relation.getIdentifiers().get(0).getTable = " + relation.getIdentifiers().get(0).getTable());
<<<<<<< protected ArrayList< ArrayList<TrackNode<K>> > trackSegments = null; /** Stores whether the default cost matrices from the paper should be used, * or if the user will supply their own. */ protected boolean defaultCosts = true; ======= protected ArrayList< ArrayList<Spot> > trackSegments = null; >>>>>>> protected ArrayList< ArrayList<TrackNode<K>> > trackSegments = null; /** Stores whether the default cost matrices from the paper should be used, * or if the user will supply their own. */ protected boolean defaultCosts = true; <<<<<<< public LAPTracker (TreeMap<Integer, ? extends Collection<TrackNode<K>> > objects) { this.objects = convertMapToArrayList(objects); } ======= >>>>>>> <<<<<<< public LAPTracker (TreeMap<Integer, ? extends Collection<TrackNode<K>> > objects, ArrayList<double[][]> linkingCosts) { ======= public LAPTracker(TreeMap<Integer, ? extends Collection<Spot> > objects, ArrayList<double[][]> linkingCosts, Settings settings) { >>>>>>> public LAPTracker (TreeMap<Integer, ? extends Collection<TrackNode<K>> > objects, ArrayList<double[][]> linkingCosts, Settings settings) { <<<<<<< LinkingCostMatrixCreator<K> objCosts = new LinkingCostMatrixCreator<K>(new ArrayList<TrackNode<K>>(objects.get(i)), new ArrayList<TrackNode<K>>(objects.get(i + 1))); ======= LinkingCostMatrixCreator objCosts = new LinkingCostMatrixCreator(new ArrayList<Spot>(objects.get(i)), new ArrayList<Spot>(objects.get(i + 1)), settings); >>>>>>> LinkingCostMatrixCreator<K> objCosts = new LinkingCostMatrixCreator<K>( new ArrayList<TrackNode<K>>(objects.get(i)), new ArrayList<TrackNode<K>>(objects.get(i + 1)), settings); <<<<<<< TrackSegmentCostMatrixCreator<K> segCosts = new TrackSegmentCostMatrixCreator<K>(trackSegments); ======= TrackSegmentCostMatrixCreator segCosts = new TrackSegmentCostMatrixCreator(trackSegments, settings); >>>>>>> TrackSegmentCostMatrixCreator<K> segCosts = new TrackSegmentCostMatrixCreator<K>(trackSegments, settings); <<<<<<< end.addParent(start); start.addChild(end); ======= end.addNext(start); start.addPrev(end); >>>>>>> end.addParent(start); start.addChild(end); <<<<<<< LAPTracker<Spot> lap = new LAPTracker<Spot>(wrap, false); ======= LAPTracker lap = new LAPTracker(wrap); >>>>>>> LAPTracker<Spot> lap = new LAPTracker<Spot>(wrap); <<<<<<< ArrayList<TrackNode<Spot>> x = wrap.get(i); ArrayList<TrackNode<Spot>> y = wrap.get(i+1); LinkingCostMatrixCreator<Spot> l = new LinkingCostMatrixCreator<Spot>(x, y); ======= ArrayList<Spot> x = wrap.get(i); ArrayList<Spot> y = wrap.get(i+1); LinkingCostMatrixCreator l = new LinkingCostMatrixCreator(x, y, settings); >>>>>>> ArrayList<TrackNode<Spot>> x = wrap.get(i); ArrayList<TrackNode<Spot>> y = wrap.get(i+1); LinkingCostMatrixCreator<Spot> l = new LinkingCostMatrixCreator<Spot>(x, y, settings); <<<<<<< TrackSegmentCostMatrixCreator<Spot> segCosts = new TrackSegmentCostMatrixCreator<Spot>(tSegs); ======= TrackSegmentCostMatrixCreator segCosts = new TrackSegmentCostMatrixCreator(tSegs, settings); >>>>>>> TrackSegmentCostMatrixCreator<Spot> segCosts = new TrackSegmentCostMatrixCreator<Spot>(tSegs, settings);
<<<<<<< public ConfigureViewsDescriptor( final TrackMate trackmate, final FeatureColorGenerator< Spot > spotColorGenerator, final PerEdgeFeatureColorGenerator edgeColorGenerator, final PerTrackFeatureColorGenerator trackColorGenerator, final ManualSpotColorGenerator manualSpotColorGenerator, final ManualEdgeColorGenerator manualEdgeColorGenerator, final TrackMateGUIController controller ) ======= public ConfigureViewsDescriptor( final TrackMate trackmate, final FeatureColorGenerator< Spot > spotColorGenerator, final PerEdgeFeatureColorGenerator edgeColorGenerator, final PerTrackFeatureColorGenerator trackColorGenerator, final FeatureColorGenerator< Spot > spotColorGeneratorPerTrackFeature, final TrackMateGUIController controller ) >>>>>>> public ConfigureViewsDescriptor( final TrackMate trackmate, final FeatureColorGenerator< Spot > spotColorGenerator, final PerEdgeFeatureColorGenerator edgeColorGenerator, final PerTrackFeatureColorGenerator trackColorGenerator, final FeatureColorGenerator< Spot > spotColorGeneratorPerTrackFeature, final ManualSpotColorGenerator manualSpotColorGenerator, final ManualEdgeColorGenerator manualEdgeColorGenerator, final TrackMateGUIController controller ) <<<<<<< panel.setManualSpotColorGenerator( manualSpotColorGenerator ); panel.setManualEdgeColorGenerator( manualEdgeColorGenerator ); ======= panel.setSpotColorGeneratorPerTrackFeature( spotColorGeneratorPerTrackFeature ); >>>>>>> panel.setManualSpotColorGenerator( manualSpotColorGenerator ); panel.setManualEdgeColorGenerator( manualEdgeColorGenerator ); panel.setSpotColorGeneratorPerTrackFeature( spotColorGeneratorPerTrackFeature );
<<<<<<< import net.imglib2.Cursor; import net.imglib2.algorithm.region.localneighborhood.AbstractNeighborhood; import net.imglib2.algorithm.region.localneighborhood.DiscNeighborhood; import net.imglib2.algorithm.region.localneighborhood.SphereNeighborhood; import net.imglib2.img.ImgPlus; ======= >>>>>>> <<<<<<< final AbstractNeighborhood<T, ImgPlus<T>> neighborhood; if (img.numDimensions() == 3) { neighborhood = new SphereNeighborhood<T>(img, diameters[nDiameters-2]/2); } else { neighborhood = new DiscNeighborhood<T>(img, diameters[nDiameters-2]/2); } final long[] coords = new long[3]; for (int i = 0; i < coords.length; i++) { coords[i] = Math.round( spot.getDoublePosition(i) / img.calibration(i) ); } neighborhood.setPosition(coords); ======= SpotNeighborhood<T> neighborhood = new SpotNeighborhood<T>(tmpSpot , img); SpotNeighborhoodCursor<T> cursor = neighborhood.cursor(); >>>>>>> SpotNeighborhood<T> neighborhood = new SpotNeighborhood<T>(tmpSpot , img); SpotNeighborhoodCursor<T> cursor = neighborhood.cursor(); <<<<<<< Cursor<T> cursor = neighborhood.cursor(); ======= >>>>>>>
<<<<<<< final Logger logger = Logger.IJ_LOGGER; // logPanel.getLogger(); if ( null == arg0 ) ======= final Logger logger = Logger.IJ_LOGGER; // logPanel.getLogger(); if ( null == filePath ) >>>>>>> final Logger logger = Logger.IJ_LOGGER; // logPanel.getLogger(); if ( null == filePath ) <<<<<<< if ( null == file ) { final File folder = new File( System.getProperty( "user.dir" ) ).getParentFile().getParentFile(); file = new File( folder.getPath() + File.separator + "TrackMateData.xml" ); } final File tmpFile = IOUtils.askForFileForLoading( file, "Load a TrackMate XML file", frame, logger ); if ( null == tmpFile ) { return; } file = tmpFile; } else { file = new File( arg0 ); } ======= if ( null == file || file.length() == 0 ) { final File folder = new File( System.getProperty( "user.dir" ) ).getParentFile().getParentFile(); file = new File( folder.getPath() + File.separator + "TrackMateData.xml" ); } final File tmpFile = IOUtils.askForFileForLoading( file, "Load a TrackMate XML file", frame, logger ); if ( null == tmpFile ) { return; } file = tmpFile; } else { file = new File( filePath ); if ( !file.exists() ) { IJ.error( TrackMate.PLUGIN_NAME_STR + " v" + TrackMate.PLUGIN_NAME_VERSION, "Could not find file with path " + filePath + "." ); return; } if ( !file.canRead() ) { IJ.error( TrackMate.PLUGIN_NAME_STR + " v" + TrackMate.PLUGIN_NAME_VERSION, "Could not read file with path " + filePath + "." ); return; } } >>>>>>> if ( null == file || file.length() == 0 ) { final File folder = new File( System.getProperty( "user.dir" ) ).getParentFile().getParentFile(); file = new File( folder.getPath() + File.separator + "TrackMateData.xml" ); } final File tmpFile = IOUtils.askForFileForLoading( file, "Load a TrackMate XML file", frame, logger ); if ( null == tmpFile ) { return; } file = tmpFile; } else { file = new File( filePath ); if ( !file.exists() ) { IJ.error( TrackMate.PLUGIN_NAME_STR + " v" + TrackMate.PLUGIN_NAME_VERSION, "Could not find file with path " + filePath + "." ); return; } if ( !file.canRead() ) { IJ.error( TrackMate.PLUGIN_NAME_STR + " v" + TrackMate.PLUGIN_NAME_VERSION, "Could not read file with path " + filePath + "." ); return; } } <<<<<<< final File file = new File( AppUtils.getBaseDirectory( TrackMate.class ), "samples/FakeTracks.xml" ); ======= >>>>>>> <<<<<<< plugIn.run( file.getAbsolutePath() ); ======= plugIn.run( "samples/FakeTracks.xml" ); >>>>>>> plugIn.run( "samples/FakeTracks.xml" );
<<<<<<< ======= import net.imagej.ImgPlus; import net.imglib2.meta.view.HyperSliceImgPlus; import net.imglib2.type.NativeType; import net.imglib2.type.numeric.RealType; import org.scijava.Priority; >>>>>>> import org.scijava.Priority; <<<<<<< @Plugin( type = SpotAnalyzerFactory.class, priority = -1d ) ======= @Plugin( type = SpotAnalyzerFactory.class, priority = Priority.LOW ) >>>>>>> @Plugin( type = SpotAnalyzerFactory.class, priority = Priority.LOW )
<<<<<<< public void testOptionalVarOnFunctionType() throws Throwable { doTest(true); } ======= public void testShiftRightAssign() throws Throwable { doTest(true); } public void testNestedTypedClassesAssignment() throws Throwable { doTest(true); } public void testUnsignedShiftRightAssign() throws Throwable { doTest(true); } >>>>>>> public void testOptionalVarOnFunctionType() throws Throwable { doTest(true); } public void testShiftRightAssign() throws Throwable { doTest(true); } public void testNestedTypedClassesAssignment() throws Throwable { doTest(true); } public void testUnsignedShiftRightAssign() throws Throwable { doTest(true); }
<<<<<<< ======= import com.github.nexus.api.Nexus; import com.github.nexus.configuration.ConfigurationParser; import com.github.nexus.configuration.PropertyLoader; >>>>>>> import com.github.nexus.api.Nexus; import com.github.nexus.configuration.ConfigurationParser; import com.github.nexus.configuration.PropertyLoader;
<<<<<<< import com.intellij.plugins.haxe.model.HaxeMemberModel; import com.intellij.plugins.haxe.model.HaxeMethodModel; import com.intellij.plugins.haxe.model.type.HaxeTypeResolver; import com.intellij.plugins.haxe.model.type.SpecificTypeReference; import com.intellij.plugins.haxe.util.*; import com.intellij.psi.*; ======= import com.intellij.plugins.haxe.util.HaxePresentableUtil; import com.intellij.plugins.haxe.util.HaxeResolveUtil; import com.intellij.plugins.haxe.util.UsefulPsiTreeUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiNamedElement; import com.intellij.psi.PsiReference; >>>>>>> import com.intellij.plugins.haxe.model.HaxeMemberModel; import com.intellij.plugins.haxe.model.HaxeMethodModel; import com.intellij.plugins.haxe.model.type.HaxeTypeResolver; import com.intellij.plugins.haxe.model.type.SpecificTypeReference; import com.intellij.plugins.haxe.util.HaxePresentableUtil; import com.intellij.plugins.haxe.util.HaxeResolveUtil; import com.intellij.plugins.haxe.util.UsefulPsiTreeUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiNamedElement; import com.intellij.psi.PsiReference;
<<<<<<< IElementType CONSTRUCTOR_NAME = new HaxeElementType("CONSTRUCTOR_NAME"); ======= IElementType CONDITIONAL = new HaxeElementType("CONDITIONAL"); >>>>>>> IElementType CONDITIONAL = new HaxeElementType("CONDITIONAL"); IElementType CONSTRUCTOR_NAME = new HaxeElementType("CONSTRUCTOR_NAME"); <<<<<<< IElementType FINAL_META = new HaxeElementType("FINAL_META"); ======= IElementType FAT_ARROW_EXPRESSION = new HaxeElementType("FAT_ARROW_EXPRESSION"); >>>>>>> IElementType FAT_ARROW_EXPRESSION = new HaxeElementType("FAT_ARROW_EXPRESSION"); IElementType FINAL_META = new HaxeElementType("FINAL_META"); <<<<<<< else if (type == CONSTRUCTOR_NAME) { return new HaxeConstructorNameImpl(node); } ======= else if (type == CONDITIONAL) { return new HaxeConditionalImpl(node); } >>>>>>> else if (type == CONDITIONAL) { return new HaxeConditionalImpl(node); } else if (type == CONSTRUCTOR_NAME) { return new HaxeConstructorNameImpl(node); } <<<<<<< else if (type == FINAL_META) { return new HaxeFinalMetaImpl(node); } ======= else if (type == FAT_ARROW_EXPRESSION) { return new HaxeFatArrowExpressionImpl(node); } >>>>>>> else if (type == FAT_ARROW_EXPRESSION) { return new HaxeFatArrowExpressionImpl(node); } else if (type == FINAL_META) { return new HaxeFinalMetaImpl(node); }